Consolidate old/new vendor and category routes, enhance new brands route, update frontend accordingly for all three pages, improve hierarchy on categories page, fix some calculations
This commit is contained in:
@@ -5,9 +5,9 @@ const { Pool } = require('pg'); // Assuming you use 'pg'
|
||||
|
||||
// --- Configuration ---
|
||||
// Toggle these constants to enable/disable specific steps for testing
|
||||
const RUN_DAILY_SNAPSHOTS = true;
|
||||
const RUN_PRODUCT_METRICS = true;
|
||||
const RUN_PERIODIC_METRICS = true;
|
||||
const RUN_DAILY_SNAPSHOTS = false;
|
||||
const RUN_PRODUCT_METRICS = false;
|
||||
const RUN_PERIODIC_METRICS = false;
|
||||
const RUN_BRAND_METRICS = true;
|
||||
const RUN_VENDOR_METRICS = true;
|
||||
const RUN_CATEGORY_METRICS = true;
|
||||
@@ -401,27 +401,43 @@ async function executeSqlStep(config, progress) {
|
||||
start_time TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP WITH TIME ZONE,
|
||||
duration_seconds INTEGER,
|
||||
status TEXT, -- 'running', 'completed', 'failed', 'cancelled'
|
||||
status TEXT, -- Will be altered to enum if needed below
|
||||
error_message TEXT,
|
||||
additional_info JSONB
|
||||
);
|
||||
`);
|
||||
|
||||
// Ensure the calculation_status enum type exists if needed
|
||||
await connection.query(`
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'calculation_status') THEN
|
||||
CREATE TYPE calculation_status AS ENUM ('running', 'completed', 'failed', 'cancelled');
|
||||
|
||||
-- If needed, alter the existing table to use the enum
|
||||
ALTER TABLE calculate_history
|
||||
ALTER COLUMN status TYPE calculation_status
|
||||
USING status::calculation_status;
|
||||
END IF;
|
||||
END
|
||||
$$;
|
||||
`);
|
||||
|
||||
// Mark previous runs of this type as cancelled
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
status = 'cancelled',
|
||||
status = 'cancelled'::calculation_status,
|
||||
end_time = NOW(),
|
||||
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
|
||||
error_message = 'Previous calculation was not completed properly or was superseded.'
|
||||
WHERE status = 'running' AND additional_info->>'type' = $1::text;
|
||||
WHERE status = 'running'::calculation_status AND additional_info->>'type' = $1::text;
|
||||
`, [config.historyType]);
|
||||
|
||||
// Create history record for this run
|
||||
const historyResult = await connection.query(`
|
||||
INSERT INTO calculate_history (status, additional_info)
|
||||
VALUES ('running', jsonb_build_object('type', $1::text, 'sql_file', $2::text))
|
||||
VALUES ('running'::calculation_status, jsonb_build_object('type', $1::text, 'sql_file', $2::text))
|
||||
RETURNING id;
|
||||
`, [config.historyType, config.sqlFile]);
|
||||
calculateHistoryId = historyResult.rows[0].id;
|
||||
@@ -502,7 +518,7 @@ async function executeSqlStep(config, progress) {
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = $1::integer,
|
||||
status = 'completed'
|
||||
status = 'completed'::calculation_status
|
||||
WHERE id = $2::integer;
|
||||
`, [stepDuration, calculateHistoryId]);
|
||||
|
||||
@@ -551,7 +567,7 @@ async function executeSqlStep(config, progress) {
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = $1::integer,
|
||||
status = $2::text,
|
||||
status = $2::calculation_status,
|
||||
error_message = $3::text
|
||||
WHERE id = $4::integer;
|
||||
`, [errorDuration, finalStatus, errorMessage.substring(0, 1000), calculateHistoryId]); // Limit error message size
|
||||
|
||||
@@ -0,0 +1,200 @@
|
||||
-- Description: Rebuilds daily product snapshots from scratch using real orders data.
|
||||
-- Fixes issues with duplicated/inflated metrics.
|
||||
-- Dependencies: Core import tables (products, orders, purchase_orders).
|
||||
-- Frequency: One-time run to clear out problematic data.
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
_module_name TEXT := 'rebuild_daily_snapshots';
|
||||
_start_time TIMESTAMPTZ := clock_timestamp();
|
||||
_date DATE;
|
||||
_count INT;
|
||||
_total_records INT := 0;
|
||||
_begin_date DATE := (SELECT MIN(date)::date FROM orders WHERE date >= '2024-01-01'); -- Starting point for data rebuild
|
||||
_end_date DATE := CURRENT_DATE;
|
||||
BEGIN
|
||||
RAISE NOTICE 'Beginning daily snapshots rebuild from % to %. Starting at %', _begin_date, _end_date, _start_time;
|
||||
|
||||
-- First truncate the existing snapshots to ensure a clean slate
|
||||
TRUNCATE TABLE public.daily_product_snapshots;
|
||||
RAISE NOTICE 'Cleared existing snapshot data';
|
||||
|
||||
-- Now rebuild the snapshots day by day
|
||||
_date := _begin_date;
|
||||
|
||||
WHILE _date <= _end_date LOOP
|
||||
RAISE NOTICE 'Processing date %...', _date;
|
||||
|
||||
-- Create snapshots for this date
|
||||
WITH SalesData AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.sku,
|
||||
-- Count orders to ensure we only include products with real activity
|
||||
COUNT(o.id) as order_count,
|
||||
-- Aggregate Sales (Quantity > 0, Status not Canceled/Returned)
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.discount ELSE 0 END), 0.00) AS discounts,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN COALESCE(o.costeach, p.landing_cost_price, p.cost_price) * o.quantity ELSE 0 END), 0.00) AS cogs,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN p.regular_price * o.quantity ELSE 0 END), 0.00) AS gross_regular_revenue,
|
||||
|
||||
-- Aggregate Returns (Quantity < 0 or Status = Returned)
|
||||
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN ABS(o.quantity) ELSE 0 END), 0) AS units_returned,
|
||||
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN o.price * ABS(o.quantity) ELSE 0 END), 0.00) AS returns_revenue
|
||||
FROM public.products p
|
||||
LEFT JOIN public.orders o
|
||||
ON p.pid = o.pid
|
||||
AND o.date::date = _date
|
||||
GROUP BY p.pid, p.sku
|
||||
HAVING COUNT(o.id) > 0 -- Only include products with actual orders for this date
|
||||
),
|
||||
ReceivingData AS (
|
||||
SELECT
|
||||
po.pid,
|
||||
-- Count POs to ensure we only include products with real activity
|
||||
COUNT(po.po_id) as po_count,
|
||||
-- Calculate received quantity for this day
|
||||
COALESCE(
|
||||
-- First try the received field from purchase_orders table (if received on this date)
|
||||
SUM(CASE WHEN po.date::date = _date THEN po.received ELSE 0 END),
|
||||
|
||||
-- Otherwise try receiving_history JSON
|
||||
SUM(
|
||||
CASE
|
||||
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
ELSE 0
|
||||
END
|
||||
),
|
||||
0
|
||||
) AS units_received,
|
||||
|
||||
COALESCE(
|
||||
-- First try the actual cost_price from purchase_orders
|
||||
SUM(CASE WHEN po.date::date = _date THEN po.received * po.cost_price ELSE 0 END),
|
||||
|
||||
-- Otherwise try receiving_history JSON
|
||||
SUM(
|
||||
CASE
|
||||
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
ELSE 0
|
||||
END
|
||||
* COALESCE((rh.item->>'cost')::numeric, po.cost_price)
|
||||
),
|
||||
0.00
|
||||
) AS cost_received
|
||||
FROM public.purchase_orders po
|
||||
LEFT JOIN LATERAL jsonb_array_elements(po.receiving_history) AS rh(item) ON
|
||||
jsonb_typeof(po.receiving_history) = 'array' AND
|
||||
jsonb_array_length(po.receiving_history) > 0 AND
|
||||
(
|
||||
(rh.item->>'date')::date = _date OR
|
||||
(rh.item->>'received_at')::date = _date OR
|
||||
(rh.item->>'receipt_date')::date = _date
|
||||
)
|
||||
-- Include POs with the current date or relevant receiving_history
|
||||
WHERE
|
||||
po.date::date = _date OR
|
||||
jsonb_typeof(po.receiving_history) = 'array' AND
|
||||
jsonb_array_length(po.receiving_history) > 0
|
||||
GROUP BY po.pid
|
||||
HAVING COUNT(po.po_id) > 0 OR SUM(
|
||||
CASE
|
||||
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
|
||||
ELSE 0
|
||||
END
|
||||
) > 0
|
||||
),
|
||||
-- Get stock quantities for the day - note this is approximate since we're using current products data
|
||||
StockData AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.stock_quantity,
|
||||
COALESCE(p.landing_cost_price, p.cost_price, 0.00) as effective_cost_price,
|
||||
COALESCE(p.price, 0.00) as current_price,
|
||||
COALESCE(p.regular_price, 0.00) as current_regular_price
|
||||
FROM public.products p
|
||||
)
|
||||
INSERT INTO public.daily_product_snapshots (
|
||||
snapshot_date,
|
||||
pid,
|
||||
sku,
|
||||
eod_stock_quantity,
|
||||
eod_stock_cost,
|
||||
eod_stock_retail,
|
||||
eod_stock_gross,
|
||||
stockout_flag,
|
||||
units_sold,
|
||||
units_returned,
|
||||
gross_revenue,
|
||||
discounts,
|
||||
returns_revenue,
|
||||
net_revenue,
|
||||
cogs,
|
||||
gross_regular_revenue,
|
||||
profit,
|
||||
units_received,
|
||||
cost_received,
|
||||
calculation_timestamp
|
||||
)
|
||||
SELECT
|
||||
_date AS snapshot_date,
|
||||
COALESCE(sd.pid, rd.pid) AS pid,
|
||||
sd.sku,
|
||||
-- Use current stock as approximation, since historical stock data may not be available
|
||||
s.stock_quantity AS eod_stock_quantity,
|
||||
s.stock_quantity * s.effective_cost_price AS eod_stock_cost,
|
||||
s.stock_quantity * s.current_price AS eod_stock_retail,
|
||||
s.stock_quantity * s.current_regular_price AS eod_stock_gross,
|
||||
(s.stock_quantity <= 0) AS stockout_flag,
|
||||
-- Sales metrics
|
||||
COALESCE(sd.units_sold, 0),
|
||||
COALESCE(sd.units_returned, 0),
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00),
|
||||
COALESCE(sd.discounts, 0.00),
|
||||
COALESCE(sd.returns_revenue, 0.00),
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) AS net_revenue,
|
||||
COALESCE(sd.cogs, 0.00),
|
||||
COALESCE(sd.gross_regular_revenue, 0.00),
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
|
||||
-- Receiving metrics
|
||||
COALESCE(rd.units_received, 0),
|
||||
COALESCE(rd.cost_received, 0.00),
|
||||
_start_time
|
||||
FROM SalesData sd
|
||||
FULL OUTER JOIN ReceivingData rd ON sd.pid = rd.pid
|
||||
LEFT JOIN StockData s ON COALESCE(sd.pid, rd.pid) = s.pid
|
||||
WHERE (COALESCE(sd.order_count, 0) > 0 OR COALESCE(rd.po_count, 0) > 0);
|
||||
|
||||
-- Get record count for this day
|
||||
GET DIAGNOSTICS _count = ROW_COUNT;
|
||||
_total_records := _total_records + _count;
|
||||
|
||||
RAISE NOTICE 'Added % snapshot records for date %', _count, _date;
|
||||
|
||||
-- Move to next day
|
||||
_date := _date + INTERVAL '1 day';
|
||||
END LOOP;
|
||||
|
||||
RAISE NOTICE 'Rebuilding daily snapshots complete. Added % total records across % days.', _total_records, (_end_date - _begin_date)::integer + 1;
|
||||
|
||||
-- Update the status table for daily_snapshots
|
||||
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
|
||||
VALUES ('daily_snapshots', _start_time)
|
||||
ON CONFLICT (module_name) DO UPDATE SET last_calculation_timestamp = _start_time;
|
||||
|
||||
-- Now update product_metrics based on the rebuilt snapshots
|
||||
RAISE NOTICE 'Triggering update of product_metrics table...';
|
||||
|
||||
-- Call the update_product_metrics procedure directly
|
||||
-- Your system might use a different method to trigger this update
|
||||
PERFORM pg_notify('recalculate_metrics', 'product_metrics');
|
||||
|
||||
RAISE NOTICE 'Rebuild complete. Duration: %', clock_timestamp() - _start_time;
|
||||
END $$;
|
||||
@@ -6,6 +6,7 @@ DO $$
|
||||
DECLARE
|
||||
_module_name VARCHAR := 'brand_metrics';
|
||||
_start_time TIMESTAMPTZ := clock_timestamp();
|
||||
_min_revenue NUMERIC := 50.00; -- Minimum revenue threshold for margin calculation
|
||||
BEGIN
|
||||
RAISE NOTICE 'Running % calculation...', _module_name;
|
||||
|
||||
@@ -19,14 +20,26 @@ BEGIN
|
||||
SUM(pm.current_stock) AS current_stock_units,
|
||||
SUM(pm.current_stock_cost) AS current_stock_cost,
|
||||
SUM(pm.current_stock_retail) AS current_stock_retail,
|
||||
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d,
|
||||
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d,
|
||||
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d,
|
||||
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d,
|
||||
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue
|
||||
-- Only include products with valid sales data in each time period
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_7d > 0 THEN pm.pid END) AS products_with_sales_7d,
|
||||
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_30d > 0 THEN pm.pid END) AS products_with_sales_30d,
|
||||
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_365d > 0 THEN pm.pid END) AS products_with_sales_365d,
|
||||
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.lifetime_sales > 0 THEN pm.pid END) AS products_with_lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
|
||||
FROM public.product_metrics pm
|
||||
JOIN public.products p ON pm.pid = p.pid
|
||||
-- WHERE p.visible = true -- Optional: filter only visible products for brand metrics?
|
||||
GROUP BY brand_group
|
||||
),
|
||||
AllBrands AS (
|
||||
@@ -58,8 +71,14 @@ BEGIN
|
||||
COALESCE(ba.profit_30d, 0.00), COALESCE(ba.cogs_30d, 0.00),
|
||||
COALESCE(ba.sales_365d, 0), COALESCE(ba.revenue_365d, 0.00),
|
||||
COALESCE(ba.lifetime_sales, 0), COALESCE(ba.lifetime_revenue, 0.00),
|
||||
-- KPIs
|
||||
(ba.profit_30d / NULLIF(ba.revenue_30d, 0)) * 100.0
|
||||
-- KPIs - Calculate margin only for brands with significant revenue
|
||||
CASE
|
||||
WHEN COALESCE(ba.revenue_30d, 0) >= _min_revenue THEN
|
||||
-- Directly calculate margin from revenue and cogs for consistency
|
||||
-- This is mathematically equivalent to profit/revenue but more explicit
|
||||
((COALESCE(ba.revenue_30d, 0) - COALESCE(ba.cogs_30d, 0)) / COALESCE(ba.revenue_30d, 1)) * 100.0
|
||||
ELSE NULL -- No margin for low/no revenue brands
|
||||
END
|
||||
FROM AllBrands b
|
||||
LEFT JOIN BrandAggregates ba ON b.brand_group = ba.brand_group
|
||||
|
||||
|
||||
@@ -9,9 +9,44 @@ DECLARE
|
||||
BEGIN
|
||||
RAISE NOTICE 'Running % calculation...', _module_name;
|
||||
|
||||
WITH CategoryAggregates AS (
|
||||
WITH
|
||||
-- Identify the hierarchy depth for each category
|
||||
CategoryDepth AS (
|
||||
WITH RECURSIVE CategoryTree AS (
|
||||
-- Base case: Start with categories without parents (root categories)
|
||||
SELECT cat_id, name, parent_id, 0 AS depth
|
||||
FROM public.categories
|
||||
WHERE parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- Recursive step: Add child categories with incremented depth
|
||||
SELECT c.cat_id, c.name, c.parent_id, ct.depth + 1
|
||||
FROM public.categories c
|
||||
JOIN CategoryTree ct ON c.parent_id = ct.cat_id
|
||||
)
|
||||
SELECT cat_id, depth
|
||||
FROM CategoryTree
|
||||
),
|
||||
-- For each product, find the most specific (deepest) category it belongs to
|
||||
ProductDeepestCategory AS (
|
||||
SELECT
|
||||
pc.pid,
|
||||
pc.cat_id
|
||||
FROM public.product_categories pc
|
||||
JOIN CategoryDepth cd ON pc.cat_id = cd.cat_id
|
||||
-- This is the key part: for each product, select only the category with maximum depth
|
||||
WHERE (pc.pid, cd.depth) IN (
|
||||
SELECT pc2.pid, MAX(cd2.depth)
|
||||
FROM public.product_categories pc2
|
||||
JOIN CategoryDepth cd2 ON pc2.cat_id = cd2.cat_id
|
||||
GROUP BY pc2.pid
|
||||
)
|
||||
),
|
||||
-- Calculate metrics only at the most specific category level for each product
|
||||
CategoryAggregates AS (
|
||||
SELECT
|
||||
pc.cat_id,
|
||||
pdc.cat_id,
|
||||
-- Counts
|
||||
COUNT(DISTINCT pm.pid) AS product_count,
|
||||
COUNT(DISTINCT CASE WHEN pm.is_visible THEN pm.pid END) AS active_product_count,
|
||||
@@ -20,19 +55,74 @@ BEGIN
|
||||
SUM(pm.current_stock) AS current_stock_units,
|
||||
SUM(pm.current_stock_cost) AS current_stock_cost,
|
||||
SUM(pm.current_stock_retail) AS current_stock_retail,
|
||||
-- Rolling Periods (Sum directly from product_metrics)
|
||||
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d,
|
||||
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d,
|
||||
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d,
|
||||
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d,
|
||||
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue,
|
||||
-- Data for KPIs
|
||||
SUM(pm.avg_stock_units_30d) AS total_avg_stock_units_30d -- Sum of averages (use cautiously)
|
||||
-- Rolling Periods - Only include products with actual sales in each period
|
||||
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
|
||||
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
|
||||
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
|
||||
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue,
|
||||
-- Data for KPIs - Only average stock for products with stock
|
||||
SUM(CASE WHEN pm.avg_stock_units_30d > 0 THEN pm.avg_stock_units_30d ELSE 0 END) AS total_avg_stock_units_30d
|
||||
FROM public.product_metrics pm
|
||||
JOIN public.product_categories pc ON pm.pid = pc.pid
|
||||
-- Optional: JOIN products p ON pm.pid = p.pid if needed for filtering (e.g., only visible products)
|
||||
-- WHERE p.visible = true -- Example filter
|
||||
GROUP BY pc.cat_id
|
||||
JOIN ProductDeepestCategory pdc ON pm.pid = pdc.pid
|
||||
GROUP BY pdc.cat_id
|
||||
),
|
||||
-- Use a flat approach to build the complete category tree with aggregate values
|
||||
CategoryTree AS (
|
||||
WITH RECURSIVE CategoryHierarchy AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
c.cat_id as leaf_id, -- Track original leaf category
|
||||
ARRAY[c.cat_id] as path
|
||||
FROM public.categories c
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
p.cat_id,
|
||||
p.name,
|
||||
p.parent_id,
|
||||
ch.leaf_id, -- Keep track of the original leaf
|
||||
p.cat_id || ch.path
|
||||
FROM public.categories p
|
||||
JOIN CategoryHierarchy ch ON p.cat_id = ch.parent_id
|
||||
)
|
||||
SELECT
|
||||
ch.cat_id,
|
||||
ch.leaf_id
|
||||
FROM CategoryHierarchy ch
|
||||
),
|
||||
-- Now aggregate by maintaining the link between leaf categories and ancestors
|
||||
RollupMetrics AS (
|
||||
SELECT
|
||||
ct.cat_id,
|
||||
SUM(ca.product_count) AS product_count,
|
||||
SUM(ca.active_product_count) AS active_product_count,
|
||||
SUM(ca.replenishable_product_count) AS replenishable_product_count,
|
||||
SUM(ca.current_stock_units) AS current_stock_units,
|
||||
SUM(ca.current_stock_cost) AS current_stock_cost,
|
||||
SUM(ca.current_stock_retail) AS current_stock_retail,
|
||||
SUM(ca.sales_7d) AS sales_7d,
|
||||
SUM(ca.revenue_7d) AS revenue_7d,
|
||||
SUM(ca.sales_30d) AS sales_30d,
|
||||
SUM(ca.revenue_30d) AS revenue_30d,
|
||||
SUM(ca.cogs_30d) AS cogs_30d,
|
||||
SUM(ca.profit_30d) AS profit_30d,
|
||||
SUM(ca.sales_365d) AS sales_365d,
|
||||
SUM(ca.revenue_365d) AS revenue_365d,
|
||||
SUM(ca.lifetime_sales) AS lifetime_sales,
|
||||
SUM(ca.lifetime_revenue) AS lifetime_revenue,
|
||||
SUM(ca.total_avg_stock_units_30d) AS total_avg_stock_units_30d
|
||||
FROM CategoryTree ct
|
||||
JOIN CategoryAggregates ca ON ct.leaf_id = ca.cat_id
|
||||
GROUP BY ct.cat_id
|
||||
)
|
||||
INSERT INTO public.category_metrics (
|
||||
category_id, category_name, category_type, parent_id, last_calculated,
|
||||
@@ -49,22 +139,22 @@ BEGIN
|
||||
c.parent_id,
|
||||
_start_time,
|
||||
-- Base Aggregates
|
||||
COALESCE(ca.product_count, 0),
|
||||
COALESCE(ca.active_product_count, 0),
|
||||
COALESCE(ca.replenishable_product_count, 0),
|
||||
COALESCE(ca.current_stock_units, 0),
|
||||
COALESCE(ca.current_stock_cost, 0.00),
|
||||
COALESCE(ca.current_stock_retail, 0.00),
|
||||
COALESCE(ca.sales_7d, 0), COALESCE(ca.revenue_7d, 0.00),
|
||||
COALESCE(ca.sales_30d, 0), COALESCE(ca.revenue_30d, 0.00),
|
||||
COALESCE(ca.profit_30d, 0.00), COALESCE(ca.cogs_30d, 0.00),
|
||||
COALESCE(ca.sales_365d, 0), COALESCE(ca.revenue_365d, 0.00),
|
||||
COALESCE(ca.lifetime_sales, 0), COALESCE(ca.lifetime_revenue, 0.00),
|
||||
COALESCE(rm.product_count, 0),
|
||||
COALESCE(rm.active_product_count, 0),
|
||||
COALESCE(rm.replenishable_product_count, 0),
|
||||
COALESCE(rm.current_stock_units, 0),
|
||||
COALESCE(rm.current_stock_cost, 0.00),
|
||||
COALESCE(rm.current_stock_retail, 0.00),
|
||||
COALESCE(rm.sales_7d, 0), COALESCE(rm.revenue_7d, 0.00),
|
||||
COALESCE(rm.sales_30d, 0), COALESCE(rm.revenue_30d, 0.00),
|
||||
COALESCE(rm.profit_30d, 0.00), COALESCE(rm.cogs_30d, 0.00),
|
||||
COALESCE(rm.sales_365d, 0), COALESCE(rm.revenue_365d, 0.00),
|
||||
COALESCE(rm.lifetime_sales, 0), COALESCE(rm.lifetime_revenue, 0.00),
|
||||
-- KPIs
|
||||
(ca.profit_30d / NULLIF(ca.revenue_30d, 0)) * 100.0,
|
||||
ca.sales_30d / NULLIF(ca.total_avg_stock_units_30d, 0) -- Simple unit-based turnover
|
||||
(rm.profit_30d / NULLIF(rm.revenue_30d, 0)) * 100.0,
|
||||
rm.sales_30d / NULLIF(rm.total_avg_stock_units_30d, 0) -- Simple unit-based turnover
|
||||
FROM public.categories c -- Start from categories to include those with no products yet
|
||||
LEFT JOIN CategoryAggregates ca ON c.cat_id = ca.cat_id
|
||||
LEFT JOIN RollupMetrics rm ON c.cat_id = rm.cat_id
|
||||
|
||||
ON CONFLICT (category_id) DO UPDATE SET
|
||||
category_name = EXCLUDED.category_name,
|
||||
|
||||
@@ -21,11 +21,24 @@ BEGIN
|
||||
SUM(pm.current_stock_retail) AS current_stock_retail,
|
||||
SUM(pm.on_order_qty) AS on_order_units,
|
||||
SUM(pm.on_order_cost) AS on_order_cost,
|
||||
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d,
|
||||
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d,
|
||||
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d,
|
||||
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d,
|
||||
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue
|
||||
-- Only include products with valid sales data in each time period
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_7d > 0 THEN pm.pid END) AS products_with_sales_7d,
|
||||
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_30d > 0 THEN pm.pid END) AS products_with_sales_30d,
|
||||
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_365d > 0 THEN pm.pid END) AS products_with_sales_365d,
|
||||
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.lifetime_sales > 0 THEN pm.pid END) AS products_with_lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
|
||||
FROM public.product_metrics pm
|
||||
JOIN public.products p ON pm.pid = p.pid
|
||||
WHERE p.vendor IS NOT NULL AND p.vendor <> ''
|
||||
|
||||
@@ -9,6 +9,8 @@ DECLARE
|
||||
_start_time TIMESTAMPTZ := clock_timestamp(); -- Time execution started
|
||||
_last_calc_time TIMESTAMPTZ;
|
||||
_target_date DATE := CURRENT_DATE; -- Always recalculate today for simplicity with hourly runs
|
||||
_total_records INT := 0;
|
||||
_has_orders BOOLEAN := FALSE;
|
||||
BEGIN
|
||||
-- Get the timestamp before the last successful run of this module
|
||||
SELECT last_calculation_timestamp INTO _last_calc_time
|
||||
@@ -17,11 +19,48 @@ BEGIN
|
||||
|
||||
RAISE NOTICE 'Running % for date %. Start Time: %', _module_name, _target_date, _start_time;
|
||||
|
||||
-- Use CTEs to aggregate data for the target date
|
||||
-- CRITICAL FIX: Check if we have any orders or receiving activity for today
|
||||
-- to prevent creating artificial records when no real activity exists
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM public.orders WHERE date::date = _target_date
|
||||
UNION
|
||||
SELECT 1 FROM public.purchase_orders
|
||||
WHERE date::date = _target_date
|
||||
OR EXISTS (
|
||||
SELECT 1 FROM jsonb_array_elements(receiving_history) AS rh
|
||||
WHERE jsonb_typeof(receiving_history) = 'array'
|
||||
AND (
|
||||
(rh->>'date')::date = _target_date OR
|
||||
(rh->>'received_at')::date = _target_date OR
|
||||
(rh->>'receipt_date')::date = _target_date
|
||||
)
|
||||
)
|
||||
LIMIT 1
|
||||
) INTO _has_orders;
|
||||
|
||||
-- If no orders or receiving activity found for today, log and exit
|
||||
IF NOT _has_orders THEN
|
||||
RAISE NOTICE 'No orders or receiving activity found for % - skipping daily snapshot creation', _target_date;
|
||||
|
||||
-- Still update the calculate_status to prevent repeated attempts
|
||||
UPDATE public.calculate_status
|
||||
SET last_calculation_timestamp = _start_time
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RETURN; -- Exit without creating snapshots
|
||||
END IF;
|
||||
|
||||
-- IMPORTANT: First delete any existing data for this date to prevent duplication
|
||||
DELETE FROM public.daily_product_snapshots
|
||||
WHERE snapshot_date = _target_date;
|
||||
|
||||
-- Proceed with calculating daily metrics only for products with actual activity
|
||||
WITH SalesData AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.sku,
|
||||
-- Track number of orders to ensure we have real data
|
||||
COUNT(o.id) as order_count,
|
||||
-- Aggregate Sales (Quantity > 0, Status not Canceled/Returned)
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted, -- Before discount
|
||||
@@ -37,10 +76,13 @@ BEGIN
|
||||
ON p.pid = o.pid
|
||||
AND o.date::date = _target_date -- Cast to date to ensure compatibility regardless of original type
|
||||
GROUP BY p.pid, p.sku
|
||||
HAVING COUNT(o.id) > 0 -- CRITICAL: Only include products with actual orders
|
||||
),
|
||||
ReceivingData AS (
|
||||
SELECT
|
||||
po.pid,
|
||||
-- Track number of POs to ensure we have real data
|
||||
COUNT(po.po_id) as po_count,
|
||||
-- Prioritize the actual table fields over the JSON data
|
||||
COALESCE(
|
||||
-- First try the received field from purchase_orders table
|
||||
@@ -89,6 +131,15 @@ BEGIN
|
||||
jsonb_typeof(po.receiving_history) = 'array' AND
|
||||
jsonb_array_length(po.receiving_history) > 0
|
||||
GROUP BY po.pid
|
||||
-- CRITICAL: Only include products with actual receiving activity
|
||||
HAVING COUNT(po.po_id) > 0 OR SUM(
|
||||
CASE
|
||||
WHEN (rh.item->>'date')::date = _target_date THEN (rh.item->>'qty')::numeric
|
||||
WHEN (rh.item->>'received_at')::date = _target_date THEN (rh.item->>'qty')::numeric
|
||||
WHEN (rh.item->>'receipt_date')::date = _target_date THEN (rh.item->>'qty')::numeric
|
||||
ELSE 0
|
||||
END
|
||||
) > 0
|
||||
),
|
||||
CurrentStock AS (
|
||||
-- Select current stock values directly from products table
|
||||
@@ -100,7 +151,7 @@ BEGIN
|
||||
COALESCE(regular_price, 0.00) as current_regular_price
|
||||
FROM public.products
|
||||
)
|
||||
-- Upsert into the daily snapshots table
|
||||
-- Now insert records, but ONLY for products with actual activity
|
||||
INSERT INTO public.daily_product_snapshots (
|
||||
snapshot_date,
|
||||
pid,
|
||||
@@ -125,8 +176,8 @@ BEGIN
|
||||
)
|
||||
SELECT
|
||||
_target_date AS snapshot_date,
|
||||
p.pid,
|
||||
p.sku,
|
||||
COALESCE(sd.pid, rd.pid) AS pid, -- Use sales or receiving PID
|
||||
COALESCE(sd.sku, p.sku) AS sku, -- Get SKU from sales data or products table
|
||||
-- Inventory Metrics (Using CurrentStock)
|
||||
cs.stock_quantity AS eod_stock_quantity,
|
||||
cs.stock_quantity * cs.effective_cost_price AS eod_stock_cost,
|
||||
@@ -147,31 +198,15 @@ BEGIN
|
||||
COALESCE(rd.units_received, 0),
|
||||
COALESCE(rd.cost_received, 0.00),
|
||||
_start_time -- Timestamp of this calculation run
|
||||
FROM public.products p
|
||||
LEFT JOIN CurrentStock cs ON p.pid = cs.pid
|
||||
LEFT JOIN SalesData sd ON p.pid = sd.pid
|
||||
LEFT JOIN ReceivingData rd ON p.pid = rd.pid
|
||||
WHERE p.pid IS NOT NULL -- Ensure we only insert for existing products
|
||||
FROM SalesData sd
|
||||
FULL OUTER JOIN ReceivingData rd ON sd.pid = rd.pid
|
||||
LEFT JOIN public.products p ON COALESCE(sd.pid, rd.pid) = p.pid
|
||||
LEFT JOIN CurrentStock cs ON COALESCE(sd.pid, rd.pid) = cs.pid
|
||||
WHERE p.pid IS NOT NULL; -- Ensure we only insert for existing products
|
||||
|
||||
ON CONFLICT (snapshot_date, pid) DO UPDATE SET
|
||||
sku = EXCLUDED.sku,
|
||||
eod_stock_quantity = EXCLUDED.eod_stock_quantity,
|
||||
eod_stock_cost = EXCLUDED.eod_stock_cost,
|
||||
eod_stock_retail = EXCLUDED.eod_stock_retail,
|
||||
eod_stock_gross = EXCLUDED.eod_stock_gross,
|
||||
stockout_flag = EXCLUDED.stockout_flag,
|
||||
units_sold = EXCLUDED.units_sold,
|
||||
units_returned = EXCLUDED.units_returned,
|
||||
gross_revenue = EXCLUDED.gross_revenue,
|
||||
discounts = EXCLUDED.discounts,
|
||||
returns_revenue = EXCLUDED.returns_revenue,
|
||||
net_revenue = EXCLUDED.net_revenue,
|
||||
cogs = EXCLUDED.cogs,
|
||||
gross_regular_revenue = EXCLUDED.gross_regular_revenue,
|
||||
profit = EXCLUDED.profit,
|
||||
units_received = EXCLUDED.units_received,
|
||||
cost_received = EXCLUDED.cost_received,
|
||||
calculation_timestamp = EXCLUDED.calculation_timestamp; -- Use the timestamp from this run
|
||||
-- Get the total number of records inserted
|
||||
GET DIAGNOSTICS _total_records = ROW_COUNT;
|
||||
RAISE NOTICE 'Created % daily snapshot records for % with sales/receiving activity', _total_records, _target_date;
|
||||
|
||||
-- Update the status table with the timestamp from the START of this run
|
||||
UPDATE public.calculate_status
|
||||
|
||||
@@ -110,31 +110,37 @@ BEGIN
|
||||
SUM(units_sold) AS total_units_sold,
|
||||
SUM(net_revenue) AS total_net_revenue,
|
||||
|
||||
-- Specific time windows if we have enough data
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' THEN units_sold ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS revenue_7d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' THEN units_sold ELSE 0 END) AS sales_14d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' THEN net_revenue ELSE 0 END) AS revenue_14d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_sold ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN cogs ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN profit ELSE 0 END) AS profit_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_returned ELSE 0 END) AS returns_units_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN returns_revenue ELSE 0 END) AS returns_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN discounts ELSE 0 END) AS discounts_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN gross_revenue ELSE 0 END) AS gross_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN gross_regular_revenue ELSE 0 END) AS gross_regular_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND stockout_flag THEN 1 ELSE 0 END) AS stockout_days_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' THEN units_sold ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' THEN net_revenue ELSE 0 END) AS revenue_365d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_received ELSE 0 END) AS received_qty_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN cost_received ELSE 0 END) AS received_cost_30d,
|
||||
-- Specific time windows using date range boundaries precisely
|
||||
-- Use _current_date - INTERVAL '6 days' to include 7 days (today + 6 previous days)
|
||||
-- This ensures we count exactly the right number of days in each period
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_7d,
|
||||
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_14d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_14d,
|
||||
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN cogs ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN profit ELSE 0 END) AS profit_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_returned ELSE 0 END) AS returns_units_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN returns_revenue ELSE 0 END) AS returns_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN discounts ELSE 0 END) AS discounts_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN gross_revenue ELSE 0 END) AS gross_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN gross_regular_revenue ELSE 0 END) AS gross_regular_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date AND stockout_flag THEN 1 ELSE 0 END) AS stockout_days_30d,
|
||||
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_365d,
|
||||
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_received ELSE 0 END) AS received_qty_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN cost_received ELSE 0 END) AS received_cost_30d,
|
||||
|
||||
-- Averages (check for NULLIF 0 days in period if filtering dates)
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_quantity END) AS avg_stock_units_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_cost END) AS avg_stock_cost_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_retail END) AS avg_stock_retail_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_gross END) AS avg_stock_gross_30d,
|
||||
-- Averages for stock levels - only include dates within the specified period
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_quantity END) AS avg_stock_units_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_cost END) AS avg_stock_cost_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_retail END) AS avg_stock_retail_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_gross END) AS avg_stock_gross_30d,
|
||||
|
||||
-- Lifetime - should match total values above
|
||||
SUM(units_sold) AS lifetime_sales,
|
||||
@@ -150,14 +156,14 @@ BEGIN
|
||||
SELECT
|
||||
pid,
|
||||
date_first_sold,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '6 days' THEN units_sold ELSE 0 END) AS first_7_days_sales,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS first_7_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '29 days' THEN units_sold ELSE 0 END) AS first_30_days_sales,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS first_30_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '59 days' THEN units_sold ELSE 0 END) AS first_60_days_sales,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '59 days' THEN net_revenue ELSE 0 END) AS first_60_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '89 days' THEN units_sold ELSE 0 END) AS first_90_days_sales,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '89 days' THEN net_revenue ELSE 0 END) AS first_90_days_revenue
|
||||
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '6 days' THEN units_sold ELSE 0 END) AS first_7_days_sales,
|
||||
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS first_7_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '29 days' THEN units_sold ELSE 0 END) AS first_30_days_sales,
|
||||
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS first_30_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '59 days' THEN units_sold ELSE 0 END) AS first_60_days_sales,
|
||||
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '59 days' THEN net_revenue ELSE 0 END) AS first_60_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '89 days' THEN units_sold ELSE 0 END) AS first_90_days_sales,
|
||||
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '89 days' THEN net_revenue ELSE 0 END) AS first_90_days_revenue
|
||||
FROM public.daily_product_snapshots ds
|
||||
JOIN HistoricalDates hd USING(pid)
|
||||
WHERE date_first_sold IS NOT NULL
|
||||
@@ -246,45 +252,314 @@ BEGIN
|
||||
(sa.sales_30d / NULLIF(ci.current_stock + sa.sales_30d, 0)) * 100 AS sell_through_30d,
|
||||
|
||||
-- Forecasting intermediate values
|
||||
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) AS sales_velocity_daily,
|
||||
-- CRITICAL FIX: Use safer velocity calculation to prevent extreme values
|
||||
-- Original problematic calculation: (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0))
|
||||
-- Use available days (not stockout days) as denominator with a minimum safety value
|
||||
(sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d, -- Standard calculation
|
||||
CASE
|
||||
WHEN sa.sales_30d > 0 THEN 14.0 -- If we have sales, ensure at least 14 days denominator
|
||||
ELSE 30.0 -- If no sales, use full period
|
||||
END
|
||||
),
|
||||
0
|
||||
)
|
||||
) AS sales_velocity_daily,
|
||||
s.effective_lead_time AS config_lead_time,
|
||||
s.effective_days_of_stock AS config_days_of_stock,
|
||||
s.effective_safety_stock AS config_safety_stock,
|
||||
(s.effective_lead_time + s.effective_days_of_stock) AS planning_period_days,
|
||||
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time AS lead_time_forecast_units,
|
||||
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock AS days_of_stock_forecast_units,
|
||||
((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock) AS planning_period_forecast_units,
|
||||
(ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time)) AS lead_time_closing_stock,
|
||||
((ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock) AS days_of_stock_closing_stock,
|
||||
(((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0) AS replenishment_needed_raw,
|
||||
|
||||
-- Apply the same fix to all derived calculations
|
||||
(sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time AS lead_time_forecast_units,
|
||||
|
||||
(sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock AS days_of_stock_forecast_units,
|
||||
|
||||
(sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * (s.effective_lead_time + s.effective_days_of_stock) AS planning_period_forecast_units,
|
||||
|
||||
(ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time)) AS lead_time_closing_stock,
|
||||
|
||||
((ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time))) - ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock) AS days_of_stock_closing_stock,
|
||||
|
||||
(((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0) AS replenishment_needed_raw,
|
||||
|
||||
-- Final Forecasting / Replenishment Metrics (apply CEILING/GREATEST/etc.)
|
||||
-- Note: These calculations are nested for clarity, can be simplified in prod
|
||||
CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS replenishment_units,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_effective_cost AS replenishment_cost,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_price AS replenishment_retail,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * (ci.current_price - ci.current_effective_cost) AS replenishment_profit,
|
||||
CEILING(GREATEST(0, ((((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS replenishment_units,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_effective_cost AS replenishment_cost,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_price AS replenishment_retail,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * (ci.current_price - ci.current_effective_cost) AS replenishment_profit,
|
||||
|
||||
-- Placeholder for To Order (Apply MOQ/UOM logic here if needed, otherwise equals replenishment)
|
||||
CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS to_order_units,
|
||||
CEILING(GREATEST(0, ((((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS to_order_units,
|
||||
|
||||
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) AS forecast_lost_sales_units,
|
||||
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) * ci.current_price AS forecast_lost_revenue,
|
||||
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time))) AS forecast_lost_sales_units,
|
||||
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time))) * ci.current_price AS forecast_lost_revenue,
|
||||
|
||||
ci.current_stock / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS stock_cover_in_days,
|
||||
COALESCE(ooi.on_order_qty, 0) / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS po_cover_in_days,
|
||||
(ci.current_stock + COALESCE(ooi.on_order_qty, 0)) / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS sells_out_in_days,
|
||||
ci.current_stock / NULLIF((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
), 0) AS stock_cover_in_days,
|
||||
COALESCE(ooi.on_order_qty, 0) / NULLIF((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
), 0) AS po_cover_in_days,
|
||||
(ci.current_stock + COALESCE(ooi.on_order_qty, 0)) / NULLIF((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
), 0) AS sells_out_in_days,
|
||||
|
||||
-- Replenish Date: Date when stock is projected to hit safety stock, minus lead time
|
||||
CASE
|
||||
WHEN (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) > 0
|
||||
THEN _current_date + FLOOR(GREATEST(0, ci.current_stock - s.effective_safety_stock) / (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)))::int - s.effective_lead_time
|
||||
WHEN (sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) > 0
|
||||
THEN _current_date + FLOOR(GREATEST(0, ci.current_stock - s.effective_safety_stock) / (sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
))::int - s.effective_lead_time
|
||||
ELSE NULL
|
||||
END AS replenish_date,
|
||||
|
||||
GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))::int AS overstocked_units,
|
||||
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))) * ci.current_effective_cost AS overstocked_cost,
|
||||
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))) * ci.current_price AS overstocked_retail,
|
||||
GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)))::int AS overstocked_units,
|
||||
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)))) * ci.current_effective_cost AS overstocked_cost,
|
||||
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_lead_time) + ((sa.sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - sa.stockout_days_30d,
|
||||
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
|
||||
),
|
||||
0
|
||||
)
|
||||
) * s.effective_days_of_stock)))) * ci.current_price AS overstocked_retail,
|
||||
|
||||
-- Old Stock Flag
|
||||
(ci.created_at::date < _current_date - INTERVAL '60 day') AND
|
||||
|
||||
@@ -28,6 +28,8 @@ const COLUMN_MAP = {
|
||||
avgMargin30d: { dbCol: 'bm.avg_margin_30d', type: 'number' },
|
||||
// Add aliases if needed
|
||||
name: { dbCol: 'bm.brand_name', type: 'string' },
|
||||
// Add status for filtering
|
||||
status: { dbCol: 'brand_status', type: 'string' },
|
||||
};
|
||||
|
||||
function getSafeColumnInfo(queryParamKey) {
|
||||
@@ -41,11 +43,26 @@ router.get('/filter-options', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /brands-aggregate/filter-options');
|
||||
try {
|
||||
// Get brand names
|
||||
const { rows: brandRows } = await pool.query(`
|
||||
SELECT DISTINCT brand_name FROM public.brand_metrics ORDER BY brand_name
|
||||
`);
|
||||
|
||||
// Get status values - calculate them since they're derived
|
||||
const { rows: statusRows } = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
CASE
|
||||
WHEN active_product_count > 0 AND sales_30d > 0 THEN 'active'
|
||||
WHEN active_product_count > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as status
|
||||
FROM public.brand_metrics
|
||||
ORDER BY status
|
||||
`);
|
||||
|
||||
res.json({
|
||||
brands: brandRows.map(r => r.brand_name),
|
||||
statuses: statusRows.map(r => r.status)
|
||||
});
|
||||
} catch(error) {
|
||||
console.error('Error fetching brand filter options:', error);
|
||||
@@ -61,16 +78,17 @@ router.get('/stats', async (req, res) => {
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) AS total_brands,
|
||||
COUNT(CASE WHEN active_product_count > 0 THEN 1 END) AS active_brands,
|
||||
SUM(active_product_count) AS total_active_products,
|
||||
SUM(current_stock_cost) AS total_stock_value,
|
||||
-- Weighted Average Margin
|
||||
SUM(profit_30d) * 100.0 / NULLIF(SUM(revenue_30d), 0) AS overall_avg_margin_weighted
|
||||
-- Add other stats
|
||||
FROM public.brand_metrics bm
|
||||
`);
|
||||
|
||||
res.json({
|
||||
totalBrands: parseInt(stats?.total_brands || 0),
|
||||
activeBrands: parseInt(stats?.active_brands || 0),
|
||||
totalActiveProducts: parseInt(stats?.total_active_products || 0),
|
||||
totalValue: parseFloat(stats?.total_stock_value || 0),
|
||||
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || 0),
|
||||
@@ -177,10 +195,40 @@ router.get('/', async (req, res) => {
|
||||
|
||||
// --- Execute Queries ---
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
const baseSql = `FROM public.brand_metrics bm ${whereClause}`;
|
||||
|
||||
// Status calculation similar to vendors
|
||||
const statusCase = `
|
||||
CASE
|
||||
WHEN active_product_count > 0 AND sales_30d > 0 THEN 'active'
|
||||
WHEN active_product_count > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as brand_status
|
||||
`;
|
||||
|
||||
const baseSql = `
|
||||
FROM (
|
||||
SELECT
|
||||
bm.*,
|
||||
${statusCase}
|
||||
FROM public.brand_metrics bm
|
||||
) bm
|
||||
${whereClause}
|
||||
`;
|
||||
|
||||
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
|
||||
const dataSql = `SELECT bm.* ${baseSql} ${sortClause} LIMIT $${paramCounter} OFFSET $${paramCounter + 1}`;
|
||||
const dataSql = `
|
||||
WITH brand_data AS (
|
||||
SELECT
|
||||
bm.*,
|
||||
${statusCase}
|
||||
FROM public.brand_metrics bm
|
||||
)
|
||||
SELECT bm.*
|
||||
FROM brand_data bm
|
||||
${whereClause}
|
||||
${sortClause}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
const dataParams = [...params, limit, offset];
|
||||
|
||||
console.log("Count SQL:", countSql, params);
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Get all categories
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get all categories with metrics and hierarchy info
|
||||
const { rows: categories } = await pool.query(`
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.type,
|
||||
c.parent_id,
|
||||
c.description,
|
||||
c.status,
|
||||
p.name as parent_name,
|
||||
p.type as parent_type,
|
||||
COALESCE(cm.product_count, 0) as product_count,
|
||||
COALESCE(cm.active_products, 0) as active_products,
|
||||
ROUND(COALESCE(cm.total_value, 0)::numeric, 3) as total_value,
|
||||
COALESCE(cm.avg_margin, 0) as avg_margin,
|
||||
COALESCE(cm.turnover_rate, 0) as turnover_rate,
|
||||
COALESCE(cm.growth_rate, 0) as growth_rate
|
||||
FROM categories c
|
||||
LEFT JOIN categories p ON c.parent_id = p.cat_id
|
||||
LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN c.type = 10 THEN 1 -- sections first
|
||||
WHEN c.type = 11 THEN 2 -- categories second
|
||||
WHEN c.type = 12 THEN 3 -- subcategories third
|
||||
WHEN c.type = 13 THEN 4 -- subsubcategories fourth
|
||||
WHEN c.type = 20 THEN 5 -- themes fifth
|
||||
WHEN c.type = 21 THEN 6 -- subthemes last
|
||||
ELSE 7
|
||||
END,
|
||||
c.name ASC
|
||||
`);
|
||||
|
||||
// Get overall stats
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(DISTINCT c.cat_id) as totalCategories,
|
||||
COUNT(DISTINCT CASE WHEN c.status = 'active' THEN c.cat_id END) as activeCategories,
|
||||
ROUND(COALESCE(SUM(cm.total_value), 0)::numeric, 3) as totalValue,
|
||||
COALESCE(ROUND(AVG(NULLIF(cm.avg_margin, 0))::numeric, 1), 0) as avgMargin,
|
||||
COALESCE(ROUND(AVG(NULLIF(cm.growth_rate, 0))::numeric, 1), 0) as avgGrowth
|
||||
FROM categories c
|
||||
LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id
|
||||
`);
|
||||
|
||||
// Get type counts for filtering
|
||||
const { rows: typeCounts } = await pool.query(`
|
||||
SELECT
|
||||
type,
|
||||
COUNT(*)::integer as count
|
||||
FROM categories
|
||||
GROUP BY type
|
||||
ORDER BY type
|
||||
`);
|
||||
|
||||
res.json({
|
||||
categories: categories.map(cat => ({
|
||||
cat_id: cat.cat_id,
|
||||
name: cat.name,
|
||||
type: cat.type,
|
||||
parent_id: cat.parent_id,
|
||||
parent_name: cat.parent_name,
|
||||
parent_type: cat.parent_type,
|
||||
description: cat.description,
|
||||
status: cat.status,
|
||||
metrics: {
|
||||
product_count: parseInt(cat.product_count),
|
||||
active_products: parseInt(cat.active_products),
|
||||
total_value: parseFloat(cat.total_value),
|
||||
avg_margin: parseFloat(cat.avg_margin),
|
||||
turnover_rate: parseFloat(cat.turnover_rate),
|
||||
growth_rate: parseFloat(cat.growth_rate)
|
||||
}
|
||||
})),
|
||||
typeCounts: typeCounts.map(tc => ({
|
||||
type: tc.type,
|
||||
count: tc.count // Already cast to integer in the query
|
||||
})),
|
||||
stats: {
|
||||
totalCategories: parseInt(stats.totalcategories),
|
||||
activeCategories: parseInt(stats.activecategories),
|
||||
totalValue: parseFloat(stats.totalvalue),
|
||||
avgMargin: parseFloat(stats.avgmargin),
|
||||
avgGrowth: parseFloat(stats.avggrowth)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching categories:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch categories' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -4,7 +4,7 @@ const { parseValue } = require('../utils/apiHelpers'); // Adjust path if needed
|
||||
|
||||
// --- Configuration & Helpers ---
|
||||
const DEFAULT_PAGE_LIMIT = 50;
|
||||
const MAX_PAGE_LIMIT = 200;
|
||||
const MAX_PAGE_LIMIT = 5000; // Increase this to allow retrieving all categories in one request
|
||||
|
||||
// Maps query keys to DB columns in category_metrics and categories tables
|
||||
const COLUMN_MAP = {
|
||||
@@ -31,8 +31,8 @@ const COLUMN_MAP = {
|
||||
lifetimeRevenue: { dbCol: 'cm.lifetime_revenue', type: 'number' },
|
||||
avgMargin30d: { dbCol: 'cm.avg_margin_30d', type: 'number' },
|
||||
stockTurn30d: { dbCol: 'cm.stock_turn_30d', type: 'number' },
|
||||
// Add 'status' if filtering by category status needed (requires JOIN)
|
||||
// status: { dbCol: 'c.status', type: 'string' },
|
||||
// Add status from the categories table for filtering
|
||||
status: { dbCol: 'c.status', type: 'string' },
|
||||
};
|
||||
|
||||
function getSafeColumnInfo(queryParamKey) {
|
||||
@@ -43,7 +43,7 @@ function getSafeColumnInfo(queryParamKey) {
|
||||
const TYPE_LABELS = {
|
||||
10: 'Section', 11: 'Category', 12: 'Subcategory', 13: 'Sub-subcategory',
|
||||
1: 'Company', 2: 'Line', 3: 'Subline', 40: 'Artist', // From old schema comments
|
||||
// Add other types if needed
|
||||
20: 'Theme', 21: 'Subtheme' // Additional types from categories.js
|
||||
};
|
||||
|
||||
// --- Route Handlers ---
|
||||
@@ -66,12 +66,28 @@ router.get('/filter-options', async (req, res) => {
|
||||
label: TYPE_LABELS[r.category_type] || `Type ${r.category_type}` // Add labels
|
||||
}));
|
||||
|
||||
// Add other filter options like status if needed
|
||||
// const { rows: statusRows } = await pool.query(`SELECT DISTINCT status FROM public.categories ORDER BY status`);
|
||||
// Add status options for filtering (from categories.js)
|
||||
const { rows: statusRows } = await pool.query(`
|
||||
SELECT DISTINCT status FROM public.categories ORDER BY status
|
||||
`);
|
||||
|
||||
// Get type counts (from categories.js)
|
||||
const { rows: typeCounts } = await pool.query(`
|
||||
SELECT
|
||||
type,
|
||||
COUNT(*)::integer as count
|
||||
FROM categories
|
||||
GROUP BY type
|
||||
ORDER BY type
|
||||
`);
|
||||
|
||||
res.json({
|
||||
types: typeOptions,
|
||||
// statuses: statusRows.map(r => r.status)
|
||||
statuses: statusRows.map(r => r.status),
|
||||
typeCounts: typeCounts.map(tc => ({
|
||||
type: tc.type,
|
||||
count: tc.count
|
||||
}))
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching category filter options:', error);
|
||||
@@ -96,7 +112,7 @@ router.get('/stats', async (req, res) => {
|
||||
SUM(cm.profit_30d) * 100.0 / NULLIF(SUM(cm.revenue_30d), 0) AS overall_avg_margin_weighted,
|
||||
-- Simple Average Margin (less accurate if categories vary greatly in size)
|
||||
AVG(NULLIF(cm.avg_margin_30d, 0)) AS overall_avg_margin_simple
|
||||
-- Add SUM(revenue_30d) / SUM(revenue_30d_previous) for growth if needed
|
||||
-- Growth rate can be calculated from 30d vs previous 30d revenue if needed
|
||||
FROM public.category_metrics cm
|
||||
JOIN public.categories c ON cm.category_id = c.cat_id -- Join to check category status
|
||||
`);
|
||||
@@ -107,8 +123,8 @@ router.get('/stats', async (req, res) => {
|
||||
totalActiveProducts: parseInt(stats?.total_active_products || 0),
|
||||
totalValue: parseFloat(stats?.total_stock_value || 0),
|
||||
// Choose which avg margin calculation to expose
|
||||
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || stats?.overall_avg_margin_simple || 0),
|
||||
// avgGrowth: ... // Calculate if needed
|
||||
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || stats?.overall_avg_margin_simple || 0)
|
||||
// Growth rate could be added if we implement the calculation
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching category stats:', error);
|
||||
@@ -130,10 +146,27 @@ router.get('/', async (req, res) => {
|
||||
// --- Sorting ---
|
||||
const sortQueryKey = req.query.sort || 'categoryName';
|
||||
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
|
||||
// Default sort order: Type then Name
|
||||
const defaultSortOrder = 'ORDER BY cm.category_type ASC, cm.category_name ASC';
|
||||
let sortClause = defaultSortOrder;
|
||||
if (sortColumnInfo) {
|
||||
|
||||
// Hierarchical sorting logic from categories.js
|
||||
const hierarchicalSortOrder = `
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN cm.category_type = 10 THEN 1 -- sections first
|
||||
WHEN cm.category_type = 11 THEN 2 -- categories second
|
||||
WHEN cm.category_type = 12 THEN 3 -- subcategories third
|
||||
WHEN cm.category_type = 13 THEN 4 -- subsubcategories fourth
|
||||
WHEN cm.category_type = 20 THEN 5 -- themes fifth
|
||||
WHEN cm.category_type = 21 THEN 6 -- subthemes last
|
||||
ELSE 7
|
||||
END,
|
||||
cm.category_name ASC
|
||||
`;
|
||||
|
||||
// Use hierarchical sort as default
|
||||
let sortClause = hierarchicalSortOrder;
|
||||
|
||||
// Override with custom sort if specified
|
||||
if (sortColumnInfo && sortQueryKey !== 'categoryName') {
|
||||
const sortColumn = sortColumnInfo.dbCol;
|
||||
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
|
||||
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST');
|
||||
@@ -224,18 +257,26 @@ router.get('/', async (req, res) => {
|
||||
// Need JOIN for parent_name if sorting/filtering by it, or always include for display
|
||||
const sortColumn = sortColumnInfo?.dbCol;
|
||||
|
||||
// Always include the parent join for consistency
|
||||
const parentJoinSql = 'LEFT JOIN public.categories p ON cm.parent_id = p.cat_id';
|
||||
// Always include the category and parent joins for status and parent_name
|
||||
const joinSql = `
|
||||
JOIN public.categories c ON cm.category_id = c.cat_id
|
||||
LEFT JOIN public.categories p ON cm.parent_id = p.cat_id
|
||||
`;
|
||||
|
||||
const baseSql = `
|
||||
FROM public.category_metrics cm
|
||||
${parentJoinSql}
|
||||
${joinSql}
|
||||
${whereClause}
|
||||
`;
|
||||
|
||||
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
|
||||
const dataSql = `
|
||||
SELECT cm.*, p.name as parent_name
|
||||
SELECT
|
||||
cm.*,
|
||||
c.status,
|
||||
c.description,
|
||||
p.name as parent_name,
|
||||
p.type as parent_type
|
||||
${baseSql}
|
||||
${sortClause}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Get vendors with pagination, filtering, and sorting
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get all vendors with metrics
|
||||
const { rows: vendors } = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
p.vendor as name,
|
||||
COALESCE(vm.active_products, 0) as active_products,
|
||||
COALESCE(vm.total_orders, 0) as total_orders,
|
||||
COALESCE(vm.avg_lead_time_days, 0) as avg_lead_time_days,
|
||||
COALESCE(vm.on_time_delivery_rate, 0) as on_time_delivery_rate,
|
||||
COALESCE(vm.order_fill_rate, 0) as order_fill_rate,
|
||||
CASE
|
||||
WHEN COALESCE(vm.total_orders, 0) > 0 AND COALESCE(vm.order_fill_rate, 0) >= 75 THEN 'active'
|
||||
WHEN COALESCE(vm.total_orders, 0) > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as status
|
||||
FROM products p
|
||||
LEFT JOIN vendor_metrics vm ON p.vendor = vm.vendor
|
||||
WHERE p.vendor IS NOT NULL AND p.vendor != ''
|
||||
`);
|
||||
|
||||
// Get cost metrics for all vendors
|
||||
const vendorNames = vendors.map(v => v.name);
|
||||
const { rows: costMetrics } = await pool.query(`
|
||||
SELECT
|
||||
vendor,
|
||||
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE status = 2
|
||||
AND cost_price IS NOT NULL
|
||||
AND ordered > 0
|
||||
AND vendor = ANY($1)
|
||||
GROUP BY vendor
|
||||
`, [vendorNames]);
|
||||
|
||||
// Create a map of cost metrics by vendor
|
||||
const costMetricsMap = costMetrics.reduce((acc, curr) => {
|
||||
acc[curr.vendor] = {
|
||||
avg_unit_cost: curr.avg_unit_cost,
|
||||
total_spend: curr.total_spend
|
||||
};
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
// Get overall stats
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(DISTINCT p.vendor) as totalVendors,
|
||||
COUNT(DISTINCT CASE
|
||||
WHEN COALESCE(vm.total_orders, 0) > 0 AND COALESCE(vm.order_fill_rate, 0) >= 75
|
||||
THEN p.vendor
|
||||
END) as activeVendors,
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.avg_lead_time_days, 0))::numeric, 1), 0) as avgLeadTime,
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.order_fill_rate, 0))::numeric, 1), 0) as avgFillRate,
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.on_time_delivery_rate, 0))::numeric, 1), 0) as avgOnTimeDelivery
|
||||
FROM products p
|
||||
LEFT JOIN vendor_metrics vm ON p.vendor = vm.vendor
|
||||
WHERE p.vendor IS NOT NULL AND p.vendor != ''
|
||||
`);
|
||||
|
||||
// Get overall cost metrics
|
||||
const { rows: [overallCostMetrics] } = await pool.query(`
|
||||
SELECT
|
||||
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE status = 2
|
||||
AND cost_price IS NOT NULL
|
||||
AND ordered > 0
|
||||
AND vendor IS NOT NULL AND vendor != ''
|
||||
`);
|
||||
|
||||
res.json({
|
||||
vendors: vendors.map(vendor => ({
|
||||
vendor_id: vendor.name,
|
||||
name: vendor.name,
|
||||
status: vendor.status,
|
||||
avg_lead_time_days: parseFloat(vendor.avg_lead_time_days),
|
||||
on_time_delivery_rate: parseFloat(vendor.on_time_delivery_rate),
|
||||
order_fill_rate: parseFloat(vendor.order_fill_rate),
|
||||
total_orders: parseInt(vendor.total_orders),
|
||||
active_products: parseInt(vendor.active_products),
|
||||
avg_unit_cost: parseFloat(costMetricsMap[vendor.name]?.avg_unit_cost || 0),
|
||||
total_spend: parseFloat(costMetricsMap[vendor.name]?.total_spend || 0)
|
||||
})),
|
||||
stats: {
|
||||
totalVendors: parseInt(stats.totalvendors),
|
||||
activeVendors: parseInt(stats.activevendors),
|
||||
avgLeadTime: parseFloat(stats.avgleadtime),
|
||||
avgFillRate: parseFloat(stats.avgfillrate),
|
||||
avgOnTimeDelivery: parseFloat(stats.avgontimedelivery),
|
||||
avgUnitCost: parseFloat(overallCostMetrics.avg_unit_cost),
|
||||
totalSpend: parseFloat(overallCostMetrics.total_spend)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching vendors:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch vendors' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -33,6 +33,8 @@ const COLUMN_MAP = {
|
||||
// Add aliases if needed for frontend compatibility
|
||||
name: { dbCol: 'vm.vendor_name', type: 'string' },
|
||||
leadTime: { dbCol: 'vm.avg_lead_time_days', type: 'number' },
|
||||
// Add status for filtering
|
||||
status: { dbCol: 'vendor_status', type: 'string' },
|
||||
};
|
||||
|
||||
function getSafeColumnInfo(queryParamKey) {
|
||||
@@ -46,11 +48,26 @@ router.get('/filter-options', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /vendors-aggregate/filter-options');
|
||||
try {
|
||||
// Get vendor names
|
||||
const { rows: vendorRows } = await pool.query(`
|
||||
SELECT DISTINCT vendor_name FROM public.vendor_metrics ORDER BY vendor_name
|
||||
`);
|
||||
|
||||
// Get status values - calculate them since they're derived
|
||||
const { rows: statusRows } = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
CASE
|
||||
WHEN po_count_365d > 0 AND sales_30d > 0 THEN 'active'
|
||||
WHEN po_count_365d > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as status
|
||||
FROM public.vendor_metrics
|
||||
ORDER BY status
|
||||
`);
|
||||
|
||||
res.json({
|
||||
vendors: vendorRows.map(r => r.vendor_name),
|
||||
statuses: statusRows.map(r => r.status)
|
||||
});
|
||||
} catch(error) {
|
||||
console.error('Error fetching vendor filter options:', error);
|
||||
@@ -63,23 +80,47 @@ router.get('/stats', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /vendors-aggregate/stats');
|
||||
try {
|
||||
// Get basic vendor stats from aggregate table
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) AS total_vendors,
|
||||
SUM(active_product_count) AS total_active_products,
|
||||
SUM(current_stock_cost) AS total_stock_value,
|
||||
SUM(on_order_cost) AS total_on_order_value,
|
||||
AVG(NULLIF(avg_lead_time_days, 0)) AS overall_avg_lead_time -- Simple average
|
||||
-- Add more overall stats: weighted margin, total POs etc.
|
||||
AVG(NULLIF(avg_lead_time_days, 0)) AS overall_avg_lead_time
|
||||
FROM public.vendor_metrics vm
|
||||
`);
|
||||
|
||||
// Count active vendors based on criteria (from old vendors.js)
|
||||
const { rows: [activeStats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(DISTINCT CASE
|
||||
WHEN po_count_365d > 0
|
||||
THEN vendor_name
|
||||
END) as active_vendors
|
||||
FROM public.vendor_metrics
|
||||
`);
|
||||
|
||||
// Get overall cost metrics from purchase orders
|
||||
const { rows: [overallCostMetrics] } = await pool.query(`
|
||||
SELECT
|
||||
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE cost_price IS NOT NULL
|
||||
AND ordered > 0
|
||||
AND vendor IS NOT NULL AND vendor != ''
|
||||
`);
|
||||
|
||||
res.json({
|
||||
totalVendors: parseInt(stats?.total_vendors || 0),
|
||||
activeVendors: parseInt(activeStats?.active_vendors || 0),
|
||||
totalActiveProducts: parseInt(stats?.total_active_products || 0),
|
||||
totalValue: parseFloat(stats?.total_stock_value || 0),
|
||||
totalOnOrderValue: parseFloat(stats?.total_on_order_value || 0),
|
||||
avgLeadTime: parseFloat(stats?.overall_avg_lead_time || 0)
|
||||
avgLeadTime: parseFloat(stats?.overall_avg_lead_time || 0),
|
||||
avgUnitCost: parseFloat(overallCostMetrics?.avg_unit_cost || 0),
|
||||
totalSpend: parseFloat(overallCostMetrics?.total_spend || 0)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching vendor stats:', error);
|
||||
@@ -181,10 +222,52 @@ router.get('/', async (req, res) => {
|
||||
|
||||
// --- Execute Queries ---
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
const baseSql = `FROM public.vendor_metrics vm ${whereClause}`;
|
||||
|
||||
// Status calculation from vendors.js
|
||||
const statusCase = `
|
||||
CASE
|
||||
WHEN po_count_365d > 0 AND sales_30d > 0 THEN 'active'
|
||||
WHEN po_count_365d > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as vendor_status
|
||||
`;
|
||||
|
||||
const baseSql = `
|
||||
FROM (
|
||||
SELECT
|
||||
vm.*,
|
||||
${statusCase}
|
||||
FROM public.vendor_metrics vm
|
||||
) vm
|
||||
${whereClause}
|
||||
`;
|
||||
|
||||
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
|
||||
const dataSql = `SELECT vm.* ${baseSql} ${sortClause} LIMIT $${paramCounter} OFFSET $${paramCounter + 1}`;
|
||||
const dataSql = `
|
||||
WITH vendor_data AS (
|
||||
SELECT
|
||||
vm.*,
|
||||
${statusCase}
|
||||
FROM public.vendor_metrics vm
|
||||
)
|
||||
SELECT
|
||||
vm.*,
|
||||
COALESCE(po.avg_unit_cost, 0) as avg_unit_cost,
|
||||
COALESCE(po.total_spend, 0) as total_spend
|
||||
FROM vendor_data vm
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
vendor,
|
||||
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE cost_price IS NOT NULL AND ordered > 0
|
||||
GROUP BY vendor
|
||||
) po ON vm.vendor_name = po.vendor
|
||||
${whereClause}
|
||||
${sortClause}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
const dataParams = [...params, limit, offset];
|
||||
|
||||
console.log("Count SQL:", countSql, params);
|
||||
|
||||
@@ -13,8 +13,6 @@ const analyticsRouter = require('./routes/analytics');
|
||||
const purchaseOrdersRouter = require('./routes/purchase-orders');
|
||||
const configRouter = require('./routes/config');
|
||||
const metricsRouter = require('./routes/metrics');
|
||||
const vendorsRouter = require('./routes/vendors');
|
||||
const categoriesRouter = require('./routes/categories');
|
||||
const importRouter = require('./routes/import');
|
||||
const aiValidationRouter = require('./routes/ai-validation');
|
||||
const templatesRouter = require('./routes/templates');
|
||||
@@ -103,16 +101,18 @@ async function startServer() {
|
||||
app.use('/api/purchase-orders', purchaseOrdersRouter);
|
||||
app.use('/api/config', configRouter);
|
||||
app.use('/api/metrics', metricsRouter);
|
||||
app.use('/api/vendors', vendorsRouter);
|
||||
app.use('/api/categories', categoriesRouter);
|
||||
// Use only the aggregate routes for vendors and categories
|
||||
app.use('/api/vendors', vendorsAggregateRouter);
|
||||
app.use('/api/categories', categoriesAggregateRouter);
|
||||
// Keep the aggregate-specific endpoints for backward compatibility
|
||||
app.use('/api/categories-aggregate', categoriesAggregateRouter);
|
||||
app.use('/api/vendors-aggregate', vendorsAggregateRouter);
|
||||
app.use('/api/brands-aggregate', brandsAggregateRouter);
|
||||
app.use('/api/import', importRouter);
|
||||
app.use('/api/ai-validation', aiValidationRouter);
|
||||
app.use('/api/templates', templatesRouter);
|
||||
app.use('/api/ai-prompts', aiPromptsRouter);
|
||||
app.use('/api/reusable-images', reusableImagesRouter);
|
||||
app.use('/api/categories-aggregate', categoriesAggregateRouter);
|
||||
app.use('/api/vendors-aggregate', vendorsAggregateRouter);
|
||||
app.use('/api/brands-aggregate', brandsAggregateRouter);
|
||||
|
||||
// Basic health check route
|
||||
app.get('/health', (req, res) => {
|
||||
|
||||
Reference in New Issue
Block a user