Consolidate old/new vendor and category routes, enhance new brands route, update frontend accordingly for all three pages, improve hierarchy on categories page, fix some calculations

This commit is contained in:
2025-04-02 14:28:18 -04:00
parent dbd0232285
commit 6051b849d6
16 changed files with 2273 additions and 880 deletions

View File

@@ -5,9 +5,9 @@ const { Pool } = require('pg'); // Assuming you use 'pg'
// --- Configuration ---
// Toggle these constants to enable/disable specific steps for testing
const RUN_DAILY_SNAPSHOTS = true;
const RUN_PRODUCT_METRICS = true;
const RUN_PERIODIC_METRICS = true;
const RUN_DAILY_SNAPSHOTS = false;
const RUN_PRODUCT_METRICS = false;
const RUN_PERIODIC_METRICS = false;
const RUN_BRAND_METRICS = true;
const RUN_VENDOR_METRICS = true;
const RUN_CATEGORY_METRICS = true;
@@ -401,27 +401,43 @@ async function executeSqlStep(config, progress) {
start_time TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP WITH TIME ZONE,
duration_seconds INTEGER,
status TEXT, -- 'running', 'completed', 'failed', 'cancelled'
status TEXT, -- Will be altered to enum if needed below
error_message TEXT,
additional_info JSONB
);
`);
// Ensure the calculation_status enum type exists if needed
await connection.query(`
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'calculation_status') THEN
CREATE TYPE calculation_status AS ENUM ('running', 'completed', 'failed', 'cancelled');
-- If needed, alter the existing table to use the enum
ALTER TABLE calculate_history
ALTER COLUMN status TYPE calculation_status
USING status::calculation_status;
END IF;
END
$$;
`);
// Mark previous runs of this type as cancelled
await connection.query(`
UPDATE calculate_history
SET
status = 'cancelled',
status = 'cancelled'::calculation_status,
end_time = NOW(),
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
error_message = 'Previous calculation was not completed properly or was superseded.'
WHERE status = 'running' AND additional_info->>'type' = $1::text;
WHERE status = 'running'::calculation_status AND additional_info->>'type' = $1::text;
`, [config.historyType]);
// Create history record for this run
const historyResult = await connection.query(`
INSERT INTO calculate_history (status, additional_info)
VALUES ('running', jsonb_build_object('type', $1::text, 'sql_file', $2::text))
VALUES ('running'::calculation_status, jsonb_build_object('type', $1::text, 'sql_file', $2::text))
RETURNING id;
`, [config.historyType, config.sqlFile]);
calculateHistoryId = historyResult.rows[0].id;
@@ -502,7 +518,7 @@ async function executeSqlStep(config, progress) {
SET
end_time = NOW(),
duration_seconds = $1::integer,
status = 'completed'
status = 'completed'::calculation_status
WHERE id = $2::integer;
`, [stepDuration, calculateHistoryId]);
@@ -551,7 +567,7 @@ async function executeSqlStep(config, progress) {
SET
end_time = NOW(),
duration_seconds = $1::integer,
status = $2::text,
status = $2::calculation_status,
error_message = $3::text
WHERE id = $4::integer;
`, [errorDuration, finalStatus, errorMessage.substring(0, 1000), calculateHistoryId]); // Limit error message size

View File

@@ -0,0 +1,200 @@
-- Description: Rebuilds daily product snapshots from scratch using real orders data.
-- Fixes issues with duplicated/inflated metrics.
-- Dependencies: Core import tables (products, orders, purchase_orders).
-- Frequency: One-time run to clear out problematic data.
DO $$
DECLARE
_module_name TEXT := 'rebuild_daily_snapshots';
_start_time TIMESTAMPTZ := clock_timestamp();
_date DATE;
_count INT;
_total_records INT := 0;
_begin_date DATE := (SELECT MIN(date)::date FROM orders WHERE date >= '2024-01-01'); -- Starting point for data rebuild
_end_date DATE := CURRENT_DATE;
BEGIN
RAISE NOTICE 'Beginning daily snapshots rebuild from % to %. Starting at %', _begin_date, _end_date, _start_time;
-- First truncate the existing snapshots to ensure a clean slate
TRUNCATE TABLE public.daily_product_snapshots;
RAISE NOTICE 'Cleared existing snapshot data';
-- Now rebuild the snapshots day by day
_date := _begin_date;
WHILE _date <= _end_date LOOP
RAISE NOTICE 'Processing date %...', _date;
-- Create snapshots for this date
WITH SalesData AS (
SELECT
p.pid,
p.sku,
-- Count orders to ensure we only include products with real activity
COUNT(o.id) as order_count,
-- Aggregate Sales (Quantity > 0, Status not Canceled/Returned)
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.discount ELSE 0 END), 0.00) AS discounts,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN COALESCE(o.costeach, p.landing_cost_price, p.cost_price) * o.quantity ELSE 0 END), 0.00) AS cogs,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN p.regular_price * o.quantity ELSE 0 END), 0.00) AS gross_regular_revenue,
-- Aggregate Returns (Quantity < 0 or Status = Returned)
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN ABS(o.quantity) ELSE 0 END), 0) AS units_returned,
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN o.price * ABS(o.quantity) ELSE 0 END), 0.00) AS returns_revenue
FROM public.products p
LEFT JOIN public.orders o
ON p.pid = o.pid
AND o.date::date = _date
GROUP BY p.pid, p.sku
HAVING COUNT(o.id) > 0 -- Only include products with actual orders for this date
),
ReceivingData AS (
SELECT
po.pid,
-- Count POs to ensure we only include products with real activity
COUNT(po.po_id) as po_count,
-- Calculate received quantity for this day
COALESCE(
-- First try the received field from purchase_orders table (if received on this date)
SUM(CASE WHEN po.date::date = _date THEN po.received ELSE 0 END),
-- Otherwise try receiving_history JSON
SUM(
CASE
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
ELSE 0
END
),
0
) AS units_received,
COALESCE(
-- First try the actual cost_price from purchase_orders
SUM(CASE WHEN po.date::date = _date THEN po.received * po.cost_price ELSE 0 END),
-- Otherwise try receiving_history JSON
SUM(
CASE
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
ELSE 0
END
* COALESCE((rh.item->>'cost')::numeric, po.cost_price)
),
0.00
) AS cost_received
FROM public.purchase_orders po
LEFT JOIN LATERAL jsonb_array_elements(po.receiving_history) AS rh(item) ON
jsonb_typeof(po.receiving_history) = 'array' AND
jsonb_array_length(po.receiving_history) > 0 AND
(
(rh.item->>'date')::date = _date OR
(rh.item->>'received_at')::date = _date OR
(rh.item->>'receipt_date')::date = _date
)
-- Include POs with the current date or relevant receiving_history
WHERE
po.date::date = _date OR
jsonb_typeof(po.receiving_history) = 'array' AND
jsonb_array_length(po.receiving_history) > 0
GROUP BY po.pid
HAVING COUNT(po.po_id) > 0 OR SUM(
CASE
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
ELSE 0
END
) > 0
),
-- Get stock quantities for the day - note this is approximate since we're using current products data
StockData AS (
SELECT
p.pid,
p.stock_quantity,
COALESCE(p.landing_cost_price, p.cost_price, 0.00) as effective_cost_price,
COALESCE(p.price, 0.00) as current_price,
COALESCE(p.regular_price, 0.00) as current_regular_price
FROM public.products p
)
INSERT INTO public.daily_product_snapshots (
snapshot_date,
pid,
sku,
eod_stock_quantity,
eod_stock_cost,
eod_stock_retail,
eod_stock_gross,
stockout_flag,
units_sold,
units_returned,
gross_revenue,
discounts,
returns_revenue,
net_revenue,
cogs,
gross_regular_revenue,
profit,
units_received,
cost_received,
calculation_timestamp
)
SELECT
_date AS snapshot_date,
COALESCE(sd.pid, rd.pid) AS pid,
sd.sku,
-- Use current stock as approximation, since historical stock data may not be available
s.stock_quantity AS eod_stock_quantity,
s.stock_quantity * s.effective_cost_price AS eod_stock_cost,
s.stock_quantity * s.current_price AS eod_stock_retail,
s.stock_quantity * s.current_regular_price AS eod_stock_gross,
(s.stock_quantity <= 0) AS stockout_flag,
-- Sales metrics
COALESCE(sd.units_sold, 0),
COALESCE(sd.units_returned, 0),
COALESCE(sd.gross_revenue_unadjusted, 0.00),
COALESCE(sd.discounts, 0.00),
COALESCE(sd.returns_revenue, 0.00),
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) AS net_revenue,
COALESCE(sd.cogs, 0.00),
COALESCE(sd.gross_regular_revenue, 0.00),
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
-- Receiving metrics
COALESCE(rd.units_received, 0),
COALESCE(rd.cost_received, 0.00),
_start_time
FROM SalesData sd
FULL OUTER JOIN ReceivingData rd ON sd.pid = rd.pid
LEFT JOIN StockData s ON COALESCE(sd.pid, rd.pid) = s.pid
WHERE (COALESCE(sd.order_count, 0) > 0 OR COALESCE(rd.po_count, 0) > 0);
-- Get record count for this day
GET DIAGNOSTICS _count = ROW_COUNT;
_total_records := _total_records + _count;
RAISE NOTICE 'Added % snapshot records for date %', _count, _date;
-- Move to next day
_date := _date + INTERVAL '1 day';
END LOOP;
RAISE NOTICE 'Rebuilding daily snapshots complete. Added % total records across % days.', _total_records, (_end_date - _begin_date)::integer + 1;
-- Update the status table for daily_snapshots
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
VALUES ('daily_snapshots', _start_time)
ON CONFLICT (module_name) DO UPDATE SET last_calculation_timestamp = _start_time;
-- Now update product_metrics based on the rebuilt snapshots
RAISE NOTICE 'Triggering update of product_metrics table...';
-- Call the update_product_metrics procedure directly
-- Your system might use a different method to trigger this update
PERFORM pg_notify('recalculate_metrics', 'product_metrics');
RAISE NOTICE 'Rebuild complete. Duration: %', clock_timestamp() - _start_time;
END $$;

View File

@@ -6,6 +6,7 @@ DO $$
DECLARE
_module_name VARCHAR := 'brand_metrics';
_start_time TIMESTAMPTZ := clock_timestamp();
_min_revenue NUMERIC := 50.00; -- Minimum revenue threshold for margin calculation
BEGIN
RAISE NOTICE 'Running % calculation...', _module_name;
@@ -19,14 +20,26 @@ BEGIN
SUM(pm.current_stock) AS current_stock_units,
SUM(pm.current_stock_cost) AS current_stock_cost,
SUM(pm.current_stock_retail) AS current_stock_retail,
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d,
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d,
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d,
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d,
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue
-- Only include products with valid sales data in each time period
COUNT(DISTINCT CASE WHEN pm.sales_7d > 0 THEN pm.pid END) AS products_with_sales_7d,
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
COUNT(DISTINCT CASE WHEN pm.sales_30d > 0 THEN pm.pid END) AS products_with_sales_30d,
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
COUNT(DISTINCT CASE WHEN pm.sales_365d > 0 THEN pm.pid END) AS products_with_sales_365d,
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
COUNT(DISTINCT CASE WHEN pm.lifetime_sales > 0 THEN pm.pid END) AS products_with_lifetime_sales,
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
FROM public.product_metrics pm
JOIN public.products p ON pm.pid = p.pid
-- WHERE p.visible = true -- Optional: filter only visible products for brand metrics?
GROUP BY brand_group
),
AllBrands AS (
@@ -58,8 +71,14 @@ BEGIN
COALESCE(ba.profit_30d, 0.00), COALESCE(ba.cogs_30d, 0.00),
COALESCE(ba.sales_365d, 0), COALESCE(ba.revenue_365d, 0.00),
COALESCE(ba.lifetime_sales, 0), COALESCE(ba.lifetime_revenue, 0.00),
-- KPIs
(ba.profit_30d / NULLIF(ba.revenue_30d, 0)) * 100.0
-- KPIs - Calculate margin only for brands with significant revenue
CASE
WHEN COALESCE(ba.revenue_30d, 0) >= _min_revenue THEN
-- Directly calculate margin from revenue and cogs for consistency
-- This is mathematically equivalent to profit/revenue but more explicit
((COALESCE(ba.revenue_30d, 0) - COALESCE(ba.cogs_30d, 0)) / COALESCE(ba.revenue_30d, 1)) * 100.0
ELSE NULL -- No margin for low/no revenue brands
END
FROM AllBrands b
LEFT JOIN BrandAggregates ba ON b.brand_group = ba.brand_group

View File

@@ -9,9 +9,44 @@ DECLARE
BEGIN
RAISE NOTICE 'Running % calculation...', _module_name;
WITH CategoryAggregates AS (
WITH
-- Identify the hierarchy depth for each category
CategoryDepth AS (
WITH RECURSIVE CategoryTree AS (
-- Base case: Start with categories without parents (root categories)
SELECT cat_id, name, parent_id, 0 AS depth
FROM public.categories
WHERE parent_id IS NULL
UNION ALL
-- Recursive step: Add child categories with incremented depth
SELECT c.cat_id, c.name, c.parent_id, ct.depth + 1
FROM public.categories c
JOIN CategoryTree ct ON c.parent_id = ct.cat_id
)
SELECT cat_id, depth
FROM CategoryTree
),
-- For each product, find the most specific (deepest) category it belongs to
ProductDeepestCategory AS (
SELECT
pc.pid,
pc.cat_id
FROM public.product_categories pc
JOIN CategoryDepth cd ON pc.cat_id = cd.cat_id
-- This is the key part: for each product, select only the category with maximum depth
WHERE (pc.pid, cd.depth) IN (
SELECT pc2.pid, MAX(cd2.depth)
FROM public.product_categories pc2
JOIN CategoryDepth cd2 ON pc2.cat_id = cd2.cat_id
GROUP BY pc2.pid
)
),
-- Calculate metrics only at the most specific category level for each product
CategoryAggregates AS (
SELECT
pc.cat_id,
pdc.cat_id,
-- Counts
COUNT(DISTINCT pm.pid) AS product_count,
COUNT(DISTINCT CASE WHEN pm.is_visible THEN pm.pid END) AS active_product_count,
@@ -20,19 +55,74 @@ BEGIN
SUM(pm.current_stock) AS current_stock_units,
SUM(pm.current_stock_cost) AS current_stock_cost,
SUM(pm.current_stock_retail) AS current_stock_retail,
-- Rolling Periods (Sum directly from product_metrics)
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d,
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d,
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d,
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d,
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue,
-- Data for KPIs
SUM(pm.avg_stock_units_30d) AS total_avg_stock_units_30d -- Sum of averages (use cautiously)
-- Rolling Periods - Only include products with actual sales in each period
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue,
-- Data for KPIs - Only average stock for products with stock
SUM(CASE WHEN pm.avg_stock_units_30d > 0 THEN pm.avg_stock_units_30d ELSE 0 END) AS total_avg_stock_units_30d
FROM public.product_metrics pm
JOIN public.product_categories pc ON pm.pid = pc.pid
-- Optional: JOIN products p ON pm.pid = p.pid if needed for filtering (e.g., only visible products)
-- WHERE p.visible = true -- Example filter
GROUP BY pc.cat_id
JOIN ProductDeepestCategory pdc ON pm.pid = pdc.pid
GROUP BY pdc.cat_id
),
-- Use a flat approach to build the complete category tree with aggregate values
CategoryTree AS (
WITH RECURSIVE CategoryHierarchy AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.cat_id as leaf_id, -- Track original leaf category
ARRAY[c.cat_id] as path
FROM public.categories c
UNION ALL
SELECT
p.cat_id,
p.name,
p.parent_id,
ch.leaf_id, -- Keep track of the original leaf
p.cat_id || ch.path
FROM public.categories p
JOIN CategoryHierarchy ch ON p.cat_id = ch.parent_id
)
SELECT
ch.cat_id,
ch.leaf_id
FROM CategoryHierarchy ch
),
-- Now aggregate by maintaining the link between leaf categories and ancestors
RollupMetrics AS (
SELECT
ct.cat_id,
SUM(ca.product_count) AS product_count,
SUM(ca.active_product_count) AS active_product_count,
SUM(ca.replenishable_product_count) AS replenishable_product_count,
SUM(ca.current_stock_units) AS current_stock_units,
SUM(ca.current_stock_cost) AS current_stock_cost,
SUM(ca.current_stock_retail) AS current_stock_retail,
SUM(ca.sales_7d) AS sales_7d,
SUM(ca.revenue_7d) AS revenue_7d,
SUM(ca.sales_30d) AS sales_30d,
SUM(ca.revenue_30d) AS revenue_30d,
SUM(ca.cogs_30d) AS cogs_30d,
SUM(ca.profit_30d) AS profit_30d,
SUM(ca.sales_365d) AS sales_365d,
SUM(ca.revenue_365d) AS revenue_365d,
SUM(ca.lifetime_sales) AS lifetime_sales,
SUM(ca.lifetime_revenue) AS lifetime_revenue,
SUM(ca.total_avg_stock_units_30d) AS total_avg_stock_units_30d
FROM CategoryTree ct
JOIN CategoryAggregates ca ON ct.leaf_id = ca.cat_id
GROUP BY ct.cat_id
)
INSERT INTO public.category_metrics (
category_id, category_name, category_type, parent_id, last_calculated,
@@ -49,22 +139,22 @@ BEGIN
c.parent_id,
_start_time,
-- Base Aggregates
COALESCE(ca.product_count, 0),
COALESCE(ca.active_product_count, 0),
COALESCE(ca.replenishable_product_count, 0),
COALESCE(ca.current_stock_units, 0),
COALESCE(ca.current_stock_cost, 0.00),
COALESCE(ca.current_stock_retail, 0.00),
COALESCE(ca.sales_7d, 0), COALESCE(ca.revenue_7d, 0.00),
COALESCE(ca.sales_30d, 0), COALESCE(ca.revenue_30d, 0.00),
COALESCE(ca.profit_30d, 0.00), COALESCE(ca.cogs_30d, 0.00),
COALESCE(ca.sales_365d, 0), COALESCE(ca.revenue_365d, 0.00),
COALESCE(ca.lifetime_sales, 0), COALESCE(ca.lifetime_revenue, 0.00),
COALESCE(rm.product_count, 0),
COALESCE(rm.active_product_count, 0),
COALESCE(rm.replenishable_product_count, 0),
COALESCE(rm.current_stock_units, 0),
COALESCE(rm.current_stock_cost, 0.00),
COALESCE(rm.current_stock_retail, 0.00),
COALESCE(rm.sales_7d, 0), COALESCE(rm.revenue_7d, 0.00),
COALESCE(rm.sales_30d, 0), COALESCE(rm.revenue_30d, 0.00),
COALESCE(rm.profit_30d, 0.00), COALESCE(rm.cogs_30d, 0.00),
COALESCE(rm.sales_365d, 0), COALESCE(rm.revenue_365d, 0.00),
COALESCE(rm.lifetime_sales, 0), COALESCE(rm.lifetime_revenue, 0.00),
-- KPIs
(ca.profit_30d / NULLIF(ca.revenue_30d, 0)) * 100.0,
ca.sales_30d / NULLIF(ca.total_avg_stock_units_30d, 0) -- Simple unit-based turnover
(rm.profit_30d / NULLIF(rm.revenue_30d, 0)) * 100.0,
rm.sales_30d / NULLIF(rm.total_avg_stock_units_30d, 0) -- Simple unit-based turnover
FROM public.categories c -- Start from categories to include those with no products yet
LEFT JOIN CategoryAggregates ca ON c.cat_id = ca.cat_id
LEFT JOIN RollupMetrics rm ON c.cat_id = rm.cat_id
ON CONFLICT (category_id) DO UPDATE SET
category_name = EXCLUDED.category_name,

View File

@@ -21,11 +21,24 @@ BEGIN
SUM(pm.current_stock_retail) AS current_stock_retail,
SUM(pm.on_order_qty) AS on_order_units,
SUM(pm.on_order_cost) AS on_order_cost,
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d,
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d,
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d,
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d,
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue
-- Only include products with valid sales data in each time period
COUNT(DISTINCT CASE WHEN pm.sales_7d > 0 THEN pm.pid END) AS products_with_sales_7d,
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
COUNT(DISTINCT CASE WHEN pm.sales_30d > 0 THEN pm.pid END) AS products_with_sales_30d,
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
COUNT(DISTINCT CASE WHEN pm.sales_365d > 0 THEN pm.pid END) AS products_with_sales_365d,
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
COUNT(DISTINCT CASE WHEN pm.lifetime_sales > 0 THEN pm.pid END) AS products_with_lifetime_sales,
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
FROM public.product_metrics pm
JOIN public.products p ON pm.pid = p.pid
WHERE p.vendor IS NOT NULL AND p.vendor <> ''

View File

@@ -9,6 +9,8 @@ DECLARE
_start_time TIMESTAMPTZ := clock_timestamp(); -- Time execution started
_last_calc_time TIMESTAMPTZ;
_target_date DATE := CURRENT_DATE; -- Always recalculate today for simplicity with hourly runs
_total_records INT := 0;
_has_orders BOOLEAN := FALSE;
BEGIN
-- Get the timestamp before the last successful run of this module
SELECT last_calculation_timestamp INTO _last_calc_time
@@ -17,11 +19,48 @@ BEGIN
RAISE NOTICE 'Running % for date %. Start Time: %', _module_name, _target_date, _start_time;
-- Use CTEs to aggregate data for the target date
-- CRITICAL FIX: Check if we have any orders or receiving activity for today
-- to prevent creating artificial records when no real activity exists
SELECT EXISTS (
SELECT 1 FROM public.orders WHERE date::date = _target_date
UNION
SELECT 1 FROM public.purchase_orders
WHERE date::date = _target_date
OR EXISTS (
SELECT 1 FROM jsonb_array_elements(receiving_history) AS rh
WHERE jsonb_typeof(receiving_history) = 'array'
AND (
(rh->>'date')::date = _target_date OR
(rh->>'received_at')::date = _target_date OR
(rh->>'receipt_date')::date = _target_date
)
)
LIMIT 1
) INTO _has_orders;
-- If no orders or receiving activity found for today, log and exit
IF NOT _has_orders THEN
RAISE NOTICE 'No orders or receiving activity found for % - skipping daily snapshot creation', _target_date;
-- Still update the calculate_status to prevent repeated attempts
UPDATE public.calculate_status
SET last_calculation_timestamp = _start_time
WHERE module_name = _module_name;
RETURN; -- Exit without creating snapshots
END IF;
-- IMPORTANT: First delete any existing data for this date to prevent duplication
DELETE FROM public.daily_product_snapshots
WHERE snapshot_date = _target_date;
-- Proceed with calculating daily metrics only for products with actual activity
WITH SalesData AS (
SELECT
p.pid,
p.sku,
-- Track number of orders to ensure we have real data
COUNT(o.id) as order_count,
-- Aggregate Sales (Quantity > 0, Status not Canceled/Returned)
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted, -- Before discount
@@ -37,10 +76,13 @@ BEGIN
ON p.pid = o.pid
AND o.date::date = _target_date -- Cast to date to ensure compatibility regardless of original type
GROUP BY p.pid, p.sku
HAVING COUNT(o.id) > 0 -- CRITICAL: Only include products with actual orders
),
ReceivingData AS (
SELECT
po.pid,
-- Track number of POs to ensure we have real data
COUNT(po.po_id) as po_count,
-- Prioritize the actual table fields over the JSON data
COALESCE(
-- First try the received field from purchase_orders table
@@ -89,6 +131,15 @@ BEGIN
jsonb_typeof(po.receiving_history) = 'array' AND
jsonb_array_length(po.receiving_history) > 0
GROUP BY po.pid
-- CRITICAL: Only include products with actual receiving activity
HAVING COUNT(po.po_id) > 0 OR SUM(
CASE
WHEN (rh.item->>'date')::date = _target_date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'received_at')::date = _target_date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'receipt_date')::date = _target_date THEN (rh.item->>'qty')::numeric
ELSE 0
END
) > 0
),
CurrentStock AS (
-- Select current stock values directly from products table
@@ -100,7 +151,7 @@ BEGIN
COALESCE(regular_price, 0.00) as current_regular_price
FROM public.products
)
-- Upsert into the daily snapshots table
-- Now insert records, but ONLY for products with actual activity
INSERT INTO public.daily_product_snapshots (
snapshot_date,
pid,
@@ -125,8 +176,8 @@ BEGIN
)
SELECT
_target_date AS snapshot_date,
p.pid,
p.sku,
COALESCE(sd.pid, rd.pid) AS pid, -- Use sales or receiving PID
COALESCE(sd.sku, p.sku) AS sku, -- Get SKU from sales data or products table
-- Inventory Metrics (Using CurrentStock)
cs.stock_quantity AS eod_stock_quantity,
cs.stock_quantity * cs.effective_cost_price AS eod_stock_cost,
@@ -147,31 +198,15 @@ BEGIN
COALESCE(rd.units_received, 0),
COALESCE(rd.cost_received, 0.00),
_start_time -- Timestamp of this calculation run
FROM public.products p
LEFT JOIN CurrentStock cs ON p.pid = cs.pid
LEFT JOIN SalesData sd ON p.pid = sd.pid
LEFT JOIN ReceivingData rd ON p.pid = rd.pid
WHERE p.pid IS NOT NULL -- Ensure we only insert for existing products
FROM SalesData sd
FULL OUTER JOIN ReceivingData rd ON sd.pid = rd.pid
LEFT JOIN public.products p ON COALESCE(sd.pid, rd.pid) = p.pid
LEFT JOIN CurrentStock cs ON COALESCE(sd.pid, rd.pid) = cs.pid
WHERE p.pid IS NOT NULL; -- Ensure we only insert for existing products
ON CONFLICT (snapshot_date, pid) DO UPDATE SET
sku = EXCLUDED.sku,
eod_stock_quantity = EXCLUDED.eod_stock_quantity,
eod_stock_cost = EXCLUDED.eod_stock_cost,
eod_stock_retail = EXCLUDED.eod_stock_retail,
eod_stock_gross = EXCLUDED.eod_stock_gross,
stockout_flag = EXCLUDED.stockout_flag,
units_sold = EXCLUDED.units_sold,
units_returned = EXCLUDED.units_returned,
gross_revenue = EXCLUDED.gross_revenue,
discounts = EXCLUDED.discounts,
returns_revenue = EXCLUDED.returns_revenue,
net_revenue = EXCLUDED.net_revenue,
cogs = EXCLUDED.cogs,
gross_regular_revenue = EXCLUDED.gross_regular_revenue,
profit = EXCLUDED.profit,
units_received = EXCLUDED.units_received,
cost_received = EXCLUDED.cost_received,
calculation_timestamp = EXCLUDED.calculation_timestamp; -- Use the timestamp from this run
-- Get the total number of records inserted
GET DIAGNOSTICS _total_records = ROW_COUNT;
RAISE NOTICE 'Created % daily snapshot records for % with sales/receiving activity', _total_records, _target_date;
-- Update the status table with the timestamp from the START of this run
UPDATE public.calculate_status

View File

@@ -110,31 +110,37 @@ BEGIN
SUM(units_sold) AS total_units_sold,
SUM(net_revenue) AS total_net_revenue,
-- Specific time windows if we have enough data
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' THEN units_sold ELSE 0 END) AS sales_7d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS revenue_7d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' THEN units_sold ELSE 0 END) AS sales_14d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' THEN net_revenue ELSE 0 END) AS revenue_14d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_sold ELSE 0 END) AS sales_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN cogs ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN profit ELSE 0 END) AS profit_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_returned ELSE 0 END) AS returns_units_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN returns_revenue ELSE 0 END) AS returns_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN discounts ELSE 0 END) AS discounts_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN gross_revenue ELSE 0 END) AS gross_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN gross_regular_revenue ELSE 0 END) AS gross_regular_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND stockout_flag THEN 1 ELSE 0 END) AS stockout_days_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' THEN units_sold ELSE 0 END) AS sales_365d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' THEN net_revenue ELSE 0 END) AS revenue_365d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_received ELSE 0 END) AS received_qty_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN cost_received ELSE 0 END) AS received_cost_30d,
-- Specific time windows using date range boundaries precisely
-- Use _current_date - INTERVAL '6 days' to include 7 days (today + 6 previous days)
-- This ensures we count exactly the right number of days in each period
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_7d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_7d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_14d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_14d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN cogs ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN profit ELSE 0 END) AS profit_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_returned ELSE 0 END) AS returns_units_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN returns_revenue ELSE 0 END) AS returns_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN discounts ELSE 0 END) AS discounts_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN gross_revenue ELSE 0 END) AS gross_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN gross_regular_revenue ELSE 0 END) AS gross_regular_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date AND stockout_flag THEN 1 ELSE 0 END) AS stockout_days_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_365d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_365d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_received ELSE 0 END) AS received_qty_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN cost_received ELSE 0 END) AS received_cost_30d,
-- Averages (check for NULLIF 0 days in period if filtering dates)
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_quantity END) AS avg_stock_units_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_cost END) AS avg_stock_cost_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_retail END) AS avg_stock_retail_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_gross END) AS avg_stock_gross_30d,
-- Averages for stock levels - only include dates within the specified period
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_quantity END) AS avg_stock_units_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_cost END) AS avg_stock_cost_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_retail END) AS avg_stock_retail_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_gross END) AS avg_stock_gross_30d,
-- Lifetime - should match total values above
SUM(units_sold) AS lifetime_sales,
@@ -150,14 +156,14 @@ BEGIN
SELECT
pid,
date_first_sold,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '6 days' THEN units_sold ELSE 0 END) AS first_7_days_sales,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS first_7_days_revenue,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '29 days' THEN units_sold ELSE 0 END) AS first_30_days_sales,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS first_30_days_revenue,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '59 days' THEN units_sold ELSE 0 END) AS first_60_days_sales,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '59 days' THEN net_revenue ELSE 0 END) AS first_60_days_revenue,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '89 days' THEN units_sold ELSE 0 END) AS first_90_days_sales,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '89 days' THEN net_revenue ELSE 0 END) AS first_90_days_revenue
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '6 days' THEN units_sold ELSE 0 END) AS first_7_days_sales,
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS first_7_days_revenue,
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '29 days' THEN units_sold ELSE 0 END) AS first_30_days_sales,
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS first_30_days_revenue,
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '59 days' THEN units_sold ELSE 0 END) AS first_60_days_sales,
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '59 days' THEN net_revenue ELSE 0 END) AS first_60_days_revenue,
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '89 days' THEN units_sold ELSE 0 END) AS first_90_days_sales,
SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '89 days' THEN net_revenue ELSE 0 END) AS first_90_days_revenue
FROM public.daily_product_snapshots ds
JOIN HistoricalDates hd USING(pid)
WHERE date_first_sold IS NOT NULL
@@ -246,45 +252,314 @@ BEGIN
(sa.sales_30d / NULLIF(ci.current_stock + sa.sales_30d, 0)) * 100 AS sell_through_30d,
-- Forecasting intermediate values
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) AS sales_velocity_daily,
-- CRITICAL FIX: Use safer velocity calculation to prevent extreme values
-- Original problematic calculation: (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0))
-- Use available days (not stockout days) as denominator with a minimum safety value
(sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d, -- Standard calculation
CASE
WHEN sa.sales_30d > 0 THEN 14.0 -- If we have sales, ensure at least 14 days denominator
ELSE 30.0 -- If no sales, use full period
END
),
0
)
) AS sales_velocity_daily,
s.effective_lead_time AS config_lead_time,
s.effective_days_of_stock AS config_days_of_stock,
s.effective_safety_stock AS config_safety_stock,
(s.effective_lead_time + s.effective_days_of_stock) AS planning_period_days,
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time AS lead_time_forecast_units,
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock AS days_of_stock_forecast_units,
((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock) AS planning_period_forecast_units,
(ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time)) AS lead_time_closing_stock,
((ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock) AS days_of_stock_closing_stock,
(((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0) AS replenishment_needed_raw,
-- Apply the same fix to all derived calculations
(sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time AS lead_time_forecast_units,
(sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock AS days_of_stock_forecast_units,
(sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * (s.effective_lead_time + s.effective_days_of_stock) AS planning_period_forecast_units,
(ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time)) AS lead_time_closing_stock,
((ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time))) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock) AS days_of_stock_closing_stock,
(((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0) AS replenishment_needed_raw,
-- Final Forecasting / Replenishment Metrics (apply CEILING/GREATEST/etc.)
-- Note: These calculations are nested for clarity, can be simplified in prod
CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS replenishment_units,
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_effective_cost AS replenishment_cost,
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_price AS replenishment_retail,
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * (ci.current_price - ci.current_effective_cost) AS replenishment_profit,
CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS replenishment_units,
(CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_effective_cost AS replenishment_cost,
(CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_price AS replenishment_retail,
(CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * (ci.current_price - ci.current_effective_cost) AS replenishment_profit,
-- Placeholder for To Order (Apply MOQ/UOM logic here if needed, otherwise equals replenishment)
CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS to_order_units,
CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS to_order_units,
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) AS forecast_lost_sales_units,
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) * ci.current_price AS forecast_lost_revenue,
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time))) AS forecast_lost_sales_units,
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time))) * ci.current_price AS forecast_lost_revenue,
ci.current_stock / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS stock_cover_in_days,
COALESCE(ooi.on_order_qty, 0) / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS po_cover_in_days,
(ci.current_stock + COALESCE(ooi.on_order_qty, 0)) / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS sells_out_in_days,
ci.current_stock / NULLIF((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
), 0) AS stock_cover_in_days,
COALESCE(ooi.on_order_qty, 0) / NULLIF((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
), 0) AS po_cover_in_days,
(ci.current_stock + COALESCE(ooi.on_order_qty, 0)) / NULLIF((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
), 0) AS sells_out_in_days,
-- Replenish Date: Date when stock is projected to hit safety stock, minus lead time
CASE
WHEN (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) > 0
THEN _current_date + FLOOR(GREATEST(0, ci.current_stock - s.effective_safety_stock) / (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)))::int - s.effective_lead_time
WHEN (sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) > 0
THEN _current_date + FLOOR(GREATEST(0, ci.current_stock - s.effective_safety_stock) / (sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
))::int - s.effective_lead_time
ELSE NULL
END AS replenish_date,
GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))::int AS overstocked_units,
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))) * ci.current_effective_cost AS overstocked_cost,
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))) * ci.current_price AS overstocked_retail,
GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)))::int AS overstocked_units,
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)))) * ci.current_effective_cost AS overstocked_cost,
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)))) * ci.current_price AS overstocked_retail,
-- Old Stock Flag
(ci.created_at::date < _current_date - INTERVAL '60 day') AND