Add in forecasting, lifecycle phases, associated component and script changes

This commit is contained in:
2026-02-13 22:45:18 -05:00
parent f41b5ab0f6
commit 45ded53530
29 changed files with 3643 additions and 376 deletions

View File

@@ -11,6 +11,7 @@ const RUN_PERIODIC_METRICS = true;
const RUN_BRAND_METRICS = true;
const RUN_VENDOR_METRICS = true;
const RUN_CATEGORY_METRICS = true;
const RUN_LIFECYCLE_FORECASTS = true;
// Maximum execution time for the entire sequence (e.g., 90 minutes)
const MAX_EXECUTION_TIME_TOTAL = 90 * 60 * 1000;
@@ -592,6 +593,13 @@ async function runAllCalculations() {
historyType: 'product_metrics',
statusModule: 'product_metrics'
},
{
run: RUN_LIFECYCLE_FORECASTS,
name: 'Lifecycle Forecast Update',
sqlFile: 'metrics-new/update_lifecycle_forecasts.sql',
historyType: 'lifecycle_forecasts',
statusModule: 'lifecycle_forecasts'
},
{
run: RUN_PERIODIC_METRICS,
name: 'Periodic Metrics Update',

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,5 @@
numpy>=1.24
scipy>=1.10
pandas>=2.0
psycopg2-binary>=2.9
statsmodels>=0.14

View File

@@ -0,0 +1,128 @@
#!/usr/bin/env node
/**
* Forecast Pipeline Orchestrator
*
* Spawns the Python forecast engine with database credentials from the
* environment. Can be run manually, via cron, or integrated into the
* existing metrics pipeline.
*
* Usage:
* node run_forecast.js
*
* Environment:
* Reads DB_HOST, DB_USER, DB_PASSWORD, DB_NAME, DB_PORT from
* /var/www/html/inventory/.env (or current process env).
*/
const { spawn } = require('child_process');
const path = require('path');
const fs = require('fs');
// Load .env file if it exists (production path)
const envPaths = [
'/var/www/html/inventory/.env',
path.join(__dirname, '../../.env'),
];
for (const envPath of envPaths) {
if (fs.existsSync(envPath)) {
const envContent = fs.readFileSync(envPath, 'utf-8');
for (const line of envContent.split('\n')) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#')) continue;
const eqIndex = trimmed.indexOf('=');
if (eqIndex === -1) continue;
const key = trimmed.slice(0, eqIndex);
const value = trimmed.slice(eqIndex + 1);
if (!process.env[key]) {
process.env[key] = value;
}
}
console.log(`Loaded env from ${envPath}`);
break;
}
}
// Verify required env vars
const required = ['DB_HOST', 'DB_USER', 'DB_PASSWORD', 'DB_NAME'];
const missing = required.filter(k => !process.env[k]);
if (missing.length > 0) {
console.error(`Missing required environment variables: ${missing.join(', ')}`);
process.exit(1);
}
const SCRIPT_DIR = __dirname;
const PYTHON_SCRIPT = path.join(SCRIPT_DIR, 'forecast_engine.py');
const VENV_DIR = path.join(SCRIPT_DIR, 'venv');
const REQUIREMENTS = path.join(SCRIPT_DIR, 'requirements.txt');
// Determine python binary (prefer venv if it exists)
function getPythonBin() {
const venvPython = path.join(VENV_DIR, 'bin', 'python');
if (fs.existsSync(venvPython)) return venvPython;
// Fall back to system python
return 'python3';
}
// Ensure venv and dependencies are installed
async function ensureDependencies() {
if (!fs.existsSync(path.join(VENV_DIR, 'bin', 'python'))) {
console.log('Creating virtual environment...');
await runCommand('python3', ['-m', 'venv', VENV_DIR]);
}
// Always run pip install — idempotent, fast when packages already present
console.log('Checking dependencies...');
const python = path.join(VENV_DIR, 'bin', 'python');
await runCommand(python, ['-m', 'pip', 'install', '--quiet', '-r', REQUIREMENTS]);
}
function runCommand(cmd, args, options = {}) {
return new Promise((resolve, reject) => {
const proc = spawn(cmd, args, {
stdio: 'inherit',
...options,
});
proc.on('close', code => {
if (code === 0) resolve();
else reject(new Error(`${cmd} exited with code ${code}`));
});
proc.on('error', reject);
});
}
async function main() {
const startTime = Date.now();
console.log('='.repeat(60));
console.log(`Forecast Pipeline - ${new Date().toISOString()}`);
console.log('='.repeat(60));
try {
await ensureDependencies();
const pythonBin = getPythonBin();
console.log(`Using Python: ${pythonBin}`);
console.log(`Running: ${PYTHON_SCRIPT}`);
console.log('');
await runCommand(pythonBin, [PYTHON_SCRIPT], {
env: {
...process.env,
PYTHONUNBUFFERED: '1', // Real-time output
},
});
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log('');
console.log('='.repeat(60));
console.log(`Forecast pipeline completed in ${duration}s`);
console.log('='.repeat(60));
} catch (err) {
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.error(`Forecast pipeline FAILED after ${duration}s:`, err.message);
process.exit(1);
}
}
main();

View File

@@ -0,0 +1,51 @@
-- Forecasting Pipeline Tables
-- Run once to create the schema. Safe to re-run (IF NOT EXISTS).
-- Precomputed reference decay curves per brand (or brand x category at any hierarchy level)
CREATE TABLE IF NOT EXISTS brand_lifecycle_curves (
id SERIAL PRIMARY KEY,
brand TEXT NOT NULL,
root_category TEXT, -- NULL = brand-level fallback curve, else category name
cat_id BIGINT, -- NULL = brand-only; else category_hierarchy.cat_id for precise matching
category_level SMALLINT, -- NULL = brand-only; 0-3 = hierarchy depth
amplitude NUMERIC(10,4), -- A in: sales(t) = A * exp(-λt) + C
decay_rate NUMERIC(10,6), -- λ (higher = faster decay)
baseline NUMERIC(10,4), -- C (long-tail steady-state daily sales)
r_squared NUMERIC(6,4), -- goodness of fit
sample_size INT, -- number of products that informed this curve
median_first_week_sales NUMERIC(10,2), -- for scaling new launches
median_preorder_sales NUMERIC(10,2), -- for scaling pre-order products
median_preorder_days NUMERIC(10,2), -- median pre-order accumulation window (days)
computed_at TIMESTAMP DEFAULT NOW(),
UNIQUE(brand, cat_id)
);
-- Per-product daily forecasts (next 90 days, regenerated each run)
CREATE TABLE IF NOT EXISTS product_forecasts (
pid BIGINT NOT NULL,
forecast_date DATE NOT NULL,
forecast_units NUMERIC(10,2),
forecast_revenue NUMERIC(14,4),
lifecycle_phase TEXT, -- preorder, launch, decay, mature, slow_mover, dormant
forecast_method TEXT, -- lifecycle_curve, exp_smoothing, velocity, zero
confidence_lower NUMERIC(10,2),
confidence_upper NUMERIC(10,2),
generated_at TIMESTAMP DEFAULT NOW(),
PRIMARY KEY (pid, forecast_date)
);
CREATE INDEX IF NOT EXISTS idx_pf_date ON product_forecasts(forecast_date);
CREATE INDEX IF NOT EXISTS idx_pf_phase ON product_forecasts(lifecycle_phase);
-- Forecast run history (for monitoring)
CREATE TABLE IF NOT EXISTS forecast_runs (
id SERIAL PRIMARY KEY,
started_at TIMESTAMP NOT NULL,
finished_at TIMESTAMP,
status TEXT DEFAULT 'running', -- running, completed, failed
products_forecast INT,
phase_counts JSONB, -- {"launch": 50, "decay": 200, ...}
curve_count INT, -- brand curves computed
error_message TEXT,
duration_seconds NUMERIC(10,2)
);

View File

@@ -40,7 +40,7 @@ const sshConfig = {
password: process.env.PROD_DB_PASSWORD,
database: process.env.PROD_DB_NAME,
port: process.env.PROD_DB_PORT || 3306,
timezone: '-05:00', // Production DB always stores times in EST (UTC-5) regardless of DST
timezone: '-05:00', // mysql2 driver timezone — corrected at runtime via adjustDateForMySQL() in utils.js
},
localDbConfig: {
// PostgreSQL config for local

View File

@@ -58,8 +58,12 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
);
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
// Adjust for mysql2 driver timezone vs MySQL server timezone mismatch
const mysqlSyncTime = prodConnection.adjustDateForMySQL
? prodConnection.adjustDateForMySQL(lastSyncTime)
: lastSyncTime;
console.log('Orders: Using last sync time:', lastSyncTime);
console.log('Orders: Using last sync time:', lastSyncTime, '(adjusted:', mysqlSyncTime, ')');
// First get count of order items - Keep MySQL compatible for production
const [[{ total }]] = await prodConnection.query(`
@@ -82,7 +86,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
)
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
totalOrderItems = total;
console.log('Orders: Found changes:', totalOrderItems);
@@ -116,7 +120,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
)
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
console.log('Orders: Found', orderItems.length, 'order items to process');

View File

@@ -669,8 +669,13 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
// Setup temporary tables
await setupTemporaryTables(localConnection);
// Adjust sync time for mysql2 driver timezone vs MySQL server timezone mismatch
const mysqlSyncTime = prodConnection.adjustDateForMySQL
? prodConnection.adjustDateForMySQL(lastSyncTime)
: lastSyncTime;
// Materialize calculations into temp table
const materializeResult = await materializeCalculations(prodConnection, localConnection, incrementalUpdate, lastSyncTime, startTime);
const materializeResult = await materializeCalculations(prodConnection, localConnection, incrementalUpdate, mysqlSyncTime, startTime);
// Get the list of products that need updating
const [products] = await localConnection.query(`

View File

@@ -65,8 +65,12 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'"
);
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
// Adjust for mysql2 driver timezone vs MySQL server timezone mismatch
const mysqlSyncTime = prodConnection.adjustDateForMySQL
? prodConnection.adjustDateForMySQL(lastSyncTime)
: lastSyncTime;
console.log('Purchase Orders: Using last sync time:', lastSyncTime);
console.log('Purchase Orders: Using last sync time:', lastSyncTime, '(adjusted:', mysqlSyncTime, ')');
// Create temp tables for processing
await localConnection.query(`
@@ -254,7 +258,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
OR p.date_estin > ?
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
const totalPOs = poCount[0].total;
console.log(`Found ${totalPOs} relevant purchase orders`);
@@ -291,7 +295,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
` : ''}
ORDER BY p.po_id
LIMIT ${PO_BATCH_SIZE} OFFSET ${offset}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
if (poList.length === 0) {
allPOsProcessed = true;
@@ -426,7 +430,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
OR r.date_created > ?
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime] : []);
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime] : []);
const totalReceivings = receivingCount[0].total;
console.log(`Found ${totalReceivings} relevant receivings`);
@@ -463,7 +467,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
` : ''}
ORDER BY r.receiving_id
LIMIT ${PO_BATCH_SIZE} OFFSET ${offset}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime] : []);
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime] : []);
if (receivingList.length === 0) {
allReceivingsProcessed = true;

View File

@@ -48,6 +48,37 @@ async function setupConnections(sshConfig) {
stream: tunnel.stream,
});
// Detect MySQL server timezone and calculate correction for the driver timezone mismatch.
// The mysql2 driver is configured with timezone: '-05:00' (EST), but the MySQL server
// may be in a different timezone (e.g., America/Chicago = CST/CDT). When the driver
// formats a JS Date as EST and MySQL interprets it in its own timezone, DATETIME
// comparisons can be off. This correction adjusts Date objects before they're passed
// to MySQL queries so the formatted string matches the server's local time.
const [[{ utcDiffSec }]] = await prodConnection.query(
"SELECT TIMESTAMPDIFF(SECOND, NOW(), UTC_TIMESTAMP()) as utcDiffSec"
);
const mysqlOffsetMs = -utcDiffSec * 1000; // MySQL UTC offset in ms (e.g., -21600000 for CST)
const driverOffsetMs = -5 * 3600 * 1000; // Driver's -05:00 in ms (-18000000)
const tzCorrectionMs = driverOffsetMs - mysqlOffsetMs;
// CST (winter): -18000000 - (-21600000) = +3600000 (1 hour correction needed)
// CDT (summer): -18000000 - (-18000000) = 0 (no correction needed)
if (tzCorrectionMs !== 0) {
console.log(`MySQL timezone correction: ${tzCorrectionMs / 1000}s (server offset: ${utcDiffSec}s from UTC)`);
}
/**
* Adjusts a Date/timestamp for the mysql2 driver timezone mismatch before
* passing it as a query parameter to MySQL. This ensures that the string
* mysql2 generates matches the timezone that DATETIME values are stored in.
*/
function adjustDateForMySQL(date) {
if (!date || tzCorrectionMs === 0) return date;
const d = date instanceof Date ? date : new Date(date);
return new Date(d.getTime() - tzCorrectionMs);
}
prodConnection.adjustDateForMySQL = adjustDateForMySQL;
// Setup PostgreSQL connection pool for local
const localPool = new Pool(sshConfig.localDbConfig);

View File

@@ -1,6 +1,7 @@
-- Description: Calculates and updates daily aggregated product data.
-- Self-healing: automatically detects and fills gaps in snapshot history.
-- Always reprocesses recent days to pick up new orders and data corrections.
-- Self-healing: detects gaps (missing snapshots), stale data (snapshot
-- aggregates that don't match source tables after backfills), and always
-- reprocesses recent days to pick up new orders and data corrections.
-- Dependencies: Core import tables (products, orders, purchase_orders), calculate_status table.
-- Frequency: Hourly (Run ~5-10 minutes after hourly data import completes).
@@ -18,28 +19,26 @@ DECLARE
BEGIN
RAISE NOTICE 'Running % script. Start Time: %', _module_name, _start_time;
-- Find the latest existing snapshot date to determine where gaps begin
-- Find the latest existing snapshot date (for logging only)
SELECT MAX(snapshot_date) INTO _latest_snapshot
FROM public.daily_product_snapshots;
-- Determine how far back to look for gaps, capped at _max_backfill_days
_backfill_start := GREATEST(
COALESCE(_latest_snapshot + 1, CURRENT_DATE - _max_backfill_days),
CURRENT_DATE - _max_backfill_days
);
-- Always scan the full backfill window to catch holes in the middle,
-- not just gaps at the end. The gap fill and stale detection queries
-- need to see the entire range to find missing or outdated snapshots.
_backfill_start := CURRENT_DATE - _max_backfill_days;
IF _latest_snapshot IS NULL THEN
RAISE NOTICE 'No existing snapshots found. Backfilling up to % days.', _max_backfill_days;
ELSIF _backfill_start > _latest_snapshot + 1 THEN
RAISE NOTICE 'Latest snapshot: %. Gap exceeds % day cap — backfilling from %. Use rebuild script for full history.',
_latest_snapshot, _max_backfill_days, _backfill_start;
ELSE
RAISE NOTICE 'Latest snapshot: %. Checking for gaps from %.', _latest_snapshot, _backfill_start;
RAISE NOTICE 'Latest snapshot: %. Scanning from % for gaps and stale data.', _latest_snapshot, _backfill_start;
END IF;
-- Process all dates that need snapshots:
-- 1. Gap fill: dates with orders/receivings but no snapshots (older than recent window)
-- 2. Recent recheck: last N days always reprocessed (picks up new orders, corrections)
-- 2. Stale detection: existing snapshots where aggregates don't match source data
-- (catches backfilled imports that arrived after snapshot was calculated)
-- 3. Recent recheck: last N days always reprocessed (picks up new orders, corrections)
FOR _target_date IN
SELECT d FROM (
-- Gap fill: find dates with activity but missing snapshots
@@ -55,6 +54,36 @@ BEGIN
SELECT 1 FROM public.daily_product_snapshots dps WHERE dps.snapshot_date = activity_dates.d
)
UNION
-- Stale detection: compare snapshot aggregates against source tables
SELECT snap_agg.snapshot_date AS d
FROM (
SELECT snapshot_date,
COALESCE(SUM(units_received), 0)::bigint AS snap_received,
COALESCE(SUM(units_sold), 0)::bigint AS snap_sold
FROM public.daily_product_snapshots
WHERE snapshot_date >= _backfill_start
AND snapshot_date < CURRENT_DATE - _recent_recheck_days
GROUP BY snapshot_date
) snap_agg
LEFT JOIN (
SELECT received_date::date AS d, SUM(qty_each)::bigint AS actual_received
FROM public.receivings
WHERE received_date::date >= _backfill_start
AND received_date::date < CURRENT_DATE - _recent_recheck_days
GROUP BY received_date::date
) recv_agg ON snap_agg.snapshot_date = recv_agg.d
LEFT JOIN (
SELECT date::date AS d,
SUM(CASE WHEN quantity > 0 AND COALESCE(status, 'pending') NOT IN ('canceled', 'returned')
THEN quantity ELSE 0 END)::bigint AS actual_sold
FROM public.orders
WHERE date::date >= _backfill_start
AND date::date < CURRENT_DATE - _recent_recheck_days
GROUP BY date::date
) orders_agg ON snap_agg.snapshot_date = orders_agg.d
WHERE snap_agg.snap_received != COALESCE(recv_agg.actual_received, 0)
OR snap_agg.snap_sold != COALESCE(orders_agg.actual_sold, 0)
UNION
-- Recent days: always reprocess
SELECT d::date
FROM generate_series(
@@ -66,11 +95,18 @@ BEGIN
ORDER BY d
LOOP
_days_processed := _days_processed + 1;
RAISE NOTICE 'Processing date: % [%/%]', _target_date, _days_processed,
_days_processed; -- count not known ahead of time, but shows progress
-- Classify why this date is being processed (for logging)
IF _target_date >= CURRENT_DATE - _recent_recheck_days THEN
RAISE NOTICE 'Processing date: % [recent recheck]', _target_date;
ELSIF NOT EXISTS (SELECT 1 FROM public.daily_product_snapshots WHERE snapshot_date = _target_date) THEN
RAISE NOTICE 'Processing date: % [gap fill — no existing snapshot]', _target_date;
ELSE
RAISE NOTICE 'Processing date: % [stale data — snapshot aggregates mismatch source]', _target_date;
END IF;
-- IMPORTANT: First delete any existing data for this date to prevent duplication
DELETE FROM public.daily_product_snapshots
DELETE FROM public.daily_product_snapshots
WHERE snapshot_date = _target_date;
-- Proceed with calculating daily metrics only for products with actual activity

View File

@@ -0,0 +1,131 @@
-- Description: Populates lifecycle forecast columns on product_metrics from product_forecasts.
-- Runs AFTER update_product_metrics.sql so that lead time / days of stock settings are available.
-- Dependencies: product_metrics (fully populated), product_forecasts, settings tables.
-- Frequency: After each metrics run and/or after forecast engine runs.
DO $$
DECLARE
_module_name TEXT := 'lifecycle_forecasts';
_start_time TIMESTAMPTZ := clock_timestamp();
_updated INT;
BEGIN
RAISE NOTICE 'Running % module. Start Time: %', _module_name, _start_time;
-- Step 1: Set lifecycle_phase from product_forecasts (one phase per product)
UPDATE product_metrics pm
SET lifecycle_phase = sub.lifecycle_phase
FROM (
SELECT DISTINCT ON (pid) pid, lifecycle_phase
FROM product_forecasts
ORDER BY pid, forecast_date
) sub
WHERE pm.pid = sub.pid
AND (pm.lifecycle_phase IS DISTINCT FROM sub.lifecycle_phase);
GET DIAGNOSTICS _updated = ROW_COUNT;
RAISE NOTICE 'Updated lifecycle_phase for % products', _updated;
-- Step 2: Compute lifecycle-based lead time and planning period forecasts
-- Uses each product's configured lead time and days of stock
WITH forecast_sums AS (
SELECT
pf.pid,
SUM(pf.forecast_units) FILTER (
WHERE pf.forecast_date <= CURRENT_DATE + s.effective_lead_time
) AS lt_forecast,
SUM(pf.forecast_units) FILTER (
WHERE pf.forecast_date <= CURRENT_DATE + s.effective_lead_time + s.effective_days_of_stock
) AS pp_forecast
FROM product_forecasts pf
JOIN (
SELECT
p.pid,
COALESCE(sp.lead_time_days, sv.default_lead_time_days,
(SELECT setting_value::int FROM settings_global WHERE setting_key = 'default_lead_time_days'), 14
) AS effective_lead_time,
COALESCE(sp.days_of_stock, sv.default_days_of_stock,
(SELECT setting_value::int FROM settings_global WHERE setting_key = 'default_days_of_stock'), 30
) AS effective_days_of_stock
FROM products p
LEFT JOIN settings_product sp ON p.pid = sp.pid
LEFT JOIN settings_vendor sv ON p.vendor = sv.vendor
) s ON s.pid = pf.pid
WHERE pf.forecast_date >= CURRENT_DATE
GROUP BY pf.pid
)
UPDATE product_metrics pm
SET
lifecycle_lead_time_forecast = COALESCE(fs.lt_forecast, 0),
lifecycle_planning_period_forecast = COALESCE(fs.pp_forecast, 0)
FROM forecast_sums fs
WHERE pm.pid = fs.pid
AND (pm.lifecycle_lead_time_forecast IS DISTINCT FROM COALESCE(fs.lt_forecast, 0)
OR pm.lifecycle_planning_period_forecast IS DISTINCT FROM COALESCE(fs.pp_forecast, 0));
GET DIAGNOSTICS _updated = ROW_COUNT;
RAISE NOTICE 'Updated lifecycle forecasts for % products', _updated;
-- Step 3: Reclassify demand_pattern using residual CV (de-trended)
-- For launch/decay products, raw CV is high because of expected lifecycle decay.
-- We subtract the expected brand curve value to get residuals, then compute CV on those.
-- Products that track their brand curve closely → low residual CV → "stable"
-- Products with erratic deviations from curve → higher residual CV → "variable"/"sporadic"
WITH product_curve AS (
-- Get each product's brand curve and age
SELECT
pm.pid,
pm.lifecycle_phase,
pm.date_first_received,
blc.amplitude,
blc.decay_rate,
blc.baseline
FROM product_metrics pm
JOIN products p ON p.pid = pm.pid
LEFT JOIN brand_lifecycle_curves blc
ON blc.brand = pm.brand
AND blc.root_category IS NULL -- brand-only curve
WHERE pm.lifecycle_phase IN ('launch', 'decay')
AND pm.date_first_received IS NOT NULL
AND blc.amplitude IS NOT NULL
),
daily_residuals AS (
-- Compute residual = actual - expected for each snapshot day
-- Curve params are in WEEKLY units; divide by 7 to get daily expected
SELECT
dps.pid,
dps.units_sold,
(pc.amplitude * EXP(-pc.decay_rate * (dps.snapshot_date - pc.date_first_received)::numeric / 7.0) + pc.baseline) / 7.0 AS expected,
dps.units_sold - (pc.amplitude * EXP(-pc.decay_rate * (dps.snapshot_date - pc.date_first_received)::numeric / 7.0) + pc.baseline) / 7.0 AS residual
FROM daily_product_snapshots dps
JOIN product_curve pc ON pc.pid = dps.pid
WHERE dps.snapshot_date >= CURRENT_DATE - INTERVAL '29 days'
AND dps.snapshot_date <= CURRENT_DATE
),
residual_cv AS (
SELECT
pid,
AVG(units_sold) AS avg_sales,
CASE WHEN COUNT(*) >= 7 AND AVG(ABS(expected)) > 0.01 THEN
STDDEV_POP(residual) / GREATEST(AVG(ABS(expected)), 0.1)
END AS res_cv
FROM daily_residuals
GROUP BY pid
)
UPDATE product_metrics pm
SET demand_pattern = classify_demand_pattern(rc.avg_sales, rc.res_cv)
FROM residual_cv rc
WHERE pm.pid = rc.pid
AND rc.res_cv IS NOT NULL
AND pm.demand_pattern IS DISTINCT FROM classify_demand_pattern(rc.avg_sales, rc.res_cv);
GET DIAGNOSTICS _updated = ROW_COUNT;
RAISE NOTICE 'Reclassified demand_pattern for % launch/decay products', _updated;
-- Update tracking
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
VALUES (_module_name, clock_timestamp())
ON CONFLICT (module_name) DO UPDATE SET
last_calculation_timestamp = EXCLUDED.last_calculation_timestamp;
RAISE NOTICE '% module complete. Duration: %', _module_name, clock_timestamp() - _start_time;
END $$;

View File

@@ -67,6 +67,23 @@ router.get('/stock/metrics', async (req, res) => {
ORDER BY CASE WHEN brand = 'Other' THEN 1 ELSE 0 END, stock_cost DESC
`);
// Stock breakdown by lifecycle phase (lifecycle_phase populated by update_lifecycle_forecasts.sql)
const { rows: phaseStock } = await executeQuery(`
SELECT
COALESCE(pm.lifecycle_phase, 'unknown') AS phase,
COUNT(DISTINCT pm.pid)::integer AS products,
COALESCE(SUM(pm.current_stock), 0)::integer AS units,
ROUND(COALESCE(SUM(pm.current_stock_cost), 0)::numeric, 2) AS cost,
ROUND(COALESCE(SUM(pm.current_stock_retail), 0)::numeric, 2) AS retail
FROM product_metrics pm
WHERE pm.is_visible = true AND pm.current_stock > 0
AND COALESCE(pm.preorder_count, 0) = 0
GROUP BY pm.lifecycle_phase
ORDER BY cost DESC
`);
const phaseTotalCost = phaseStock.reduce((s, r) => s + (parseFloat(r.cost) || 0), 0);
// Format the response with explicit type conversion
const response = {
totalProducts: parseInt(stockMetrics.total_products) || 0,
@@ -80,7 +97,17 @@ router.get('/stock/metrics', async (req, res) => {
units: parseInt(v.stock_units) || 0,
cost: parseFloat(v.stock_cost) || 0,
retail: parseFloat(v.stock_retail) || 0
}))
})),
phaseStock: phaseStock.filter(r => parseFloat(r.cost) > 0).map(r => ({
phase: r.phase,
products: parseInt(r.products) || 0,
units: parseInt(r.units) || 0,
cost: parseFloat(r.cost) || 0,
retail: parseFloat(r.retail) || 0,
percentage: phaseTotalCost > 0
? parseFloat(((parseFloat(r.cost) / phaseTotalCost) * 100).toFixed(1))
: 0,
})),
};
res.json(response);
@@ -208,12 +235,39 @@ router.get('/replenishment/metrics', async (req, res) => {
LIMIT 5
`);
// Replenishment breakdown by lifecycle phase (lifecycle_phase on product_metrics)
const { rows: phaseReplenish } = await executeQuery(`
SELECT
COALESCE(pm.lifecycle_phase, 'unknown') AS phase,
COUNT(DISTINCT pm.pid)::integer AS products,
COALESCE(SUM(pm.replenishment_units), 0)::integer AS units,
ROUND(COALESCE(SUM(pm.replenishment_cost), 0)::numeric, 2) AS cost
FROM product_metrics pm
WHERE pm.is_visible = true
AND pm.is_replenishable = true
AND (pm.status IN ('Critical', 'Reorder') OR pm.current_stock < 0)
AND pm.replenishment_units > 0
GROUP BY pm.lifecycle_phase
ORDER BY cost DESC
`);
const replenishTotalCost = phaseReplenish.reduce((s, r) => s + (parseFloat(r.cost) || 0), 0);
// Format response
const response = {
productsToReplenish: parseInt(metrics.products_to_replenish) || 0,
unitsToReplenish: parseInt(metrics.total_units_needed) || 0,
replenishmentCost: parseFloat(metrics.total_cost) || 0,
replenishmentRetail: parseFloat(metrics.total_retail) || 0,
phaseBreakdown: phaseReplenish.filter(r => parseFloat(r.cost) > 0).map(r => ({
phase: r.phase,
products: parseInt(r.products) || 0,
units: parseInt(r.units) || 0,
cost: parseFloat(r.cost) || 0,
percentage: replenishTotalCost > 0
? parseFloat(((parseFloat(r.cost) / replenishTotalCost) * 100).toFixed(1))
: 0,
})),
topVariants: variants.map(v => ({
id: v.pid,
title: v.title,
@@ -234,165 +288,499 @@ router.get('/replenishment/metrics', async (req, res) => {
});
// GET /dashboard/forecast/metrics
// Returns sales forecasts for specified period
// Reads from product_forecasts table (lifecycle-aware forecasting pipeline).
// Falls back to velocity-based projection if forecast table is empty.
router.get('/forecast/metrics', async (req, res) => {
// Default to last 30 days if no date range provided
const today = new Date();
const thirtyDaysAgo = new Date(today);
thirtyDaysAgo.setDate(today.getDate() - 30);
const startDate = req.query.startDate || thirtyDaysAgo.toISOString();
const endDate = req.query.endDate || today.toISOString();
const thirtyDaysOut = new Date(today);
thirtyDaysOut.setDate(today.getDate() + 30);
const startDate = req.query.startDate ? new Date(req.query.startDate) : today;
const endDate = req.query.endDate ? new Date(req.query.endDate) : thirtyDaysOut;
const startISO = startDate.toISOString().split('T')[0];
const endISO = endDate.toISOString().split('T')[0];
const days = Math.max(1, Math.round((endDate - startDate) / (1000 * 60 * 60 * 24)));
try {
// Check if sales_forecasts table exists and has data
const { rows: tableCheck } = await executeQuery(`
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'sales_forecasts'
) as table_exists
`);
const tableExists = tableCheck[0].table_exists;
if (!tableExists) {
console.log('sales_forecasts table does not exist, returning dummy data');
// Generate dummy data for forecast
const days = 30;
const dummyData = [];
const startDateObj = new Date(startDate);
for (let i = 0; i < days; i++) {
const currentDate = new Date(startDateObj);
currentDate.setDate(startDateObj.getDate() + i);
// Use sales data with slight randomization
const baseValue = 500 + Math.random() * 200;
dummyData.push({
date: currentDate.toISOString().split('T')[0],
revenue: parseFloat((baseValue + Math.random() * 100).toFixed(2)),
confidence: parseFloat((0.7 + Math.random() * 0.2).toFixed(2))
});
// Check if product_forecasts has data
const { rows: [countRow] } = await executeQuery(
`SELECT COUNT(*) AS cnt FROM product_forecasts WHERE forecast_date >= $1 LIMIT 1`,
[startISO]
);
const hasForecastData = parseInt(countRow.cnt) > 0;
if (hasForecastData) {
// --- Read from lifecycle-aware forecast pipeline ---
// Find the last date covered by product_forecasts
const { rows: [horizonRow] } = await executeQuery(
`SELECT MAX(forecast_date) AS max_date FROM product_forecasts`
);
const forecastHorizonISO = horizonRow.max_date instanceof Date
? horizonRow.max_date.toISOString().split('T')[0]
: horizonRow.max_date;
const forecastHorizon = new Date(forecastHorizonISO + 'T00:00:00');
const clampedEndISO = endISO <= forecastHorizonISO ? endISO : forecastHorizonISO;
const needsExtrapolation = endISO > forecastHorizonISO;
// Totals from actual forecast data (clamped to horizon)
const { rows: [totals] } = await executeQuery(`
SELECT
COALESCE(SUM(pf.forecast_units), 0) AS total_units,
COALESCE(SUM(pf.forecast_revenue), 0) AS total_revenue,
COUNT(DISTINCT pf.pid) FILTER (
WHERE pf.lifecycle_phase IN ('launch','decay','mature','preorder','slow_mover')
) AS active_products,
COUNT(DISTINCT pf.pid) FILTER (
WHERE pf.forecast_method = 'lifecycle_curve'
) AS curve_products
FROM product_forecasts pf
JOIN product_metrics pm ON pm.pid = pf.pid
WHERE pm.is_visible = true
AND pf.forecast_date BETWEEN $1 AND $2
`, [startISO, clampedEndISO]);
const active = parseInt(totals.active_products) || 1;
const curveProducts = parseInt(totals.curve_products) || 0;
const confidenceLevel = parseFloat((curveProducts / active).toFixed(2));
// Daily series from actual forecast
const { rows: dailyRows } = await executeQuery(`
SELECT pf.forecast_date AS date,
SUM(pf.forecast_units) AS units,
SUM(pf.forecast_revenue) AS revenue
FROM product_forecasts pf
JOIN product_metrics pm ON pm.pid = pf.pid
WHERE pm.is_visible = true
AND pf.forecast_date BETWEEN $1 AND $2
GROUP BY pf.forecast_date
ORDER BY pf.forecast_date
`, [startISO, clampedEndISO]);
const dailyForecasts = dailyRows.map(d => ({
date: d.date instanceof Date ? d.date.toISOString().split('T')[0] : d.date,
units: parseFloat(d.units) || 0,
revenue: parseFloat(d.revenue) || 0,
confidence: confidenceLevel,
}));
// Daily forecast broken down by lifecycle phase (for stacked chart)
const { rows: dailyPhaseRows } = await executeQuery(`
SELECT pf.forecast_date AS date,
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'preorder'), 0) AS preorder,
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'launch'), 0) AS launch,
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'decay'), 0) AS decay,
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'mature'), 0) AS mature,
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'slow_mover'), 0) AS slow_mover,
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'dormant'), 0) AS dormant
FROM product_forecasts pf
JOIN product_metrics pm ON pm.pid = pf.pid
WHERE pm.is_visible = true
AND pf.forecast_date BETWEEN $1 AND $2
GROUP BY pf.forecast_date
ORDER BY pf.forecast_date
`, [startISO, clampedEndISO]);
// --- New product pipeline contribution ---
// Average daily revenue from new product introductions (last 12 months).
// Only used for EXTRAPOLATED days beyond the forecast horizon — within the
// 90-day horizon, preorder/launch products are already forecast by lifecycle curves.
const { rows: [pipeline] } = await executeQuery(`
SELECT
COALESCE(AVG(monthly_revenue), 0) AS avg_monthly_revenue
FROM (
SELECT DATE_TRUNC('month', pm.date_first_received) AS month,
COUNT(*) AS monthly_products,
SUM(pm.first_30_days_revenue) AS monthly_revenue
FROM product_metrics pm
WHERE pm.is_visible = true
AND pm.date_first_received >= NOW() - INTERVAL '12 months'
AND pm.date_first_received < DATE_TRUNC('month', NOW())
GROUP BY 1
) sub
`);
// Compute average product price for converting revenue to unit estimates
const { rows: [priceRow] } = await executeQuery(`
SELECT COALESCE(AVG(current_price) FILTER (WHERE current_price > 0 AND sales_30d > 0), 7) AS avg_price
FROM product_metrics
WHERE is_visible = true
`);
const avgPrice = parseFloat(priceRow.avg_price) || 7;
// Daily new-product revenue = (avg products/month × avg 30d revenue/product) / 30
const avgMonthlyRevenue = parseFloat(pipeline.avg_monthly_revenue) || 0;
const newProductDailyRevenue = avgMonthlyRevenue / 30;
const newProductDailyUnits = newProductDailyRevenue / avgPrice;
let totalRevenue = dailyForecasts.reduce((sum, d) => sum + d.revenue, 0);
let totalUnits = dailyForecasts.reduce((sum, d) => sum + d.units, 0);
// --- Extrapolation beyond forecast horizon (rest-of-year) ---
if (needsExtrapolation) {
// Monthly seasonal indices from last 12 months of actual revenue
const { rows: seasonalRows } = await executeQuery(`
SELECT EXTRACT(MONTH FROM o.date)::int AS month,
SUM(o.quantity * o.price) AS revenue
FROM orders o
WHERE o.canceled IS DISTINCT FROM TRUE
AND o.date >= NOW() - INTERVAL '12 months'
GROUP BY 1
`);
const monthlyRevenue = {};
let totalMonthlyRev = 0;
for (const r of seasonalRows) {
monthlyRevenue[r.month] = parseFloat(r.revenue) || 0;
totalMonthlyRev += monthlyRevenue[r.month];
}
const avgMonthRev = totalMonthlyRev / Math.max(Object.keys(monthlyRevenue).length, 1);
const seasonalIndex = {};
for (let m = 1; m <= 12; m++) {
seasonalIndex[m] = monthlyRevenue[m] ? monthlyRevenue[m] / avgMonthRev : 1.0;
}
// Baseline: avg daily revenue from last 7 days of forecast (mature tail)
const tailDays = dailyForecasts.slice(-7);
const baselineDaily = tailDays.length > 0
? tailDays.reduce((s, d) => s + d.revenue, 0) / tailDays.length
: 0;
// Generate estimated days beyond horizon
const extraStart = new Date(forecastHorizon);
extraStart.setDate(extraStart.getDate() + 1);
const extraEnd = new Date(endISO + 'T00:00:00');
for (let d = new Date(extraStart); d <= extraEnd; d.setDate(d.getDate() + 1)) {
const month = d.getMonth() + 1;
const seasonal = seasonalIndex[month] || 1.0;
// Beyond horizon: existing product tail + new product pipeline
const estRevenue = baselineDaily * seasonal + newProductDailyRevenue;
const estUnits = (baselineDaily * seasonal) / avgPrice + newProductDailyUnits;
dailyForecasts.push({
date: d.toISOString().split('T')[0],
units: parseFloat(estUnits.toFixed(1)),
revenue: parseFloat(estRevenue.toFixed(2)),
confidence: 0, // lower confidence for extrapolated data
estimated: true,
});
totalRevenue += estRevenue;
totalUnits += estUnits;
}
}
// Return dummy response
const response = {
forecastSales: 500,
forecastRevenue: 25000,
confidenceLevel: 0.85,
dailyForecasts: dummyData,
categoryForecasts: [
{ category: "Electronics", units: 120, revenue: 6000, confidence: 0.9 },
{ category: "Clothing", units: 80, revenue: 4000, confidence: 0.8 },
{ category: "Home Goods", units: 150, revenue: 7500, confidence: 0.75 },
{ category: "Others", units: 150, revenue: 7500, confidence: 0.7 }
]
};
return res.json(response);
}
// If the table exists, try to query it with proper error handling
try {
// Get summary metrics
const { rows: metrics } = await executeQuery(`
SELECT
COALESCE(SUM(forecast_units), 0) as total_forecast_units,
COALESCE(SUM(forecast_revenue), 0) as total_forecast_revenue,
COALESCE(AVG(confidence_level), 0) as overall_confidence
FROM sales_forecasts
WHERE forecast_date BETWEEN $1 AND $2
`, [startDate, endDate]);
// Get daily forecasts
const { rows: dailyForecasts } = await executeQuery(`
SELECT
DATE(forecast_date) as date,
COALESCE(SUM(forecast_revenue), 0) as revenue,
COALESCE(AVG(confidence_level), 0) as confidence
FROM sales_forecasts
WHERE forecast_date BETWEEN $1 AND $2
GROUP BY DATE(forecast_date)
ORDER BY date
`, [startDate, endDate]);
// Get category forecasts
const { rows: categoryForecasts } = await executeQuery(`
SELECT
c.name as category,
COALESCE(SUM(cf.forecast_units), 0) as units,
COALESCE(SUM(cf.forecast_revenue), 0) as revenue,
COALESCE(AVG(cf.confidence_level), 0) as confidence
FROM category_forecasts cf
JOIN categories c ON cf.category_id = c.cat_id
WHERE cf.forecast_date BETWEEN $1 AND $2
GROUP BY c.cat_id, c.name
// Lifecycle phase breakdown (from actual forecast data only)
const { rows: phaseRows } = await executeQuery(`
SELECT pf.lifecycle_phase AS phase,
COUNT(DISTINCT pf.pid) AS products,
COALESCE(SUM(pf.forecast_units), 0) AS units,
COALESCE(SUM(pf.forecast_revenue), 0) AS revenue
FROM product_forecasts pf
JOIN product_metrics pm ON pm.pid = pf.pid
WHERE pm.is_visible = true
AND pf.forecast_date BETWEEN $1 AND $2
GROUP BY pf.lifecycle_phase
ORDER BY revenue DESC
`, [startDate, endDate]);
`, [startISO, clampedEndISO]);
// Format response
const response = {
forecastSales: parseInt(metrics[0]?.total_forecast_units) || 0,
forecastRevenue: parseFloat(metrics[0]?.total_forecast_revenue) || 0,
confidenceLevel: parseFloat(metrics[0]?.overall_confidence) || 0,
dailyForecasts: dailyForecasts.map(d => ({
date: d.date,
revenue: parseFloat(d.revenue) || 0,
confidence: parseFloat(d.confidence) || 0
})),
categoryForecasts: categoryForecasts.map(c => ({
category: c.category,
units: parseInt(c.units) || 0,
revenue: parseFloat(c.revenue) || 0,
confidence: parseFloat(c.confidence) || 0
}))
};
const phaseTotal = phaseRows.reduce((s, r) => s + (parseFloat(r.revenue) || 0), 0);
const phaseBreakdown = phaseRows
.filter(r => parseFloat(r.revenue) > 0)
.map(r => ({
phase: r.phase,
products: parseInt(r.products) || 0,
units: Math.round(parseFloat(r.units) || 0),
revenue: parseFloat(parseFloat(r.revenue).toFixed(2)),
percentage: phaseTotal > 0
? parseFloat(((parseFloat(r.revenue) / phaseTotal) * 100).toFixed(1))
: 0,
}));
res.json(response);
} catch (err) {
console.error('Error with forecast tables structure, returning dummy data:', err);
// Generate dummy data for forecast as fallback
const days = 30;
const dummyData = [];
const startDateObj = new Date(startDate);
for (let i = 0; i < days; i++) {
const currentDate = new Date(startDateObj);
currentDate.setDate(startDateObj.getDate() + i);
const baseValue = 500 + Math.random() * 200;
dummyData.push({
date: currentDate.toISOString().split('T')[0],
revenue: parseFloat((baseValue + Math.random() * 100).toFixed(2)),
confidence: parseFloat((0.7 + Math.random() * 0.2).toFixed(2))
});
// Category breakdown (from actual forecast data only)
const { rows: categoryRows } = await executeQuery(`
WITH product_root_category AS (
SELECT DISTINCT ON (pf.pid)
pf.pid, ch.name AS category
FROM product_forecasts pf
JOIN product_metrics pm ON pm.pid = pf.pid
JOIN product_categories pc ON pc.pid = pf.pid
JOIN category_hierarchy ch ON ch.cat_id = pc.cat_id AND ch.level = 0
WHERE pm.is_visible = true
AND ch.name NOT IN ('Deals', 'Black Friday')
AND pf.forecast_date BETWEEN $1 AND $2
ORDER BY pf.pid, ch.name
)
SELECT prc.category,
SUM(pf.forecast_units) AS units,
SUM(pf.forecast_revenue) AS revenue
FROM product_forecasts pf
JOIN product_root_category prc ON prc.pid = pf.pid
WHERE pf.forecast_date BETWEEN $1 AND $2
GROUP BY prc.category
ORDER BY revenue DESC
LIMIT 8
`, [startISO, clampedEndISO]);
const dailyForecastsByPhase = dailyPhaseRows.map(d => ({
date: d.date instanceof Date ? d.date.toISOString().split('T')[0] : d.date,
preorder: parseFloat(d.preorder) || 0,
launch: parseFloat(d.launch) || 0,
decay: parseFloat(d.decay) || 0,
mature: parseFloat(d.mature) || 0,
slow_mover: parseFloat(d.slow_mover) || 0,
dormant: parseFloat(d.dormant) || 0,
}));
// Add extrapolated days to phase series (distribute proportionally using last phase ratios)
if (needsExtrapolation && dailyForecastsByPhase.length > 0) {
const lastPhaseDay = dailyForecastsByPhase[dailyForecastsByPhase.length - 1];
const phases = ['preorder', 'launch', 'decay', 'mature', 'slow_mover', 'dormant'];
const lastTotal = phases.reduce((s, p) => s + lastPhaseDay[p], 0);
const phaseRatios = {};
for (const p of phases) {
phaseRatios[p] = lastTotal > 0 ? lastPhaseDay[p] / lastTotal : 1 / phases.length;
}
// Match extrapolated days from dailyForecasts
for (let i = dailyForecastsByPhase.length; i < dailyForecasts.length; i++) {
const dayRev = dailyForecasts[i].revenue;
const entry = { date: dailyForecasts[i].date };
for (const p of phases) {
entry[p] = parseFloat((dayRev * phaseRatios[p]).toFixed(2));
}
dailyForecastsByPhase.push(entry);
}
}
// Return dummy response
const response = {
forecastSales: 500,
forecastRevenue: 25000,
confidenceLevel: 0.85,
dailyForecasts: dummyData,
categoryForecasts: [
{ category: "Electronics", units: 120, revenue: 6000, confidence: 0.9 },
{ category: "Clothing", units: 80, revenue: 4000, confidence: 0.8 },
{ category: "Home Goods", units: 150, revenue: 7500, confidence: 0.75 },
{ category: "Others", units: 150, revenue: 7500, confidence: 0.7 }
]
};
res.json(response);
return res.json({
forecastSales: Math.round(totalUnits),
forecastRevenue: totalRevenue.toFixed(2),
confidenceLevel,
dailyForecasts,
dailyForecastsByPhase,
phaseBreakdown,
categoryForecasts: categoryRows.map(c => ({
category: c.category,
units: Math.round(parseFloat(c.units)),
revenue: parseFloat(parseFloat(c.revenue).toFixed(2)),
})),
});
}
// --- Fallback: velocity-based projection (no forecast data yet) ---
const { rows: [totals] } = await executeQuery(`
SELECT
COALESCE(SUM(sales_velocity_daily), 0) AS daily_units,
COALESCE(SUM(sales_velocity_daily * current_price), 0) AS daily_revenue,
COUNT(*) FILTER (WHERE sales_velocity_daily > 0) AS active_products
FROM product_metrics
WHERE is_visible = true AND sales_velocity_daily > 0
`);
const dailyUnits = parseFloat(totals.daily_units) || 0;
const dailyRevenue = parseFloat(totals.daily_revenue) || 0;
const dailyForecasts = [];
for (let i = 0; i < days; i++) {
const d = new Date(startDate);
d.setDate(startDate.getDate() + i);
dailyForecasts.push({
date: d.toISOString().split('T')[0],
units: parseFloat(dailyUnits.toFixed(1)),
revenue: parseFloat(dailyRevenue.toFixed(2)),
confidence: 0,
});
}
const { rows: categoryRows } = await executeQuery(`
WITH product_root_category AS (
SELECT DISTINCT ON (pm.pid) pm.pid,
pm.sales_velocity_daily, pm.current_price,
ch.name AS category
FROM product_metrics pm
JOIN product_categories pc ON pc.pid = pm.pid
JOIN category_hierarchy ch ON ch.cat_id = pc.cat_id AND ch.level = 0
WHERE pm.is_visible = true AND pm.sales_velocity_daily > 0
AND ch.name NOT IN ('Deals', 'Black Friday')
ORDER BY pm.pid, ch.name
)
SELECT category,
ROUND(SUM(sales_velocity_daily)::numeric, 1) AS daily_units,
ROUND(SUM(sales_velocity_daily * current_price)::numeric, 2) AS daily_revenue
FROM product_root_category
GROUP BY category ORDER BY daily_revenue DESC LIMIT 8
`);
res.json({
forecastSales: Math.round(dailyUnits * days),
forecastRevenue: (dailyRevenue * days).toFixed(2),
confidenceLevel: 0,
dailyForecasts,
categoryForecasts: categoryRows.map(c => ({
category: c.category,
units: Math.round(parseFloat(c.daily_units) * days),
revenue: parseFloat((parseFloat(c.daily_revenue) * days).toFixed(2)),
})),
});
} catch (err) {
console.error('Error fetching forecast metrics:', err);
res.status(500).json({ error: 'Failed to fetch forecast metrics' });
}
});
// GET /dashboard/forecast/accuracy
// Returns forecast accuracy metrics computed by the forecast engine.
// Reads from forecast_accuracy table (populated after each forecast run).
router.get('/forecast/accuracy', async (req, res) => {
try {
// Check if forecast_accuracy table exists and has data
const { rows: [tableCheck] } = await executeQuery(`
SELECT EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_name = 'forecast_accuracy'
) AS exists
`);
if (!tableCheck.exists) {
return res.json({ hasData: false, message: 'Accuracy data not yet available' });
}
// Get the latest run that has accuracy data
const { rows: runRows } = await executeQuery(`
SELECT DISTINCT fa.run_id, fr.finished_at
FROM forecast_accuracy fa
JOIN forecast_runs fr ON fr.id = fa.run_id
ORDER BY fr.finished_at DESC
LIMIT 1
`);
if (runRows.length === 0) {
return res.json({ hasData: false, message: 'No accuracy data computed yet' });
}
const latestRunId = runRows[0].run_id;
const computedAt = runRows[0].finished_at;
// Count days of history available
const { rows: [historyInfo] } = await executeQuery(`
SELECT
COUNT(DISTINCT forecast_date) AS days_of_history,
MIN(forecast_date) AS earliest_date,
MAX(forecast_date) AS latest_date
FROM product_forecasts_history
`);
// Fetch all accuracy metrics for the latest run
const { rows: metrics } = await executeQuery(`
SELECT metric_type, dimension_value, sample_size,
total_actual_units, total_forecast_units,
mae, wmape, bias, rmse
FROM forecast_accuracy
WHERE run_id = $1
ORDER BY metric_type, dimension_value
`, [latestRunId]);
// Organize into response structure
const overall = metrics.find(m => m.metric_type === 'overall');
const byPhase = metrics
.filter(m => m.metric_type === 'by_phase')
.map(m => ({
phase: m.dimension_value,
sampleSize: parseInt(m.sample_size),
totalActual: parseFloat(m.total_actual_units) || 0,
totalForecast: parseFloat(m.total_forecast_units) || 0,
mae: m.mae != null ? parseFloat(parseFloat(m.mae).toFixed(4)) : null,
wmape: m.wmape != null ? parseFloat((parseFloat(m.wmape) * 100).toFixed(1)) : null,
bias: m.bias != null ? parseFloat(parseFloat(m.bias).toFixed(4)) : null,
rmse: m.rmse != null ? parseFloat(parseFloat(m.rmse).toFixed(4)) : null,
}))
.sort((a, b) => (b.totalActual || 0) - (a.totalActual || 0));
const byLeadTime = metrics
.filter(m => m.metric_type === 'by_lead_time')
.map(m => ({
bucket: m.dimension_value,
sampleSize: parseInt(m.sample_size),
mae: m.mae != null ? parseFloat(parseFloat(m.mae).toFixed(4)) : null,
wmape: m.wmape != null ? parseFloat((parseFloat(m.wmape) * 100).toFixed(1)) : null,
bias: m.bias != null ? parseFloat(parseFloat(m.bias).toFixed(4)) : null,
rmse: m.rmse != null ? parseFloat(parseFloat(m.rmse).toFixed(4)) : null,
}))
.sort((a, b) => {
const order = { '1-7d': 0, '8-14d': 1, '15-30d': 2, '31-60d': 3, '61-90d': 4 };
return (order[a.bucket] ?? 99) - (order[b.bucket] ?? 99);
});
const byMethod = metrics
.filter(m => m.metric_type === 'by_method')
.map(m => ({
method: m.dimension_value,
sampleSize: parseInt(m.sample_size),
mae: m.mae != null ? parseFloat(parseFloat(m.mae).toFixed(4)) : null,
wmape: m.wmape != null ? parseFloat((parseFloat(m.wmape) * 100).toFixed(1)) : null,
bias: m.bias != null ? parseFloat(parseFloat(m.bias).toFixed(4)) : null,
}));
const dailyTrend = metrics
.filter(m => m.metric_type === 'daily')
.map(m => ({
date: m.dimension_value,
mae: m.mae != null ? parseFloat(parseFloat(m.mae).toFixed(4)) : null,
wmape: m.wmape != null ? parseFloat((parseFloat(m.wmape) * 100).toFixed(1)) : null,
bias: m.bias != null ? parseFloat(parseFloat(m.bias).toFixed(4)) : null,
}))
.sort((a, b) => a.date.localeCompare(b.date));
// Historical accuracy trend (across runs)
const { rows: trendRows } = await executeQuery(`
SELECT fa.run_id, fr.finished_at::date AS run_date,
fa.mae, fa.wmape, fa.bias, fa.rmse, fa.sample_size
FROM forecast_accuracy fa
JOIN forecast_runs fr ON fr.id = fa.run_id
WHERE fa.metric_type = 'overall'
AND fa.dimension_value = 'all'
ORDER BY fr.finished_at
`);
const accuracyTrend = trendRows.map(r => ({
date: r.run_date instanceof Date ? r.run_date.toISOString().split('T')[0] : r.run_date,
mae: r.mae != null ? parseFloat(parseFloat(r.mae).toFixed(4)) : null,
wmape: r.wmape != null ? parseFloat((parseFloat(r.wmape) * 100).toFixed(1)) : null,
bias: r.bias != null ? parseFloat(parseFloat(r.bias).toFixed(4)) : null,
sampleSize: parseInt(r.sample_size),
}));
res.json({
hasData: true,
computedAt,
daysOfHistory: parseInt(historyInfo.days_of_history) || 0,
historyRange: {
from: historyInfo.earliest_date instanceof Date
? historyInfo.earliest_date.toISOString().split('T')[0]
: historyInfo.earliest_date,
to: historyInfo.latest_date instanceof Date
? historyInfo.latest_date.toISOString().split('T')[0]
: historyInfo.latest_date,
},
overall: overall ? {
sampleSize: parseInt(overall.sample_size),
totalActual: parseFloat(overall.total_actual_units) || 0,
totalForecast: parseFloat(overall.total_forecast_units) || 0,
mae: overall.mae != null ? parseFloat(parseFloat(overall.mae).toFixed(4)) : null,
wmape: overall.wmape != null ? parseFloat((parseFloat(overall.wmape) * 100).toFixed(1)) : null,
bias: overall.bias != null ? parseFloat(parseFloat(overall.bias).toFixed(4)) : null,
rmse: overall.rmse != null ? parseFloat(parseFloat(overall.rmse).toFixed(4)) : null,
} : null,
byPhase,
byLeadTime,
byMethod,
dailyTrend,
accuracyTrend,
});
} catch (err) {
console.error('Error fetching forecast accuracy:', err);
res.status(500).json({ error: 'Failed to fetch forecast accuracy' });
}
});
// GET /dashboard/overstock/metrics
// Returns overstock metrics by category
router.get('/overstock/metrics', async (req, res) => {
@@ -427,7 +815,7 @@ router.get('/overstock/metrics', async (req, res) => {
// Get category breakdowns separately
const { rows: categoryData } = await executeQuery(`
SELECT
SELECT
c.name as category_name,
COUNT(DISTINCT pm.pid)::integer as overstocked_products,
SUM(pm.overstocked_units)::integer as total_excess_units,
@@ -443,6 +831,22 @@ router.get('/overstock/metrics', async (req, res) => {
LIMIT 8
`);
// Overstock breakdown by lifecycle phase
const { rows: phaseOverstock } = await executeQuery(`
SELECT
COALESCE(pm.lifecycle_phase, 'unknown') AS phase,
COUNT(DISTINCT pm.pid)::integer AS products,
COALESCE(SUM(pm.overstocked_units), 0)::integer AS units,
ROUND(COALESCE(SUM(pm.overstocked_cost), 0)::numeric, 2) AS cost,
ROUND(COALESCE(SUM(pm.overstocked_retail), 0)::numeric, 2) AS retail
FROM product_metrics pm
WHERE pm.status = 'Overstock' AND pm.is_visible = true
AND COALESCE(pm.preorder_count, 0) = 0
GROUP BY pm.lifecycle_phase
ORDER BY cost DESC
`);
const overstockPhaseTotalCost = phaseOverstock.reduce((s, r) => s + (parseFloat(r.cost) || 0), 0);
// Format response with explicit type conversion
const response = {
overstockedProducts: parseInt(summaryMetrics.total_overstocked) || 0,
@@ -455,7 +859,17 @@ router.get('/overstock/metrics', async (req, res) => {
units: parseInt(cat.total_excess_units) || 0,
cost: parseFloat(cat.total_excess_cost) || 0,
retail: parseFloat(cat.total_excess_retail) || 0
}))
})),
phaseBreakdown: phaseOverstock.filter(r => parseFloat(r.cost) > 0).map(r => ({
phase: r.phase,
products: parseInt(r.products) || 0,
units: parseInt(r.units) || 0,
cost: parseFloat(r.cost) || 0,
retail: parseFloat(r.retail) || 0,
percentage: overstockPhaseTotalCost > 0
? parseFloat(((parseFloat(r.cost) / overstockPhaseTotalCost) * 100).toFixed(1))
: 0,
})),
};
res.json(response);
@@ -600,7 +1014,7 @@ router.get('/sales/metrics', async (req, res) => {
// Get overall metrics for the period
const { rows: [metrics] } = await executeQuery(`
SELECT
SELECT
COUNT(DISTINCT order_number) as total_orders,
SUM(quantity) as total_units,
SUM(price * quantity) as total_revenue,
@@ -610,6 +1024,40 @@ router.get('/sales/metrics', async (req, res) => {
AND canceled = false
`, [startDate, endDate]);
// Sales breakdown by lifecycle phase
const { rows: phaseSales } = await executeQuery(`
SELECT
COALESCE(pm.lifecycle_phase, 'unknown') AS phase,
COUNT(DISTINCT o.order_number)::integer AS orders,
COALESCE(SUM(o.quantity), 0)::integer AS units,
ROUND(COALESCE(SUM(o.price * o.quantity), 0)::numeric, 2) AS revenue,
ROUND(COALESCE(SUM(o.costeach * o.quantity), 0)::numeric, 2) AS cogs
FROM orders o
LEFT JOIN product_metrics pm ON o.pid = pm.pid
WHERE o.date BETWEEN $1 AND $2 AND o.canceled = false
GROUP BY pm.lifecycle_phase
ORDER BY revenue DESC
`, [startDate, endDate]);
const salePhaseTotalRev = phaseSales.reduce((s, r) => s + (parseFloat(r.revenue) || 0), 0);
// Daily sales broken down by lifecycle phase (for stacked chart)
const { rows: dailyPhaseRows } = await executeQuery(`
SELECT
DATE(o.date) AS sale_date,
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'preorder'), 0) AS preorder,
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'launch'), 0) AS launch,
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'decay'), 0) AS decay,
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'mature'), 0) AS mature,
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'slow_mover'), 0) AS slow_mover,
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'dormant'), 0) AS dormant,
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE pm.lifecycle_phase IS NULL), 0) AS unknown
FROM orders o
LEFT JOIN product_metrics pm ON o.pid = pm.pid
WHERE o.date BETWEEN $1 AND $2 AND o.canceled = false
GROUP BY DATE(o.date)
ORDER BY sale_date
`, [startDate, endDate]);
const response = {
totalOrders: parseInt(metrics?.total_orders) || 0,
totalUnitsSold: parseInt(metrics?.total_units) || 0,
@@ -620,7 +1068,27 @@ router.get('/sales/metrics', async (req, res) => {
units: parseInt(day.total_units) || 0,
revenue: parseFloat(day.total_revenue) || 0,
cogs: parseFloat(day.total_cogs) || 0
}))
})),
dailySalesByPhase: dailyPhaseRows.map(d => ({
date: d.sale_date,
preorder: parseFloat(d.preorder) || 0,
launch: parseFloat(d.launch) || 0,
decay: parseFloat(d.decay) || 0,
mature: parseFloat(d.mature) || 0,
slow_mover: parseFloat(d.slow_mover) || 0,
dormant: parseFloat(d.dormant) || 0,
unknown: parseFloat(d.unknown) || 0,
})),
phaseBreakdown: phaseSales.filter(r => parseFloat(r.revenue) > 0).map(r => ({
phase: r.phase,
orders: parseInt(r.orders) || 0,
units: parseInt(r.units) || 0,
revenue: parseFloat(r.revenue) || 0,
cogs: parseFloat(r.cogs) || 0,
percentage: salePhaseTotalRev > 0
? parseFloat(((parseFloat(r.revenue) / salePhaseTotalRev) * 100).toFixed(1))
: 0,
})),
};
res.json(response);

View File

@@ -782,4 +782,49 @@ router.get('/:id/time-series', async (req, res) => {
}
});
// GET /products/:id/forecast
// Returns the 90-day daily forecast for a single product from product_forecasts
router.get('/:id/forecast', async (req, res) => {
const { id } = req.params;
try {
const pool = req.app.locals.pool;
const { rows } = await pool.query(`
SELECT
forecast_date AS date,
forecast_units AS units,
forecast_revenue AS revenue,
lifecycle_phase AS phase,
forecast_method AS method,
confidence_lower,
confidence_upper
FROM product_forecasts
WHERE pid = $1
ORDER BY forecast_date
`, [id]);
if (rows.length === 0) {
return res.json({ forecast: [], phase: null, method: null });
}
const phase = rows[0].phase;
const method = rows[0].method;
res.json({
phase,
method,
forecast: rows.map(r => ({
date: r.date instanceof Date ? r.date.toISOString().split('T')[0] : r.date,
units: parseFloat(r.units) || 0,
revenue: parseFloat(r.revenue) || 0,
confidenceLower: parseFloat(r.confidence_lower) || 0,
confidenceUpper: parseFloat(r.confidence_upper) || 0,
})),
});
} catch (error) {
console.error('Error fetching product forecast:', error);
res.status(500).json({ error: 'Failed to fetch product forecast' });
}
});
module.exports = router;