Add in forecasting, lifecycle phases, associated component and script changes
This commit is contained in:
@@ -67,6 +67,23 @@ router.get('/stock/metrics', async (req, res) => {
|
||||
ORDER BY CASE WHEN brand = 'Other' THEN 1 ELSE 0 END, stock_cost DESC
|
||||
`);
|
||||
|
||||
// Stock breakdown by lifecycle phase (lifecycle_phase populated by update_lifecycle_forecasts.sql)
|
||||
const { rows: phaseStock } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(pm.lifecycle_phase, 'unknown') AS phase,
|
||||
COUNT(DISTINCT pm.pid)::integer AS products,
|
||||
COALESCE(SUM(pm.current_stock), 0)::integer AS units,
|
||||
ROUND(COALESCE(SUM(pm.current_stock_cost), 0)::numeric, 2) AS cost,
|
||||
ROUND(COALESCE(SUM(pm.current_stock_retail), 0)::numeric, 2) AS retail
|
||||
FROM product_metrics pm
|
||||
WHERE pm.is_visible = true AND pm.current_stock > 0
|
||||
AND COALESCE(pm.preorder_count, 0) = 0
|
||||
GROUP BY pm.lifecycle_phase
|
||||
ORDER BY cost DESC
|
||||
`);
|
||||
|
||||
const phaseTotalCost = phaseStock.reduce((s, r) => s + (parseFloat(r.cost) || 0), 0);
|
||||
|
||||
// Format the response with explicit type conversion
|
||||
const response = {
|
||||
totalProducts: parseInt(stockMetrics.total_products) || 0,
|
||||
@@ -80,7 +97,17 @@ router.get('/stock/metrics', async (req, res) => {
|
||||
units: parseInt(v.stock_units) || 0,
|
||||
cost: parseFloat(v.stock_cost) || 0,
|
||||
retail: parseFloat(v.stock_retail) || 0
|
||||
}))
|
||||
})),
|
||||
phaseStock: phaseStock.filter(r => parseFloat(r.cost) > 0).map(r => ({
|
||||
phase: r.phase,
|
||||
products: parseInt(r.products) || 0,
|
||||
units: parseInt(r.units) || 0,
|
||||
cost: parseFloat(r.cost) || 0,
|
||||
retail: parseFloat(r.retail) || 0,
|
||||
percentage: phaseTotalCost > 0
|
||||
? parseFloat(((parseFloat(r.cost) / phaseTotalCost) * 100).toFixed(1))
|
||||
: 0,
|
||||
})),
|
||||
};
|
||||
|
||||
res.json(response);
|
||||
@@ -208,12 +235,39 @@ router.get('/replenishment/metrics', async (req, res) => {
|
||||
LIMIT 5
|
||||
`);
|
||||
|
||||
// Replenishment breakdown by lifecycle phase (lifecycle_phase on product_metrics)
|
||||
const { rows: phaseReplenish } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(pm.lifecycle_phase, 'unknown') AS phase,
|
||||
COUNT(DISTINCT pm.pid)::integer AS products,
|
||||
COALESCE(SUM(pm.replenishment_units), 0)::integer AS units,
|
||||
ROUND(COALESCE(SUM(pm.replenishment_cost), 0)::numeric, 2) AS cost
|
||||
FROM product_metrics pm
|
||||
WHERE pm.is_visible = true
|
||||
AND pm.is_replenishable = true
|
||||
AND (pm.status IN ('Critical', 'Reorder') OR pm.current_stock < 0)
|
||||
AND pm.replenishment_units > 0
|
||||
GROUP BY pm.lifecycle_phase
|
||||
ORDER BY cost DESC
|
||||
`);
|
||||
|
||||
const replenishTotalCost = phaseReplenish.reduce((s, r) => s + (parseFloat(r.cost) || 0), 0);
|
||||
|
||||
// Format response
|
||||
const response = {
|
||||
productsToReplenish: parseInt(metrics.products_to_replenish) || 0,
|
||||
unitsToReplenish: parseInt(metrics.total_units_needed) || 0,
|
||||
replenishmentCost: parseFloat(metrics.total_cost) || 0,
|
||||
replenishmentRetail: parseFloat(metrics.total_retail) || 0,
|
||||
phaseBreakdown: phaseReplenish.filter(r => parseFloat(r.cost) > 0).map(r => ({
|
||||
phase: r.phase,
|
||||
products: parseInt(r.products) || 0,
|
||||
units: parseInt(r.units) || 0,
|
||||
cost: parseFloat(r.cost) || 0,
|
||||
percentage: replenishTotalCost > 0
|
||||
? parseFloat(((parseFloat(r.cost) / replenishTotalCost) * 100).toFixed(1))
|
||||
: 0,
|
||||
})),
|
||||
topVariants: variants.map(v => ({
|
||||
id: v.pid,
|
||||
title: v.title,
|
||||
@@ -234,165 +288,499 @@ router.get('/replenishment/metrics', async (req, res) => {
|
||||
});
|
||||
|
||||
// GET /dashboard/forecast/metrics
|
||||
// Returns sales forecasts for specified period
|
||||
// Reads from product_forecasts table (lifecycle-aware forecasting pipeline).
|
||||
// Falls back to velocity-based projection if forecast table is empty.
|
||||
router.get('/forecast/metrics', async (req, res) => {
|
||||
// Default to last 30 days if no date range provided
|
||||
const today = new Date();
|
||||
const thirtyDaysAgo = new Date(today);
|
||||
thirtyDaysAgo.setDate(today.getDate() - 30);
|
||||
|
||||
const startDate = req.query.startDate || thirtyDaysAgo.toISOString();
|
||||
const endDate = req.query.endDate || today.toISOString();
|
||||
|
||||
const thirtyDaysOut = new Date(today);
|
||||
thirtyDaysOut.setDate(today.getDate() + 30);
|
||||
|
||||
const startDate = req.query.startDate ? new Date(req.query.startDate) : today;
|
||||
const endDate = req.query.endDate ? new Date(req.query.endDate) : thirtyDaysOut;
|
||||
const startISO = startDate.toISOString().split('T')[0];
|
||||
const endISO = endDate.toISOString().split('T')[0];
|
||||
const days = Math.max(1, Math.round((endDate - startDate) / (1000 * 60 * 60 * 24)));
|
||||
|
||||
try {
|
||||
// Check if sales_forecasts table exists and has data
|
||||
const { rows: tableCheck } = await executeQuery(`
|
||||
SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'sales_forecasts'
|
||||
) as table_exists
|
||||
`);
|
||||
|
||||
const tableExists = tableCheck[0].table_exists;
|
||||
|
||||
if (!tableExists) {
|
||||
console.log('sales_forecasts table does not exist, returning dummy data');
|
||||
|
||||
// Generate dummy data for forecast
|
||||
const days = 30;
|
||||
const dummyData = [];
|
||||
const startDateObj = new Date(startDate);
|
||||
|
||||
for (let i = 0; i < days; i++) {
|
||||
const currentDate = new Date(startDateObj);
|
||||
currentDate.setDate(startDateObj.getDate() + i);
|
||||
|
||||
// Use sales data with slight randomization
|
||||
const baseValue = 500 + Math.random() * 200;
|
||||
dummyData.push({
|
||||
date: currentDate.toISOString().split('T')[0],
|
||||
revenue: parseFloat((baseValue + Math.random() * 100).toFixed(2)),
|
||||
confidence: parseFloat((0.7 + Math.random() * 0.2).toFixed(2))
|
||||
});
|
||||
// Check if product_forecasts has data
|
||||
const { rows: [countRow] } = await executeQuery(
|
||||
`SELECT COUNT(*) AS cnt FROM product_forecasts WHERE forecast_date >= $1 LIMIT 1`,
|
||||
[startISO]
|
||||
);
|
||||
const hasForecastData = parseInt(countRow.cnt) > 0;
|
||||
|
||||
if (hasForecastData) {
|
||||
// --- Read from lifecycle-aware forecast pipeline ---
|
||||
|
||||
// Find the last date covered by product_forecasts
|
||||
const { rows: [horizonRow] } = await executeQuery(
|
||||
`SELECT MAX(forecast_date) AS max_date FROM product_forecasts`
|
||||
);
|
||||
const forecastHorizonISO = horizonRow.max_date instanceof Date
|
||||
? horizonRow.max_date.toISOString().split('T')[0]
|
||||
: horizonRow.max_date;
|
||||
const forecastHorizon = new Date(forecastHorizonISO + 'T00:00:00');
|
||||
const clampedEndISO = endISO <= forecastHorizonISO ? endISO : forecastHorizonISO;
|
||||
const needsExtrapolation = endISO > forecastHorizonISO;
|
||||
|
||||
// Totals from actual forecast data (clamped to horizon)
|
||||
const { rows: [totals] } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(SUM(pf.forecast_units), 0) AS total_units,
|
||||
COALESCE(SUM(pf.forecast_revenue), 0) AS total_revenue,
|
||||
COUNT(DISTINCT pf.pid) FILTER (
|
||||
WHERE pf.lifecycle_phase IN ('launch','decay','mature','preorder','slow_mover')
|
||||
) AS active_products,
|
||||
COUNT(DISTINCT pf.pid) FILTER (
|
||||
WHERE pf.forecast_method = 'lifecycle_curve'
|
||||
) AS curve_products
|
||||
FROM product_forecasts pf
|
||||
JOIN product_metrics pm ON pm.pid = pf.pid
|
||||
WHERE pm.is_visible = true
|
||||
AND pf.forecast_date BETWEEN $1 AND $2
|
||||
`, [startISO, clampedEndISO]);
|
||||
|
||||
const active = parseInt(totals.active_products) || 1;
|
||||
const curveProducts = parseInt(totals.curve_products) || 0;
|
||||
const confidenceLevel = parseFloat((curveProducts / active).toFixed(2));
|
||||
|
||||
// Daily series from actual forecast
|
||||
const { rows: dailyRows } = await executeQuery(`
|
||||
SELECT pf.forecast_date AS date,
|
||||
SUM(pf.forecast_units) AS units,
|
||||
SUM(pf.forecast_revenue) AS revenue
|
||||
FROM product_forecasts pf
|
||||
JOIN product_metrics pm ON pm.pid = pf.pid
|
||||
WHERE pm.is_visible = true
|
||||
AND pf.forecast_date BETWEEN $1 AND $2
|
||||
GROUP BY pf.forecast_date
|
||||
ORDER BY pf.forecast_date
|
||||
`, [startISO, clampedEndISO]);
|
||||
|
||||
const dailyForecasts = dailyRows.map(d => ({
|
||||
date: d.date instanceof Date ? d.date.toISOString().split('T')[0] : d.date,
|
||||
units: parseFloat(d.units) || 0,
|
||||
revenue: parseFloat(d.revenue) || 0,
|
||||
confidence: confidenceLevel,
|
||||
}));
|
||||
|
||||
// Daily forecast broken down by lifecycle phase (for stacked chart)
|
||||
const { rows: dailyPhaseRows } = await executeQuery(`
|
||||
SELECT pf.forecast_date AS date,
|
||||
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'preorder'), 0) AS preorder,
|
||||
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'launch'), 0) AS launch,
|
||||
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'decay'), 0) AS decay,
|
||||
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'mature'), 0) AS mature,
|
||||
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'slow_mover'), 0) AS slow_mover,
|
||||
COALESCE(SUM(pf.forecast_revenue) FILTER (WHERE pf.lifecycle_phase = 'dormant'), 0) AS dormant
|
||||
FROM product_forecasts pf
|
||||
JOIN product_metrics pm ON pm.pid = pf.pid
|
||||
WHERE pm.is_visible = true
|
||||
AND pf.forecast_date BETWEEN $1 AND $2
|
||||
GROUP BY pf.forecast_date
|
||||
ORDER BY pf.forecast_date
|
||||
`, [startISO, clampedEndISO]);
|
||||
|
||||
// --- New product pipeline contribution ---
|
||||
// Average daily revenue from new product introductions (last 12 months).
|
||||
// Only used for EXTRAPOLATED days beyond the forecast horizon — within the
|
||||
// 90-day horizon, preorder/launch products are already forecast by lifecycle curves.
|
||||
const { rows: [pipeline] } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(AVG(monthly_revenue), 0) AS avg_monthly_revenue
|
||||
FROM (
|
||||
SELECT DATE_TRUNC('month', pm.date_first_received) AS month,
|
||||
COUNT(*) AS monthly_products,
|
||||
SUM(pm.first_30_days_revenue) AS monthly_revenue
|
||||
FROM product_metrics pm
|
||||
WHERE pm.is_visible = true
|
||||
AND pm.date_first_received >= NOW() - INTERVAL '12 months'
|
||||
AND pm.date_first_received < DATE_TRUNC('month', NOW())
|
||||
GROUP BY 1
|
||||
) sub
|
||||
`);
|
||||
// Compute average product price for converting revenue to unit estimates
|
||||
const { rows: [priceRow] } = await executeQuery(`
|
||||
SELECT COALESCE(AVG(current_price) FILTER (WHERE current_price > 0 AND sales_30d > 0), 7) AS avg_price
|
||||
FROM product_metrics
|
||||
WHERE is_visible = true
|
||||
`);
|
||||
const avgPrice = parseFloat(priceRow.avg_price) || 7;
|
||||
|
||||
// Daily new-product revenue = (avg products/month × avg 30d revenue/product) / 30
|
||||
const avgMonthlyRevenue = parseFloat(pipeline.avg_monthly_revenue) || 0;
|
||||
const newProductDailyRevenue = avgMonthlyRevenue / 30;
|
||||
const newProductDailyUnits = newProductDailyRevenue / avgPrice;
|
||||
|
||||
let totalRevenue = dailyForecasts.reduce((sum, d) => sum + d.revenue, 0);
|
||||
let totalUnits = dailyForecasts.reduce((sum, d) => sum + d.units, 0);
|
||||
|
||||
// --- Extrapolation beyond forecast horizon (rest-of-year) ---
|
||||
if (needsExtrapolation) {
|
||||
// Monthly seasonal indices from last 12 months of actual revenue
|
||||
const { rows: seasonalRows } = await executeQuery(`
|
||||
SELECT EXTRACT(MONTH FROM o.date)::int AS month,
|
||||
SUM(o.quantity * o.price) AS revenue
|
||||
FROM orders o
|
||||
WHERE o.canceled IS DISTINCT FROM TRUE
|
||||
AND o.date >= NOW() - INTERVAL '12 months'
|
||||
GROUP BY 1
|
||||
`);
|
||||
const monthlyRevenue = {};
|
||||
let totalMonthlyRev = 0;
|
||||
for (const r of seasonalRows) {
|
||||
monthlyRevenue[r.month] = parseFloat(r.revenue) || 0;
|
||||
totalMonthlyRev += monthlyRevenue[r.month];
|
||||
}
|
||||
const avgMonthRev = totalMonthlyRev / Math.max(Object.keys(monthlyRevenue).length, 1);
|
||||
const seasonalIndex = {};
|
||||
for (let m = 1; m <= 12; m++) {
|
||||
seasonalIndex[m] = monthlyRevenue[m] ? monthlyRevenue[m] / avgMonthRev : 1.0;
|
||||
}
|
||||
|
||||
// Baseline: avg daily revenue from last 7 days of forecast (mature tail)
|
||||
const tailDays = dailyForecasts.slice(-7);
|
||||
const baselineDaily = tailDays.length > 0
|
||||
? tailDays.reduce((s, d) => s + d.revenue, 0) / tailDays.length
|
||||
: 0;
|
||||
|
||||
// Generate estimated days beyond horizon
|
||||
const extraStart = new Date(forecastHorizon);
|
||||
extraStart.setDate(extraStart.getDate() + 1);
|
||||
const extraEnd = new Date(endISO + 'T00:00:00');
|
||||
|
||||
for (let d = new Date(extraStart); d <= extraEnd; d.setDate(d.getDate() + 1)) {
|
||||
const month = d.getMonth() + 1;
|
||||
const seasonal = seasonalIndex[month] || 1.0;
|
||||
// Beyond horizon: existing product tail + new product pipeline
|
||||
const estRevenue = baselineDaily * seasonal + newProductDailyRevenue;
|
||||
const estUnits = (baselineDaily * seasonal) / avgPrice + newProductDailyUnits;
|
||||
|
||||
dailyForecasts.push({
|
||||
date: d.toISOString().split('T')[0],
|
||||
units: parseFloat(estUnits.toFixed(1)),
|
||||
revenue: parseFloat(estRevenue.toFixed(2)),
|
||||
confidence: 0, // lower confidence for extrapolated data
|
||||
estimated: true,
|
||||
});
|
||||
totalRevenue += estRevenue;
|
||||
totalUnits += estUnits;
|
||||
}
|
||||
}
|
||||
|
||||
// Return dummy response
|
||||
const response = {
|
||||
forecastSales: 500,
|
||||
forecastRevenue: 25000,
|
||||
confidenceLevel: 0.85,
|
||||
dailyForecasts: dummyData,
|
||||
categoryForecasts: [
|
||||
{ category: "Electronics", units: 120, revenue: 6000, confidence: 0.9 },
|
||||
{ category: "Clothing", units: 80, revenue: 4000, confidence: 0.8 },
|
||||
{ category: "Home Goods", units: 150, revenue: 7500, confidence: 0.75 },
|
||||
{ category: "Others", units: 150, revenue: 7500, confidence: 0.7 }
|
||||
]
|
||||
};
|
||||
|
||||
return res.json(response);
|
||||
}
|
||||
|
||||
// If the table exists, try to query it with proper error handling
|
||||
try {
|
||||
// Get summary metrics
|
||||
const { rows: metrics } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(SUM(forecast_units), 0) as total_forecast_units,
|
||||
COALESCE(SUM(forecast_revenue), 0) as total_forecast_revenue,
|
||||
COALESCE(AVG(confidence_level), 0) as overall_confidence
|
||||
FROM sales_forecasts
|
||||
WHERE forecast_date BETWEEN $1 AND $2
|
||||
`, [startDate, endDate]);
|
||||
|
||||
// Get daily forecasts
|
||||
const { rows: dailyForecasts } = await executeQuery(`
|
||||
SELECT
|
||||
DATE(forecast_date) as date,
|
||||
COALESCE(SUM(forecast_revenue), 0) as revenue,
|
||||
COALESCE(AVG(confidence_level), 0) as confidence
|
||||
FROM sales_forecasts
|
||||
WHERE forecast_date BETWEEN $1 AND $2
|
||||
GROUP BY DATE(forecast_date)
|
||||
ORDER BY date
|
||||
`, [startDate, endDate]);
|
||||
|
||||
// Get category forecasts
|
||||
const { rows: categoryForecasts } = await executeQuery(`
|
||||
SELECT
|
||||
c.name as category,
|
||||
COALESCE(SUM(cf.forecast_units), 0) as units,
|
||||
COALESCE(SUM(cf.forecast_revenue), 0) as revenue,
|
||||
COALESCE(AVG(cf.confidence_level), 0) as confidence
|
||||
FROM category_forecasts cf
|
||||
JOIN categories c ON cf.category_id = c.cat_id
|
||||
WHERE cf.forecast_date BETWEEN $1 AND $2
|
||||
GROUP BY c.cat_id, c.name
|
||||
// Lifecycle phase breakdown (from actual forecast data only)
|
||||
const { rows: phaseRows } = await executeQuery(`
|
||||
SELECT pf.lifecycle_phase AS phase,
|
||||
COUNT(DISTINCT pf.pid) AS products,
|
||||
COALESCE(SUM(pf.forecast_units), 0) AS units,
|
||||
COALESCE(SUM(pf.forecast_revenue), 0) AS revenue
|
||||
FROM product_forecasts pf
|
||||
JOIN product_metrics pm ON pm.pid = pf.pid
|
||||
WHERE pm.is_visible = true
|
||||
AND pf.forecast_date BETWEEN $1 AND $2
|
||||
GROUP BY pf.lifecycle_phase
|
||||
ORDER BY revenue DESC
|
||||
`, [startDate, endDate]);
|
||||
`, [startISO, clampedEndISO]);
|
||||
|
||||
// Format response
|
||||
const response = {
|
||||
forecastSales: parseInt(metrics[0]?.total_forecast_units) || 0,
|
||||
forecastRevenue: parseFloat(metrics[0]?.total_forecast_revenue) || 0,
|
||||
confidenceLevel: parseFloat(metrics[0]?.overall_confidence) || 0,
|
||||
dailyForecasts: dailyForecasts.map(d => ({
|
||||
date: d.date,
|
||||
revenue: parseFloat(d.revenue) || 0,
|
||||
confidence: parseFloat(d.confidence) || 0
|
||||
})),
|
||||
categoryForecasts: categoryForecasts.map(c => ({
|
||||
category: c.category,
|
||||
units: parseInt(c.units) || 0,
|
||||
revenue: parseFloat(c.revenue) || 0,
|
||||
confidence: parseFloat(c.confidence) || 0
|
||||
}))
|
||||
};
|
||||
const phaseTotal = phaseRows.reduce((s, r) => s + (parseFloat(r.revenue) || 0), 0);
|
||||
const phaseBreakdown = phaseRows
|
||||
.filter(r => parseFloat(r.revenue) > 0)
|
||||
.map(r => ({
|
||||
phase: r.phase,
|
||||
products: parseInt(r.products) || 0,
|
||||
units: Math.round(parseFloat(r.units) || 0),
|
||||
revenue: parseFloat(parseFloat(r.revenue).toFixed(2)),
|
||||
percentage: phaseTotal > 0
|
||||
? parseFloat(((parseFloat(r.revenue) / phaseTotal) * 100).toFixed(1))
|
||||
: 0,
|
||||
}));
|
||||
|
||||
res.json(response);
|
||||
} catch (err) {
|
||||
console.error('Error with forecast tables structure, returning dummy data:', err);
|
||||
|
||||
// Generate dummy data for forecast as fallback
|
||||
const days = 30;
|
||||
const dummyData = [];
|
||||
const startDateObj = new Date(startDate);
|
||||
|
||||
for (let i = 0; i < days; i++) {
|
||||
const currentDate = new Date(startDateObj);
|
||||
currentDate.setDate(startDateObj.getDate() + i);
|
||||
|
||||
const baseValue = 500 + Math.random() * 200;
|
||||
dummyData.push({
|
||||
date: currentDate.toISOString().split('T')[0],
|
||||
revenue: parseFloat((baseValue + Math.random() * 100).toFixed(2)),
|
||||
confidence: parseFloat((0.7 + Math.random() * 0.2).toFixed(2))
|
||||
});
|
||||
// Category breakdown (from actual forecast data only)
|
||||
const { rows: categoryRows } = await executeQuery(`
|
||||
WITH product_root_category AS (
|
||||
SELECT DISTINCT ON (pf.pid)
|
||||
pf.pid, ch.name AS category
|
||||
FROM product_forecasts pf
|
||||
JOIN product_metrics pm ON pm.pid = pf.pid
|
||||
JOIN product_categories pc ON pc.pid = pf.pid
|
||||
JOIN category_hierarchy ch ON ch.cat_id = pc.cat_id AND ch.level = 0
|
||||
WHERE pm.is_visible = true
|
||||
AND ch.name NOT IN ('Deals', 'Black Friday')
|
||||
AND pf.forecast_date BETWEEN $1 AND $2
|
||||
ORDER BY pf.pid, ch.name
|
||||
)
|
||||
SELECT prc.category,
|
||||
SUM(pf.forecast_units) AS units,
|
||||
SUM(pf.forecast_revenue) AS revenue
|
||||
FROM product_forecasts pf
|
||||
JOIN product_root_category prc ON prc.pid = pf.pid
|
||||
WHERE pf.forecast_date BETWEEN $1 AND $2
|
||||
GROUP BY prc.category
|
||||
ORDER BY revenue DESC
|
||||
LIMIT 8
|
||||
`, [startISO, clampedEndISO]);
|
||||
|
||||
const dailyForecastsByPhase = dailyPhaseRows.map(d => ({
|
||||
date: d.date instanceof Date ? d.date.toISOString().split('T')[0] : d.date,
|
||||
preorder: parseFloat(d.preorder) || 0,
|
||||
launch: parseFloat(d.launch) || 0,
|
||||
decay: parseFloat(d.decay) || 0,
|
||||
mature: parseFloat(d.mature) || 0,
|
||||
slow_mover: parseFloat(d.slow_mover) || 0,
|
||||
dormant: parseFloat(d.dormant) || 0,
|
||||
}));
|
||||
|
||||
// Add extrapolated days to phase series (distribute proportionally using last phase ratios)
|
||||
if (needsExtrapolation && dailyForecastsByPhase.length > 0) {
|
||||
const lastPhaseDay = dailyForecastsByPhase[dailyForecastsByPhase.length - 1];
|
||||
const phases = ['preorder', 'launch', 'decay', 'mature', 'slow_mover', 'dormant'];
|
||||
const lastTotal = phases.reduce((s, p) => s + lastPhaseDay[p], 0);
|
||||
const phaseRatios = {};
|
||||
for (const p of phases) {
|
||||
phaseRatios[p] = lastTotal > 0 ? lastPhaseDay[p] / lastTotal : 1 / phases.length;
|
||||
}
|
||||
// Match extrapolated days from dailyForecasts
|
||||
for (let i = dailyForecastsByPhase.length; i < dailyForecasts.length; i++) {
|
||||
const dayRev = dailyForecasts[i].revenue;
|
||||
const entry = { date: dailyForecasts[i].date };
|
||||
for (const p of phases) {
|
||||
entry[p] = parseFloat((dayRev * phaseRatios[p]).toFixed(2));
|
||||
}
|
||||
dailyForecastsByPhase.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
// Return dummy response
|
||||
const response = {
|
||||
forecastSales: 500,
|
||||
forecastRevenue: 25000,
|
||||
confidenceLevel: 0.85,
|
||||
dailyForecasts: dummyData,
|
||||
categoryForecasts: [
|
||||
{ category: "Electronics", units: 120, revenue: 6000, confidence: 0.9 },
|
||||
{ category: "Clothing", units: 80, revenue: 4000, confidence: 0.8 },
|
||||
{ category: "Home Goods", units: 150, revenue: 7500, confidence: 0.75 },
|
||||
{ category: "Others", units: 150, revenue: 7500, confidence: 0.7 }
|
||||
]
|
||||
};
|
||||
|
||||
res.json(response);
|
||||
|
||||
return res.json({
|
||||
forecastSales: Math.round(totalUnits),
|
||||
forecastRevenue: totalRevenue.toFixed(2),
|
||||
confidenceLevel,
|
||||
dailyForecasts,
|
||||
dailyForecastsByPhase,
|
||||
phaseBreakdown,
|
||||
categoryForecasts: categoryRows.map(c => ({
|
||||
category: c.category,
|
||||
units: Math.round(parseFloat(c.units)),
|
||||
revenue: parseFloat(parseFloat(c.revenue).toFixed(2)),
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
// --- Fallback: velocity-based projection (no forecast data yet) ---
|
||||
const { rows: [totals] } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(SUM(sales_velocity_daily), 0) AS daily_units,
|
||||
COALESCE(SUM(sales_velocity_daily * current_price), 0) AS daily_revenue,
|
||||
COUNT(*) FILTER (WHERE sales_velocity_daily > 0) AS active_products
|
||||
FROM product_metrics
|
||||
WHERE is_visible = true AND sales_velocity_daily > 0
|
||||
`);
|
||||
|
||||
const dailyUnits = parseFloat(totals.daily_units) || 0;
|
||||
const dailyRevenue = parseFloat(totals.daily_revenue) || 0;
|
||||
|
||||
const dailyForecasts = [];
|
||||
for (let i = 0; i < days; i++) {
|
||||
const d = new Date(startDate);
|
||||
d.setDate(startDate.getDate() + i);
|
||||
dailyForecasts.push({
|
||||
date: d.toISOString().split('T')[0],
|
||||
units: parseFloat(dailyUnits.toFixed(1)),
|
||||
revenue: parseFloat(dailyRevenue.toFixed(2)),
|
||||
confidence: 0,
|
||||
});
|
||||
}
|
||||
|
||||
const { rows: categoryRows } = await executeQuery(`
|
||||
WITH product_root_category AS (
|
||||
SELECT DISTINCT ON (pm.pid) pm.pid,
|
||||
pm.sales_velocity_daily, pm.current_price,
|
||||
ch.name AS category
|
||||
FROM product_metrics pm
|
||||
JOIN product_categories pc ON pc.pid = pm.pid
|
||||
JOIN category_hierarchy ch ON ch.cat_id = pc.cat_id AND ch.level = 0
|
||||
WHERE pm.is_visible = true AND pm.sales_velocity_daily > 0
|
||||
AND ch.name NOT IN ('Deals', 'Black Friday')
|
||||
ORDER BY pm.pid, ch.name
|
||||
)
|
||||
SELECT category,
|
||||
ROUND(SUM(sales_velocity_daily)::numeric, 1) AS daily_units,
|
||||
ROUND(SUM(sales_velocity_daily * current_price)::numeric, 2) AS daily_revenue
|
||||
FROM product_root_category
|
||||
GROUP BY category ORDER BY daily_revenue DESC LIMIT 8
|
||||
`);
|
||||
|
||||
res.json({
|
||||
forecastSales: Math.round(dailyUnits * days),
|
||||
forecastRevenue: (dailyRevenue * days).toFixed(2),
|
||||
confidenceLevel: 0,
|
||||
dailyForecasts,
|
||||
categoryForecasts: categoryRows.map(c => ({
|
||||
category: c.category,
|
||||
units: Math.round(parseFloat(c.daily_units) * days),
|
||||
revenue: parseFloat((parseFloat(c.daily_revenue) * days).toFixed(2)),
|
||||
})),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Error fetching forecast metrics:', err);
|
||||
res.status(500).json({ error: 'Failed to fetch forecast metrics' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /dashboard/forecast/accuracy
|
||||
// Returns forecast accuracy metrics computed by the forecast engine.
|
||||
// Reads from forecast_accuracy table (populated after each forecast run).
|
||||
router.get('/forecast/accuracy', async (req, res) => {
|
||||
try {
|
||||
// Check if forecast_accuracy table exists and has data
|
||||
const { rows: [tableCheck] } = await executeQuery(`
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_name = 'forecast_accuracy'
|
||||
) AS exists
|
||||
`);
|
||||
|
||||
if (!tableCheck.exists) {
|
||||
return res.json({ hasData: false, message: 'Accuracy data not yet available' });
|
||||
}
|
||||
|
||||
// Get the latest run that has accuracy data
|
||||
const { rows: runRows } = await executeQuery(`
|
||||
SELECT DISTINCT fa.run_id, fr.finished_at
|
||||
FROM forecast_accuracy fa
|
||||
JOIN forecast_runs fr ON fr.id = fa.run_id
|
||||
ORDER BY fr.finished_at DESC
|
||||
LIMIT 1
|
||||
`);
|
||||
|
||||
if (runRows.length === 0) {
|
||||
return res.json({ hasData: false, message: 'No accuracy data computed yet' });
|
||||
}
|
||||
|
||||
const latestRunId = runRows[0].run_id;
|
||||
const computedAt = runRows[0].finished_at;
|
||||
|
||||
// Count days of history available
|
||||
const { rows: [historyInfo] } = await executeQuery(`
|
||||
SELECT
|
||||
COUNT(DISTINCT forecast_date) AS days_of_history,
|
||||
MIN(forecast_date) AS earliest_date,
|
||||
MAX(forecast_date) AS latest_date
|
||||
FROM product_forecasts_history
|
||||
`);
|
||||
|
||||
// Fetch all accuracy metrics for the latest run
|
||||
const { rows: metrics } = await executeQuery(`
|
||||
SELECT metric_type, dimension_value, sample_size,
|
||||
total_actual_units, total_forecast_units,
|
||||
mae, wmape, bias, rmse
|
||||
FROM forecast_accuracy
|
||||
WHERE run_id = $1
|
||||
ORDER BY metric_type, dimension_value
|
||||
`, [latestRunId]);
|
||||
|
||||
// Organize into response structure
|
||||
const overall = metrics.find(m => m.metric_type === 'overall');
|
||||
const byPhase = metrics
|
||||
.filter(m => m.metric_type === 'by_phase')
|
||||
.map(m => ({
|
||||
phase: m.dimension_value,
|
||||
sampleSize: parseInt(m.sample_size),
|
||||
totalActual: parseFloat(m.total_actual_units) || 0,
|
||||
totalForecast: parseFloat(m.total_forecast_units) || 0,
|
||||
mae: m.mae != null ? parseFloat(parseFloat(m.mae).toFixed(4)) : null,
|
||||
wmape: m.wmape != null ? parseFloat((parseFloat(m.wmape) * 100).toFixed(1)) : null,
|
||||
bias: m.bias != null ? parseFloat(parseFloat(m.bias).toFixed(4)) : null,
|
||||
rmse: m.rmse != null ? parseFloat(parseFloat(m.rmse).toFixed(4)) : null,
|
||||
}))
|
||||
.sort((a, b) => (b.totalActual || 0) - (a.totalActual || 0));
|
||||
|
||||
const byLeadTime = metrics
|
||||
.filter(m => m.metric_type === 'by_lead_time')
|
||||
.map(m => ({
|
||||
bucket: m.dimension_value,
|
||||
sampleSize: parseInt(m.sample_size),
|
||||
mae: m.mae != null ? parseFloat(parseFloat(m.mae).toFixed(4)) : null,
|
||||
wmape: m.wmape != null ? parseFloat((parseFloat(m.wmape) * 100).toFixed(1)) : null,
|
||||
bias: m.bias != null ? parseFloat(parseFloat(m.bias).toFixed(4)) : null,
|
||||
rmse: m.rmse != null ? parseFloat(parseFloat(m.rmse).toFixed(4)) : null,
|
||||
}))
|
||||
.sort((a, b) => {
|
||||
const order = { '1-7d': 0, '8-14d': 1, '15-30d': 2, '31-60d': 3, '61-90d': 4 };
|
||||
return (order[a.bucket] ?? 99) - (order[b.bucket] ?? 99);
|
||||
});
|
||||
|
||||
const byMethod = metrics
|
||||
.filter(m => m.metric_type === 'by_method')
|
||||
.map(m => ({
|
||||
method: m.dimension_value,
|
||||
sampleSize: parseInt(m.sample_size),
|
||||
mae: m.mae != null ? parseFloat(parseFloat(m.mae).toFixed(4)) : null,
|
||||
wmape: m.wmape != null ? parseFloat((parseFloat(m.wmape) * 100).toFixed(1)) : null,
|
||||
bias: m.bias != null ? parseFloat(parseFloat(m.bias).toFixed(4)) : null,
|
||||
}));
|
||||
|
||||
const dailyTrend = metrics
|
||||
.filter(m => m.metric_type === 'daily')
|
||||
.map(m => ({
|
||||
date: m.dimension_value,
|
||||
mae: m.mae != null ? parseFloat(parseFloat(m.mae).toFixed(4)) : null,
|
||||
wmape: m.wmape != null ? parseFloat((parseFloat(m.wmape) * 100).toFixed(1)) : null,
|
||||
bias: m.bias != null ? parseFloat(parseFloat(m.bias).toFixed(4)) : null,
|
||||
}))
|
||||
.sort((a, b) => a.date.localeCompare(b.date));
|
||||
|
||||
// Historical accuracy trend (across runs)
|
||||
const { rows: trendRows } = await executeQuery(`
|
||||
SELECT fa.run_id, fr.finished_at::date AS run_date,
|
||||
fa.mae, fa.wmape, fa.bias, fa.rmse, fa.sample_size
|
||||
FROM forecast_accuracy fa
|
||||
JOIN forecast_runs fr ON fr.id = fa.run_id
|
||||
WHERE fa.metric_type = 'overall'
|
||||
AND fa.dimension_value = 'all'
|
||||
ORDER BY fr.finished_at
|
||||
`);
|
||||
|
||||
const accuracyTrend = trendRows.map(r => ({
|
||||
date: r.run_date instanceof Date ? r.run_date.toISOString().split('T')[0] : r.run_date,
|
||||
mae: r.mae != null ? parseFloat(parseFloat(r.mae).toFixed(4)) : null,
|
||||
wmape: r.wmape != null ? parseFloat((parseFloat(r.wmape) * 100).toFixed(1)) : null,
|
||||
bias: r.bias != null ? parseFloat(parseFloat(r.bias).toFixed(4)) : null,
|
||||
sampleSize: parseInt(r.sample_size),
|
||||
}));
|
||||
|
||||
res.json({
|
||||
hasData: true,
|
||||
computedAt,
|
||||
daysOfHistory: parseInt(historyInfo.days_of_history) || 0,
|
||||
historyRange: {
|
||||
from: historyInfo.earliest_date instanceof Date
|
||||
? historyInfo.earliest_date.toISOString().split('T')[0]
|
||||
: historyInfo.earliest_date,
|
||||
to: historyInfo.latest_date instanceof Date
|
||||
? historyInfo.latest_date.toISOString().split('T')[0]
|
||||
: historyInfo.latest_date,
|
||||
},
|
||||
overall: overall ? {
|
||||
sampleSize: parseInt(overall.sample_size),
|
||||
totalActual: parseFloat(overall.total_actual_units) || 0,
|
||||
totalForecast: parseFloat(overall.total_forecast_units) || 0,
|
||||
mae: overall.mae != null ? parseFloat(parseFloat(overall.mae).toFixed(4)) : null,
|
||||
wmape: overall.wmape != null ? parseFloat((parseFloat(overall.wmape) * 100).toFixed(1)) : null,
|
||||
bias: overall.bias != null ? parseFloat(parseFloat(overall.bias).toFixed(4)) : null,
|
||||
rmse: overall.rmse != null ? parseFloat(parseFloat(overall.rmse).toFixed(4)) : null,
|
||||
} : null,
|
||||
byPhase,
|
||||
byLeadTime,
|
||||
byMethod,
|
||||
dailyTrend,
|
||||
accuracyTrend,
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Error fetching forecast accuracy:', err);
|
||||
res.status(500).json({ error: 'Failed to fetch forecast accuracy' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /dashboard/overstock/metrics
|
||||
// Returns overstock metrics by category
|
||||
router.get('/overstock/metrics', async (req, res) => {
|
||||
@@ -427,7 +815,7 @@ router.get('/overstock/metrics', async (req, res) => {
|
||||
|
||||
// Get category breakdowns separately
|
||||
const { rows: categoryData } = await executeQuery(`
|
||||
SELECT
|
||||
SELECT
|
||||
c.name as category_name,
|
||||
COUNT(DISTINCT pm.pid)::integer as overstocked_products,
|
||||
SUM(pm.overstocked_units)::integer as total_excess_units,
|
||||
@@ -443,6 +831,22 @@ router.get('/overstock/metrics', async (req, res) => {
|
||||
LIMIT 8
|
||||
`);
|
||||
|
||||
// Overstock breakdown by lifecycle phase
|
||||
const { rows: phaseOverstock } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(pm.lifecycle_phase, 'unknown') AS phase,
|
||||
COUNT(DISTINCT pm.pid)::integer AS products,
|
||||
COALESCE(SUM(pm.overstocked_units), 0)::integer AS units,
|
||||
ROUND(COALESCE(SUM(pm.overstocked_cost), 0)::numeric, 2) AS cost,
|
||||
ROUND(COALESCE(SUM(pm.overstocked_retail), 0)::numeric, 2) AS retail
|
||||
FROM product_metrics pm
|
||||
WHERE pm.status = 'Overstock' AND pm.is_visible = true
|
||||
AND COALESCE(pm.preorder_count, 0) = 0
|
||||
GROUP BY pm.lifecycle_phase
|
||||
ORDER BY cost DESC
|
||||
`);
|
||||
const overstockPhaseTotalCost = phaseOverstock.reduce((s, r) => s + (parseFloat(r.cost) || 0), 0);
|
||||
|
||||
// Format response with explicit type conversion
|
||||
const response = {
|
||||
overstockedProducts: parseInt(summaryMetrics.total_overstocked) || 0,
|
||||
@@ -455,7 +859,17 @@ router.get('/overstock/metrics', async (req, res) => {
|
||||
units: parseInt(cat.total_excess_units) || 0,
|
||||
cost: parseFloat(cat.total_excess_cost) || 0,
|
||||
retail: parseFloat(cat.total_excess_retail) || 0
|
||||
}))
|
||||
})),
|
||||
phaseBreakdown: phaseOverstock.filter(r => parseFloat(r.cost) > 0).map(r => ({
|
||||
phase: r.phase,
|
||||
products: parseInt(r.products) || 0,
|
||||
units: parseInt(r.units) || 0,
|
||||
cost: parseFloat(r.cost) || 0,
|
||||
retail: parseFloat(r.retail) || 0,
|
||||
percentage: overstockPhaseTotalCost > 0
|
||||
? parseFloat(((parseFloat(r.cost) / overstockPhaseTotalCost) * 100).toFixed(1))
|
||||
: 0,
|
||||
})),
|
||||
};
|
||||
|
||||
res.json(response);
|
||||
@@ -600,7 +1014,7 @@ router.get('/sales/metrics', async (req, res) => {
|
||||
|
||||
// Get overall metrics for the period
|
||||
const { rows: [metrics] } = await executeQuery(`
|
||||
SELECT
|
||||
SELECT
|
||||
COUNT(DISTINCT order_number) as total_orders,
|
||||
SUM(quantity) as total_units,
|
||||
SUM(price * quantity) as total_revenue,
|
||||
@@ -610,6 +1024,40 @@ router.get('/sales/metrics', async (req, res) => {
|
||||
AND canceled = false
|
||||
`, [startDate, endDate]);
|
||||
|
||||
// Sales breakdown by lifecycle phase
|
||||
const { rows: phaseSales } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(pm.lifecycle_phase, 'unknown') AS phase,
|
||||
COUNT(DISTINCT o.order_number)::integer AS orders,
|
||||
COALESCE(SUM(o.quantity), 0)::integer AS units,
|
||||
ROUND(COALESCE(SUM(o.price * o.quantity), 0)::numeric, 2) AS revenue,
|
||||
ROUND(COALESCE(SUM(o.costeach * o.quantity), 0)::numeric, 2) AS cogs
|
||||
FROM orders o
|
||||
LEFT JOIN product_metrics pm ON o.pid = pm.pid
|
||||
WHERE o.date BETWEEN $1 AND $2 AND o.canceled = false
|
||||
GROUP BY pm.lifecycle_phase
|
||||
ORDER BY revenue DESC
|
||||
`, [startDate, endDate]);
|
||||
const salePhaseTotalRev = phaseSales.reduce((s, r) => s + (parseFloat(r.revenue) || 0), 0);
|
||||
|
||||
// Daily sales broken down by lifecycle phase (for stacked chart)
|
||||
const { rows: dailyPhaseRows } = await executeQuery(`
|
||||
SELECT
|
||||
DATE(o.date) AS sale_date,
|
||||
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'preorder'), 0) AS preorder,
|
||||
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'launch'), 0) AS launch,
|
||||
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'decay'), 0) AS decay,
|
||||
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'mature'), 0) AS mature,
|
||||
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'slow_mover'), 0) AS slow_mover,
|
||||
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE COALESCE(pm.lifecycle_phase, 'unknown') = 'dormant'), 0) AS dormant,
|
||||
COALESCE(SUM(o.price * o.quantity) FILTER (WHERE pm.lifecycle_phase IS NULL), 0) AS unknown
|
||||
FROM orders o
|
||||
LEFT JOIN product_metrics pm ON o.pid = pm.pid
|
||||
WHERE o.date BETWEEN $1 AND $2 AND o.canceled = false
|
||||
GROUP BY DATE(o.date)
|
||||
ORDER BY sale_date
|
||||
`, [startDate, endDate]);
|
||||
|
||||
const response = {
|
||||
totalOrders: parseInt(metrics?.total_orders) || 0,
|
||||
totalUnitsSold: parseInt(metrics?.total_units) || 0,
|
||||
@@ -620,7 +1068,27 @@ router.get('/sales/metrics', async (req, res) => {
|
||||
units: parseInt(day.total_units) || 0,
|
||||
revenue: parseFloat(day.total_revenue) || 0,
|
||||
cogs: parseFloat(day.total_cogs) || 0
|
||||
}))
|
||||
})),
|
||||
dailySalesByPhase: dailyPhaseRows.map(d => ({
|
||||
date: d.sale_date,
|
||||
preorder: parseFloat(d.preorder) || 0,
|
||||
launch: parseFloat(d.launch) || 0,
|
||||
decay: parseFloat(d.decay) || 0,
|
||||
mature: parseFloat(d.mature) || 0,
|
||||
slow_mover: parseFloat(d.slow_mover) || 0,
|
||||
dormant: parseFloat(d.dormant) || 0,
|
||||
unknown: parseFloat(d.unknown) || 0,
|
||||
})),
|
||||
phaseBreakdown: phaseSales.filter(r => parseFloat(r.revenue) > 0).map(r => ({
|
||||
phase: r.phase,
|
||||
orders: parseInt(r.orders) || 0,
|
||||
units: parseInt(r.units) || 0,
|
||||
revenue: parseFloat(r.revenue) || 0,
|
||||
cogs: parseFloat(r.cogs) || 0,
|
||||
percentage: salePhaseTotalRev > 0
|
||||
? parseFloat(((parseFloat(r.revenue) / salePhaseTotalRev) * 100).toFixed(1))
|
||||
: 0,
|
||||
})),
|
||||
};
|
||||
|
||||
res.json(response);
|
||||
|
||||
@@ -782,4 +782,49 @@ router.get('/:id/time-series', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// GET /products/:id/forecast
|
||||
// Returns the 90-day daily forecast for a single product from product_forecasts
|
||||
router.get('/:id/forecast', async (req, res) => {
|
||||
const { id } = req.params;
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
forecast_date AS date,
|
||||
forecast_units AS units,
|
||||
forecast_revenue AS revenue,
|
||||
lifecycle_phase AS phase,
|
||||
forecast_method AS method,
|
||||
confidence_lower,
|
||||
confidence_upper
|
||||
FROM product_forecasts
|
||||
WHERE pid = $1
|
||||
ORDER BY forecast_date
|
||||
`, [id]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
return res.json({ forecast: [], phase: null, method: null });
|
||||
}
|
||||
|
||||
const phase = rows[0].phase;
|
||||
const method = rows[0].method;
|
||||
|
||||
res.json({
|
||||
phase,
|
||||
method,
|
||||
forecast: rows.map(r => ({
|
||||
date: r.date instanceof Date ? r.date.toISOString().split('T')[0] : r.date,
|
||||
units: parseFloat(r.units) || 0,
|
||||
revenue: parseFloat(r.revenue) || 0,
|
||||
confidenceLower: parseFloat(r.confidence_lower) || 0,
|
||||
confidenceUpper: parseFloat(r.confidence_upper) || 0,
|
||||
})),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching product forecast:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product forecast' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
Reference in New Issue
Block a user