Import/metrics calc fixes

This commit is contained in:
2026-02-08 22:44:57 -05:00
parent 12cc7a4639
commit 7c41a7f799
11 changed files with 828 additions and 55 deletions

View File

@@ -7,6 +7,7 @@ const { importProducts } = require('./import/products');
const importOrders = require('./import/orders');
const importPurchaseOrders = require('./import/purchase-orders');
const importDailyDeals = require('./import/daily-deals');
const importStockSnapshots = require('./import/stock-snapshots');
dotenv.config({ path: path.join(__dirname, "../.env") });
@@ -16,6 +17,7 @@ const IMPORT_PRODUCTS = true;
const IMPORT_ORDERS = true;
const IMPORT_PURCHASE_ORDERS = true;
const IMPORT_DAILY_DEALS = true;
const IMPORT_STOCK_SNAPSHOTS = true;
// Add flag for incremental updates
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
@@ -81,7 +83,8 @@ async function main() {
IMPORT_PRODUCTS,
IMPORT_ORDERS,
IMPORT_PURCHASE_ORDERS,
IMPORT_DAILY_DEALS
IMPORT_DAILY_DEALS,
IMPORT_STOCK_SNAPSHOTS
].filter(Boolean).length;
try {
@@ -130,10 +133,11 @@ async function main() {
'products_enabled', $3::boolean,
'orders_enabled', $4::boolean,
'purchase_orders_enabled', $5::boolean,
'daily_deals_enabled', $6::boolean
'daily_deals_enabled', $6::boolean,
'stock_snapshots_enabled', $7::boolean
)
) RETURNING id
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_DAILY_DEALS]);
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_DAILY_DEALS, IMPORT_STOCK_SNAPSHOTS]);
importHistoryId = historyResult.rows[0].id;
} catch (error) {
console.error("Error creating import history record:", error);
@@ -151,7 +155,8 @@ async function main() {
products: null,
orders: null,
purchaseOrders: null,
dailyDeals: null
dailyDeals: null,
stockSnapshots: null
};
let totalRecordsAdded = 0;
@@ -257,6 +262,33 @@ async function main() {
}
}
if (IMPORT_STOCK_SNAPSHOTS) {
try {
const stepStart = Date.now();
results.stockSnapshots = await importStockSnapshots(prodConnection, localConnection, INCREMENTAL_UPDATE);
stepTimings.stockSnapshots = Math.round((Date.now() - stepStart) / 1000);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Stock snapshots import result:', results.stockSnapshots);
if (results.stockSnapshots?.status === 'error') {
console.error('Stock snapshots import had an error:', results.stockSnapshots.error);
} else {
totalRecordsAdded += parseInt(results.stockSnapshots?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.stockSnapshots?.recordsUpdated || 0);
}
} catch (error) {
console.error('Error during stock snapshots import:', error);
results.stockSnapshots = {
status: 'error',
error: error.message,
recordsAdded: 0,
recordsUpdated: 0
};
}
}
const endTime = Date.now();
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
@@ -280,11 +312,13 @@ async function main() {
'orders_result', COALESCE($11::jsonb, 'null'::jsonb),
'purchase_orders_result', COALESCE($12::jsonb, 'null'::jsonb),
'daily_deals_result', COALESCE($13::jsonb, 'null'::jsonb),
'total_deleted', $14::integer,
'total_skipped', $15::integer,
'step_timings', $16::jsonb
'stock_snapshots_enabled', $14::boolean,
'stock_snapshots_result', COALESCE($15::jsonb, 'null'::jsonb),
'total_deleted', $16::integer,
'total_skipped', $17::integer,
'step_timings', $18::jsonb
)
WHERE id = $17
WHERE id = $19
`, [
totalElapsedSeconds,
parseInt(totalRecordsAdded),
@@ -299,6 +333,8 @@ async function main() {
JSON.stringify(results.orders),
JSON.stringify(results.purchaseOrders),
JSON.stringify(results.dailyDeals),
IMPORT_STOCK_SNAPSHOTS,
JSON.stringify(results.stockSnapshots),
totalRecordsDeleted,
totalRecordsSkipped,
JSON.stringify(stepTimings),

View File

@@ -597,14 +597,15 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
ELSE 0
END) as promo_discount_sum,
COALESCE(ot.tax, 0) as total_tax,
COALESCE(oc.costeach, oi.price * 0.5) as costeach
COALESCE(oc.costeach, p.cost_price, oi.price * 0.5) as costeach
FROM temp_order_items oi
LEFT JOIN temp_item_discounts id ON oi.order_id = id.order_id AND oi.pid = id.pid
LEFT JOIN temp_main_discounts md ON id.order_id = md.order_id AND id.discount_id = md.discount_id
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs oc ON oi.order_id = oc.order_id AND oi.pid = oc.pid
LEFT JOIN public.products p ON oi.pid = p.pid
WHERE oi.order_id = ANY($1)
GROUP BY oi.order_id, oi.pid, ot.tax, oc.costeach
GROUP BY oi.order_id, oi.pid, ot.tax, oc.costeach, p.cost_price
)
SELECT
oi.order_id as order_number,
@@ -631,10 +632,11 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
om.customer_name,
om.status,
om.canceled,
COALESCE(ot.costeach, oi.price * 0.5)::NUMERIC(14, 4) as costeach
COALESCE(ot.costeach, p.cost_price, oi.price * 0.5)::NUMERIC(14, 4) as costeach
FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN order_totals ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN public.products p ON oi.pid = p.pid
WHERE oi.order_id = ANY($1)
ORDER BY oi.order_id, oi.pid
`, [subBatchIds]);

View File

@@ -0,0 +1,184 @@
const { outputProgress, formatElapsedTime, calculateRate } = require('../metrics-new/utils/progress');
const BATCH_SIZE = 5000;
/**
* Imports daily stock snapshots from MySQL's snap_product_value table to PostgreSQL.
* This provides historical end-of-day stock quantities per product, dating back to 2012.
*
* MySQL source table: snap_product_value (date, pid, count, pending, value)
* - date: snapshot date (typically yesterday's date, recorded daily by cron)
* - pid: product ID
* - count: end-of-day stock quantity (sum of product_inventory.count)
* - pending: pending/on-order quantity
* - value: total inventory value at cost (sum of costeach * count)
*
* PostgreSQL target table: stock_snapshots (snapshot_date, pid, stock_quantity, pending_quantity, stock_value)
*
* @param {object} prodConnection - MySQL connection to production DB
* @param {object} localConnection - PostgreSQL connection wrapper
* @param {boolean} incrementalUpdate - If true, only fetch new snapshots since last import
* @returns {object} Import statistics
*/
async function importStockSnapshots(prodConnection, localConnection, incrementalUpdate = true) {
const startTime = Date.now();
outputProgress({
status: 'running',
operation: 'Stock snapshots import',
message: 'Starting stock snapshots import...',
current: 0,
total: 0,
elapsed: formatElapsedTime(startTime)
});
// Ensure target table exists
await localConnection.query(`
CREATE TABLE IF NOT EXISTS stock_snapshots (
snapshot_date DATE NOT NULL,
pid BIGINT NOT NULL,
stock_quantity INT NOT NULL DEFAULT 0,
pending_quantity INT NOT NULL DEFAULT 0,
stock_value NUMERIC(14, 4) NOT NULL DEFAULT 0,
PRIMARY KEY (snapshot_date, pid)
)
`);
// Create index for efficient lookups by pid
await localConnection.query(`
CREATE INDEX IF NOT EXISTS idx_stock_snapshots_pid ON stock_snapshots (pid)
`);
// Determine the start date for the import
let startDate = '2020-01-01'; // Default: match the orders/snapshots date range
if (incrementalUpdate) {
const [result] = await localConnection.query(`
SELECT MAX(snapshot_date)::text AS max_date FROM stock_snapshots
`);
if (result.rows[0]?.max_date) {
// Start from the day after the last imported date
startDate = result.rows[0].max_date;
}
}
outputProgress({
status: 'running',
operation: 'Stock snapshots import',
message: `Fetching stock snapshots from MySQL since ${startDate}...`,
current: 0,
total: 0,
elapsed: formatElapsedTime(startTime)
});
// Count total rows to import
const [countResult] = await prodConnection.query(
`SELECT COUNT(*) AS total FROM snap_product_value WHERE date > ?`,
[startDate]
);
const totalRows = countResult[0].total;
if (totalRows === 0) {
outputProgress({
status: 'complete',
operation: 'Stock snapshots import',
message: 'No new stock snapshots to import',
current: 0,
total: 0,
elapsed: formatElapsedTime(startTime)
});
return { recordsAdded: 0, recordsUpdated: 0, status: 'complete' };
}
outputProgress({
status: 'running',
operation: 'Stock snapshots import',
message: `Found ${totalRows.toLocaleString()} stock snapshot rows to import`,
current: 0,
total: totalRows,
elapsed: formatElapsedTime(startTime)
});
// Process in batches using date-based pagination (more efficient than OFFSET)
let processedRows = 0;
let recordsAdded = 0;
let currentDate = startDate;
while (processedRows < totalRows) {
// Fetch a batch of dates
const [dateBatch] = await prodConnection.query(
`SELECT DISTINCT date FROM snap_product_value
WHERE date > ? ORDER BY date LIMIT 10`,
[currentDate]
);
if (dateBatch.length === 0) break;
const dates = dateBatch.map(r => r.date);
const lastDate = dates[dates.length - 1];
// Fetch all rows for these dates
const [rows] = await prodConnection.query(
`SELECT date, pid, count AS stock_quantity, pending AS pending_quantity, value AS stock_value
FROM snap_product_value
WHERE date > ? AND date <= ?
ORDER BY date, pid`,
[currentDate, lastDate]
);
if (rows.length === 0) break;
// Batch insert into PostgreSQL using UNNEST for efficiency
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
const batch = rows.slice(i, i + BATCH_SIZE);
const dates = batch.map(r => r.date);
const pids = batch.map(r => r.pid);
const quantities = batch.map(r => r.stock_quantity);
const pending = batch.map(r => r.pending_quantity);
const values = batch.map(r => r.stock_value);
const [result] = await localConnection.query(`
INSERT INTO stock_snapshots (snapshot_date, pid, stock_quantity, pending_quantity, stock_value)
SELECT * FROM UNNEST(
$1::date[], $2::bigint[], $3::int[], $4::int[], $5::numeric[]
)
ON CONFLICT (snapshot_date, pid) DO UPDATE SET
stock_quantity = EXCLUDED.stock_quantity,
pending_quantity = EXCLUDED.pending_quantity,
stock_value = EXCLUDED.stock_value
`, [dates, pids, quantities, pending, values]);
recordsAdded += batch.length;
}
processedRows += rows.length;
currentDate = lastDate;
outputProgress({
status: 'running',
operation: 'Stock snapshots import',
message: `Imported ${processedRows.toLocaleString()} / ${totalRows.toLocaleString()} rows (through ${currentDate})`,
current: processedRows,
total: totalRows,
elapsed: formatElapsedTime(startTime),
rate: calculateRate(processedRows, startTime)
});
}
outputProgress({
status: 'complete',
operation: 'Stock snapshots import',
message: `Stock snapshots import complete: ${recordsAdded.toLocaleString()} rows`,
current: processedRows,
total: totalRows,
elapsed: formatElapsedTime(startTime)
});
return {
recordsAdded,
recordsUpdated: 0,
status: 'complete'
};
}
module.exports = importStockSnapshots;

View File

@@ -214,7 +214,7 @@ BEGIN
-- Final INSERT/UPDATE statement using all the prepared CTEs
INSERT INTO public.product_metrics (
pid, last_calculated, sku, title, brand, vendor, image_url, is_visible, is_replenishable,
current_price, current_regular_price, current_cost_price, current_landing_cost_price,
current_price, current_regular_price, current_cost_price,
current_stock, current_stock_cost, current_stock_retail, current_stock_gross,
on_order_qty, on_order_cost, on_order_retail, earliest_expected_date,
date_created, date_first_received, date_last_received, date_first_sold, date_last_sold, age_days,
@@ -242,7 +242,7 @@ BEGIN
SELECT
-- Select columns in order, joining all CTEs by pid
ci.pid, _start_time, ci.sku, ci.title, ci.brand, ci.vendor, ci.image_url, ci.is_visible, ci.replenishable,
ci.current_price, ci.current_regular_price, ci.current_cost_price, ci.current_effective_cost,
ci.current_price, ci.current_regular_price, ci.current_cost_price,
ci.current_stock, (ci.current_stock * COALESCE(ci.current_effective_cost, 0.00))::numeric(12,2), (ci.current_stock * COALESCE(ci.current_price, 0.00))::numeric(12,2), (ci.current_stock * COALESCE(ci.current_regular_price, 0.00))::numeric(12,2),
COALESCE(ooi.on_order_qty, 0), COALESCE(ooi.on_order_cost, 0.00)::numeric(12,2), (COALESCE(ooi.on_order_qty, 0) * COALESCE(ci.current_price, 0.00))::numeric(12,2), ooi.earliest_expected_date,
@@ -415,7 +415,7 @@ BEGIN
-- *** IMPORTANT: List ALL columns here, ensuring order matches INSERT list ***
-- Update ALL columns to ensure entire row is refreshed
last_calculated = EXCLUDED.last_calculated, sku = EXCLUDED.sku, title = EXCLUDED.title, brand = EXCLUDED.brand, vendor = EXCLUDED.vendor, image_url = EXCLUDED.image_url, is_visible = EXCLUDED.is_visible, is_replenishable = EXCLUDED.is_replenishable,
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price, current_landing_cost_price = EXCLUDED.current_landing_cost_price,
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price,
current_stock = EXCLUDED.current_stock, current_stock_cost = EXCLUDED.current_stock_cost, current_stock_retail = EXCLUDED.current_stock_retail, current_stock_gross = EXCLUDED.current_stock_gross,
on_order_qty = EXCLUDED.on_order_qty, on_order_cost = EXCLUDED.on_order_cost, on_order_retail = EXCLUDED.on_order_retail, earliest_expected_date = EXCLUDED.earliest_expected_date,
date_created = EXCLUDED.date_created, date_first_received = EXCLUDED.date_first_received, date_last_received = EXCLUDED.date_last_received, date_first_sold = EXCLUDED.date_first_sold, date_last_sold = EXCLUDED.date_last_sold, age_days = EXCLUDED.age_days,

View File

@@ -13,7 +13,7 @@ DECLARE
_begin_date DATE := (SELECT MIN(date)::date FROM orders WHERE date >= '2020-01-01'); -- Starting point: captures all historical order data
_end_date DATE := CURRENT_DATE;
BEGIN
RAISE NOTICE 'Beginning daily snapshots rebuild from % to %. Starting at %', _begin_date, _end_date, _start_time;
RAISE NOTICE 'Begicnning daily snapshots rebuild from % to %. Starting at %', _begin_date, _end_date, _start_time;
-- First truncate the existing snapshots to ensure a clean slate
TRUNCATE TABLE public.daily_product_snapshots;
@@ -36,7 +36,13 @@ BEGIN
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.discount ELSE 0 END), 0.00) AS discounts,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN COALESCE(o.costeach, p.cost_price) * o.quantity ELSE 0 END), 0.00) AS cogs,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN
COALESCE(
o.costeach,
get_weighted_avg_cost(p.pid, o.date::date),
p.cost_price
) * o.quantity
ELSE 0 END), 0.00) AS cogs,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN p.regular_price * o.quantity ELSE 0 END), 0.00) AS gross_regular_revenue,
-- Aggregate Returns (Quantity < 0 or Status = Returned)
@@ -63,15 +69,17 @@ BEGIN
GROUP BY r.pid
HAVING COUNT(DISTINCT r.receiving_id) > 0 OR SUM(r.qty_each) > 0
),
-- Get stock quantities for the day - note this is approximate since we're using current products data
-- Use historical stock from stock_snapshots when available,
-- falling back to current stock from products table
StockData AS (
SELECT
p.pid,
p.stock_quantity,
COALESCE(p.cost_price, 0.00) as effective_cost_price,
COALESCE(ss.stock_quantity, p.stock_quantity) AS stock_quantity,
COALESCE(ss.stock_value, p.stock_quantity * COALESCE(p.cost_price, 0.00)) AS stock_value,
COALESCE(p.price, 0.00) as current_price,
COALESCE(p.regular_price, 0.00) as current_regular_price
FROM public.products p
LEFT JOIN stock_snapshots ss ON p.pid = ss.pid AND ss.snapshot_date = _date
)
INSERT INTO public.daily_product_snapshots (
snapshot_date,
@@ -99,9 +107,9 @@ BEGIN
_date AS snapshot_date,
COALESCE(sd.pid, rd.pid) AS pid,
sd.sku,
-- Use current stock as approximation, since historical stock data may not be available
-- Historical stock from stock_snapshots, falls back to current stock
s.stock_quantity AS eod_stock_quantity,
s.stock_quantity * s.effective_cost_price AS eod_stock_cost,
s.stock_value AS eod_stock_cost,
s.stock_quantity * s.current_price AS eod_stock_retail,
s.stock_quantity * s.current_regular_price AS eod_stock_gross,
(s.stock_quantity <= 0) AS stockout_flag,
@@ -114,7 +122,7 @@ BEGIN
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00) AS net_revenue,
COALESCE(sd.cogs, 0.00),
COALESCE(sd.gross_regular_revenue, 0.00),
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
-- Receiving metrics
COALESCE(rd.units_received, 0),
COALESCE(rd.cost_received, 0.00),

View File

@@ -121,14 +121,16 @@ BEGIN
HAVING COUNT(DISTINCT r.receiving_id) > 0 OR SUM(r.qty_each) > 0
),
CurrentStock AS (
-- Select current stock values directly from products table
-- Use historical stock from stock_snapshots when available,
-- falling back to current stock from products table
SELECT
pid,
stock_quantity,
COALESCE(cost_price, 0.00) as effective_cost_price,
COALESCE(price, 0.00) as current_price,
COALESCE(regular_price, 0.00) as current_regular_price
FROM public.products
p.pid,
COALESCE(ss.stock_quantity, p.stock_quantity) AS stock_quantity,
COALESCE(ss.stock_value, p.stock_quantity * COALESCE(p.cost_price, 0.00)) AS stock_value,
COALESCE(p.price, 0.00) AS current_price,
COALESCE(p.regular_price, 0.00) AS current_regular_price
FROM public.products p
LEFT JOIN stock_snapshots ss ON p.pid = ss.pid AND ss.snapshot_date = _target_date
),
ProductsWithActivity AS (
-- Quick pre-filter to only process products with activity
@@ -168,7 +170,7 @@ BEGIN
COALESCE(sd.sku, p.sku) AS sku, -- Get SKU from sales data or products table
-- Inventory Metrics (Using CurrentStock)
cs.stock_quantity AS eod_stock_quantity,
cs.stock_quantity * cs.effective_cost_price AS eod_stock_cost,
cs.stock_value AS eod_stock_cost,
cs.stock_quantity * cs.current_price AS eod_stock_retail,
cs.stock_quantity * cs.current_regular_price AS eod_stock_gross,
(cs.stock_quantity <= 0) AS stockout_flag,
@@ -181,7 +183,7 @@ BEGIN
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00) AS net_revenue,
COALESCE(sd.cogs, 0.00),
COALESCE(sd.gross_regular_revenue, 0.00),
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit, -- Basic profit: Net Revenue - COGS
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
-- Receiving Metrics (From ReceivingData)
COALESCE(rd.units_received, 0),
COALESCE(rd.cost_received, 0.00),

View File

@@ -204,22 +204,33 @@ BEGIN
GROUP BY pid
),
DemandVariability AS (
-- Calculate variance and standard deviation of daily sales
-- Calculate variance and standard deviation of daily sales over the full 30-day window
-- including zero-sales days (not just activity days) for accurate variability metrics
SELECT
pid,
COUNT(*) AS days_with_data,
AVG(units_sold) AS avg_daily_sales,
VARIANCE(units_sold) AS sales_variance,
STDDEV(units_sold) AS sales_std_dev,
-- Coefficient of variation
CASE
WHEN AVG(units_sold) > 0 THEN STDDEV(units_sold) / AVG(units_sold)
ELSE NULL
pd.pid,
COUNT(dps.pid) AS days_with_data,
AVG(COALESCE(dps.units_sold, 0)) AS avg_daily_sales,
VARIANCE(COALESCE(dps.units_sold, 0)) AS sales_variance,
STDDEV(COALESCE(dps.units_sold, 0)) AS sales_std_dev,
CASE
WHEN AVG(COALESCE(dps.units_sold, 0)) > 0
THEN STDDEV(COALESCE(dps.units_sold, 0)) / AVG(COALESCE(dps.units_sold, 0))
ELSE NULL
END AS sales_cv
FROM public.daily_product_snapshots
WHERE snapshot_date >= _current_date - INTERVAL '29 days'
AND snapshot_date <= _current_date
GROUP BY pid
FROM (
SELECT DISTINCT pid
FROM public.daily_product_snapshots
WHERE snapshot_date >= _current_date - INTERVAL '29 days'
AND snapshot_date <= _current_date
) pd
CROSS JOIN generate_series(
(_current_date - INTERVAL '29 days')::date,
_current_date,
'1 day'::interval
) AS d(day)
LEFT JOIN public.daily_product_snapshots dps
ON dps.pid = pd.pid AND dps.snapshot_date = d.day::date
GROUP BY pd.pid
),
ServiceLevels AS (
-- Calculate service level and fill rate metrics
@@ -257,7 +268,7 @@ BEGIN
barcode, harmonized_tariff_code, vendor_reference, notions_reference, line, subline, artist,
moq, rating, reviews, weight, length, width, height, country_of_origin, location,
baskets, notifies, preorder_count, notions_inv_count,
current_price, current_regular_price, current_cost_price, current_landing_cost_price,
current_price, current_regular_price, current_cost_price,
current_stock, current_stock_cost, current_stock_retail, current_stock_gross,
on_order_qty, on_order_cost, on_order_retail, earliest_expected_date,
date_created, date_first_received, date_last_received, date_first_sold, date_last_sold, age_days,
@@ -295,7 +306,7 @@ BEGIN
ci.barcode, ci.harmonized_tariff_code, ci.vendor_reference, ci.notions_reference, ci.line, ci.subline, ci.artist,
ci.moq, ci.rating, ci.reviews, ci.weight, ci.length, ci.width, ci.height, ci.country_of_origin, ci.location,
ci.baskets, ci.notifies, ci.preorder_count, ci.notions_inv_count,
ci.current_price, ci.current_regular_price, ci.current_cost_price, ci.current_effective_cost,
ci.current_price, ci.current_regular_price, ci.current_cost_price,
ci.current_stock, ci.current_stock * ci.current_effective_cost, ci.current_stock * ci.current_price, ci.current_stock * ci.current_regular_price,
COALESCE(ooi.on_order_qty, 0), COALESCE(ooi.on_order_cost, 0.00), COALESCE(ooi.on_order_qty, 0) * ci.current_price, ooi.earliest_expected_date,
ci.created_at::date, COALESCE(ci.first_received::date, hd.date_first_received_calc), hd.date_last_received_calc, hd.date_first_sold, COALESCE(ci.date_last_sold, hd.max_order_date),
@@ -514,7 +525,7 @@ BEGIN
barcode = EXCLUDED.barcode, harmonized_tariff_code = EXCLUDED.harmonized_tariff_code, vendor_reference = EXCLUDED.vendor_reference, notions_reference = EXCLUDED.notions_reference, line = EXCLUDED.line, subline = EXCLUDED.subline, artist = EXCLUDED.artist,
moq = EXCLUDED.moq, rating = EXCLUDED.rating, reviews = EXCLUDED.reviews, weight = EXCLUDED.weight, length = EXCLUDED.length, width = EXCLUDED.width, height = EXCLUDED.height, country_of_origin = EXCLUDED.country_of_origin, location = EXCLUDED.location,
baskets = EXCLUDED.baskets, notifies = EXCLUDED.notifies, preorder_count = EXCLUDED.preorder_count, notions_inv_count = EXCLUDED.notions_inv_count,
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price, current_landing_cost_price = EXCLUDED.current_landing_cost_price,
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price,
current_stock = EXCLUDED.current_stock, current_stock_cost = EXCLUDED.current_stock_cost, current_stock_retail = EXCLUDED.current_stock_retail, current_stock_gross = EXCLUDED.current_stock_gross,
on_order_qty = EXCLUDED.on_order_qty, on_order_cost = EXCLUDED.on_order_cost, on_order_retail = EXCLUDED.on_order_retail, earliest_expected_date = EXCLUDED.earliest_expected_date,
date_created = EXCLUDED.date_created, date_first_received = EXCLUDED.date_first_received, date_last_received = EXCLUDED.date_last_received, date_first_sold = EXCLUDED.date_first_sold, date_last_sold = EXCLUDED.date_last_sold, age_days = EXCLUDED.age_days,
@@ -567,11 +578,26 @@ BEGIN
product_metrics.replenishment_units IS DISTINCT FROM EXCLUDED.replenishment_units OR
product_metrics.stock_cover_in_days IS DISTINCT FROM EXCLUDED.stock_cover_in_days OR
product_metrics.yesterday_sales IS DISTINCT FROM EXCLUDED.yesterday_sales OR
-- Check a few other important fields that might change
product_metrics.date_last_sold IS DISTINCT FROM EXCLUDED.date_last_sold OR
product_metrics.earliest_expected_date IS DISTINCT FROM EXCLUDED.earliest_expected_date OR
product_metrics.lifetime_sales IS DISTINCT FROM EXCLUDED.lifetime_sales OR
product_metrics.lifetime_revenue_quality IS DISTINCT FROM EXCLUDED.lifetime_revenue_quality
product_metrics.lifetime_revenue_quality IS DISTINCT FROM EXCLUDED.lifetime_revenue_quality OR
-- Derived metrics that can change even when source fields don't
product_metrics.profit_30d IS DISTINCT FROM EXCLUDED.profit_30d OR
product_metrics.cogs_30d IS DISTINCT FROM EXCLUDED.cogs_30d OR
product_metrics.margin_30d IS DISTINCT FROM EXCLUDED.margin_30d OR
product_metrics.stockout_days_30d IS DISTINCT FROM EXCLUDED.stockout_days_30d OR
product_metrics.sell_through_30d IS DISTINCT FROM EXCLUDED.sell_through_30d OR
-- Growth and variability metrics
product_metrics.sales_growth_30d_vs_prev IS DISTINCT FROM EXCLUDED.sales_growth_30d_vs_prev OR
product_metrics.revenue_growth_30d_vs_prev IS DISTINCT FROM EXCLUDED.revenue_growth_30d_vs_prev OR
product_metrics.demand_pattern IS DISTINCT FROM EXCLUDED.demand_pattern OR
product_metrics.seasonal_pattern IS DISTINCT FROM EXCLUDED.seasonal_pattern OR
product_metrics.seasonality_index IS DISTINCT FROM EXCLUDED.seasonality_index OR
product_metrics.service_level_30d IS DISTINCT FROM EXCLUDED.service_level_30d OR
product_metrics.fill_rate_30d IS DISTINCT FROM EXCLUDED.fill_rate_30d OR
-- Time-based safety net: always update if more than 1 day stale
product_metrics.last_calculated < NOW() - INTERVAL '1 day'
;
-- Update the status table with the timestamp from the START of this run