Import/metrics calc fixes
This commit is contained in:
241
inventory-server/db/functions.sql
Normal file
241
inventory-server/db/functions.sql
Normal file
@@ -0,0 +1,241 @@
|
||||
-- Custom PostgreSQL functions used by the metrics pipeline
|
||||
-- These must exist in the database before running calculate-metrics-new.js
|
||||
--
|
||||
-- To install/update: psql -d inventory_db -f functions.sql
|
||||
-- All functions use CREATE OR REPLACE so they are safe to re-run.
|
||||
|
||||
-- =============================================================================
|
||||
-- safe_divide: Division helper that returns a default value instead of erroring
|
||||
-- on NULL or zero denominators.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.safe_divide(
|
||||
numerator numeric,
|
||||
denominator numeric,
|
||||
default_value numeric DEFAULT NULL::numeric
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF denominator IS NULL OR denominator = 0 THEN
|
||||
RETURN default_value;
|
||||
ELSE
|
||||
RETURN numerator / denominator;
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- std_numeric: Standardized rounding helper for consistent numeric precision.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.std_numeric(
|
||||
value numeric,
|
||||
precision_digits integer DEFAULT 2
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF value IS NULL THEN
|
||||
RETURN NULL;
|
||||
ELSE
|
||||
RETURN ROUND(value, precision_digits);
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- calculate_sales_velocity: Daily sales velocity adjusted for stockout days.
|
||||
-- Ensures at least 14-day denominator for products with sales to avoid
|
||||
-- inflated velocity from short windows.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.calculate_sales_velocity(
|
||||
sales_30d integer,
|
||||
stockout_days_30d integer
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
RETURN sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - stockout_days_30d,
|
||||
CASE
|
||||
WHEN sales_30d > 0 THEN 14.0 -- If we have sales, ensure at least 14 days denominator
|
||||
ELSE 30.0 -- If no sales, use full period
|
||||
END
|
||||
),
|
||||
0
|
||||
);
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- get_weighted_avg_cost: Weighted average cost from receivings up to a given date.
|
||||
-- Uses all non-canceled receivings (no row limit) weighted by quantity.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.get_weighted_avg_cost(
|
||||
p_pid bigint,
|
||||
p_date date
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
STABLE
|
||||
AS $function$
|
||||
DECLARE
|
||||
weighted_cost NUMERIC;
|
||||
BEGIN
|
||||
SELECT
|
||||
CASE
|
||||
WHEN SUM(qty_each) > 0 THEN SUM(cost_each * qty_each) / SUM(qty_each)
|
||||
ELSE NULL
|
||||
END INTO weighted_cost
|
||||
FROM receivings
|
||||
WHERE pid = p_pid
|
||||
AND received_date <= p_date
|
||||
AND status != 'canceled';
|
||||
|
||||
RETURN weighted_cost;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- classify_demand_pattern: Classifies demand based on average demand and
|
||||
-- coefficient of variation (CV). Standard inventory classification:
|
||||
-- zero: no demand
|
||||
-- stable: CV <= 0.2 (predictable, easy to forecast)
|
||||
-- variable: CV <= 0.5 (some variability, still forecastable)
|
||||
-- sporadic: low volume + high CV (intermittent demand)
|
||||
-- lumpy: high volume + high CV (unpredictable bursts)
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.classify_demand_pattern(
|
||||
avg_demand numeric,
|
||||
cv numeric
|
||||
)
|
||||
RETURNS character varying
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF avg_demand IS NULL OR cv IS NULL THEN
|
||||
RETURN NULL;
|
||||
ELSIF avg_demand = 0 THEN
|
||||
RETURN 'zero';
|
||||
ELSIF cv <= 0.2 THEN
|
||||
RETURN 'stable';
|
||||
ELSIF cv <= 0.5 THEN
|
||||
RETURN 'variable';
|
||||
ELSIF avg_demand < 1.0 THEN
|
||||
RETURN 'sporadic';
|
||||
ELSE
|
||||
RETURN 'lumpy';
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- detect_seasonal_pattern: Detects seasonality by comparing monthly average
|
||||
-- sales across the last 12 months. Uses coefficient of variation across months
|
||||
-- and peak-to-average ratio to classify patterns.
|
||||
--
|
||||
-- Returns:
|
||||
-- seasonal_pattern: 'none', 'moderate', or 'strong'
|
||||
-- seasonality_index: peak month avg / overall avg * 100 (100 = no seasonality)
|
||||
-- peak_season: name of peak month (e.g. 'January'), or NULL if none
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.detect_seasonal_pattern(p_pid bigint)
|
||||
RETURNS TABLE(seasonal_pattern character varying, seasonality_index numeric, peak_season character varying)
|
||||
LANGUAGE plpgsql
|
||||
STABLE
|
||||
AS $function$
|
||||
DECLARE
|
||||
v_monthly_cv NUMERIC;
|
||||
v_max_month_avg NUMERIC;
|
||||
v_overall_avg NUMERIC;
|
||||
v_monthly_stddev NUMERIC;
|
||||
v_peak_month_num INT;
|
||||
v_data_months INT;
|
||||
v_seasonality_index NUMERIC;
|
||||
v_seasonal_pattern VARCHAR;
|
||||
v_peak_season VARCHAR;
|
||||
BEGIN
|
||||
-- Gather monthly average sales over the last 12 months
|
||||
SELECT
|
||||
COUNT(*),
|
||||
AVG(month_avg),
|
||||
STDDEV(month_avg),
|
||||
MAX(month_avg)
|
||||
INTO v_data_months, v_overall_avg, v_monthly_stddev, v_max_month_avg
|
||||
FROM (
|
||||
SELECT EXTRACT(MONTH FROM snapshot_date) AS mo, AVG(units_sold) AS month_avg
|
||||
FROM daily_product_snapshots
|
||||
WHERE pid = p_pid AND snapshot_date >= CURRENT_DATE - INTERVAL '365 days'
|
||||
GROUP BY EXTRACT(MONTH FROM snapshot_date)
|
||||
) monthly;
|
||||
|
||||
-- Need at least 3 months of data for meaningful seasonality detection
|
||||
IF v_data_months < 3 OR v_overall_avg IS NULL OR v_overall_avg = 0 THEN
|
||||
RETURN QUERY SELECT 'none'::VARCHAR, 100::NUMERIC, NULL::VARCHAR;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- CV of monthly averages
|
||||
v_monthly_cv := v_monthly_stddev / v_overall_avg;
|
||||
|
||||
-- Find peak month number
|
||||
SELECT EXTRACT(MONTH FROM snapshot_date)::INT INTO v_peak_month_num
|
||||
FROM daily_product_snapshots
|
||||
WHERE pid = p_pid AND snapshot_date >= CURRENT_DATE - INTERVAL '365 days'
|
||||
GROUP BY EXTRACT(MONTH FROM snapshot_date)
|
||||
ORDER BY AVG(units_sold) DESC
|
||||
LIMIT 1;
|
||||
|
||||
-- Seasonality index: peak month avg / overall avg * 100
|
||||
v_seasonality_index := ROUND((v_max_month_avg / v_overall_avg * 100)::NUMERIC, 2);
|
||||
|
||||
IF v_monthly_cv > 0.5 AND v_seasonality_index > 150 THEN
|
||||
v_seasonal_pattern := 'strong';
|
||||
v_peak_season := TRIM(TO_CHAR(TO_DATE(v_peak_month_num::TEXT, 'MM'), 'Month'));
|
||||
ELSIF v_monthly_cv > 0.3 AND v_seasonality_index > 120 THEN
|
||||
v_seasonal_pattern := 'moderate';
|
||||
v_peak_season := TRIM(TO_CHAR(TO_DATE(v_peak_month_num::TEXT, 'MM'), 'Month'));
|
||||
ELSE
|
||||
v_seasonal_pattern := 'none';
|
||||
v_peak_season := NULL;
|
||||
v_seasonality_index := 100;
|
||||
END IF;
|
||||
|
||||
RETURN QUERY SELECT v_seasonal_pattern, v_seasonality_index, v_peak_season;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- category_hierarchy: Materialized view providing a recursive category tree
|
||||
-- with ancestor paths for efficient rollup queries.
|
||||
--
|
||||
-- Refresh after category changes: REFRESH MATERIALIZED VIEW category_hierarchy;
|
||||
-- =============================================================================
|
||||
-- DROP MATERIALIZED VIEW IF EXISTS category_hierarchy;
|
||||
-- CREATE MATERIALIZED VIEW category_hierarchy AS
|
||||
-- WITH RECURSIVE cat_tree AS (
|
||||
-- SELECT cat_id, name, type, parent_id,
|
||||
-- cat_id AS root_id, 0 AS level, ARRAY[cat_id] AS path
|
||||
-- FROM categories
|
||||
-- WHERE parent_id IS NULL
|
||||
-- UNION ALL
|
||||
-- SELECT c.cat_id, c.name, c.type, c.parent_id,
|
||||
-- ct.root_id, ct.level + 1, ct.path || c.cat_id
|
||||
-- FROM categories c
|
||||
-- JOIN cat_tree ct ON c.parent_id = ct.cat_id
|
||||
-- )
|
||||
-- SELECT cat_id, name, type, parent_id, root_id, level, path,
|
||||
-- (SELECT array_agg(unnest ORDER BY unnest DESC)
|
||||
-- FROM unnest(cat_tree.path) unnest
|
||||
-- WHERE unnest <> cat_tree.cat_id) AS ancestor_ids
|
||||
-- FROM cat_tree;
|
||||
--
|
||||
-- CREATE UNIQUE INDEX ON category_hierarchy (cat_id);
|
||||
@@ -80,7 +80,6 @@ CREATE TABLE public.product_metrics (
|
||||
current_price NUMERIC(10, 2),
|
||||
current_regular_price NUMERIC(10, 2),
|
||||
current_cost_price NUMERIC(10, 4), -- Increased precision for cost
|
||||
current_landing_cost_price NUMERIC(10, 4), -- Increased precision for cost
|
||||
current_stock INT NOT NULL DEFAULT 0,
|
||||
current_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
current_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
@@ -156,9 +155,9 @@ CREATE TABLE public.product_metrics (
|
||||
days_of_stock_closing_stock NUMERIC(10, 2), -- lead_time_closing_stock - days_of_stock_forecast_units
|
||||
replenishment_needed_raw NUMERIC(10, 2), -- planning_period_forecast_units + config_safety_stock - current_stock - on_order_qty
|
||||
replenishment_units INT, -- CEILING(GREATEST(0, replenishment_needed_raw))
|
||||
replenishment_cost NUMERIC(14, 4), -- replenishment_units * COALESCE(current_landing_cost_price, current_cost_price)
|
||||
replenishment_cost NUMERIC(14, 4), -- replenishment_units * current_cost_price
|
||||
replenishment_retail NUMERIC(14, 4), -- replenishment_units * current_price
|
||||
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - COALESCE(current_landing_cost_price, current_cost_price))
|
||||
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - current_cost_price)
|
||||
to_order_units INT, -- Apply MOQ/UOM logic to replenishment_units
|
||||
forecast_lost_sales_units NUMERIC(10, 2), -- GREATEST(0, -lead_time_closing_stock)
|
||||
forecast_lost_revenue NUMERIC(14, 4), -- forecast_lost_sales_units * current_price
|
||||
@@ -167,7 +166,7 @@ CREATE TABLE public.product_metrics (
|
||||
sells_out_in_days NUMERIC(10, 1), -- (current_stock + on_order_qty) / sales_velocity_daily
|
||||
replenish_date DATE, -- Calc based on when stock hits safety stock minus lead time
|
||||
overstocked_units INT, -- GREATEST(0, current_stock - config_safety_stock - planning_period_forecast_units)
|
||||
overstocked_cost NUMERIC(14, 4), -- overstocked_units * COALESCE(current_landing_cost_price, current_cost_price)
|
||||
overstocked_cost NUMERIC(14, 4), -- overstocked_units * current_cost_price
|
||||
overstocked_retail NUMERIC(14, 4), -- overstocked_units * current_price
|
||||
is_old_stock BOOLEAN, -- Based on age, last sold, last received, on_order status
|
||||
|
||||
|
||||
@@ -29,7 +29,6 @@ CREATE TABLE products (
|
||||
price NUMERIC(14, 4) NOT NULL,
|
||||
regular_price NUMERIC(14, 4) NOT NULL,
|
||||
cost_price NUMERIC(14, 4),
|
||||
landing_cost_price NUMERIC(14, 4),
|
||||
barcode TEXT,
|
||||
harmonized_tariff_code TEXT,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
@@ -7,6 +7,7 @@ const { importProducts } = require('./import/products');
|
||||
const importOrders = require('./import/orders');
|
||||
const importPurchaseOrders = require('./import/purchase-orders');
|
||||
const importDailyDeals = require('./import/daily-deals');
|
||||
const importStockSnapshots = require('./import/stock-snapshots');
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../.env") });
|
||||
|
||||
@@ -16,6 +17,7 @@ const IMPORT_PRODUCTS = true;
|
||||
const IMPORT_ORDERS = true;
|
||||
const IMPORT_PURCHASE_ORDERS = true;
|
||||
const IMPORT_DAILY_DEALS = true;
|
||||
const IMPORT_STOCK_SNAPSHOTS = true;
|
||||
|
||||
// Add flag for incremental updates
|
||||
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
|
||||
@@ -81,7 +83,8 @@ async function main() {
|
||||
IMPORT_PRODUCTS,
|
||||
IMPORT_ORDERS,
|
||||
IMPORT_PURCHASE_ORDERS,
|
||||
IMPORT_DAILY_DEALS
|
||||
IMPORT_DAILY_DEALS,
|
||||
IMPORT_STOCK_SNAPSHOTS
|
||||
].filter(Boolean).length;
|
||||
|
||||
try {
|
||||
@@ -130,10 +133,11 @@ async function main() {
|
||||
'products_enabled', $3::boolean,
|
||||
'orders_enabled', $4::boolean,
|
||||
'purchase_orders_enabled', $5::boolean,
|
||||
'daily_deals_enabled', $6::boolean
|
||||
'daily_deals_enabled', $6::boolean,
|
||||
'stock_snapshots_enabled', $7::boolean
|
||||
)
|
||||
) RETURNING id
|
||||
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_DAILY_DEALS]);
|
||||
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_DAILY_DEALS, IMPORT_STOCK_SNAPSHOTS]);
|
||||
importHistoryId = historyResult.rows[0].id;
|
||||
} catch (error) {
|
||||
console.error("Error creating import history record:", error);
|
||||
@@ -151,7 +155,8 @@ async function main() {
|
||||
products: null,
|
||||
orders: null,
|
||||
purchaseOrders: null,
|
||||
dailyDeals: null
|
||||
dailyDeals: null,
|
||||
stockSnapshots: null
|
||||
};
|
||||
|
||||
let totalRecordsAdded = 0;
|
||||
@@ -257,6 +262,33 @@ async function main() {
|
||||
}
|
||||
}
|
||||
|
||||
if (IMPORT_STOCK_SNAPSHOTS) {
|
||||
try {
|
||||
const stepStart = Date.now();
|
||||
results.stockSnapshots = await importStockSnapshots(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||
stepTimings.stockSnapshots = Math.round((Date.now() - stepStart) / 1000);
|
||||
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Stock snapshots import result:', results.stockSnapshots);
|
||||
|
||||
if (results.stockSnapshots?.status === 'error') {
|
||||
console.error('Stock snapshots import had an error:', results.stockSnapshots.error);
|
||||
} else {
|
||||
totalRecordsAdded += parseInt(results.stockSnapshots?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.stockSnapshots?.recordsUpdated || 0);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during stock snapshots import:', error);
|
||||
results.stockSnapshots = {
|
||||
status: 'error',
|
||||
error: error.message,
|
||||
recordsAdded: 0,
|
||||
recordsUpdated: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const endTime = Date.now();
|
||||
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
|
||||
|
||||
@@ -280,11 +312,13 @@ async function main() {
|
||||
'orders_result', COALESCE($11::jsonb, 'null'::jsonb),
|
||||
'purchase_orders_result', COALESCE($12::jsonb, 'null'::jsonb),
|
||||
'daily_deals_result', COALESCE($13::jsonb, 'null'::jsonb),
|
||||
'total_deleted', $14::integer,
|
||||
'total_skipped', $15::integer,
|
||||
'step_timings', $16::jsonb
|
||||
'stock_snapshots_enabled', $14::boolean,
|
||||
'stock_snapshots_result', COALESCE($15::jsonb, 'null'::jsonb),
|
||||
'total_deleted', $16::integer,
|
||||
'total_skipped', $17::integer,
|
||||
'step_timings', $18::jsonb
|
||||
)
|
||||
WHERE id = $17
|
||||
WHERE id = $19
|
||||
`, [
|
||||
totalElapsedSeconds,
|
||||
parseInt(totalRecordsAdded),
|
||||
@@ -299,6 +333,8 @@ async function main() {
|
||||
JSON.stringify(results.orders),
|
||||
JSON.stringify(results.purchaseOrders),
|
||||
JSON.stringify(results.dailyDeals),
|
||||
IMPORT_STOCK_SNAPSHOTS,
|
||||
JSON.stringify(results.stockSnapshots),
|
||||
totalRecordsDeleted,
|
||||
totalRecordsSkipped,
|
||||
JSON.stringify(stepTimings),
|
||||
|
||||
@@ -597,14 +597,15 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
ELSE 0
|
||||
END) as promo_discount_sum,
|
||||
COALESCE(ot.tax, 0) as total_tax,
|
||||
COALESCE(oc.costeach, oi.price * 0.5) as costeach
|
||||
COALESCE(oc.costeach, p.cost_price, oi.price * 0.5) as costeach
|
||||
FROM temp_order_items oi
|
||||
LEFT JOIN temp_item_discounts id ON oi.order_id = id.order_id AND oi.pid = id.pid
|
||||
LEFT JOIN temp_main_discounts md ON id.order_id = md.order_id AND id.discount_id = md.discount_id
|
||||
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN temp_order_costs oc ON oi.order_id = oc.order_id AND oi.pid = oc.pid
|
||||
LEFT JOIN public.products p ON oi.pid = p.pid
|
||||
WHERE oi.order_id = ANY($1)
|
||||
GROUP BY oi.order_id, oi.pid, ot.tax, oc.costeach
|
||||
GROUP BY oi.order_id, oi.pid, ot.tax, oc.costeach, p.cost_price
|
||||
)
|
||||
SELECT
|
||||
oi.order_id as order_number,
|
||||
@@ -631,10 +632,11 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
om.customer_name,
|
||||
om.status,
|
||||
om.canceled,
|
||||
COALESCE(ot.costeach, oi.price * 0.5)::NUMERIC(14, 4) as costeach
|
||||
COALESCE(ot.costeach, p.cost_price, oi.price * 0.5)::NUMERIC(14, 4) as costeach
|
||||
FROM temp_order_items oi
|
||||
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
||||
LEFT JOIN order_totals ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN public.products p ON oi.pid = p.pid
|
||||
WHERE oi.order_id = ANY($1)
|
||||
ORDER BY oi.order_id, oi.pid
|
||||
`, [subBatchIds]);
|
||||
|
||||
184
inventory-server/scripts/import/stock-snapshots.js
Normal file
184
inventory-server/scripts/import/stock-snapshots.js
Normal file
@@ -0,0 +1,184 @@
|
||||
const { outputProgress, formatElapsedTime, calculateRate } = require('../metrics-new/utils/progress');
|
||||
|
||||
const BATCH_SIZE = 5000;
|
||||
|
||||
/**
|
||||
* Imports daily stock snapshots from MySQL's snap_product_value table to PostgreSQL.
|
||||
* This provides historical end-of-day stock quantities per product, dating back to 2012.
|
||||
*
|
||||
* MySQL source table: snap_product_value (date, pid, count, pending, value)
|
||||
* - date: snapshot date (typically yesterday's date, recorded daily by cron)
|
||||
* - pid: product ID
|
||||
* - count: end-of-day stock quantity (sum of product_inventory.count)
|
||||
* - pending: pending/on-order quantity
|
||||
* - value: total inventory value at cost (sum of costeach * count)
|
||||
*
|
||||
* PostgreSQL target table: stock_snapshots (snapshot_date, pid, stock_quantity, pending_quantity, stock_value)
|
||||
*
|
||||
* @param {object} prodConnection - MySQL connection to production DB
|
||||
* @param {object} localConnection - PostgreSQL connection wrapper
|
||||
* @param {boolean} incrementalUpdate - If true, only fetch new snapshots since last import
|
||||
* @returns {object} Import statistics
|
||||
*/
|
||||
async function importStockSnapshots(prodConnection, localConnection, incrementalUpdate = true) {
|
||||
const startTime = Date.now();
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Stock snapshots import',
|
||||
message: 'Starting stock snapshots import...',
|
||||
current: 0,
|
||||
total: 0,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Ensure target table exists
|
||||
await localConnection.query(`
|
||||
CREATE TABLE IF NOT EXISTS stock_snapshots (
|
||||
snapshot_date DATE NOT NULL,
|
||||
pid BIGINT NOT NULL,
|
||||
stock_quantity INT NOT NULL DEFAULT 0,
|
||||
pending_quantity INT NOT NULL DEFAULT 0,
|
||||
stock_value NUMERIC(14, 4) NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (snapshot_date, pid)
|
||||
)
|
||||
`);
|
||||
|
||||
// Create index for efficient lookups by pid
|
||||
await localConnection.query(`
|
||||
CREATE INDEX IF NOT EXISTS idx_stock_snapshots_pid ON stock_snapshots (pid)
|
||||
`);
|
||||
|
||||
// Determine the start date for the import
|
||||
let startDate = '2020-01-01'; // Default: match the orders/snapshots date range
|
||||
if (incrementalUpdate) {
|
||||
const [result] = await localConnection.query(`
|
||||
SELECT MAX(snapshot_date)::text AS max_date FROM stock_snapshots
|
||||
`);
|
||||
if (result.rows[0]?.max_date) {
|
||||
// Start from the day after the last imported date
|
||||
startDate = result.rows[0].max_date;
|
||||
}
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Stock snapshots import',
|
||||
message: `Fetching stock snapshots from MySQL since ${startDate}...`,
|
||||
current: 0,
|
||||
total: 0,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Count total rows to import
|
||||
const [countResult] = await prodConnection.query(
|
||||
`SELECT COUNT(*) AS total FROM snap_product_value WHERE date > ?`,
|
||||
[startDate]
|
||||
);
|
||||
const totalRows = countResult[0].total;
|
||||
|
||||
if (totalRows === 0) {
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Stock snapshots import',
|
||||
message: 'No new stock snapshots to import',
|
||||
current: 0,
|
||||
total: 0,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
return { recordsAdded: 0, recordsUpdated: 0, status: 'complete' };
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Stock snapshots import',
|
||||
message: `Found ${totalRows.toLocaleString()} stock snapshot rows to import`,
|
||||
current: 0,
|
||||
total: totalRows,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Process in batches using date-based pagination (more efficient than OFFSET)
|
||||
let processedRows = 0;
|
||||
let recordsAdded = 0;
|
||||
let currentDate = startDate;
|
||||
|
||||
while (processedRows < totalRows) {
|
||||
// Fetch a batch of dates
|
||||
const [dateBatch] = await prodConnection.query(
|
||||
`SELECT DISTINCT date FROM snap_product_value
|
||||
WHERE date > ? ORDER BY date LIMIT 10`,
|
||||
[currentDate]
|
||||
);
|
||||
|
||||
if (dateBatch.length === 0) break;
|
||||
|
||||
const dates = dateBatch.map(r => r.date);
|
||||
const lastDate = dates[dates.length - 1];
|
||||
|
||||
// Fetch all rows for these dates
|
||||
const [rows] = await prodConnection.query(
|
||||
`SELECT date, pid, count AS stock_quantity, pending AS pending_quantity, value AS stock_value
|
||||
FROM snap_product_value
|
||||
WHERE date > ? AND date <= ?
|
||||
ORDER BY date, pid`,
|
||||
[currentDate, lastDate]
|
||||
);
|
||||
|
||||
if (rows.length === 0) break;
|
||||
|
||||
// Batch insert into PostgreSQL using UNNEST for efficiency
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
|
||||
const dates = batch.map(r => r.date);
|
||||
const pids = batch.map(r => r.pid);
|
||||
const quantities = batch.map(r => r.stock_quantity);
|
||||
const pending = batch.map(r => r.pending_quantity);
|
||||
const values = batch.map(r => r.stock_value);
|
||||
|
||||
const [result] = await localConnection.query(`
|
||||
INSERT INTO stock_snapshots (snapshot_date, pid, stock_quantity, pending_quantity, stock_value)
|
||||
SELECT * FROM UNNEST(
|
||||
$1::date[], $2::bigint[], $3::int[], $4::int[], $5::numeric[]
|
||||
)
|
||||
ON CONFLICT (snapshot_date, pid) DO UPDATE SET
|
||||
stock_quantity = EXCLUDED.stock_quantity,
|
||||
pending_quantity = EXCLUDED.pending_quantity,
|
||||
stock_value = EXCLUDED.stock_value
|
||||
`, [dates, pids, quantities, pending, values]);
|
||||
|
||||
recordsAdded += batch.length;
|
||||
}
|
||||
|
||||
processedRows += rows.length;
|
||||
currentDate = lastDate;
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Stock snapshots import',
|
||||
message: `Imported ${processedRows.toLocaleString()} / ${totalRows.toLocaleString()} rows (through ${currentDate})`,
|
||||
current: processedRows,
|
||||
total: totalRows,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
rate: calculateRate(processedRows, startTime)
|
||||
});
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Stock snapshots import',
|
||||
message: `Stock snapshots import complete: ${recordsAdded.toLocaleString()} rows`,
|
||||
current: processedRows,
|
||||
total: totalRows,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
return {
|
||||
recordsAdded,
|
||||
recordsUpdated: 0,
|
||||
status: 'complete'
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = importStockSnapshots;
|
||||
@@ -214,7 +214,7 @@ BEGIN
|
||||
-- Final INSERT/UPDATE statement using all the prepared CTEs
|
||||
INSERT INTO public.product_metrics (
|
||||
pid, last_calculated, sku, title, brand, vendor, image_url, is_visible, is_replenishable,
|
||||
current_price, current_regular_price, current_cost_price, current_landing_cost_price,
|
||||
current_price, current_regular_price, current_cost_price,
|
||||
current_stock, current_stock_cost, current_stock_retail, current_stock_gross,
|
||||
on_order_qty, on_order_cost, on_order_retail, earliest_expected_date,
|
||||
date_created, date_first_received, date_last_received, date_first_sold, date_last_sold, age_days,
|
||||
@@ -242,7 +242,7 @@ BEGIN
|
||||
SELECT
|
||||
-- Select columns in order, joining all CTEs by pid
|
||||
ci.pid, _start_time, ci.sku, ci.title, ci.brand, ci.vendor, ci.image_url, ci.is_visible, ci.replenishable,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price, ci.current_effective_cost,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price,
|
||||
ci.current_stock, (ci.current_stock * COALESCE(ci.current_effective_cost, 0.00))::numeric(12,2), (ci.current_stock * COALESCE(ci.current_price, 0.00))::numeric(12,2), (ci.current_stock * COALESCE(ci.current_regular_price, 0.00))::numeric(12,2),
|
||||
COALESCE(ooi.on_order_qty, 0), COALESCE(ooi.on_order_cost, 0.00)::numeric(12,2), (COALESCE(ooi.on_order_qty, 0) * COALESCE(ci.current_price, 0.00))::numeric(12,2), ooi.earliest_expected_date,
|
||||
|
||||
@@ -415,7 +415,7 @@ BEGIN
|
||||
-- *** IMPORTANT: List ALL columns here, ensuring order matches INSERT list ***
|
||||
-- Update ALL columns to ensure entire row is refreshed
|
||||
last_calculated = EXCLUDED.last_calculated, sku = EXCLUDED.sku, title = EXCLUDED.title, brand = EXCLUDED.brand, vendor = EXCLUDED.vendor, image_url = EXCLUDED.image_url, is_visible = EXCLUDED.is_visible, is_replenishable = EXCLUDED.is_replenishable,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price, current_landing_cost_price = EXCLUDED.current_landing_cost_price,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price,
|
||||
current_stock = EXCLUDED.current_stock, current_stock_cost = EXCLUDED.current_stock_cost, current_stock_retail = EXCLUDED.current_stock_retail, current_stock_gross = EXCLUDED.current_stock_gross,
|
||||
on_order_qty = EXCLUDED.on_order_qty, on_order_cost = EXCLUDED.on_order_cost, on_order_retail = EXCLUDED.on_order_retail, earliest_expected_date = EXCLUDED.earliest_expected_date,
|
||||
date_created = EXCLUDED.date_created, date_first_received = EXCLUDED.date_first_received, date_last_received = EXCLUDED.date_last_received, date_first_sold = EXCLUDED.date_first_sold, date_last_sold = EXCLUDED.date_last_sold, age_days = EXCLUDED.age_days,
|
||||
|
||||
@@ -13,7 +13,7 @@ DECLARE
|
||||
_begin_date DATE := (SELECT MIN(date)::date FROM orders WHERE date >= '2020-01-01'); -- Starting point: captures all historical order data
|
||||
_end_date DATE := CURRENT_DATE;
|
||||
BEGIN
|
||||
RAISE NOTICE 'Beginning daily snapshots rebuild from % to %. Starting at %', _begin_date, _end_date, _start_time;
|
||||
RAISE NOTICE 'Begicnning daily snapshots rebuild from % to %. Starting at %', _begin_date, _end_date, _start_time;
|
||||
|
||||
-- First truncate the existing snapshots to ensure a clean slate
|
||||
TRUNCATE TABLE public.daily_product_snapshots;
|
||||
@@ -36,7 +36,13 @@ BEGIN
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.discount ELSE 0 END), 0.00) AS discounts,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN COALESCE(o.costeach, p.cost_price) * o.quantity ELSE 0 END), 0.00) AS cogs,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN
|
||||
COALESCE(
|
||||
o.costeach,
|
||||
get_weighted_avg_cost(p.pid, o.date::date),
|
||||
p.cost_price
|
||||
) * o.quantity
|
||||
ELSE 0 END), 0.00) AS cogs,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN p.regular_price * o.quantity ELSE 0 END), 0.00) AS gross_regular_revenue,
|
||||
|
||||
-- Aggregate Returns (Quantity < 0 or Status = Returned)
|
||||
@@ -63,15 +69,17 @@ BEGIN
|
||||
GROUP BY r.pid
|
||||
HAVING COUNT(DISTINCT r.receiving_id) > 0 OR SUM(r.qty_each) > 0
|
||||
),
|
||||
-- Get stock quantities for the day - note this is approximate since we're using current products data
|
||||
-- Use historical stock from stock_snapshots when available,
|
||||
-- falling back to current stock from products table
|
||||
StockData AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.stock_quantity,
|
||||
COALESCE(p.cost_price, 0.00) as effective_cost_price,
|
||||
COALESCE(ss.stock_quantity, p.stock_quantity) AS stock_quantity,
|
||||
COALESCE(ss.stock_value, p.stock_quantity * COALESCE(p.cost_price, 0.00)) AS stock_value,
|
||||
COALESCE(p.price, 0.00) as current_price,
|
||||
COALESCE(p.regular_price, 0.00) as current_regular_price
|
||||
FROM public.products p
|
||||
LEFT JOIN stock_snapshots ss ON p.pid = ss.pid AND ss.snapshot_date = _date
|
||||
)
|
||||
INSERT INTO public.daily_product_snapshots (
|
||||
snapshot_date,
|
||||
@@ -99,9 +107,9 @@ BEGIN
|
||||
_date AS snapshot_date,
|
||||
COALESCE(sd.pid, rd.pid) AS pid,
|
||||
sd.sku,
|
||||
-- Use current stock as approximation, since historical stock data may not be available
|
||||
-- Historical stock from stock_snapshots, falls back to current stock
|
||||
s.stock_quantity AS eod_stock_quantity,
|
||||
s.stock_quantity * s.effective_cost_price AS eod_stock_cost,
|
||||
s.stock_value AS eod_stock_cost,
|
||||
s.stock_quantity * s.current_price AS eod_stock_retail,
|
||||
s.stock_quantity * s.current_regular_price AS eod_stock_gross,
|
||||
(s.stock_quantity <= 0) AS stockout_flag,
|
||||
@@ -114,7 +122,7 @@ BEGIN
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00) AS net_revenue,
|
||||
COALESCE(sd.cogs, 0.00),
|
||||
COALESCE(sd.gross_regular_revenue, 0.00),
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
|
||||
-- Receiving metrics
|
||||
COALESCE(rd.units_received, 0),
|
||||
COALESCE(rd.cost_received, 0.00),
|
||||
|
||||
@@ -121,14 +121,16 @@ BEGIN
|
||||
HAVING COUNT(DISTINCT r.receiving_id) > 0 OR SUM(r.qty_each) > 0
|
||||
),
|
||||
CurrentStock AS (
|
||||
-- Select current stock values directly from products table
|
||||
-- Use historical stock from stock_snapshots when available,
|
||||
-- falling back to current stock from products table
|
||||
SELECT
|
||||
pid,
|
||||
stock_quantity,
|
||||
COALESCE(cost_price, 0.00) as effective_cost_price,
|
||||
COALESCE(price, 0.00) as current_price,
|
||||
COALESCE(regular_price, 0.00) as current_regular_price
|
||||
FROM public.products
|
||||
p.pid,
|
||||
COALESCE(ss.stock_quantity, p.stock_quantity) AS stock_quantity,
|
||||
COALESCE(ss.stock_value, p.stock_quantity * COALESCE(p.cost_price, 0.00)) AS stock_value,
|
||||
COALESCE(p.price, 0.00) AS current_price,
|
||||
COALESCE(p.regular_price, 0.00) AS current_regular_price
|
||||
FROM public.products p
|
||||
LEFT JOIN stock_snapshots ss ON p.pid = ss.pid AND ss.snapshot_date = _target_date
|
||||
),
|
||||
ProductsWithActivity AS (
|
||||
-- Quick pre-filter to only process products with activity
|
||||
@@ -168,7 +170,7 @@ BEGIN
|
||||
COALESCE(sd.sku, p.sku) AS sku, -- Get SKU from sales data or products table
|
||||
-- Inventory Metrics (Using CurrentStock)
|
||||
cs.stock_quantity AS eod_stock_quantity,
|
||||
cs.stock_quantity * cs.effective_cost_price AS eod_stock_cost,
|
||||
cs.stock_value AS eod_stock_cost,
|
||||
cs.stock_quantity * cs.current_price AS eod_stock_retail,
|
||||
cs.stock_quantity * cs.current_regular_price AS eod_stock_gross,
|
||||
(cs.stock_quantity <= 0) AS stockout_flag,
|
||||
@@ -181,7 +183,7 @@ BEGIN
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00) AS net_revenue,
|
||||
COALESCE(sd.cogs, 0.00),
|
||||
COALESCE(sd.gross_regular_revenue, 0.00),
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit, -- Basic profit: Net Revenue - COGS
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
|
||||
-- Receiving Metrics (From ReceivingData)
|
||||
COALESCE(rd.units_received, 0),
|
||||
COALESCE(rd.cost_received, 0.00),
|
||||
|
||||
@@ -204,22 +204,33 @@ BEGIN
|
||||
GROUP BY pid
|
||||
),
|
||||
DemandVariability AS (
|
||||
-- Calculate variance and standard deviation of daily sales
|
||||
-- Calculate variance and standard deviation of daily sales over the full 30-day window
|
||||
-- including zero-sales days (not just activity days) for accurate variability metrics
|
||||
SELECT
|
||||
pid,
|
||||
COUNT(*) AS days_with_data,
|
||||
AVG(units_sold) AS avg_daily_sales,
|
||||
VARIANCE(units_sold) AS sales_variance,
|
||||
STDDEV(units_sold) AS sales_std_dev,
|
||||
-- Coefficient of variation
|
||||
CASE
|
||||
WHEN AVG(units_sold) > 0 THEN STDDEV(units_sold) / AVG(units_sold)
|
||||
ELSE NULL
|
||||
pd.pid,
|
||||
COUNT(dps.pid) AS days_with_data,
|
||||
AVG(COALESCE(dps.units_sold, 0)) AS avg_daily_sales,
|
||||
VARIANCE(COALESCE(dps.units_sold, 0)) AS sales_variance,
|
||||
STDDEV(COALESCE(dps.units_sold, 0)) AS sales_std_dev,
|
||||
CASE
|
||||
WHEN AVG(COALESCE(dps.units_sold, 0)) > 0
|
||||
THEN STDDEV(COALESCE(dps.units_sold, 0)) / AVG(COALESCE(dps.units_sold, 0))
|
||||
ELSE NULL
|
||||
END AS sales_cv
|
||||
FROM public.daily_product_snapshots
|
||||
WHERE snapshot_date >= _current_date - INTERVAL '29 days'
|
||||
AND snapshot_date <= _current_date
|
||||
GROUP BY pid
|
||||
FROM (
|
||||
SELECT DISTINCT pid
|
||||
FROM public.daily_product_snapshots
|
||||
WHERE snapshot_date >= _current_date - INTERVAL '29 days'
|
||||
AND snapshot_date <= _current_date
|
||||
) pd
|
||||
CROSS JOIN generate_series(
|
||||
(_current_date - INTERVAL '29 days')::date,
|
||||
_current_date,
|
||||
'1 day'::interval
|
||||
) AS d(day)
|
||||
LEFT JOIN public.daily_product_snapshots dps
|
||||
ON dps.pid = pd.pid AND dps.snapshot_date = d.day::date
|
||||
GROUP BY pd.pid
|
||||
),
|
||||
ServiceLevels AS (
|
||||
-- Calculate service level and fill rate metrics
|
||||
@@ -257,7 +268,7 @@ BEGIN
|
||||
barcode, harmonized_tariff_code, vendor_reference, notions_reference, line, subline, artist,
|
||||
moq, rating, reviews, weight, length, width, height, country_of_origin, location,
|
||||
baskets, notifies, preorder_count, notions_inv_count,
|
||||
current_price, current_regular_price, current_cost_price, current_landing_cost_price,
|
||||
current_price, current_regular_price, current_cost_price,
|
||||
current_stock, current_stock_cost, current_stock_retail, current_stock_gross,
|
||||
on_order_qty, on_order_cost, on_order_retail, earliest_expected_date,
|
||||
date_created, date_first_received, date_last_received, date_first_sold, date_last_sold, age_days,
|
||||
@@ -295,7 +306,7 @@ BEGIN
|
||||
ci.barcode, ci.harmonized_tariff_code, ci.vendor_reference, ci.notions_reference, ci.line, ci.subline, ci.artist,
|
||||
ci.moq, ci.rating, ci.reviews, ci.weight, ci.length, ci.width, ci.height, ci.country_of_origin, ci.location,
|
||||
ci.baskets, ci.notifies, ci.preorder_count, ci.notions_inv_count,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price, ci.current_effective_cost,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price,
|
||||
ci.current_stock, ci.current_stock * ci.current_effective_cost, ci.current_stock * ci.current_price, ci.current_stock * ci.current_regular_price,
|
||||
COALESCE(ooi.on_order_qty, 0), COALESCE(ooi.on_order_cost, 0.00), COALESCE(ooi.on_order_qty, 0) * ci.current_price, ooi.earliest_expected_date,
|
||||
ci.created_at::date, COALESCE(ci.first_received::date, hd.date_first_received_calc), hd.date_last_received_calc, hd.date_first_sold, COALESCE(ci.date_last_sold, hd.max_order_date),
|
||||
@@ -514,7 +525,7 @@ BEGIN
|
||||
barcode = EXCLUDED.barcode, harmonized_tariff_code = EXCLUDED.harmonized_tariff_code, vendor_reference = EXCLUDED.vendor_reference, notions_reference = EXCLUDED.notions_reference, line = EXCLUDED.line, subline = EXCLUDED.subline, artist = EXCLUDED.artist,
|
||||
moq = EXCLUDED.moq, rating = EXCLUDED.rating, reviews = EXCLUDED.reviews, weight = EXCLUDED.weight, length = EXCLUDED.length, width = EXCLUDED.width, height = EXCLUDED.height, country_of_origin = EXCLUDED.country_of_origin, location = EXCLUDED.location,
|
||||
baskets = EXCLUDED.baskets, notifies = EXCLUDED.notifies, preorder_count = EXCLUDED.preorder_count, notions_inv_count = EXCLUDED.notions_inv_count,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price, current_landing_cost_price = EXCLUDED.current_landing_cost_price,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price,
|
||||
current_stock = EXCLUDED.current_stock, current_stock_cost = EXCLUDED.current_stock_cost, current_stock_retail = EXCLUDED.current_stock_retail, current_stock_gross = EXCLUDED.current_stock_gross,
|
||||
on_order_qty = EXCLUDED.on_order_qty, on_order_cost = EXCLUDED.on_order_cost, on_order_retail = EXCLUDED.on_order_retail, earliest_expected_date = EXCLUDED.earliest_expected_date,
|
||||
date_created = EXCLUDED.date_created, date_first_received = EXCLUDED.date_first_received, date_last_received = EXCLUDED.date_last_received, date_first_sold = EXCLUDED.date_first_sold, date_last_sold = EXCLUDED.date_last_sold, age_days = EXCLUDED.age_days,
|
||||
@@ -567,11 +578,26 @@ BEGIN
|
||||
product_metrics.replenishment_units IS DISTINCT FROM EXCLUDED.replenishment_units OR
|
||||
product_metrics.stock_cover_in_days IS DISTINCT FROM EXCLUDED.stock_cover_in_days OR
|
||||
product_metrics.yesterday_sales IS DISTINCT FROM EXCLUDED.yesterday_sales OR
|
||||
-- Check a few other important fields that might change
|
||||
product_metrics.date_last_sold IS DISTINCT FROM EXCLUDED.date_last_sold OR
|
||||
product_metrics.earliest_expected_date IS DISTINCT FROM EXCLUDED.earliest_expected_date OR
|
||||
product_metrics.lifetime_sales IS DISTINCT FROM EXCLUDED.lifetime_sales OR
|
||||
product_metrics.lifetime_revenue_quality IS DISTINCT FROM EXCLUDED.lifetime_revenue_quality
|
||||
product_metrics.lifetime_revenue_quality IS DISTINCT FROM EXCLUDED.lifetime_revenue_quality OR
|
||||
-- Derived metrics that can change even when source fields don't
|
||||
product_metrics.profit_30d IS DISTINCT FROM EXCLUDED.profit_30d OR
|
||||
product_metrics.cogs_30d IS DISTINCT FROM EXCLUDED.cogs_30d OR
|
||||
product_metrics.margin_30d IS DISTINCT FROM EXCLUDED.margin_30d OR
|
||||
product_metrics.stockout_days_30d IS DISTINCT FROM EXCLUDED.stockout_days_30d OR
|
||||
product_metrics.sell_through_30d IS DISTINCT FROM EXCLUDED.sell_through_30d OR
|
||||
-- Growth and variability metrics
|
||||
product_metrics.sales_growth_30d_vs_prev IS DISTINCT FROM EXCLUDED.sales_growth_30d_vs_prev OR
|
||||
product_metrics.revenue_growth_30d_vs_prev IS DISTINCT FROM EXCLUDED.revenue_growth_30d_vs_prev OR
|
||||
product_metrics.demand_pattern IS DISTINCT FROM EXCLUDED.demand_pattern OR
|
||||
product_metrics.seasonal_pattern IS DISTINCT FROM EXCLUDED.seasonal_pattern OR
|
||||
product_metrics.seasonality_index IS DISTINCT FROM EXCLUDED.seasonality_index OR
|
||||
product_metrics.service_level_30d IS DISTINCT FROM EXCLUDED.service_level_30d OR
|
||||
product_metrics.fill_rate_30d IS DISTINCT FROM EXCLUDED.fill_rate_30d OR
|
||||
-- Time-based safety net: always update if more than 1 day stale
|
||||
product_metrics.last_calculated < NOW() - INTERVAL '1 day'
|
||||
;
|
||||
|
||||
-- Update the status table with the timestamp from the START of this run
|
||||
|
||||
Reference in New Issue
Block a user