Import/metrics calc fixes
This commit is contained in:
241
inventory-server/db/functions.sql
Normal file
241
inventory-server/db/functions.sql
Normal file
@@ -0,0 +1,241 @@
|
||||
-- Custom PostgreSQL functions used by the metrics pipeline
|
||||
-- These must exist in the database before running calculate-metrics-new.js
|
||||
--
|
||||
-- To install/update: psql -d inventory_db -f functions.sql
|
||||
-- All functions use CREATE OR REPLACE so they are safe to re-run.
|
||||
|
||||
-- =============================================================================
|
||||
-- safe_divide: Division helper that returns a default value instead of erroring
|
||||
-- on NULL or zero denominators.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.safe_divide(
|
||||
numerator numeric,
|
||||
denominator numeric,
|
||||
default_value numeric DEFAULT NULL::numeric
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF denominator IS NULL OR denominator = 0 THEN
|
||||
RETURN default_value;
|
||||
ELSE
|
||||
RETURN numerator / denominator;
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- std_numeric: Standardized rounding helper for consistent numeric precision.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.std_numeric(
|
||||
value numeric,
|
||||
precision_digits integer DEFAULT 2
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF value IS NULL THEN
|
||||
RETURN NULL;
|
||||
ELSE
|
||||
RETURN ROUND(value, precision_digits);
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- calculate_sales_velocity: Daily sales velocity adjusted for stockout days.
|
||||
-- Ensures at least 14-day denominator for products with sales to avoid
|
||||
-- inflated velocity from short windows.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.calculate_sales_velocity(
|
||||
sales_30d integer,
|
||||
stockout_days_30d integer
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
RETURN sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - stockout_days_30d,
|
||||
CASE
|
||||
WHEN sales_30d > 0 THEN 14.0 -- If we have sales, ensure at least 14 days denominator
|
||||
ELSE 30.0 -- If no sales, use full period
|
||||
END
|
||||
),
|
||||
0
|
||||
);
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- get_weighted_avg_cost: Weighted average cost from receivings up to a given date.
|
||||
-- Uses all non-canceled receivings (no row limit) weighted by quantity.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.get_weighted_avg_cost(
|
||||
p_pid bigint,
|
||||
p_date date
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
STABLE
|
||||
AS $function$
|
||||
DECLARE
|
||||
weighted_cost NUMERIC;
|
||||
BEGIN
|
||||
SELECT
|
||||
CASE
|
||||
WHEN SUM(qty_each) > 0 THEN SUM(cost_each * qty_each) / SUM(qty_each)
|
||||
ELSE NULL
|
||||
END INTO weighted_cost
|
||||
FROM receivings
|
||||
WHERE pid = p_pid
|
||||
AND received_date <= p_date
|
||||
AND status != 'canceled';
|
||||
|
||||
RETURN weighted_cost;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- classify_demand_pattern: Classifies demand based on average demand and
|
||||
-- coefficient of variation (CV). Standard inventory classification:
|
||||
-- zero: no demand
|
||||
-- stable: CV <= 0.2 (predictable, easy to forecast)
|
||||
-- variable: CV <= 0.5 (some variability, still forecastable)
|
||||
-- sporadic: low volume + high CV (intermittent demand)
|
||||
-- lumpy: high volume + high CV (unpredictable bursts)
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.classify_demand_pattern(
|
||||
avg_demand numeric,
|
||||
cv numeric
|
||||
)
|
||||
RETURNS character varying
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF avg_demand IS NULL OR cv IS NULL THEN
|
||||
RETURN NULL;
|
||||
ELSIF avg_demand = 0 THEN
|
||||
RETURN 'zero';
|
||||
ELSIF cv <= 0.2 THEN
|
||||
RETURN 'stable';
|
||||
ELSIF cv <= 0.5 THEN
|
||||
RETURN 'variable';
|
||||
ELSIF avg_demand < 1.0 THEN
|
||||
RETURN 'sporadic';
|
||||
ELSE
|
||||
RETURN 'lumpy';
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- detect_seasonal_pattern: Detects seasonality by comparing monthly average
|
||||
-- sales across the last 12 months. Uses coefficient of variation across months
|
||||
-- and peak-to-average ratio to classify patterns.
|
||||
--
|
||||
-- Returns:
|
||||
-- seasonal_pattern: 'none', 'moderate', or 'strong'
|
||||
-- seasonality_index: peak month avg / overall avg * 100 (100 = no seasonality)
|
||||
-- peak_season: name of peak month (e.g. 'January'), or NULL if none
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.detect_seasonal_pattern(p_pid bigint)
|
||||
RETURNS TABLE(seasonal_pattern character varying, seasonality_index numeric, peak_season character varying)
|
||||
LANGUAGE plpgsql
|
||||
STABLE
|
||||
AS $function$
|
||||
DECLARE
|
||||
v_monthly_cv NUMERIC;
|
||||
v_max_month_avg NUMERIC;
|
||||
v_overall_avg NUMERIC;
|
||||
v_monthly_stddev NUMERIC;
|
||||
v_peak_month_num INT;
|
||||
v_data_months INT;
|
||||
v_seasonality_index NUMERIC;
|
||||
v_seasonal_pattern VARCHAR;
|
||||
v_peak_season VARCHAR;
|
||||
BEGIN
|
||||
-- Gather monthly average sales over the last 12 months
|
||||
SELECT
|
||||
COUNT(*),
|
||||
AVG(month_avg),
|
||||
STDDEV(month_avg),
|
||||
MAX(month_avg)
|
||||
INTO v_data_months, v_overall_avg, v_monthly_stddev, v_max_month_avg
|
||||
FROM (
|
||||
SELECT EXTRACT(MONTH FROM snapshot_date) AS mo, AVG(units_sold) AS month_avg
|
||||
FROM daily_product_snapshots
|
||||
WHERE pid = p_pid AND snapshot_date >= CURRENT_DATE - INTERVAL '365 days'
|
||||
GROUP BY EXTRACT(MONTH FROM snapshot_date)
|
||||
) monthly;
|
||||
|
||||
-- Need at least 3 months of data for meaningful seasonality detection
|
||||
IF v_data_months < 3 OR v_overall_avg IS NULL OR v_overall_avg = 0 THEN
|
||||
RETURN QUERY SELECT 'none'::VARCHAR, 100::NUMERIC, NULL::VARCHAR;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- CV of monthly averages
|
||||
v_monthly_cv := v_monthly_stddev / v_overall_avg;
|
||||
|
||||
-- Find peak month number
|
||||
SELECT EXTRACT(MONTH FROM snapshot_date)::INT INTO v_peak_month_num
|
||||
FROM daily_product_snapshots
|
||||
WHERE pid = p_pid AND snapshot_date >= CURRENT_DATE - INTERVAL '365 days'
|
||||
GROUP BY EXTRACT(MONTH FROM snapshot_date)
|
||||
ORDER BY AVG(units_sold) DESC
|
||||
LIMIT 1;
|
||||
|
||||
-- Seasonality index: peak month avg / overall avg * 100
|
||||
v_seasonality_index := ROUND((v_max_month_avg / v_overall_avg * 100)::NUMERIC, 2);
|
||||
|
||||
IF v_monthly_cv > 0.5 AND v_seasonality_index > 150 THEN
|
||||
v_seasonal_pattern := 'strong';
|
||||
v_peak_season := TRIM(TO_CHAR(TO_DATE(v_peak_month_num::TEXT, 'MM'), 'Month'));
|
||||
ELSIF v_monthly_cv > 0.3 AND v_seasonality_index > 120 THEN
|
||||
v_seasonal_pattern := 'moderate';
|
||||
v_peak_season := TRIM(TO_CHAR(TO_DATE(v_peak_month_num::TEXT, 'MM'), 'Month'));
|
||||
ELSE
|
||||
v_seasonal_pattern := 'none';
|
||||
v_peak_season := NULL;
|
||||
v_seasonality_index := 100;
|
||||
END IF;
|
||||
|
||||
RETURN QUERY SELECT v_seasonal_pattern, v_seasonality_index, v_peak_season;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- category_hierarchy: Materialized view providing a recursive category tree
|
||||
-- with ancestor paths for efficient rollup queries.
|
||||
--
|
||||
-- Refresh after category changes: REFRESH MATERIALIZED VIEW category_hierarchy;
|
||||
-- =============================================================================
|
||||
-- DROP MATERIALIZED VIEW IF EXISTS category_hierarchy;
|
||||
-- CREATE MATERIALIZED VIEW category_hierarchy AS
|
||||
-- WITH RECURSIVE cat_tree AS (
|
||||
-- SELECT cat_id, name, type, parent_id,
|
||||
-- cat_id AS root_id, 0 AS level, ARRAY[cat_id] AS path
|
||||
-- FROM categories
|
||||
-- WHERE parent_id IS NULL
|
||||
-- UNION ALL
|
||||
-- SELECT c.cat_id, c.name, c.type, c.parent_id,
|
||||
-- ct.root_id, ct.level + 1, ct.path || c.cat_id
|
||||
-- FROM categories c
|
||||
-- JOIN cat_tree ct ON c.parent_id = ct.cat_id
|
||||
-- )
|
||||
-- SELECT cat_id, name, type, parent_id, root_id, level, path,
|
||||
-- (SELECT array_agg(unnest ORDER BY unnest DESC)
|
||||
-- FROM unnest(cat_tree.path) unnest
|
||||
-- WHERE unnest <> cat_tree.cat_id) AS ancestor_ids
|
||||
-- FROM cat_tree;
|
||||
--
|
||||
-- CREATE UNIQUE INDEX ON category_hierarchy (cat_id);
|
||||
@@ -80,7 +80,6 @@ CREATE TABLE public.product_metrics (
|
||||
current_price NUMERIC(10, 2),
|
||||
current_regular_price NUMERIC(10, 2),
|
||||
current_cost_price NUMERIC(10, 4), -- Increased precision for cost
|
||||
current_landing_cost_price NUMERIC(10, 4), -- Increased precision for cost
|
||||
current_stock INT NOT NULL DEFAULT 0,
|
||||
current_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
current_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
@@ -156,9 +155,9 @@ CREATE TABLE public.product_metrics (
|
||||
days_of_stock_closing_stock NUMERIC(10, 2), -- lead_time_closing_stock - days_of_stock_forecast_units
|
||||
replenishment_needed_raw NUMERIC(10, 2), -- planning_period_forecast_units + config_safety_stock - current_stock - on_order_qty
|
||||
replenishment_units INT, -- CEILING(GREATEST(0, replenishment_needed_raw))
|
||||
replenishment_cost NUMERIC(14, 4), -- replenishment_units * COALESCE(current_landing_cost_price, current_cost_price)
|
||||
replenishment_cost NUMERIC(14, 4), -- replenishment_units * current_cost_price
|
||||
replenishment_retail NUMERIC(14, 4), -- replenishment_units * current_price
|
||||
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - COALESCE(current_landing_cost_price, current_cost_price))
|
||||
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - current_cost_price)
|
||||
to_order_units INT, -- Apply MOQ/UOM logic to replenishment_units
|
||||
forecast_lost_sales_units NUMERIC(10, 2), -- GREATEST(0, -lead_time_closing_stock)
|
||||
forecast_lost_revenue NUMERIC(14, 4), -- forecast_lost_sales_units * current_price
|
||||
@@ -167,7 +166,7 @@ CREATE TABLE public.product_metrics (
|
||||
sells_out_in_days NUMERIC(10, 1), -- (current_stock + on_order_qty) / sales_velocity_daily
|
||||
replenish_date DATE, -- Calc based on when stock hits safety stock minus lead time
|
||||
overstocked_units INT, -- GREATEST(0, current_stock - config_safety_stock - planning_period_forecast_units)
|
||||
overstocked_cost NUMERIC(14, 4), -- overstocked_units * COALESCE(current_landing_cost_price, current_cost_price)
|
||||
overstocked_cost NUMERIC(14, 4), -- overstocked_units * current_cost_price
|
||||
overstocked_retail NUMERIC(14, 4), -- overstocked_units * current_price
|
||||
is_old_stock BOOLEAN, -- Based on age, last sold, last received, on_order status
|
||||
|
||||
|
||||
@@ -29,7 +29,6 @@ CREATE TABLE products (
|
||||
price NUMERIC(14, 4) NOT NULL,
|
||||
regular_price NUMERIC(14, 4) NOT NULL,
|
||||
cost_price NUMERIC(14, 4),
|
||||
landing_cost_price NUMERIC(14, 4),
|
||||
barcode TEXT,
|
||||
harmonized_tariff_code TEXT,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
Reference in New Issue
Block a user