Restore accidentally removed files, a few additional import/calculation fixes

This commit is contained in:
2026-02-09 10:19:35 -05:00
parent 6aefc1b40d
commit 38b12c188f
209 changed files with 69925 additions and 412 deletions

View File

@@ -0,0 +1,234 @@
-- Custom PostgreSQL functions used by the metrics pipeline
-- These must exist in the database before running calculate-metrics-new.js
--
-- To install/update: psql -d inventory_db -f functions.sql
-- All functions use CREATE OR REPLACE so they are safe to re-run.
-- =============================================================================
-- safe_divide: Division helper that returns a default value instead of erroring
-- on NULL or zero denominators.
-- =============================================================================
CREATE OR REPLACE FUNCTION public.safe_divide(
numerator numeric,
denominator numeric,
default_value numeric DEFAULT NULL::numeric
)
RETURNS numeric
LANGUAGE plpgsql
IMMUTABLE
AS $function$
BEGIN
IF denominator IS NULL OR denominator = 0 THEN
RETURN default_value;
ELSE
RETURN numerator / denominator;
END IF;
END;
$function$;
-- =============================================================================
-- std_numeric: Standardized rounding helper for consistent numeric precision.
-- =============================================================================
CREATE OR REPLACE FUNCTION public.std_numeric(
value numeric,
precision_digits integer DEFAULT 2
)
RETURNS numeric
LANGUAGE plpgsql
IMMUTABLE
AS $function$
BEGIN
IF value IS NULL THEN
RETURN NULL;
ELSE
RETURN ROUND(value, precision_digits);
END IF;
END;
$function$;
-- =============================================================================
-- calculate_sales_velocity: Daily sales velocity adjusted for stockout days.
-- Ensures at least 14-day denominator for products with sales to avoid
-- inflated velocity from short windows.
-- =============================================================================
CREATE OR REPLACE FUNCTION public.calculate_sales_velocity(
sales_30d integer,
stockout_days_30d integer
)
RETURNS numeric
LANGUAGE plpgsql
IMMUTABLE
AS $function$
BEGIN
RETURN sales_30d /
NULLIF(
GREATEST(
30.0 - stockout_days_30d,
CASE
WHEN sales_30d > 0 THEN 14.0 -- If we have sales, ensure at least 14 days denominator
ELSE 30.0 -- If no sales, use full period
END
),
0
);
END;
$function$;
-- =============================================================================
-- get_weighted_avg_cost: Weighted average cost from receivings up to a given date.
-- Uses all non-canceled receivings (no row limit) weighted by quantity.
-- =============================================================================
CREATE OR REPLACE FUNCTION public.get_weighted_avg_cost(
p_pid bigint,
p_date date
)
RETURNS numeric
LANGUAGE plpgsql
STABLE
AS $function$
DECLARE
weighted_cost NUMERIC;
BEGIN
SELECT
CASE
WHEN SUM(qty_each) > 0 THEN SUM(cost_each * qty_each) / SUM(qty_each)
ELSE NULL
END INTO weighted_cost
FROM receivings
WHERE pid = p_pid
AND received_date <= p_date
AND status != 'canceled';
RETURN weighted_cost;
END;
$function$;
-- =============================================================================
-- classify_demand_pattern: Classifies demand based on average demand and
-- coefficient of variation (CV). Standard inventory classification:
-- zero: no demand
-- stable: CV <= 0.2 (predictable, easy to forecast)
-- variable: CV <= 0.5 (some variability, still forecastable)
-- sporadic: low volume + high CV (intermittent demand)
-- lumpy: high volume + high CV (unpredictable bursts)
-- =============================================================================
CREATE OR REPLACE FUNCTION public.classify_demand_pattern(
avg_demand numeric,
cv numeric
)
RETURNS character varying
LANGUAGE plpgsql
IMMUTABLE
AS $function$
BEGIN
IF avg_demand IS NULL OR cv IS NULL THEN
RETURN NULL;
ELSIF avg_demand = 0 THEN
RETURN 'zero';
ELSIF cv <= 0.2 THEN
RETURN 'stable';
ELSIF cv <= 0.5 THEN
RETURN 'variable';
ELSIF avg_demand < 1.0 THEN
RETURN 'sporadic';
ELSE
RETURN 'lumpy';
END IF;
END;
$function$;
-- =============================================================================
-- detect_seasonal_pattern: Detects seasonality by comparing monthly average
-- sales across the last 12 months. Uses coefficient of variation across months
-- and peak-to-average ratio to classify patterns.
--
-- Returns:
-- seasonal_pattern: 'none', 'moderate', or 'strong'
-- seasonality_index: peak month avg / overall avg * 100 (100 = no seasonality)
-- peak_season: name of peak month (e.g. 'January'), or NULL if none
-- =============================================================================
CREATE OR REPLACE FUNCTION public.detect_seasonal_pattern(p_pid bigint)
RETURNS TABLE(seasonal_pattern character varying, seasonality_index numeric, peak_season character varying)
LANGUAGE plpgsql
STABLE
AS $function$
DECLARE
v_monthly_cv NUMERIC;
v_max_month_avg NUMERIC;
v_overall_avg NUMERIC;
v_monthly_stddev NUMERIC;
v_peak_month_num INT;
v_data_months INT;
v_seasonality_index NUMERIC;
v_seasonal_pattern VARCHAR;
v_peak_season VARCHAR;
BEGIN
-- Gather monthly average sales and peak month in a single query
SELECT
COUNT(*),
AVG(month_avg),
STDDEV(month_avg),
MAX(month_avg),
(ARRAY_AGG(mo ORDER BY month_avg DESC))[1]::INT
INTO v_data_months, v_overall_avg, v_monthly_stddev, v_max_month_avg, v_peak_month_num
FROM (
SELECT EXTRACT(MONTH FROM snapshot_date) AS mo, AVG(units_sold) AS month_avg
FROM daily_product_snapshots
WHERE pid = p_pid AND snapshot_date >= CURRENT_DATE - INTERVAL '365 days'
GROUP BY EXTRACT(MONTH FROM snapshot_date)
) monthly;
-- Need at least 3 months of data for meaningful seasonality detection
IF v_data_months < 3 OR v_overall_avg IS NULL OR v_overall_avg = 0 THEN
RETURN QUERY SELECT 'none'::VARCHAR, 100::NUMERIC, NULL::VARCHAR;
RETURN;
END IF;
-- CV of monthly averages
v_monthly_cv := v_monthly_stddev / v_overall_avg;
-- Seasonality index: peak month avg / overall avg * 100
v_seasonality_index := ROUND((v_max_month_avg / v_overall_avg * 100)::NUMERIC, 2);
IF v_monthly_cv > 0.5 AND v_seasonality_index > 150 THEN
v_seasonal_pattern := 'strong';
v_peak_season := TRIM(TO_CHAR(TO_DATE(v_peak_month_num::TEXT, 'MM'), 'Month'));
ELSIF v_monthly_cv > 0.3 AND v_seasonality_index > 120 THEN
v_seasonal_pattern := 'moderate';
v_peak_season := TRIM(TO_CHAR(TO_DATE(v_peak_month_num::TEXT, 'MM'), 'Month'));
ELSE
v_seasonal_pattern := 'none';
v_peak_season := NULL;
v_seasonality_index := 100;
END IF;
RETURN QUERY SELECT v_seasonal_pattern, v_seasonality_index, v_peak_season;
END;
$function$;
-- =============================================================================
-- category_hierarchy: Materialized view providing a recursive category tree
-- with ancestor paths for efficient rollup queries.
--
-- Refresh after category changes: REFRESH MATERIALIZED VIEW category_hierarchy;
-- =============================================================================
-- DROP MATERIALIZED VIEW IF EXISTS category_hierarchy;
-- CREATE MATERIALIZED VIEW category_hierarchy AS
-- WITH RECURSIVE cat_tree AS (
-- SELECT cat_id, name, type, parent_id,
-- cat_id AS root_id, 0 AS level, ARRAY[cat_id] AS path
-- FROM categories
-- WHERE parent_id IS NULL
-- UNION ALL
-- SELECT c.cat_id, c.name, c.type, c.parent_id,
-- ct.root_id, ct.level + 1, ct.path || c.cat_id
-- FROM categories c
-- JOIN cat_tree ct ON c.parent_id = ct.cat_id
-- )
-- SELECT cat_id, name, type, parent_id, root_id, level, path,
-- (SELECT array_agg(unnest ORDER BY unnest DESC)
-- FROM unnest(cat_tree.path) unnest
-- WHERE unnest <> cat_tree.cat_id) AS ancestor_ids
-- FROM cat_tree;
--
-- CREATE UNIQUE INDEX ON category_hierarchy (cat_id);