Consolidate old/new vendor and category routes, enhance new brands route, update frontend accordingly for all three pages, improve hierarchy on categories page, fix some calculations

This commit is contained in:
2025-04-02 14:28:18 -04:00
parent dbd0232285
commit 6051b849d6
16 changed files with 2273 additions and 880 deletions

View File

@@ -5,9 +5,9 @@ const { Pool } = require('pg'); // Assuming you use 'pg'
// --- Configuration --- // --- Configuration ---
// Toggle these constants to enable/disable specific steps for testing // Toggle these constants to enable/disable specific steps for testing
const RUN_DAILY_SNAPSHOTS = true; const RUN_DAILY_SNAPSHOTS = false;
const RUN_PRODUCT_METRICS = true; const RUN_PRODUCT_METRICS = false;
const RUN_PERIODIC_METRICS = true; const RUN_PERIODIC_METRICS = false;
const RUN_BRAND_METRICS = true; const RUN_BRAND_METRICS = true;
const RUN_VENDOR_METRICS = true; const RUN_VENDOR_METRICS = true;
const RUN_CATEGORY_METRICS = true; const RUN_CATEGORY_METRICS = true;
@@ -401,27 +401,43 @@ async function executeSqlStep(config, progress) {
start_time TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, start_time TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP WITH TIME ZONE, end_time TIMESTAMP WITH TIME ZONE,
duration_seconds INTEGER, duration_seconds INTEGER,
status TEXT, -- 'running', 'completed', 'failed', 'cancelled' status TEXT, -- Will be altered to enum if needed below
error_message TEXT, error_message TEXT,
additional_info JSONB additional_info JSONB
); );
`); `);
// Ensure the calculation_status enum type exists if needed
await connection.query(`
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'calculation_status') THEN
CREATE TYPE calculation_status AS ENUM ('running', 'completed', 'failed', 'cancelled');
-- If needed, alter the existing table to use the enum
ALTER TABLE calculate_history
ALTER COLUMN status TYPE calculation_status
USING status::calculation_status;
END IF;
END
$$;
`);
// Mark previous runs of this type as cancelled // Mark previous runs of this type as cancelled
await connection.query(` await connection.query(`
UPDATE calculate_history UPDATE calculate_history
SET SET
status = 'cancelled', status = 'cancelled'::calculation_status,
end_time = NOW(), end_time = NOW(),
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER, duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
error_message = 'Previous calculation was not completed properly or was superseded.' error_message = 'Previous calculation was not completed properly or was superseded.'
WHERE status = 'running' AND additional_info->>'type' = $1::text; WHERE status = 'running'::calculation_status AND additional_info->>'type' = $1::text;
`, [config.historyType]); `, [config.historyType]);
// Create history record for this run // Create history record for this run
const historyResult = await connection.query(` const historyResult = await connection.query(`
INSERT INTO calculate_history (status, additional_info) INSERT INTO calculate_history (status, additional_info)
VALUES ('running', jsonb_build_object('type', $1::text, 'sql_file', $2::text)) VALUES ('running'::calculation_status, jsonb_build_object('type', $1::text, 'sql_file', $2::text))
RETURNING id; RETURNING id;
`, [config.historyType, config.sqlFile]); `, [config.historyType, config.sqlFile]);
calculateHistoryId = historyResult.rows[0].id; calculateHistoryId = historyResult.rows[0].id;
@@ -502,7 +518,7 @@ async function executeSqlStep(config, progress) {
SET SET
end_time = NOW(), end_time = NOW(),
duration_seconds = $1::integer, duration_seconds = $1::integer,
status = 'completed' status = 'completed'::calculation_status
WHERE id = $2::integer; WHERE id = $2::integer;
`, [stepDuration, calculateHistoryId]); `, [stepDuration, calculateHistoryId]);
@@ -551,7 +567,7 @@ async function executeSqlStep(config, progress) {
SET SET
end_time = NOW(), end_time = NOW(),
duration_seconds = $1::integer, duration_seconds = $1::integer,
status = $2::text, status = $2::calculation_status,
error_message = $3::text error_message = $3::text
WHERE id = $4::integer; WHERE id = $4::integer;
`, [errorDuration, finalStatus, errorMessage.substring(0, 1000), calculateHistoryId]); // Limit error message size `, [errorDuration, finalStatus, errorMessage.substring(0, 1000), calculateHistoryId]); // Limit error message size

View File

@@ -0,0 +1,200 @@
-- Description: Rebuilds daily product snapshots from scratch using real orders data.
-- Fixes issues with duplicated/inflated metrics.
-- Dependencies: Core import tables (products, orders, purchase_orders).
-- Frequency: One-time run to clear out problematic data.
DO $$
DECLARE
_module_name TEXT := 'rebuild_daily_snapshots';
_start_time TIMESTAMPTZ := clock_timestamp();
_date DATE;
_count INT;
_total_records INT := 0;
_begin_date DATE := (SELECT MIN(date)::date FROM orders WHERE date >= '2024-01-01'); -- Starting point for data rebuild
_end_date DATE := CURRENT_DATE;
BEGIN
RAISE NOTICE 'Beginning daily snapshots rebuild from % to %. Starting at %', _begin_date, _end_date, _start_time;
-- First truncate the existing snapshots to ensure a clean slate
TRUNCATE TABLE public.daily_product_snapshots;
RAISE NOTICE 'Cleared existing snapshot data';
-- Now rebuild the snapshots day by day
_date := _begin_date;
WHILE _date <= _end_date LOOP
RAISE NOTICE 'Processing date %...', _date;
-- Create snapshots for this date
WITH SalesData AS (
SELECT
p.pid,
p.sku,
-- Count orders to ensure we only include products with real activity
COUNT(o.id) as order_count,
-- Aggregate Sales (Quantity > 0, Status not Canceled/Returned)
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.discount ELSE 0 END), 0.00) AS discounts,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN COALESCE(o.costeach, p.landing_cost_price, p.cost_price) * o.quantity ELSE 0 END), 0.00) AS cogs,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN p.regular_price * o.quantity ELSE 0 END), 0.00) AS gross_regular_revenue,
-- Aggregate Returns (Quantity < 0 or Status = Returned)
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN ABS(o.quantity) ELSE 0 END), 0) AS units_returned,
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN o.price * ABS(o.quantity) ELSE 0 END), 0.00) AS returns_revenue
FROM public.products p
LEFT JOIN public.orders o
ON p.pid = o.pid
AND o.date::date = _date
GROUP BY p.pid, p.sku
HAVING COUNT(o.id) > 0 -- Only include products with actual orders for this date
),
ReceivingData AS (
SELECT
po.pid,
-- Count POs to ensure we only include products with real activity
COUNT(po.po_id) as po_count,
-- Calculate received quantity for this day
COALESCE(
-- First try the received field from purchase_orders table (if received on this date)
SUM(CASE WHEN po.date::date = _date THEN po.received ELSE 0 END),
-- Otherwise try receiving_history JSON
SUM(
CASE
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
ELSE 0
END
),
0
) AS units_received,
COALESCE(
-- First try the actual cost_price from purchase_orders
SUM(CASE WHEN po.date::date = _date THEN po.received * po.cost_price ELSE 0 END),
-- Otherwise try receiving_history JSON
SUM(
CASE
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
ELSE 0
END
* COALESCE((rh.item->>'cost')::numeric, po.cost_price)
),
0.00
) AS cost_received
FROM public.purchase_orders po
LEFT JOIN LATERAL jsonb_array_elements(po.receiving_history) AS rh(item) ON
jsonb_typeof(po.receiving_history) = 'array' AND
jsonb_array_length(po.receiving_history) > 0 AND
(
(rh.item->>'date')::date = _date OR
(rh.item->>'received_at')::date = _date OR
(rh.item->>'receipt_date')::date = _date
)
-- Include POs with the current date or relevant receiving_history
WHERE
po.date::date = _date OR
jsonb_typeof(po.receiving_history) = 'array' AND
jsonb_array_length(po.receiving_history) > 0
GROUP BY po.pid
HAVING COUNT(po.po_id) > 0 OR SUM(
CASE
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
ELSE 0
END
) > 0
),
-- Get stock quantities for the day - note this is approximate since we're using current products data
StockData AS (
SELECT
p.pid,
p.stock_quantity,
COALESCE(p.landing_cost_price, p.cost_price, 0.00) as effective_cost_price,
COALESCE(p.price, 0.00) as current_price,
COALESCE(p.regular_price, 0.00) as current_regular_price
FROM public.products p
)
INSERT INTO public.daily_product_snapshots (
snapshot_date,
pid,
sku,
eod_stock_quantity,
eod_stock_cost,
eod_stock_retail,
eod_stock_gross,
stockout_flag,
units_sold,
units_returned,
gross_revenue,
discounts,
returns_revenue,
net_revenue,
cogs,
gross_regular_revenue,
profit,
units_received,
cost_received,
calculation_timestamp
)
SELECT
_date AS snapshot_date,
COALESCE(sd.pid, rd.pid) AS pid,
sd.sku,
-- Use current stock as approximation, since historical stock data may not be available
s.stock_quantity AS eod_stock_quantity,
s.stock_quantity * s.effective_cost_price AS eod_stock_cost,
s.stock_quantity * s.current_price AS eod_stock_retail,
s.stock_quantity * s.current_regular_price AS eod_stock_gross,
(s.stock_quantity <= 0) AS stockout_flag,
-- Sales metrics
COALESCE(sd.units_sold, 0),
COALESCE(sd.units_returned, 0),
COALESCE(sd.gross_revenue_unadjusted, 0.00),
COALESCE(sd.discounts, 0.00),
COALESCE(sd.returns_revenue, 0.00),
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) AS net_revenue,
COALESCE(sd.cogs, 0.00),
COALESCE(sd.gross_regular_revenue, 0.00),
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
-- Receiving metrics
COALESCE(rd.units_received, 0),
COALESCE(rd.cost_received, 0.00),
_start_time
FROM SalesData sd
FULL OUTER JOIN ReceivingData rd ON sd.pid = rd.pid
LEFT JOIN StockData s ON COALESCE(sd.pid, rd.pid) = s.pid
WHERE (COALESCE(sd.order_count, 0) > 0 OR COALESCE(rd.po_count, 0) > 0);
-- Get record count for this day
GET DIAGNOSTICS _count = ROW_COUNT;
_total_records := _total_records + _count;
RAISE NOTICE 'Added % snapshot records for date %', _count, _date;
-- Move to next day
_date := _date + INTERVAL '1 day';
END LOOP;
RAISE NOTICE 'Rebuilding daily snapshots complete. Added % total records across % days.', _total_records, (_end_date - _begin_date)::integer + 1;
-- Update the status table for daily_snapshots
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
VALUES ('daily_snapshots', _start_time)
ON CONFLICT (module_name) DO UPDATE SET last_calculation_timestamp = _start_time;
-- Now update product_metrics based on the rebuilt snapshots
RAISE NOTICE 'Triggering update of product_metrics table...';
-- Call the update_product_metrics procedure directly
-- Your system might use a different method to trigger this update
PERFORM pg_notify('recalculate_metrics', 'product_metrics');
RAISE NOTICE 'Rebuild complete. Duration: %', clock_timestamp() - _start_time;
END $$;

View File

@@ -6,6 +6,7 @@ DO $$
DECLARE DECLARE
_module_name VARCHAR := 'brand_metrics'; _module_name VARCHAR := 'brand_metrics';
_start_time TIMESTAMPTZ := clock_timestamp(); _start_time TIMESTAMPTZ := clock_timestamp();
_min_revenue NUMERIC := 50.00; -- Minimum revenue threshold for margin calculation
BEGIN BEGIN
RAISE NOTICE 'Running % calculation...', _module_name; RAISE NOTICE 'Running % calculation...', _module_name;
@@ -19,14 +20,26 @@ BEGIN
SUM(pm.current_stock) AS current_stock_units, SUM(pm.current_stock) AS current_stock_units,
SUM(pm.current_stock_cost) AS current_stock_cost, SUM(pm.current_stock_cost) AS current_stock_cost,
SUM(pm.current_stock_retail) AS current_stock_retail, SUM(pm.current_stock_retail) AS current_stock_retail,
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d, -- Only include products with valid sales data in each time period
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d, COUNT(DISTINCT CASE WHEN pm.sales_7d > 0 THEN pm.pid END) AS products_with_sales_7d,
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d, SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d, SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue
COUNT(DISTINCT CASE WHEN pm.sales_30d > 0 THEN pm.pid END) AS products_with_sales_30d,
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
COUNT(DISTINCT CASE WHEN pm.sales_365d > 0 THEN pm.pid END) AS products_with_sales_365d,
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
COUNT(DISTINCT CASE WHEN pm.lifetime_sales > 0 THEN pm.pid END) AS products_with_lifetime_sales,
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
FROM public.product_metrics pm FROM public.product_metrics pm
JOIN public.products p ON pm.pid = p.pid JOIN public.products p ON pm.pid = p.pid
-- WHERE p.visible = true -- Optional: filter only visible products for brand metrics?
GROUP BY brand_group GROUP BY brand_group
), ),
AllBrands AS ( AllBrands AS (
@@ -58,8 +71,14 @@ BEGIN
COALESCE(ba.profit_30d, 0.00), COALESCE(ba.cogs_30d, 0.00), COALESCE(ba.profit_30d, 0.00), COALESCE(ba.cogs_30d, 0.00),
COALESCE(ba.sales_365d, 0), COALESCE(ba.revenue_365d, 0.00), COALESCE(ba.sales_365d, 0), COALESCE(ba.revenue_365d, 0.00),
COALESCE(ba.lifetime_sales, 0), COALESCE(ba.lifetime_revenue, 0.00), COALESCE(ba.lifetime_sales, 0), COALESCE(ba.lifetime_revenue, 0.00),
-- KPIs -- KPIs - Calculate margin only for brands with significant revenue
(ba.profit_30d / NULLIF(ba.revenue_30d, 0)) * 100.0 CASE
WHEN COALESCE(ba.revenue_30d, 0) >= _min_revenue THEN
-- Directly calculate margin from revenue and cogs for consistency
-- This is mathematically equivalent to profit/revenue but more explicit
((COALESCE(ba.revenue_30d, 0) - COALESCE(ba.cogs_30d, 0)) / COALESCE(ba.revenue_30d, 1)) * 100.0
ELSE NULL -- No margin for low/no revenue brands
END
FROM AllBrands b FROM AllBrands b
LEFT JOIN BrandAggregates ba ON b.brand_group = ba.brand_group LEFT JOIN BrandAggregates ba ON b.brand_group = ba.brand_group

View File

@@ -9,9 +9,44 @@ DECLARE
BEGIN BEGIN
RAISE NOTICE 'Running % calculation...', _module_name; RAISE NOTICE 'Running % calculation...', _module_name;
WITH CategoryAggregates AS ( WITH
-- Identify the hierarchy depth for each category
CategoryDepth AS (
WITH RECURSIVE CategoryTree AS (
-- Base case: Start with categories without parents (root categories)
SELECT cat_id, name, parent_id, 0 AS depth
FROM public.categories
WHERE parent_id IS NULL
UNION ALL
-- Recursive step: Add child categories with incremented depth
SELECT c.cat_id, c.name, c.parent_id, ct.depth + 1
FROM public.categories c
JOIN CategoryTree ct ON c.parent_id = ct.cat_id
)
SELECT cat_id, depth
FROM CategoryTree
),
-- For each product, find the most specific (deepest) category it belongs to
ProductDeepestCategory AS (
SELECT SELECT
pc.cat_id, pc.pid,
pc.cat_id
FROM public.product_categories pc
JOIN CategoryDepth cd ON pc.cat_id = cd.cat_id
-- This is the key part: for each product, select only the category with maximum depth
WHERE (pc.pid, cd.depth) IN (
SELECT pc2.pid, MAX(cd2.depth)
FROM public.product_categories pc2
JOIN CategoryDepth cd2 ON pc2.cat_id = cd2.cat_id
GROUP BY pc2.pid
)
),
-- Calculate metrics only at the most specific category level for each product
CategoryAggregates AS (
SELECT
pdc.cat_id,
-- Counts -- Counts
COUNT(DISTINCT pm.pid) AS product_count, COUNT(DISTINCT pm.pid) AS product_count,
COUNT(DISTINCT CASE WHEN pm.is_visible THEN pm.pid END) AS active_product_count, COUNT(DISTINCT CASE WHEN pm.is_visible THEN pm.pid END) AS active_product_count,
@@ -20,19 +55,74 @@ BEGIN
SUM(pm.current_stock) AS current_stock_units, SUM(pm.current_stock) AS current_stock_units,
SUM(pm.current_stock_cost) AS current_stock_cost, SUM(pm.current_stock_cost) AS current_stock_cost,
SUM(pm.current_stock_retail) AS current_stock_retail, SUM(pm.current_stock_retail) AS current_stock_retail,
-- Rolling Periods (Sum directly from product_metrics) -- Rolling Periods - Only include products with actual sales in each period
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d, SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d, SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d, SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d, SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue, SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
-- Data for KPIs SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
SUM(pm.avg_stock_units_30d) AS total_avg_stock_units_30d -- Sum of averages (use cautiously) SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue,
-- Data for KPIs - Only average stock for products with stock
SUM(CASE WHEN pm.avg_stock_units_30d > 0 THEN pm.avg_stock_units_30d ELSE 0 END) AS total_avg_stock_units_30d
FROM public.product_metrics pm FROM public.product_metrics pm
JOIN public.product_categories pc ON pm.pid = pc.pid JOIN ProductDeepestCategory pdc ON pm.pid = pdc.pid
-- Optional: JOIN products p ON pm.pid = p.pid if needed for filtering (e.g., only visible products) GROUP BY pdc.cat_id
-- WHERE p.visible = true -- Example filter ),
GROUP BY pc.cat_id -- Use a flat approach to build the complete category tree with aggregate values
CategoryTree AS (
WITH RECURSIVE CategoryHierarchy AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.cat_id as leaf_id, -- Track original leaf category
ARRAY[c.cat_id] as path
FROM public.categories c
UNION ALL
SELECT
p.cat_id,
p.name,
p.parent_id,
ch.leaf_id, -- Keep track of the original leaf
p.cat_id || ch.path
FROM public.categories p
JOIN CategoryHierarchy ch ON p.cat_id = ch.parent_id
)
SELECT
ch.cat_id,
ch.leaf_id
FROM CategoryHierarchy ch
),
-- Now aggregate by maintaining the link between leaf categories and ancestors
RollupMetrics AS (
SELECT
ct.cat_id,
SUM(ca.product_count) AS product_count,
SUM(ca.active_product_count) AS active_product_count,
SUM(ca.replenishable_product_count) AS replenishable_product_count,
SUM(ca.current_stock_units) AS current_stock_units,
SUM(ca.current_stock_cost) AS current_stock_cost,
SUM(ca.current_stock_retail) AS current_stock_retail,
SUM(ca.sales_7d) AS sales_7d,
SUM(ca.revenue_7d) AS revenue_7d,
SUM(ca.sales_30d) AS sales_30d,
SUM(ca.revenue_30d) AS revenue_30d,
SUM(ca.cogs_30d) AS cogs_30d,
SUM(ca.profit_30d) AS profit_30d,
SUM(ca.sales_365d) AS sales_365d,
SUM(ca.revenue_365d) AS revenue_365d,
SUM(ca.lifetime_sales) AS lifetime_sales,
SUM(ca.lifetime_revenue) AS lifetime_revenue,
SUM(ca.total_avg_stock_units_30d) AS total_avg_stock_units_30d
FROM CategoryTree ct
JOIN CategoryAggregates ca ON ct.leaf_id = ca.cat_id
GROUP BY ct.cat_id
) )
INSERT INTO public.category_metrics ( INSERT INTO public.category_metrics (
category_id, category_name, category_type, parent_id, last_calculated, category_id, category_name, category_type, parent_id, last_calculated,
@@ -49,22 +139,22 @@ BEGIN
c.parent_id, c.parent_id,
_start_time, _start_time,
-- Base Aggregates -- Base Aggregates
COALESCE(ca.product_count, 0), COALESCE(rm.product_count, 0),
COALESCE(ca.active_product_count, 0), COALESCE(rm.active_product_count, 0),
COALESCE(ca.replenishable_product_count, 0), COALESCE(rm.replenishable_product_count, 0),
COALESCE(ca.current_stock_units, 0), COALESCE(rm.current_stock_units, 0),
COALESCE(ca.current_stock_cost, 0.00), COALESCE(rm.current_stock_cost, 0.00),
COALESCE(ca.current_stock_retail, 0.00), COALESCE(rm.current_stock_retail, 0.00),
COALESCE(ca.sales_7d, 0), COALESCE(ca.revenue_7d, 0.00), COALESCE(rm.sales_7d, 0), COALESCE(rm.revenue_7d, 0.00),
COALESCE(ca.sales_30d, 0), COALESCE(ca.revenue_30d, 0.00), COALESCE(rm.sales_30d, 0), COALESCE(rm.revenue_30d, 0.00),
COALESCE(ca.profit_30d, 0.00), COALESCE(ca.cogs_30d, 0.00), COALESCE(rm.profit_30d, 0.00), COALESCE(rm.cogs_30d, 0.00),
COALESCE(ca.sales_365d, 0), COALESCE(ca.revenue_365d, 0.00), COALESCE(rm.sales_365d, 0), COALESCE(rm.revenue_365d, 0.00),
COALESCE(ca.lifetime_sales, 0), COALESCE(ca.lifetime_revenue, 0.00), COALESCE(rm.lifetime_sales, 0), COALESCE(rm.lifetime_revenue, 0.00),
-- KPIs -- KPIs
(ca.profit_30d / NULLIF(ca.revenue_30d, 0)) * 100.0, (rm.profit_30d / NULLIF(rm.revenue_30d, 0)) * 100.0,
ca.sales_30d / NULLIF(ca.total_avg_stock_units_30d, 0) -- Simple unit-based turnover rm.sales_30d / NULLIF(rm.total_avg_stock_units_30d, 0) -- Simple unit-based turnover
FROM public.categories c -- Start from categories to include those with no products yet FROM public.categories c -- Start from categories to include those with no products yet
LEFT JOIN CategoryAggregates ca ON c.cat_id = ca.cat_id LEFT JOIN RollupMetrics rm ON c.cat_id = rm.cat_id
ON CONFLICT (category_id) DO UPDATE SET ON CONFLICT (category_id) DO UPDATE SET
category_name = EXCLUDED.category_name, category_name = EXCLUDED.category_name,

View File

@@ -21,11 +21,24 @@ BEGIN
SUM(pm.current_stock_retail) AS current_stock_retail, SUM(pm.current_stock_retail) AS current_stock_retail,
SUM(pm.on_order_qty) AS on_order_units, SUM(pm.on_order_qty) AS on_order_units,
SUM(pm.on_order_cost) AS on_order_cost, SUM(pm.on_order_cost) AS on_order_cost,
SUM(pm.sales_7d) AS sales_7d, SUM(pm.revenue_7d) AS revenue_7d, -- Only include products with valid sales data in each time period
SUM(pm.sales_30d) AS sales_30d, SUM(pm.revenue_30d) AS revenue_30d, COUNT(DISTINCT CASE WHEN pm.sales_7d > 0 THEN pm.pid END) AS products_with_sales_7d,
SUM(pm.profit_30d) AS profit_30d, SUM(pm.cogs_30d) AS cogs_30d, SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
SUM(pm.sales_365d) AS sales_365d, SUM(pm.revenue_365d) AS revenue_365d, SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
SUM(pm.lifetime_sales) AS lifetime_sales, SUM(pm.lifetime_revenue) AS lifetime_revenue
COUNT(DISTINCT CASE WHEN pm.sales_30d > 0 THEN pm.pid END) AS products_with_sales_30d,
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
COUNT(DISTINCT CASE WHEN pm.sales_365d > 0 THEN pm.pid END) AS products_with_sales_365d,
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
COUNT(DISTINCT CASE WHEN pm.lifetime_sales > 0 THEN pm.pid END) AS products_with_lifetime_sales,
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
FROM public.product_metrics pm FROM public.product_metrics pm
JOIN public.products p ON pm.pid = p.pid JOIN public.products p ON pm.pid = p.pid
WHERE p.vendor IS NOT NULL AND p.vendor <> '' WHERE p.vendor IS NOT NULL AND p.vendor <> ''

View File

@@ -9,6 +9,8 @@ DECLARE
_start_time TIMESTAMPTZ := clock_timestamp(); -- Time execution started _start_time TIMESTAMPTZ := clock_timestamp(); -- Time execution started
_last_calc_time TIMESTAMPTZ; _last_calc_time TIMESTAMPTZ;
_target_date DATE := CURRENT_DATE; -- Always recalculate today for simplicity with hourly runs _target_date DATE := CURRENT_DATE; -- Always recalculate today for simplicity with hourly runs
_total_records INT := 0;
_has_orders BOOLEAN := FALSE;
BEGIN BEGIN
-- Get the timestamp before the last successful run of this module -- Get the timestamp before the last successful run of this module
SELECT last_calculation_timestamp INTO _last_calc_time SELECT last_calculation_timestamp INTO _last_calc_time
@@ -17,11 +19,48 @@ BEGIN
RAISE NOTICE 'Running % for date %. Start Time: %', _module_name, _target_date, _start_time; RAISE NOTICE 'Running % for date %. Start Time: %', _module_name, _target_date, _start_time;
-- Use CTEs to aggregate data for the target date -- CRITICAL FIX: Check if we have any orders or receiving activity for today
-- to prevent creating artificial records when no real activity exists
SELECT EXISTS (
SELECT 1 FROM public.orders WHERE date::date = _target_date
UNION
SELECT 1 FROM public.purchase_orders
WHERE date::date = _target_date
OR EXISTS (
SELECT 1 FROM jsonb_array_elements(receiving_history) AS rh
WHERE jsonb_typeof(receiving_history) = 'array'
AND (
(rh->>'date')::date = _target_date OR
(rh->>'received_at')::date = _target_date OR
(rh->>'receipt_date')::date = _target_date
)
)
LIMIT 1
) INTO _has_orders;
-- If no orders or receiving activity found for today, log and exit
IF NOT _has_orders THEN
RAISE NOTICE 'No orders or receiving activity found for % - skipping daily snapshot creation', _target_date;
-- Still update the calculate_status to prevent repeated attempts
UPDATE public.calculate_status
SET last_calculation_timestamp = _start_time
WHERE module_name = _module_name;
RETURN; -- Exit without creating snapshots
END IF;
-- IMPORTANT: First delete any existing data for this date to prevent duplication
DELETE FROM public.daily_product_snapshots
WHERE snapshot_date = _target_date;
-- Proceed with calculating daily metrics only for products with actual activity
WITH SalesData AS ( WITH SalesData AS (
SELECT SELECT
p.pid, p.pid,
p.sku, p.sku,
-- Track number of orders to ensure we have real data
COUNT(o.id) as order_count,
-- Aggregate Sales (Quantity > 0, Status not Canceled/Returned) -- Aggregate Sales (Quantity > 0, Status not Canceled/Returned)
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold, COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted, -- Before discount COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted, -- Before discount
@@ -37,10 +76,13 @@ BEGIN
ON p.pid = o.pid ON p.pid = o.pid
AND o.date::date = _target_date -- Cast to date to ensure compatibility regardless of original type AND o.date::date = _target_date -- Cast to date to ensure compatibility regardless of original type
GROUP BY p.pid, p.sku GROUP BY p.pid, p.sku
HAVING COUNT(o.id) > 0 -- CRITICAL: Only include products with actual orders
), ),
ReceivingData AS ( ReceivingData AS (
SELECT SELECT
po.pid, po.pid,
-- Track number of POs to ensure we have real data
COUNT(po.po_id) as po_count,
-- Prioritize the actual table fields over the JSON data -- Prioritize the actual table fields over the JSON data
COALESCE( COALESCE(
-- First try the received field from purchase_orders table -- First try the received field from purchase_orders table
@@ -89,6 +131,15 @@ BEGIN
jsonb_typeof(po.receiving_history) = 'array' AND jsonb_typeof(po.receiving_history) = 'array' AND
jsonb_array_length(po.receiving_history) > 0 jsonb_array_length(po.receiving_history) > 0
GROUP BY po.pid GROUP BY po.pid
-- CRITICAL: Only include products with actual receiving activity
HAVING COUNT(po.po_id) > 0 OR SUM(
CASE
WHEN (rh.item->>'date')::date = _target_date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'received_at')::date = _target_date THEN (rh.item->>'qty')::numeric
WHEN (rh.item->>'receipt_date')::date = _target_date THEN (rh.item->>'qty')::numeric
ELSE 0
END
) > 0
), ),
CurrentStock AS ( CurrentStock AS (
-- Select current stock values directly from products table -- Select current stock values directly from products table
@@ -100,7 +151,7 @@ BEGIN
COALESCE(regular_price, 0.00) as current_regular_price COALESCE(regular_price, 0.00) as current_regular_price
FROM public.products FROM public.products
) )
-- Upsert into the daily snapshots table -- Now insert records, but ONLY for products with actual activity
INSERT INTO public.daily_product_snapshots ( INSERT INTO public.daily_product_snapshots (
snapshot_date, snapshot_date,
pid, pid,
@@ -125,8 +176,8 @@ BEGIN
) )
SELECT SELECT
_target_date AS snapshot_date, _target_date AS snapshot_date,
p.pid, COALESCE(sd.pid, rd.pid) AS pid, -- Use sales or receiving PID
p.sku, COALESCE(sd.sku, p.sku) AS sku, -- Get SKU from sales data or products table
-- Inventory Metrics (Using CurrentStock) -- Inventory Metrics (Using CurrentStock)
cs.stock_quantity AS eod_stock_quantity, cs.stock_quantity AS eod_stock_quantity,
cs.stock_quantity * cs.effective_cost_price AS eod_stock_cost, cs.stock_quantity * cs.effective_cost_price AS eod_stock_cost,
@@ -147,31 +198,15 @@ BEGIN
COALESCE(rd.units_received, 0), COALESCE(rd.units_received, 0),
COALESCE(rd.cost_received, 0.00), COALESCE(rd.cost_received, 0.00),
_start_time -- Timestamp of this calculation run _start_time -- Timestamp of this calculation run
FROM public.products p FROM SalesData sd
LEFT JOIN CurrentStock cs ON p.pid = cs.pid FULL OUTER JOIN ReceivingData rd ON sd.pid = rd.pid
LEFT JOIN SalesData sd ON p.pid = sd.pid LEFT JOIN public.products p ON COALESCE(sd.pid, rd.pid) = p.pid
LEFT JOIN ReceivingData rd ON p.pid = rd.pid LEFT JOIN CurrentStock cs ON COALESCE(sd.pid, rd.pid) = cs.pid
WHERE p.pid IS NOT NULL -- Ensure we only insert for existing products WHERE p.pid IS NOT NULL; -- Ensure we only insert for existing products
ON CONFLICT (snapshot_date, pid) DO UPDATE SET -- Get the total number of records inserted
sku = EXCLUDED.sku, GET DIAGNOSTICS _total_records = ROW_COUNT;
eod_stock_quantity = EXCLUDED.eod_stock_quantity, RAISE NOTICE 'Created % daily snapshot records for % with sales/receiving activity', _total_records, _target_date;
eod_stock_cost = EXCLUDED.eod_stock_cost,
eod_stock_retail = EXCLUDED.eod_stock_retail,
eod_stock_gross = EXCLUDED.eod_stock_gross,
stockout_flag = EXCLUDED.stockout_flag,
units_sold = EXCLUDED.units_sold,
units_returned = EXCLUDED.units_returned,
gross_revenue = EXCLUDED.gross_revenue,
discounts = EXCLUDED.discounts,
returns_revenue = EXCLUDED.returns_revenue,
net_revenue = EXCLUDED.net_revenue,
cogs = EXCLUDED.cogs,
gross_regular_revenue = EXCLUDED.gross_regular_revenue,
profit = EXCLUDED.profit,
units_received = EXCLUDED.units_received,
cost_received = EXCLUDED.cost_received,
calculation_timestamp = EXCLUDED.calculation_timestamp; -- Use the timestamp from this run
-- Update the status table with the timestamp from the START of this run -- Update the status table with the timestamp from the START of this run
UPDATE public.calculate_status UPDATE public.calculate_status

View File

@@ -110,31 +110,37 @@ BEGIN
SUM(units_sold) AS total_units_sold, SUM(units_sold) AS total_units_sold,
SUM(net_revenue) AS total_net_revenue, SUM(net_revenue) AS total_net_revenue,
-- Specific time windows if we have enough data -- Specific time windows using date range boundaries precisely
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' THEN units_sold ELSE 0 END) AS sales_7d, -- Use _current_date - INTERVAL '6 days' to include 7 days (today + 6 previous days)
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS revenue_7d, -- This ensures we count exactly the right number of days in each period
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' THEN units_sold ELSE 0 END) AS sales_14d, SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_7d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' THEN net_revenue ELSE 0 END) AS revenue_14d, SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_7d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_sold ELSE 0 END) AS sales_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN cogs ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN profit ELSE 0 END) AS profit_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_returned ELSE 0 END) AS returns_units_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN returns_revenue ELSE 0 END) AS returns_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN discounts ELSE 0 END) AS discounts_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN gross_revenue ELSE 0 END) AS gross_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN gross_regular_revenue ELSE 0 END) AS gross_regular_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND stockout_flag THEN 1 ELSE 0 END) AS stockout_days_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' THEN units_sold ELSE 0 END) AS sales_365d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' THEN net_revenue ELSE 0 END) AS revenue_365d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_received ELSE 0 END) AS received_qty_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN cost_received ELSE 0 END) AS received_cost_30d,
-- Averages (check for NULLIF 0 days in period if filtering dates) SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_14d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_quantity END) AS avg_stock_units_30d, SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_14d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_cost END) AS avg_stock_cost_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_retail END) AS avg_stock_retail_30d, SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_gross END) AS avg_stock_gross_30d, SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN cogs ELSE 0 END) AS cogs_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN profit ELSE 0 END) AS profit_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_returned ELSE 0 END) AS returns_units_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN returns_revenue ELSE 0 END) AS returns_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN discounts ELSE 0 END) AS discounts_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN gross_revenue ELSE 0 END) AS gross_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN gross_regular_revenue ELSE 0 END) AS gross_regular_revenue_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date AND stockout_flag THEN 1 ELSE 0 END) AS stockout_days_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' AND snapshot_date <= _current_date THEN units_sold ELSE 0 END) AS sales_365d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' AND snapshot_date <= _current_date THEN net_revenue ELSE 0 END) AS revenue_365d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN units_received ELSE 0 END) AS received_qty_30d,
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN cost_received ELSE 0 END) AS received_cost_30d,
-- Averages for stock levels - only include dates within the specified period
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_quantity END) AS avg_stock_units_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_cost END) AS avg_stock_cost_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_retail END) AS avg_stock_retail_30d,
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND snapshot_date <= _current_date THEN eod_stock_gross END) AS avg_stock_gross_30d,
-- Lifetime - should match total values above -- Lifetime - should match total values above
SUM(units_sold) AS lifetime_sales, SUM(units_sold) AS lifetime_sales,
@@ -150,14 +156,14 @@ BEGIN
SELECT SELECT
pid, pid,
date_first_sold, date_first_sold,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '6 days' THEN units_sold ELSE 0 END) AS first_7_days_sales, SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '6 days' THEN units_sold ELSE 0 END) AS first_7_days_sales,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS first_7_days_revenue, SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS first_7_days_revenue,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '29 days' THEN units_sold ELSE 0 END) AS first_30_days_sales, SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '29 days' THEN units_sold ELSE 0 END) AS first_30_days_sales,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS first_30_days_revenue, SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS first_30_days_revenue,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '59 days' THEN units_sold ELSE 0 END) AS first_60_days_sales, SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '59 days' THEN units_sold ELSE 0 END) AS first_60_days_sales,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '59 days' THEN net_revenue ELSE 0 END) AS first_60_days_revenue, SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '59 days' THEN net_revenue ELSE 0 END) AS first_60_days_revenue,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '89 days' THEN units_sold ELSE 0 END) AS first_90_days_sales, SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '89 days' THEN units_sold ELSE 0 END) AS first_90_days_sales,
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '89 days' THEN net_revenue ELSE 0 END) AS first_90_days_revenue SUM(CASE WHEN snapshot_date >= date_first_sold AND snapshot_date <= date_first_sold + INTERVAL '89 days' THEN net_revenue ELSE 0 END) AS first_90_days_revenue
FROM public.daily_product_snapshots ds FROM public.daily_product_snapshots ds
JOIN HistoricalDates hd USING(pid) JOIN HistoricalDates hd USING(pid)
WHERE date_first_sold IS NOT NULL WHERE date_first_sold IS NOT NULL
@@ -246,45 +252,314 @@ BEGIN
(sa.sales_30d / NULLIF(ci.current_stock + sa.sales_30d, 0)) * 100 AS sell_through_30d, (sa.sales_30d / NULLIF(ci.current_stock + sa.sales_30d, 0)) * 100 AS sell_through_30d,
-- Forecasting intermediate values -- Forecasting intermediate values
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) AS sales_velocity_daily, -- CRITICAL FIX: Use safer velocity calculation to prevent extreme values
-- Original problematic calculation: (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0))
-- Use available days (not stockout days) as denominator with a minimum safety value
(sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d, -- Standard calculation
CASE
WHEN sa.sales_30d > 0 THEN 14.0 -- If we have sales, ensure at least 14 days denominator
ELSE 30.0 -- If no sales, use full period
END
),
0
)
) AS sales_velocity_daily,
s.effective_lead_time AS config_lead_time, s.effective_lead_time AS config_lead_time,
s.effective_days_of_stock AS config_days_of_stock, s.effective_days_of_stock AS config_days_of_stock,
s.effective_safety_stock AS config_safety_stock, s.effective_safety_stock AS config_safety_stock,
(s.effective_lead_time + s.effective_days_of_stock) AS planning_period_days, (s.effective_lead_time + s.effective_days_of_stock) AS planning_period_days,
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time AS lead_time_forecast_units,
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock AS days_of_stock_forecast_units, -- Apply the same fix to all derived calculations
((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock) AS planning_period_forecast_units, (sa.sales_30d /
(ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time)) AS lead_time_closing_stock, NULLIF(
((ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock) AS days_of_stock_closing_stock, GREATEST(
(((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0) AS replenishment_needed_raw, 30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time AS lead_time_forecast_units,
(sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock AS days_of_stock_forecast_units,
(sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * (s.effective_lead_time + s.effective_days_of_stock) AS planning_period_forecast_units,
(ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time)) AS lead_time_closing_stock,
((ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time))) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock) AS days_of_stock_closing_stock,
(((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0) AS replenishment_needed_raw,
-- Final Forecasting / Replenishment Metrics (apply CEILING/GREATEST/etc.) -- Final Forecasting / Replenishment Metrics (apply CEILING/GREATEST/etc.)
-- Note: These calculations are nested for clarity, can be simplified in prod -- Note: These calculations are nested for clarity, can be simplified in prod
CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS replenishment_units, CEILING(GREATEST(0, ((((sa.sales_30d /
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_effective_cost AS replenishment_cost, NULLIF(
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_price AS replenishment_retail, GREATEST(
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * (ci.current_price - ci.current_effective_cost) AS replenishment_profit, 30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS replenishment_units,
(CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_effective_cost AS replenishment_cost,
(CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_price AS replenishment_retail,
(CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * (ci.current_price - ci.current_effective_cost) AS replenishment_profit,
-- Placeholder for To Order (Apply MOQ/UOM logic here if needed, otherwise equals replenishment) -- Placeholder for To Order (Apply MOQ/UOM logic here if needed, otherwise equals replenishment)
CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS to_order_units, CEILING(GREATEST(0, ((((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS to_order_units,
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) AS forecast_lost_sales_units, GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) * ci.current_price AS forecast_lost_revenue, NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time))) AS forecast_lost_sales_units,
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time))) * ci.current_price AS forecast_lost_revenue,
ci.current_stock / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS stock_cover_in_days, ci.current_stock / NULLIF((sa.sales_30d /
COALESCE(ooi.on_order_qty, 0) / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS po_cover_in_days, NULLIF(
(ci.current_stock + COALESCE(ooi.on_order_qty, 0)) / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS sells_out_in_days, GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
), 0) AS stock_cover_in_days,
COALESCE(ooi.on_order_qty, 0) / NULLIF((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
), 0) AS po_cover_in_days,
(ci.current_stock + COALESCE(ooi.on_order_qty, 0)) / NULLIF((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
), 0) AS sells_out_in_days,
-- Replenish Date: Date when stock is projected to hit safety stock, minus lead time -- Replenish Date: Date when stock is projected to hit safety stock, minus lead time
CASE CASE
WHEN (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) > 0 WHEN (sa.sales_30d /
THEN _current_date + FLOOR(GREATEST(0, ci.current_stock - s.effective_safety_stock) / (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)))::int - s.effective_lead_time NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) > 0
THEN _current_date + FLOOR(GREATEST(0, ci.current_stock - s.effective_safety_stock) / (sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
))::int - s.effective_lead_time
ELSE NULL ELSE NULL
END AS replenish_date, END AS replenish_date,
GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))::int AS overstocked_units, GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))) * ci.current_effective_cost AS overstocked_cost, NULLIF(
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))) * ci.current_price AS overstocked_retail, GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)))::int AS overstocked_units,
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)))) * ci.current_effective_cost AS overstocked_cost,
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_lead_time) + ((sa.sales_30d /
NULLIF(
GREATEST(
30.0 - sa.stockout_days_30d,
CASE WHEN sa.sales_30d > 0 THEN 14.0 ELSE 30.0 END
),
0
)
) * s.effective_days_of_stock)))) * ci.current_price AS overstocked_retail,
-- Old Stock Flag -- Old Stock Flag
(ci.created_at::date < _current_date - INTERVAL '60 day') AND (ci.created_at::date < _current_date - INTERVAL '60 day') AND

View File

@@ -28,6 +28,8 @@ const COLUMN_MAP = {
avgMargin30d: { dbCol: 'bm.avg_margin_30d', type: 'number' }, avgMargin30d: { dbCol: 'bm.avg_margin_30d', type: 'number' },
// Add aliases if needed // Add aliases if needed
name: { dbCol: 'bm.brand_name', type: 'string' }, name: { dbCol: 'bm.brand_name', type: 'string' },
// Add status for filtering
status: { dbCol: 'brand_status', type: 'string' },
}; };
function getSafeColumnInfo(queryParamKey) { function getSafeColumnInfo(queryParamKey) {
@@ -41,11 +43,26 @@ router.get('/filter-options', async (req, res) => {
const pool = req.app.locals.pool; const pool = req.app.locals.pool;
console.log('GET /brands-aggregate/filter-options'); console.log('GET /brands-aggregate/filter-options');
try { try {
// Get brand names
const { rows: brandRows } = await pool.query(` const { rows: brandRows } = await pool.query(`
SELECT DISTINCT brand_name FROM public.brand_metrics ORDER BY brand_name SELECT DISTINCT brand_name FROM public.brand_metrics ORDER BY brand_name
`); `);
// Get status values - calculate them since they're derived
const { rows: statusRows } = await pool.query(`
SELECT DISTINCT
CASE
WHEN active_product_count > 0 AND sales_30d > 0 THEN 'active'
WHEN active_product_count > 0 THEN 'inactive'
ELSE 'pending'
END as status
FROM public.brand_metrics
ORDER BY status
`);
res.json({ res.json({
brands: brandRows.map(r => r.brand_name), brands: brandRows.map(r => r.brand_name),
statuses: statusRows.map(r => r.status)
}); });
} catch(error) { } catch(error) {
console.error('Error fetching brand filter options:', error); console.error('Error fetching brand filter options:', error);
@@ -61,16 +78,17 @@ router.get('/stats', async (req, res) => {
const { rows: [stats] } = await pool.query(` const { rows: [stats] } = await pool.query(`
SELECT SELECT
COUNT(*) AS total_brands, COUNT(*) AS total_brands,
COUNT(CASE WHEN active_product_count > 0 THEN 1 END) AS active_brands,
SUM(active_product_count) AS total_active_products, SUM(active_product_count) AS total_active_products,
SUM(current_stock_cost) AS total_stock_value, SUM(current_stock_cost) AS total_stock_value,
-- Weighted Average Margin -- Weighted Average Margin
SUM(profit_30d) * 100.0 / NULLIF(SUM(revenue_30d), 0) AS overall_avg_margin_weighted SUM(profit_30d) * 100.0 / NULLIF(SUM(revenue_30d), 0) AS overall_avg_margin_weighted
-- Add other stats
FROM public.brand_metrics bm FROM public.brand_metrics bm
`); `);
res.json({ res.json({
totalBrands: parseInt(stats?.total_brands || 0), totalBrands: parseInt(stats?.total_brands || 0),
activeBrands: parseInt(stats?.active_brands || 0),
totalActiveProducts: parseInt(stats?.total_active_products || 0), totalActiveProducts: parseInt(stats?.total_active_products || 0),
totalValue: parseFloat(stats?.total_stock_value || 0), totalValue: parseFloat(stats?.total_stock_value || 0),
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || 0), avgMargin: parseFloat(stats?.overall_avg_margin_weighted || 0),
@@ -177,10 +195,40 @@ router.get('/', async (req, res) => {
// --- Execute Queries --- // --- Execute Queries ---
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : ''; const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
const baseSql = `FROM public.brand_metrics bm ${whereClause}`;
// Status calculation similar to vendors
const statusCase = `
CASE
WHEN active_product_count > 0 AND sales_30d > 0 THEN 'active'
WHEN active_product_count > 0 THEN 'inactive'
ELSE 'pending'
END as brand_status
`;
const baseSql = `
FROM (
SELECT
bm.*,
${statusCase}
FROM public.brand_metrics bm
) bm
${whereClause}
`;
const countSql = `SELECT COUNT(*) AS total ${baseSql}`; const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
const dataSql = `SELECT bm.* ${baseSql} ${sortClause} LIMIT $${paramCounter} OFFSET $${paramCounter + 1}`; const dataSql = `
WITH brand_data AS (
SELECT
bm.*,
${statusCase}
FROM public.brand_metrics bm
)
SELECT bm.*
FROM brand_data bm
${whereClause}
${sortClause}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
`;
const dataParams = [...params, limit, offset]; const dataParams = [...params, limit, offset];
console.log("Count SQL:", countSql, params); console.log("Count SQL:", countSql, params);

View File

@@ -1,100 +0,0 @@
const express = require('express');
const router = express.Router();
// Get all categories
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
try {
// Get all categories with metrics and hierarchy info
const { rows: categories } = await pool.query(`
SELECT
c.cat_id,
c.name,
c.type,
c.parent_id,
c.description,
c.status,
p.name as parent_name,
p.type as parent_type,
COALESCE(cm.product_count, 0) as product_count,
COALESCE(cm.active_products, 0) as active_products,
ROUND(COALESCE(cm.total_value, 0)::numeric, 3) as total_value,
COALESCE(cm.avg_margin, 0) as avg_margin,
COALESCE(cm.turnover_rate, 0) as turnover_rate,
COALESCE(cm.growth_rate, 0) as growth_rate
FROM categories c
LEFT JOIN categories p ON c.parent_id = p.cat_id
LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id
ORDER BY
CASE
WHEN c.type = 10 THEN 1 -- sections first
WHEN c.type = 11 THEN 2 -- categories second
WHEN c.type = 12 THEN 3 -- subcategories third
WHEN c.type = 13 THEN 4 -- subsubcategories fourth
WHEN c.type = 20 THEN 5 -- themes fifth
WHEN c.type = 21 THEN 6 -- subthemes last
ELSE 7
END,
c.name ASC
`);
// Get overall stats
const { rows: [stats] } = await pool.query(`
SELECT
COUNT(DISTINCT c.cat_id) as totalCategories,
COUNT(DISTINCT CASE WHEN c.status = 'active' THEN c.cat_id END) as activeCategories,
ROUND(COALESCE(SUM(cm.total_value), 0)::numeric, 3) as totalValue,
COALESCE(ROUND(AVG(NULLIF(cm.avg_margin, 0))::numeric, 1), 0) as avgMargin,
COALESCE(ROUND(AVG(NULLIF(cm.growth_rate, 0))::numeric, 1), 0) as avgGrowth
FROM categories c
LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id
`);
// Get type counts for filtering
const { rows: typeCounts } = await pool.query(`
SELECT
type,
COUNT(*)::integer as count
FROM categories
GROUP BY type
ORDER BY type
`);
res.json({
categories: categories.map(cat => ({
cat_id: cat.cat_id,
name: cat.name,
type: cat.type,
parent_id: cat.parent_id,
parent_name: cat.parent_name,
parent_type: cat.parent_type,
description: cat.description,
status: cat.status,
metrics: {
product_count: parseInt(cat.product_count),
active_products: parseInt(cat.active_products),
total_value: parseFloat(cat.total_value),
avg_margin: parseFloat(cat.avg_margin),
turnover_rate: parseFloat(cat.turnover_rate),
growth_rate: parseFloat(cat.growth_rate)
}
})),
typeCounts: typeCounts.map(tc => ({
type: tc.type,
count: tc.count // Already cast to integer in the query
})),
stats: {
totalCategories: parseInt(stats.totalcategories),
activeCategories: parseInt(stats.activecategories),
totalValue: parseFloat(stats.totalvalue),
avgMargin: parseFloat(stats.avgmargin),
avgGrowth: parseFloat(stats.avggrowth)
}
});
} catch (error) {
console.error('Error fetching categories:', error);
res.status(500).json({ error: 'Failed to fetch categories' });
}
});
module.exports = router;

View File

@@ -4,7 +4,7 @@ const { parseValue } = require('../utils/apiHelpers'); // Adjust path if needed
// --- Configuration & Helpers --- // --- Configuration & Helpers ---
const DEFAULT_PAGE_LIMIT = 50; const DEFAULT_PAGE_LIMIT = 50;
const MAX_PAGE_LIMIT = 200; const MAX_PAGE_LIMIT = 5000; // Increase this to allow retrieving all categories in one request
// Maps query keys to DB columns in category_metrics and categories tables // Maps query keys to DB columns in category_metrics and categories tables
const COLUMN_MAP = { const COLUMN_MAP = {
@@ -31,8 +31,8 @@ const COLUMN_MAP = {
lifetimeRevenue: { dbCol: 'cm.lifetime_revenue', type: 'number' }, lifetimeRevenue: { dbCol: 'cm.lifetime_revenue', type: 'number' },
avgMargin30d: { dbCol: 'cm.avg_margin_30d', type: 'number' }, avgMargin30d: { dbCol: 'cm.avg_margin_30d', type: 'number' },
stockTurn30d: { dbCol: 'cm.stock_turn_30d', type: 'number' }, stockTurn30d: { dbCol: 'cm.stock_turn_30d', type: 'number' },
// Add 'status' if filtering by category status needed (requires JOIN) // Add status from the categories table for filtering
// status: { dbCol: 'c.status', type: 'string' }, status: { dbCol: 'c.status', type: 'string' },
}; };
function getSafeColumnInfo(queryParamKey) { function getSafeColumnInfo(queryParamKey) {
@@ -43,7 +43,7 @@ function getSafeColumnInfo(queryParamKey) {
const TYPE_LABELS = { const TYPE_LABELS = {
10: 'Section', 11: 'Category', 12: 'Subcategory', 13: 'Sub-subcategory', 10: 'Section', 11: 'Category', 12: 'Subcategory', 13: 'Sub-subcategory',
1: 'Company', 2: 'Line', 3: 'Subline', 40: 'Artist', // From old schema comments 1: 'Company', 2: 'Line', 3: 'Subline', 40: 'Artist', // From old schema comments
// Add other types if needed 20: 'Theme', 21: 'Subtheme' // Additional types from categories.js
}; };
// --- Route Handlers --- // --- Route Handlers ---
@@ -66,12 +66,28 @@ router.get('/filter-options', async (req, res) => {
label: TYPE_LABELS[r.category_type] || `Type ${r.category_type}` // Add labels label: TYPE_LABELS[r.category_type] || `Type ${r.category_type}` // Add labels
})); }));
// Add other filter options like status if needed // Add status options for filtering (from categories.js)
// const { rows: statusRows } = await pool.query(`SELECT DISTINCT status FROM public.categories ORDER BY status`); const { rows: statusRows } = await pool.query(`
SELECT DISTINCT status FROM public.categories ORDER BY status
`);
// Get type counts (from categories.js)
const { rows: typeCounts } = await pool.query(`
SELECT
type,
COUNT(*)::integer as count
FROM categories
GROUP BY type
ORDER BY type
`);
res.json({ res.json({
types: typeOptions, types: typeOptions,
// statuses: statusRows.map(r => r.status) statuses: statusRows.map(r => r.status),
typeCounts: typeCounts.map(tc => ({
type: tc.type,
count: tc.count
}))
}); });
} catch (error) { } catch (error) {
console.error('Error fetching category filter options:', error); console.error('Error fetching category filter options:', error);
@@ -96,7 +112,7 @@ router.get('/stats', async (req, res) => {
SUM(cm.profit_30d) * 100.0 / NULLIF(SUM(cm.revenue_30d), 0) AS overall_avg_margin_weighted, SUM(cm.profit_30d) * 100.0 / NULLIF(SUM(cm.revenue_30d), 0) AS overall_avg_margin_weighted,
-- Simple Average Margin (less accurate if categories vary greatly in size) -- Simple Average Margin (less accurate if categories vary greatly in size)
AVG(NULLIF(cm.avg_margin_30d, 0)) AS overall_avg_margin_simple AVG(NULLIF(cm.avg_margin_30d, 0)) AS overall_avg_margin_simple
-- Add SUM(revenue_30d) / SUM(revenue_30d_previous) for growth if needed -- Growth rate can be calculated from 30d vs previous 30d revenue if needed
FROM public.category_metrics cm FROM public.category_metrics cm
JOIN public.categories c ON cm.category_id = c.cat_id -- Join to check category status JOIN public.categories c ON cm.category_id = c.cat_id -- Join to check category status
`); `);
@@ -107,8 +123,8 @@ router.get('/stats', async (req, res) => {
totalActiveProducts: parseInt(stats?.total_active_products || 0), totalActiveProducts: parseInt(stats?.total_active_products || 0),
totalValue: parseFloat(stats?.total_stock_value || 0), totalValue: parseFloat(stats?.total_stock_value || 0),
// Choose which avg margin calculation to expose // Choose which avg margin calculation to expose
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || stats?.overall_avg_margin_simple || 0), avgMargin: parseFloat(stats?.overall_avg_margin_weighted || stats?.overall_avg_margin_simple || 0)
// avgGrowth: ... // Calculate if needed // Growth rate could be added if we implement the calculation
}); });
} catch (error) { } catch (error) {
console.error('Error fetching category stats:', error); console.error('Error fetching category stats:', error);
@@ -130,10 +146,27 @@ router.get('/', async (req, res) => {
// --- Sorting --- // --- Sorting ---
const sortQueryKey = req.query.sort || 'categoryName'; const sortQueryKey = req.query.sort || 'categoryName';
const sortColumnInfo = getSafeColumnInfo(sortQueryKey); const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
// Default sort order: Type then Name
const defaultSortOrder = 'ORDER BY cm.category_type ASC, cm.category_name ASC'; // Hierarchical sorting logic from categories.js
let sortClause = defaultSortOrder; const hierarchicalSortOrder = `
if (sortColumnInfo) { ORDER BY
CASE
WHEN cm.category_type = 10 THEN 1 -- sections first
WHEN cm.category_type = 11 THEN 2 -- categories second
WHEN cm.category_type = 12 THEN 3 -- subcategories third
WHEN cm.category_type = 13 THEN 4 -- subsubcategories fourth
WHEN cm.category_type = 20 THEN 5 -- themes fifth
WHEN cm.category_type = 21 THEN 6 -- subthemes last
ELSE 7
END,
cm.category_name ASC
`;
// Use hierarchical sort as default
let sortClause = hierarchicalSortOrder;
// Override with custom sort if specified
if (sortColumnInfo && sortQueryKey !== 'categoryName') {
const sortColumn = sortColumnInfo.dbCol; const sortColumn = sortColumnInfo.dbCol;
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC'; const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST'); const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST');
@@ -224,18 +257,26 @@ router.get('/', async (req, res) => {
// Need JOIN for parent_name if sorting/filtering by it, or always include for display // Need JOIN for parent_name if sorting/filtering by it, or always include for display
const sortColumn = sortColumnInfo?.dbCol; const sortColumn = sortColumnInfo?.dbCol;
// Always include the parent join for consistency // Always include the category and parent joins for status and parent_name
const parentJoinSql = 'LEFT JOIN public.categories p ON cm.parent_id = p.cat_id'; const joinSql = `
JOIN public.categories c ON cm.category_id = c.cat_id
LEFT JOIN public.categories p ON cm.parent_id = p.cat_id
`;
const baseSql = ` const baseSql = `
FROM public.category_metrics cm FROM public.category_metrics cm
${parentJoinSql} ${joinSql}
${whereClause} ${whereClause}
`; `;
const countSql = `SELECT COUNT(*) AS total ${baseSql}`; const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
const dataSql = ` const dataSql = `
SELECT cm.*, p.name as parent_name SELECT
cm.*,
c.status,
c.description,
p.name as parent_name,
p.type as parent_type
${baseSql} ${baseSql}
${sortClause} ${sortClause}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1} LIMIT $${paramCounter} OFFSET $${paramCounter + 1}

View File

@@ -1,108 +0,0 @@
const express = require('express');
const router = express.Router();
// Get vendors with pagination, filtering, and sorting
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
try {
// Get all vendors with metrics
const { rows: vendors } = await pool.query(`
SELECT DISTINCT
p.vendor as name,
COALESCE(vm.active_products, 0) as active_products,
COALESCE(vm.total_orders, 0) as total_orders,
COALESCE(vm.avg_lead_time_days, 0) as avg_lead_time_days,
COALESCE(vm.on_time_delivery_rate, 0) as on_time_delivery_rate,
COALESCE(vm.order_fill_rate, 0) as order_fill_rate,
CASE
WHEN COALESCE(vm.total_orders, 0) > 0 AND COALESCE(vm.order_fill_rate, 0) >= 75 THEN 'active'
WHEN COALESCE(vm.total_orders, 0) > 0 THEN 'inactive'
ELSE 'pending'
END as status
FROM products p
LEFT JOIN vendor_metrics vm ON p.vendor = vm.vendor
WHERE p.vendor IS NOT NULL AND p.vendor != ''
`);
// Get cost metrics for all vendors
const vendorNames = vendors.map(v => v.name);
const { rows: costMetrics } = await pool.query(`
SELECT
vendor,
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
FROM purchase_orders
WHERE status = 2
AND cost_price IS NOT NULL
AND ordered > 0
AND vendor = ANY($1)
GROUP BY vendor
`, [vendorNames]);
// Create a map of cost metrics by vendor
const costMetricsMap = costMetrics.reduce((acc, curr) => {
acc[curr.vendor] = {
avg_unit_cost: curr.avg_unit_cost,
total_spend: curr.total_spend
};
return acc;
}, {});
// Get overall stats
const { rows: [stats] } = await pool.query(`
SELECT
COUNT(DISTINCT p.vendor) as totalVendors,
COUNT(DISTINCT CASE
WHEN COALESCE(vm.total_orders, 0) > 0 AND COALESCE(vm.order_fill_rate, 0) >= 75
THEN p.vendor
END) as activeVendors,
COALESCE(ROUND(AVG(NULLIF(vm.avg_lead_time_days, 0))::numeric, 1), 0) as avgLeadTime,
COALESCE(ROUND(AVG(NULLIF(vm.order_fill_rate, 0))::numeric, 1), 0) as avgFillRate,
COALESCE(ROUND(AVG(NULLIF(vm.on_time_delivery_rate, 0))::numeric, 1), 0) as avgOnTimeDelivery
FROM products p
LEFT JOIN vendor_metrics vm ON p.vendor = vm.vendor
WHERE p.vendor IS NOT NULL AND p.vendor != ''
`);
// Get overall cost metrics
const { rows: [overallCostMetrics] } = await pool.query(`
SELECT
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
FROM purchase_orders
WHERE status = 2
AND cost_price IS NOT NULL
AND ordered > 0
AND vendor IS NOT NULL AND vendor != ''
`);
res.json({
vendors: vendors.map(vendor => ({
vendor_id: vendor.name,
name: vendor.name,
status: vendor.status,
avg_lead_time_days: parseFloat(vendor.avg_lead_time_days),
on_time_delivery_rate: parseFloat(vendor.on_time_delivery_rate),
order_fill_rate: parseFloat(vendor.order_fill_rate),
total_orders: parseInt(vendor.total_orders),
active_products: parseInt(vendor.active_products),
avg_unit_cost: parseFloat(costMetricsMap[vendor.name]?.avg_unit_cost || 0),
total_spend: parseFloat(costMetricsMap[vendor.name]?.total_spend || 0)
})),
stats: {
totalVendors: parseInt(stats.totalvendors),
activeVendors: parseInt(stats.activevendors),
avgLeadTime: parseFloat(stats.avgleadtime),
avgFillRate: parseFloat(stats.avgfillrate),
avgOnTimeDelivery: parseFloat(stats.avgontimedelivery),
avgUnitCost: parseFloat(overallCostMetrics.avg_unit_cost),
totalSpend: parseFloat(overallCostMetrics.total_spend)
}
});
} catch (error) {
console.error('Error fetching vendors:', error);
res.status(500).json({ error: 'Failed to fetch vendors' });
}
});
module.exports = router;

View File

@@ -33,6 +33,8 @@ const COLUMN_MAP = {
// Add aliases if needed for frontend compatibility // Add aliases if needed for frontend compatibility
name: { dbCol: 'vm.vendor_name', type: 'string' }, name: { dbCol: 'vm.vendor_name', type: 'string' },
leadTime: { dbCol: 'vm.avg_lead_time_days', type: 'number' }, leadTime: { dbCol: 'vm.avg_lead_time_days', type: 'number' },
// Add status for filtering
status: { dbCol: 'vendor_status', type: 'string' },
}; };
function getSafeColumnInfo(queryParamKey) { function getSafeColumnInfo(queryParamKey) {
@@ -46,11 +48,26 @@ router.get('/filter-options', async (req, res) => {
const pool = req.app.locals.pool; const pool = req.app.locals.pool;
console.log('GET /vendors-aggregate/filter-options'); console.log('GET /vendors-aggregate/filter-options');
try { try {
// Get vendor names
const { rows: vendorRows } = await pool.query(` const { rows: vendorRows } = await pool.query(`
SELECT DISTINCT vendor_name FROM public.vendor_metrics ORDER BY vendor_name SELECT DISTINCT vendor_name FROM public.vendor_metrics ORDER BY vendor_name
`); `);
// Get status values - calculate them since they're derived
const { rows: statusRows } = await pool.query(`
SELECT DISTINCT
CASE
WHEN po_count_365d > 0 AND sales_30d > 0 THEN 'active'
WHEN po_count_365d > 0 THEN 'inactive'
ELSE 'pending'
END as status
FROM public.vendor_metrics
ORDER BY status
`);
res.json({ res.json({
vendors: vendorRows.map(r => r.vendor_name), vendors: vendorRows.map(r => r.vendor_name),
statuses: statusRows.map(r => r.status)
}); });
} catch(error) { } catch(error) {
console.error('Error fetching vendor filter options:', error); console.error('Error fetching vendor filter options:', error);
@@ -63,23 +80,47 @@ router.get('/stats', async (req, res) => {
const pool = req.app.locals.pool; const pool = req.app.locals.pool;
console.log('GET /vendors-aggregate/stats'); console.log('GET /vendors-aggregate/stats');
try { try {
// Get basic vendor stats from aggregate table
const { rows: [stats] } = await pool.query(` const { rows: [stats] } = await pool.query(`
SELECT SELECT
COUNT(*) AS total_vendors, COUNT(*) AS total_vendors,
SUM(active_product_count) AS total_active_products, SUM(active_product_count) AS total_active_products,
SUM(current_stock_cost) AS total_stock_value, SUM(current_stock_cost) AS total_stock_value,
SUM(on_order_cost) AS total_on_order_value, SUM(on_order_cost) AS total_on_order_value,
AVG(NULLIF(avg_lead_time_days, 0)) AS overall_avg_lead_time -- Simple average AVG(NULLIF(avg_lead_time_days, 0)) AS overall_avg_lead_time
-- Add more overall stats: weighted margin, total POs etc.
FROM public.vendor_metrics vm FROM public.vendor_metrics vm
`); `);
// Count active vendors based on criteria (from old vendors.js)
const { rows: [activeStats] } = await pool.query(`
SELECT
COUNT(DISTINCT CASE
WHEN po_count_365d > 0
THEN vendor_name
END) as active_vendors
FROM public.vendor_metrics
`);
// Get overall cost metrics from purchase orders
const { rows: [overallCostMetrics] } = await pool.query(`
SELECT
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
FROM purchase_orders
WHERE cost_price IS NOT NULL
AND ordered > 0
AND vendor IS NOT NULL AND vendor != ''
`);
res.json({ res.json({
totalVendors: parseInt(stats?.total_vendors || 0), totalVendors: parseInt(stats?.total_vendors || 0),
activeVendors: parseInt(activeStats?.active_vendors || 0),
totalActiveProducts: parseInt(stats?.total_active_products || 0), totalActiveProducts: parseInt(stats?.total_active_products || 0),
totalValue: parseFloat(stats?.total_stock_value || 0), totalValue: parseFloat(stats?.total_stock_value || 0),
totalOnOrderValue: parseFloat(stats?.total_on_order_value || 0), totalOnOrderValue: parseFloat(stats?.total_on_order_value || 0),
avgLeadTime: parseFloat(stats?.overall_avg_lead_time || 0) avgLeadTime: parseFloat(stats?.overall_avg_lead_time || 0),
avgUnitCost: parseFloat(overallCostMetrics?.avg_unit_cost || 0),
totalSpend: parseFloat(overallCostMetrics?.total_spend || 0)
}); });
} catch (error) { } catch (error) {
console.error('Error fetching vendor stats:', error); console.error('Error fetching vendor stats:', error);
@@ -181,10 +222,52 @@ router.get('/', async (req, res) => {
// --- Execute Queries --- // --- Execute Queries ---
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : ''; const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
const baseSql = `FROM public.vendor_metrics vm ${whereClause}`;
// Status calculation from vendors.js
const statusCase = `
CASE
WHEN po_count_365d > 0 AND sales_30d > 0 THEN 'active'
WHEN po_count_365d > 0 THEN 'inactive'
ELSE 'pending'
END as vendor_status
`;
const baseSql = `
FROM (
SELECT
vm.*,
${statusCase}
FROM public.vendor_metrics vm
) vm
${whereClause}
`;
const countSql = `SELECT COUNT(*) AS total ${baseSql}`; const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
const dataSql = `SELECT vm.* ${baseSql} ${sortClause} LIMIT $${paramCounter} OFFSET $${paramCounter + 1}`; const dataSql = `
WITH vendor_data AS (
SELECT
vm.*,
${statusCase}
FROM public.vendor_metrics vm
)
SELECT
vm.*,
COALESCE(po.avg_unit_cost, 0) as avg_unit_cost,
COALESCE(po.total_spend, 0) as total_spend
FROM vendor_data vm
LEFT JOIN (
SELECT
vendor,
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
FROM purchase_orders
WHERE cost_price IS NOT NULL AND ordered > 0
GROUP BY vendor
) po ON vm.vendor_name = po.vendor
${whereClause}
${sortClause}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
`;
const dataParams = [...params, limit, offset]; const dataParams = [...params, limit, offset];
console.log("Count SQL:", countSql, params); console.log("Count SQL:", countSql, params);

View File

@@ -13,8 +13,6 @@ const analyticsRouter = require('./routes/analytics');
const purchaseOrdersRouter = require('./routes/purchase-orders'); const purchaseOrdersRouter = require('./routes/purchase-orders');
const configRouter = require('./routes/config'); const configRouter = require('./routes/config');
const metricsRouter = require('./routes/metrics'); const metricsRouter = require('./routes/metrics');
const vendorsRouter = require('./routes/vendors');
const categoriesRouter = require('./routes/categories');
const importRouter = require('./routes/import'); const importRouter = require('./routes/import');
const aiValidationRouter = require('./routes/ai-validation'); const aiValidationRouter = require('./routes/ai-validation');
const templatesRouter = require('./routes/templates'); const templatesRouter = require('./routes/templates');
@@ -103,16 +101,18 @@ async function startServer() {
app.use('/api/purchase-orders', purchaseOrdersRouter); app.use('/api/purchase-orders', purchaseOrdersRouter);
app.use('/api/config', configRouter); app.use('/api/config', configRouter);
app.use('/api/metrics', metricsRouter); app.use('/api/metrics', metricsRouter);
app.use('/api/vendors', vendorsRouter); // Use only the aggregate routes for vendors and categories
app.use('/api/categories', categoriesRouter); app.use('/api/vendors', vendorsAggregateRouter);
app.use('/api/categories', categoriesAggregateRouter);
// Keep the aggregate-specific endpoints for backward compatibility
app.use('/api/categories-aggregate', categoriesAggregateRouter);
app.use('/api/vendors-aggregate', vendorsAggregateRouter);
app.use('/api/brands-aggregate', brandsAggregateRouter);
app.use('/api/import', importRouter); app.use('/api/import', importRouter);
app.use('/api/ai-validation', aiValidationRouter); app.use('/api/ai-validation', aiValidationRouter);
app.use('/api/templates', templatesRouter); app.use('/api/templates', templatesRouter);
app.use('/api/ai-prompts', aiPromptsRouter); app.use('/api/ai-prompts', aiPromptsRouter);
app.use('/api/reusable-images', reusableImagesRouter); app.use('/api/reusable-images', reusableImagesRouter);
app.use('/api/categories-aggregate', categoriesAggregateRouter);
app.use('/api/vendors-aggregate', vendorsAggregateRouter);
app.use('/api/brands-aggregate', brandsAggregateRouter);
// Basic health check route // Basic health check route
app.get('/health', (req, res) => { app.get('/health', (req, res) => {

View File

@@ -3,21 +3,23 @@ import { useQuery } from "@tanstack/react-query";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table"; import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table";
import { Pagination, PaginationContent, PaginationItem, PaginationLink, PaginationNext, PaginationPrevious } from "@/components/ui/pagination"; import { Pagination, PaginationContent, PaginationItem, PaginationLink, PaginationNext, PaginationPrevious } from "@/components/ui/pagination";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { motion } from "framer-motion"; import { motion } from "framer-motion";
import { Input } from "@/components/ui/input"; import { Input } from "@/components/ui/input";
import config from "../config"; import config from "../config";
import { Skeleton } from "@/components/ui/skeleton"; import { Skeleton } from "@/components/ui/skeleton";
import { Switch } from "@/components/ui/switch"; import { Switch } from "@/components/ui/switch";
import { Label } from "@/components/ui/label"; import { Label } from "@/components/ui/label";
import { Badge } from "@/components/ui/badge";
// Matches backend COLUMN_MAP keys for sorting // Matches backend COLUMN_MAP keys for sorting
type BrandSortableColumns = type BrandSortableColumns =
| 'brandName' | 'productCount' | 'activeProductCount' | 'currentStockUnits' | 'brandName' | 'productCount' | 'activeProductCount' | 'currentStockUnits'
| 'currentStockCost' | 'currentStockRetail' | 'revenue_7d' | 'revenue_30d' | 'currentStockCost' | 'currentStockRetail' | 'revenue_7d' | 'revenue_30d'
| 'profit_30d' | 'sales_30d' | 'avg_margin_30d'; // Add more as needed | 'profit_30d' | 'sales_30d' | 'avg_margin_30d' | 'stock_turn_30d' | 'status'; // Add more as needed
interface BrandMetric { interface BrandMetric {
// Assuming brand_name is unique primary identifier in brand_metrics brand_id: string | number;
brand_name: string; brand_name: string;
last_calculated: string; last_calculated: string;
product_count: number; product_count: number;
@@ -37,7 +39,12 @@ interface BrandMetric {
lifetime_sales: number; lifetime_sales: number;
lifetime_revenue: string | number; lifetime_revenue: string | number;
avg_margin_30d: string | number | null; avg_margin_30d: string | number | null;
stock_turn_30d: string | number | null;
status: string;
brand_status: string;
description: string;
// Camel case versions // Camel case versions
brandId: string | number;
brandName: string; brandName: string;
lastCalculated: string; lastCalculated: string;
productCount: number; productCount: number;
@@ -49,6 +56,7 @@ interface BrandMetric {
lifetimeSales: number; lifetimeSales: number;
lifetimeRevenue: string | number; lifetimeRevenue: string | number;
avgMargin_30d: string | number | null; avgMargin_30d: string | number | null;
stockTurn_30d: string | number | null;
} }
// Define response type to avoid type errors // Define response type to avoid type errors
@@ -62,11 +70,13 @@ interface BrandResponse {
}; };
} }
// Filter options are just a list of names, not useful for dropdowns here interface BrandFilterOptions {
// interface BrandFilterOptions { brands: string[]; } statuses: string[];
}
interface BrandStats { interface BrandStats {
totalBrands: number; totalBrands: number;
activeBrands: number;
totalActiveProducts: number; // SUM(active_product_count) totalActiveProducts: number; // SUM(active_product_count)
totalValue: number; // SUM(current_stock_cost) totalValue: number; // SUM(current_stock_cost)
avgMargin: number; // Weighted avg margin 30d avgMargin: number; // Weighted avg margin 30d
@@ -74,7 +84,8 @@ interface BrandStats {
interface BrandFilters { interface BrandFilters {
search: string; search: string;
showInactive: boolean; // New filter for showing brands with 0 active products status: string;
showInactive: boolean; // Show brands with 0 active products
} }
const ITEMS_PER_PAGE = 50; const ITEMS_PER_PAGE = 50;
@@ -129,6 +140,19 @@ const formatPercentage = (value: number | string | null | undefined, digits = 1)
return `${value.toFixed(digits)}%`; return `${value.toFixed(digits)}%`;
}; };
const getStatusVariant = (status: string): "default" | "secondary" | "outline" | "destructive" => {
switch (status) {
case 'active':
return 'default';
case 'inactive':
return 'secondary';
case 'discontinued':
return 'destructive';
default:
return 'outline';
}
};
export function Brands() { export function Brands() {
const [page, setPage] = useState(1); const [page, setPage] = useState(1);
const [limit] = useState(ITEMS_PER_PAGE); const [limit] = useState(ITEMS_PER_PAGE);
@@ -136,6 +160,7 @@ export function Brands() {
const [sortDirection, setSortDirection] = useState<"asc" | "desc">("asc"); const [sortDirection, setSortDirection] = useState<"asc" | "desc">("asc");
const [filters, setFilters] = useState<BrandFilters>({ const [filters, setFilters] = useState<BrandFilters>({
search: "", search: "",
status: "all",
showInactive: false, // Default to hiding brands with 0 active products showInactive: false, // Default to hiding brands with 0 active products
}); });
@@ -151,6 +176,9 @@ export function Brands() {
if (filters.search) { if (filters.search) {
params.set('brandName_ilike', filters.search); // Filter by name params.set('brandName_ilike', filters.search); // Filter by name
} }
if (filters.status !== 'all') {
params.set('status', filters.status); // Filter by status
}
if (!filters.showInactive) { if (!filters.showInactive) {
params.set('activeProductCount_gt', '0'); // Only show brands with active products params.set('activeProductCount_gt', '0'); // Only show brands with active products
} }
@@ -184,8 +212,17 @@ export function Brands() {
}, },
}); });
// Filter options query might not be needed if only search is used // Fetch filter options
// const { data: filterOptions, isLoading: isLoadingFilterOptions } = useQuery<BrandFilterOptions, Error>({ ... }); const { data: filterOptions } = useQuery<BrandFilterOptions, Error>({
queryKey: ['brandsFilterOptions'],
queryFn: async () => {
const response = await fetch(`${config.apiUrl}/brands-aggregate/filter-options`, {
credentials: 'include'
});
if (!response.ok) throw new Error("Failed to fetch filter options");
return response.json();
},
});
// --- Event Handlers --- // --- Event Handlers ---
@@ -236,7 +273,8 @@ export function Brands() {
<CardContent> <CardContent>
{isLoadingStats ? <Skeleton className="h-8 w-24" /> : <div className="text-2xl font-bold">{formatNumber(statsData?.totalBrands)}</div>} {isLoadingStats ? <Skeleton className="h-8 w-24" /> : <div className="text-2xl font-bold">{formatNumber(statsData?.totalBrands)}</div>}
<p className="text-xs text-muted-foreground"> <p className="text-xs text-muted-foreground">
All brands with metrics {isLoadingStats ? <Skeleton className="h-4 w-28" /> :
`${formatNumber(statsData?.activeBrands)} active`}
</p> </p>
</CardContent> </CardContent>
</Card> </Card>
@@ -276,15 +314,30 @@ export function Brands() {
</motion.div> </motion.div>
{/* Filter Controls */} {/* Filter Controls */}
<div className="flex items-center justify-between"> <div className="flex flex-wrap items-center space-y-2 sm:space-y-0 sm:space-x-2">
<div className="flex flex-1 items-center justify-between space-x-2">
<Input <Input
placeholder="Search brands..." placeholder="Search brands..."
value={filters.search} value={filters.search}
onChange={(e) => handleFilterChange('search', e.target.value)} onChange={(e) => handleFilterChange('search', e.target.value)}
className="w-[150px] lg:w-[250px]" className="w-full sm:w-[250px]"
/> />
<div className="flex items-center space-x-2"> <Select
value={filters.status}
onValueChange={(value) => handleFilterChange('status', value)}
>
<SelectTrigger className="w-full sm:w-[180px]">
<SelectValue placeholder="Status" />
</SelectTrigger>
<SelectContent>
<SelectItem value="all">All Statuses</SelectItem>
{filterOptions?.statuses?.map((status) => (
<SelectItem key={status} value={status}>
{status.charAt(0).toUpperCase() + status.slice(1)}
</SelectItem>
))}
</SelectContent>
</Select>
<div className="flex items-center space-x-2 ml-auto">
<Switch <Switch
id="show-inactive-brands" id="show-inactive-brands"
checked={filters.showInactive} checked={filters.showInactive}
@@ -293,7 +346,6 @@ export function Brands() {
<Label htmlFor="show-inactive-brands">Show brands with no active products</Label> <Label htmlFor="show-inactive-brands">Show brands with no active products</Label>
</div> </div>
</div> </div>
</div>
{/* Data Table */} {/* Data Table */}
<div className="rounded-md border"> <div className="rounded-md border">
@@ -308,6 +360,8 @@ export function Brands() {
<TableHead onClick={() => handleSort("revenue_30d")} className="cursor-pointer text-right">Revenue (30d)</TableHead> <TableHead onClick={() => handleSort("revenue_30d")} className="cursor-pointer text-right">Revenue (30d)</TableHead>
<TableHead onClick={() => handleSort("profit_30d")} className="cursor-pointer text-right">Profit (30d)</TableHead> <TableHead onClick={() => handleSort("profit_30d")} className="cursor-pointer text-right">Profit (30d)</TableHead>
<TableHead onClick={() => handleSort("avg_margin_30d")} className="cursor-pointer text-right">Margin (30d)</TableHead> <TableHead onClick={() => handleSort("avg_margin_30d")} className="cursor-pointer text-right">Margin (30d)</TableHead>
<TableHead onClick={() => handleSort("stock_turn_30d")} className="cursor-pointer text-right">Stock Turn (30d)</TableHead>
<TableHead onClick={() => handleSort("status")} className="cursor-pointer text-right">Status</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
@@ -322,23 +376,25 @@ export function Brands() {
<TableCell className="text-right"><Skeleton className="h-5 w-20 ml-auto" /></TableCell> <TableCell className="text-right"><Skeleton className="h-5 w-20 ml-auto" /></TableCell>
<TableCell className="text-right"><Skeleton className="h-5 w-20 ml-auto" /></TableCell> <TableCell className="text-right"><Skeleton className="h-5 w-20 ml-auto" /></TableCell>
<TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell> <TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell>
<TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell>
<TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell>
</TableRow> </TableRow>
)) ))
) : listError ? ( ) : listError ? (
<TableRow> <TableRow>
<TableCell colSpan={8} className="text-center py-8 text-destructive"> <TableCell colSpan={9} className="text-center py-8 text-destructive">
Error loading brands: {listError.message} Error loading brands: {listError.message}
</TableCell> </TableCell>
</TableRow> </TableRow>
) : brands.length === 0 ? ( ) : brands.length === 0 ? (
<TableRow> <TableRow>
<TableCell colSpan={8} className="text-center py-8 text-muted-foreground"> <TableCell colSpan={9} className="text-center py-8 text-muted-foreground">
No brands found matching your criteria. No brands found matching your criteria.
</TableCell> </TableCell>
</TableRow> </TableRow>
) : ( ) : (
brands.map((brand: BrandMetric) => ( brands.map((brand: BrandMetric) => (
<TableRow key={brand.brand_name}> {/* Use brand_name as key */} <TableRow key={brand.brand_id} className={brand.active_product_count === 0 ? "opacity-60" : ""}>
<TableCell className="font-medium">{brand.brand_name}</TableCell> <TableCell className="font-medium">{brand.brand_name}</TableCell>
<TableCell className="text-right">{formatNumber(brand.active_product_count || brand.activeProductCount)}</TableCell> <TableCell className="text-right">{formatNumber(brand.active_product_count || brand.activeProductCount)}</TableCell>
<TableCell className="text-right">{formatNumber(brand.current_stock_units || brand.currentStockUnits)}</TableCell> <TableCell className="text-right">{formatNumber(brand.current_stock_units || brand.currentStockUnits)}</TableCell>
@@ -347,6 +403,12 @@ export function Brands() {
<TableCell className="text-right">{formatCurrency(brand.revenue_30d as number)}</TableCell> <TableCell className="text-right">{formatCurrency(brand.revenue_30d as number)}</TableCell>
<TableCell className="text-right">{formatCurrency(brand.profit_30d as number)}</TableCell> <TableCell className="text-right">{formatCurrency(brand.profit_30d as number)}</TableCell>
<TableCell className="text-right">{formatPercentage(brand.avg_margin_30d as number)}</TableCell> <TableCell className="text-right">{formatPercentage(brand.avg_margin_30d as number)}</TableCell>
<TableCell className="text-right">{formatNumber(brand.stock_turn_30d, 2)}</TableCell>
<TableCell className="text-right">
<Badge variant={getStatusVariant(brand.status)}>
{brand.status || 'Unknown'}
</Badge>
</TableCell>
</TableRow> </TableRow>
)) ))
)} )}

File diff suppressed because it is too large Load Diff

View File

@@ -2,8 +2,8 @@ import { useState, useMemo, useCallback } from "react";
import { useQuery } from "@tanstack/react-query"; import { useQuery } from "@tanstack/react-query";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table"; import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table";
// import { Badge } from "@/components/ui/badge"; // Badge removed as status/performance filters are gone import { Badge } from "@/components/ui/badge";
// import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; // Select removed as filters are gone import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { Pagination, PaginationContent, PaginationItem, PaginationLink, PaginationNext, PaginationPrevious } from "@/components/ui/pagination"; import { Pagination, PaginationContent, PaginationItem, PaginationLink, PaginationNext, PaginationPrevious } from "@/components/ui/pagination";
import { motion } from "framer-motion"; import { motion } from "framer-motion";
import config from "../config"; import config from "../config";
@@ -16,10 +16,10 @@ import { Label } from "@/components/ui/label";
type VendorSortableColumns = type VendorSortableColumns =
| 'vendorName' | 'productCount' | 'activeProductCount' | 'currentStockUnits' | 'vendorName' | 'productCount' | 'activeProductCount' | 'currentStockUnits'
| 'currentStockCost' | 'onOrderUnits' | 'onOrderCost' | 'avgLeadTimeDays' | 'currentStockCost' | 'onOrderUnits' | 'onOrderCost' | 'avgLeadTimeDays'
| 'revenue_30d' | 'profit_30d' | 'avg_margin_30d' | 'po_count_365d'; | 'revenue_30d' | 'profit_30d' | 'avg_margin_30d' | 'po_count_365d' | 'status';
interface VendorMetric { interface VendorMetric {
// Assuming vendor_name is unique primary identifier vendor_id: string | number;
vendor_name: string; vendor_name: string;
last_calculated: string; last_calculated: string;
product_count: number; product_count: number;
@@ -43,7 +43,16 @@ interface VendorMetric {
lifetime_sales: number; lifetime_sales: number;
lifetime_revenue: string | number; lifetime_revenue: string | number;
avg_margin_30d: string | number | null; avg_margin_30d: string | number | null;
// New fields added by vendorsAggregate
status: string;
vendor_status: string;
cost_metrics_30d: {
avg_unit_cost: number;
total_spend: number;
order_count: number;
};
// Camel case versions // Camel case versions
vendorId: string | number;
vendorName: string; vendorName: string;
lastCalculated: string; lastCalculated: string;
productCount: number; productCount: number;
@@ -72,26 +81,27 @@ interface VendorResponse {
}; };
} }
// Filter options are just a list of names, not useful for dropdowns here interface VendorFilterOptions {
// interface VendorFilterOptions { vendors: string[]; } statuses: string[];
}
interface VendorStats { interface VendorStats {
totalVendors: number; totalVendors: number;
totalActiveProducts: number; // This seems to be SUM(active_product_count) per vendor activeVendors: number;
totalValue: number; // SUM(current_stock_cost) totalActiveProducts: number;
totalOnOrderValue: number; // SUM(on_order_cost) totalValue: number;
avgLeadTime: number; // AVG(avg_lead_time_days) totalOnOrderValue: number;
avgLeadTime: number;
} }
interface VendorFilters { interface VendorFilters {
search: string; search: string;
showInactive: boolean; // New filter for showing vendors with 0 active products status: string;
// Status and Performance filters removed showInactive: boolean;
} }
const ITEMS_PER_PAGE = 50; const ITEMS_PER_PAGE = 50;
// Re-use formatting helpers from Categories or define here
const formatCurrency = (value: number | string | null | undefined, digits = 0): string => { const formatCurrency = (value: number | string | null | undefined, digits = 0): string => {
if (value == null) return 'N/A'; if (value == null) return 'N/A';
if (typeof value === 'string') { if (typeof value === 'string') {
@@ -152,6 +162,19 @@ const formatDays = (value: number | string | null | undefined, digits = 1): stri
return `${value.toFixed(digits)} days`; return `${value.toFixed(digits)} days`;
}; };
const getStatusVariant = (status: string): "default" | "secondary" | "outline" | "destructive" => {
switch (status) {
case 'active':
return 'default';
case 'inactive':
return 'secondary';
case 'discontinued':
return 'destructive';
default:
return 'outline';
}
};
export function Vendors() { export function Vendors() {
const [page, setPage] = useState(1); const [page, setPage] = useState(1);
const [limit] = useState(ITEMS_PER_PAGE); const [limit] = useState(ITEMS_PER_PAGE);
@@ -159,6 +182,7 @@ export function Vendors() {
const [sortDirection, setSortDirection] = useState<"asc" | "desc">("asc"); const [sortDirection, setSortDirection] = useState<"asc" | "desc">("asc");
const [filters, setFilters] = useState<VendorFilters>({ const [filters, setFilters] = useState<VendorFilters>({
search: "", search: "",
status: "all",
showInactive: false, // Default to hiding vendors with 0 active products showInactive: false, // Default to hiding vendors with 0 active products
}); });
@@ -174,10 +198,12 @@ export function Vendors() {
if (filters.search) { if (filters.search) {
params.set('vendorName_ilike', filters.search); // Filter by name params.set('vendorName_ilike', filters.search); // Filter by name
} }
if (filters.status !== 'all') {
params.set('status', filters.status); // Filter by status
}
if (!filters.showInactive) { if (!filters.showInactive) {
params.set('activeProductCount_gt', '0'); // Only show vendors with active products params.set('activeProductCount_gt', '0'); // Only show vendors with active products
} }
// Add more filters here if needed (e.g., avgLeadTimeDays_lte=10)
return params; return params;
}, [page, limit, sortColumn, sortDirection, filters]); }, [page, limit, sortColumn, sortDirection, filters]);
@@ -189,9 +215,7 @@ export function Vendors() {
credentials: 'include' credentials: 'include'
}); });
if (!response.ok) throw new Error(`Network response was not ok (${response.status})`); if (!response.ok) throw new Error(`Network response was not ok (${response.status})`);
const data = await response.json(); return response.json();
console.log('Vendors data:', JSON.stringify(data, null, 2));
return data;
}, },
placeholderData: (prev) => prev, // Modern replacement for keepPreviousData placeholderData: (prev) => prev, // Modern replacement for keepPreviousData
}); });
@@ -207,8 +231,17 @@ export function Vendors() {
}, },
}); });
// Filter options query might not be needed if only search is used // Fetch filter options
// const { data: filterOptions, isLoading: isLoadingFilterOptions } = useQuery<VendorFilterOptions, Error>({ ... }); const { data: filterOptions, isLoading: isLoadingFilterOptions } = useQuery<VendorFilterOptions, Error>({
queryKey: ['vendorsFilterOptions'],
queryFn: async () => {
const response = await fetch(`${config.apiUrl}/vendors-aggregate/filter-options`, {
credentials: 'include'
});
if (!response.ok) throw new Error("Failed to fetch filter options");
return response.json();
},
});
// --- Event Handlers --- // --- Event Handlers ---
@@ -259,8 +292,8 @@ export function Vendors() {
<CardContent> <CardContent>
{isLoadingStats ? <Skeleton className="h-8 w-24" /> : <div className="text-2xl font-bold">{formatNumber(statsData?.totalVendors)}</div>} {isLoadingStats ? <Skeleton className="h-8 w-24" /> : <div className="text-2xl font-bold">{formatNumber(statsData?.totalVendors)}</div>}
<p className="text-xs text-muted-foreground"> <p className="text-xs text-muted-foreground">
{/* Active vendor count not directly available, showing total */} {isLoadingStats ? <Skeleton className="h-4 w-28" /> :
All vendors with metrics `${formatNumber(statsData?.activeVendors)} active`}
</p> </p>
</CardContent> </CardContent>
</Card> </Card>
@@ -297,19 +330,33 @@ export function Vendors() {
</p> </p>
</CardContent> </CardContent>
</Card> </Card>
{/* Note: Total Spend and Performance cards removed */}
</motion.div> </motion.div>
{/* Filter Controls */} {/* Filter Controls */}
<div className="flex items-center justify-between"> <div className="flex flex-wrap items-center space-y-2 sm:space-y-0 sm:space-x-2">
<div className="flex flex-1 items-center justify-between space-x-2">
<Input <Input
placeholder="Search vendors..." placeholder="Search vendors..."
value={filters.search} value={filters.search}
onChange={(e) => handleFilterChange('search', e.target.value)} onChange={(e) => handleFilterChange('search', e.target.value)}
className="w-[150px] lg:w-[250px]" className="w-full sm:w-[250px]"
/> />
<div className="flex items-center space-x-2"> <Select
value={filters.status}
onValueChange={(value) => handleFilterChange('status', value)}
>
<SelectTrigger className="w-full sm:w-[180px]">
<SelectValue placeholder="Status" />
</SelectTrigger>
<SelectContent>
<SelectItem value="all">All Statuses</SelectItem>
{filterOptions?.statuses?.map((status) => (
<SelectItem key={status} value={status}>
{status.charAt(0).toUpperCase() + status.slice(1)}
</SelectItem>
))}
</SelectContent>
</Select>
<div className="flex items-center space-x-2 ml-auto">
<Switch <Switch
id="show-inactive-vendors" id="show-inactive-vendors"
checked={filters.showInactive} checked={filters.showInactive}
@@ -317,8 +364,6 @@ export function Vendors() {
/> />
<Label htmlFor="show-inactive-vendors">Show vendors with no active products</Label> <Label htmlFor="show-inactive-vendors">Show vendors with no active products</Label>
</div> </div>
{/* Note: Status and Performance Select dropdowns removed */}
</div>
</div> </div>
{/* Data Table */} {/* Data Table */}
@@ -336,7 +381,7 @@ export function Vendors() {
<TableHead onClick={() => handleSort("profit_30d")} className="cursor-pointer text-right">Profit (30d)</TableHead> <TableHead onClick={() => handleSort("profit_30d")} className="cursor-pointer text-right">Profit (30d)</TableHead>
<TableHead onClick={() => handleSort("avg_margin_30d")} className="cursor-pointer text-right">Margin (30d)</TableHead> <TableHead onClick={() => handleSort("avg_margin_30d")} className="cursor-pointer text-right">Margin (30d)</TableHead>
<TableHead onClick={() => handleSort("po_count_365d")} className="cursor-pointer text-right">POs (365d)</TableHead> <TableHead onClick={() => handleSort("po_count_365d")} className="cursor-pointer text-right">POs (365d)</TableHead>
{/* Removed: Status, On-Time %, Fill Rate, Avg Unit Cost, Total Spend, Orders */} <TableHead onClick={() => handleSort("status")} className="cursor-pointer text-right">Status</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
@@ -353,23 +398,24 @@ export function Vendors() {
<TableCell className="text-right"><Skeleton className="h-5 w-20 ml-auto" /></TableCell> <TableCell className="text-right"><Skeleton className="h-5 w-20 ml-auto" /></TableCell>
<TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell> <TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell>
<TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell> <TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell>
<TableCell className="text-right"><Skeleton className="h-5 w-16 ml-auto" /></TableCell>
</TableRow> </TableRow>
)) ))
) : listError ? ( ) : listError ? (
<TableRow> <TableRow>
<TableCell colSpan={10} className="text-center py-8 text-destructive"> <TableCell colSpan={11} className="text-center py-8 text-destructive">
Error loading vendors: {listError.message} Error loading vendors: {listError.message}
</TableCell> </TableCell>
</TableRow> </TableRow>
) : vendors.length === 0 ? ( ) : vendors.length === 0 ? (
<TableRow> <TableRow>
<TableCell colSpan={10} className="text-center py-8 text-muted-foreground"> <TableCell colSpan={11} className="text-center py-8 text-muted-foreground">
No vendors found matching your criteria. No vendors found matching your criteria.
</TableCell> </TableCell>
</TableRow> </TableRow>
) : ( ) : (
vendors.map((vendor: VendorMetric) => ( vendors.map((vendor: VendorMetric) => (
<TableRow key={vendor.vendor_name}> {/* Use vendor_name as key assuming it's unique */} <TableRow key={vendor.vendor_id} className={vendor.active_product_count === 0 ? "opacity-60" : ""}>
<TableCell className="font-medium">{vendor.vendor_name}</TableCell> <TableCell className="font-medium">{vendor.vendor_name}</TableCell>
<TableCell className="text-right">{formatNumber(vendor.active_product_count || vendor.activeProductCount)}</TableCell> <TableCell className="text-right">{formatNumber(vendor.active_product_count || vendor.activeProductCount)}</TableCell>
<TableCell className="text-right">{formatCurrency(vendor.current_stock_cost as number)}</TableCell> <TableCell className="text-right">{formatCurrency(vendor.current_stock_cost as number)}</TableCell>
@@ -380,6 +426,11 @@ export function Vendors() {
<TableCell className="text-right">{formatCurrency(vendor.profit_30d as number)}</TableCell> <TableCell className="text-right">{formatCurrency(vendor.profit_30d as number)}</TableCell>
<TableCell className="text-right">{formatPercentage(vendor.avg_margin_30d as number)}</TableCell> <TableCell className="text-right">{formatPercentage(vendor.avg_margin_30d as number)}</TableCell>
<TableCell className="text-right">{formatNumber(vendor.po_count_365d || vendor.poCount_365d)}</TableCell> <TableCell className="text-right">{formatNumber(vendor.po_count_365d || vendor.poCount_365d)}</TableCell>
<TableCell className="text-right">
<Badge variant={getStatusVariant(vendor.status)}>
{vendor.status || 'Unknown'}
</Badge>
</TableCell>
</TableRow> </TableRow>
)) ))
)} )}
@@ -389,7 +440,7 @@ export function Vendors() {
{/* Pagination Controls */} {/* Pagination Controls */}
{totalPages > 1 && pagination && ( {totalPages > 1 && pagination && (
<motion.div layout="position" transition={{ duration: 0.15 }} className="flex justify-center"> <div className="flex justify-center">
<Pagination> <Pagination>
<PaginationContent> <PaginationContent>
<PaginationItem> <PaginationItem>
@@ -421,7 +472,7 @@ export function Vendors() {
</PaginationItem> </PaginationItem>
</PaginationContent> </PaginationContent>
</Pagination> </Pagination>
</motion.div> </div>
)} )}
</motion.div> </motion.div>
); );