2 Commits

21 changed files with 2232 additions and 508 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -154,6 +154,24 @@ CREATE TRIGGER update_sales_seasonality_updated
FOR EACH ROW FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column(); EXECUTE FUNCTION update_updated_at_column();
-- Create table for financial calculation parameters
CREATE TABLE financial_calc_config (
id INTEGER NOT NULL PRIMARY KEY,
order_cost DECIMAL(10,2) NOT NULL DEFAULT 25.00, -- The fixed cost per purchase order (used in EOQ)
holding_rate DECIMAL(10,4) NOT NULL DEFAULT 0.25, -- The annual inventory holding cost as a percentage of unit cost (used in EOQ)
service_level_z_score DECIMAL(10,4) NOT NULL DEFAULT 1.96, -- Z-score for ~95% service level (used in Safety Stock)
min_reorder_qty INTEGER NOT NULL DEFAULT 1, -- Minimum reorder quantity
default_reorder_qty INTEGER NOT NULL DEFAULT 5, -- Default reorder quantity when sales data is insufficient
default_safety_stock INTEGER NOT NULL DEFAULT 5, -- Default safety stock when sales data is insufficient
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
CREATE TRIGGER update_financial_calc_config_updated
BEFORE UPDATE ON financial_calc_config
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Insert default global thresholds -- Insert default global thresholds
INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days) INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
VALUES (1, NULL, NULL, 7, 14, 90) VALUES (1, NULL, NULL, 7, 14, 90)
@@ -203,6 +221,17 @@ VALUES
ON CONFLICT (month) DO UPDATE SET ON CONFLICT (month) DO UPDATE SET
last_updated = CURRENT_TIMESTAMP; last_updated = CURRENT_TIMESTAMP;
-- Insert default values
INSERT INTO financial_calc_config (id, order_cost, holding_rate, service_level_z_score, min_reorder_qty, default_reorder_qty, default_safety_stock)
VALUES (1, 25.00, 0.25, 1.96, 1, 5, 5)
ON CONFLICT (id) DO UPDATE SET
order_cost = EXCLUDED.order_cost,
holding_rate = EXCLUDED.holding_rate,
service_level_z_score = EXCLUDED.service_level_z_score,
min_reorder_qty = EXCLUDED.min_reorder_qty,
default_reorder_qty = EXCLUDED.default_reorder_qty,
default_safety_stock = EXCLUDED.default_safety_stock;
-- View to show thresholds with category names -- View to show thresholds with category names
CREATE OR REPLACE VIEW stock_thresholds_view AS CREATE OR REPLACE VIEW stock_thresholds_view AS
SELECT SELECT

View File

@@ -11,15 +11,17 @@ CREATE TABLE temp_sales_metrics (
avg_margin_percent DECIMAL(10,3), avg_margin_percent DECIMAL(10,3),
first_sale_date DATE, first_sale_date DATE,
last_sale_date DATE, last_sale_date DATE,
stddev_daily_sales DECIMAL(10,3),
PRIMARY KEY (pid) PRIMARY KEY (pid)
); );
CREATE TABLE temp_purchase_metrics ( CREATE TABLE temp_purchase_metrics (
pid BIGINT NOT NULL, pid BIGINT NOT NULL,
avg_lead_time_days INTEGER, avg_lead_time_days DECIMAL(10,2),
last_purchase_date DATE, last_purchase_date DATE,
first_received_date DATE, first_received_date DATE,
last_received_date DATE, last_received_date DATE,
stddev_lead_time_days DECIMAL(10,2),
PRIMARY KEY (pid) PRIMARY KEY (pid)
); );
@@ -50,7 +52,7 @@ CREATE TABLE product_metrics (
gross_profit DECIMAL(10,3), gross_profit DECIMAL(10,3),
gmroi DECIMAL(10,3), gmroi DECIMAL(10,3),
-- Purchase metrics -- Purchase metrics
avg_lead_time_days INTEGER, avg_lead_time_days DECIMAL(10,2),
last_purchase_date DATE, last_purchase_date DATE,
first_received_date DATE, first_received_date DATE,
last_received_date DATE, last_received_date DATE,

View File

@@ -7,7 +7,7 @@ BEGIN
-- Check which table is being updated and use the appropriate column -- Check which table is being updated and use the appropriate column
IF TG_TABLE_NAME = 'categories' THEN IF TG_TABLE_NAME = 'categories' THEN
NEW.updated_at = CURRENT_TIMESTAMP; NEW.updated_at = CURRENT_TIMESTAMP;
ELSE ELSIF TG_TABLE_NAME IN ('products', 'orders', 'purchase_orders') THEN
NEW.updated = CURRENT_TIMESTAMP; NEW.updated = CURRENT_TIMESTAMP;
END IF; END IF;
RETURN NEW; RETURN NEW;
@@ -91,6 +91,7 @@ CREATE TABLE categories (
description TEXT, description TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
status VARCHAR(20) DEFAULT 'active', status VARCHAR(20) DEFAULT 'active',
FOREIGN KEY (parent_id) REFERENCES categories(cat_id) FOREIGN KEY (parent_id) REFERENCES categories(cat_id)
); );

View File

@@ -57,25 +57,16 @@ const TEMP_TABLES = [
'temp_daily_sales', 'temp_daily_sales',
'temp_product_stats', 'temp_product_stats',
'temp_category_sales', 'temp_category_sales',
'temp_category_stats' 'temp_category_stats',
'temp_beginning_inventory',
'temp_monthly_inventory'
]; ];
// Add cleanup function for temporary tables // Add cleanup function for temporary tables
async function cleanupTemporaryTables(connection) { async function cleanupTemporaryTables(connection) {
// List of possible temporary tables that might exist
const tempTables = [
'temp_sales_metrics',
'temp_purchase_metrics',
'temp_forecast_dates',
'temp_daily_sales',
'temp_product_stats',
'temp_category_sales',
'temp_category_stats'
];
try { try {
// Drop each temporary table if it exists // Drop each temporary table if it exists
for (const table of tempTables) { for (const table of TEMP_TABLES) {
await connection.query(`DROP TABLE IF EXISTS ${table}`); await connection.query(`DROP TABLE IF EXISTS ${table}`);
} }
} catch (err) { } catch (err) {
@@ -534,7 +525,7 @@ async function calculateMetrics() {
await connection.query(` await connection.query(`
UPDATE calculate_history UPDATE calculate_history
SET SET
status = 'error', status = 'failed',
end_time = NOW(), end_time = NOW(),
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER, duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
error_message = $1 error_message = $1

View File

@@ -10,9 +10,9 @@ const importPurchaseOrders = require('./import/purchase-orders');
dotenv.config({ path: path.join(__dirname, "../.env") }); dotenv.config({ path: path.join(__dirname, "../.env") });
// Constants to control which imports run // Constants to control which imports run
const IMPORT_CATEGORIES = false; const IMPORT_CATEGORIES = true;
const IMPORT_PRODUCTS = false; const IMPORT_PRODUCTS = true;
const IMPORT_ORDERS = false; const IMPORT_ORDERS = true;
const IMPORT_PURCHASE_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true;
// Add flag for incremental updates // Add flag for incremental updates
@@ -169,8 +169,8 @@ async function main() {
if (isImportCancelled) throw new Error("Import cancelled"); if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++; completedSteps++;
console.log('Categories import result:', results.categories); console.log('Categories import result:', results.categories);
totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0) || 0; totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0) || 0; totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0);
} }
if (IMPORT_PRODUCTS) { if (IMPORT_PRODUCTS) {
@@ -178,8 +178,8 @@ async function main() {
if (isImportCancelled) throw new Error("Import cancelled"); if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++; completedSteps++;
console.log('Products import result:', results.products); console.log('Products import result:', results.products);
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0) || 0; totalRecordsAdded += parseInt(results.products?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0) || 0; totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0);
} }
if (IMPORT_ORDERS) { if (IMPORT_ORDERS) {
@@ -187,8 +187,8 @@ async function main() {
if (isImportCancelled) throw new Error("Import cancelled"); if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++; completedSteps++;
console.log('Orders import result:', results.orders); console.log('Orders import result:', results.orders);
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0) || 0; totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0) || 0; totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0);
} }
if (IMPORT_PURCHASE_ORDERS) { if (IMPORT_PURCHASE_ORDERS) {
@@ -202,8 +202,8 @@ async function main() {
if (results.purchaseOrders?.status === 'error') { if (results.purchaseOrders?.status === 'error') {
console.error('Purchase orders import had an error:', results.purchaseOrders.error); console.error('Purchase orders import had an error:', results.purchaseOrders.error);
} else { } else {
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0) || 0; totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0) || 0; totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0);
} }
} catch (error) { } catch (error) {
console.error('Error during purchase orders import:', error); console.error('Error during purchase orders import:', error);
@@ -242,8 +242,8 @@ async function main() {
WHERE id = $12 WHERE id = $12
`, [ `, [
totalElapsedSeconds, totalElapsedSeconds,
parseInt(totalRecordsAdded) || 0, parseInt(totalRecordsAdded),
parseInt(totalRecordsUpdated) || 0, parseInt(totalRecordsUpdated),
IMPORT_CATEGORIES, IMPORT_CATEGORIES,
IMPORT_PRODUCTS, IMPORT_PRODUCTS,
IMPORT_ORDERS, IMPORT_ORDERS,

View File

@@ -16,6 +16,9 @@ async function importCategories(prodConnection, localConnection) {
// Start a single transaction for the entire import // Start a single transaction for the entire import
await localConnection.query('BEGIN'); await localConnection.query('BEGIN');
// Temporarily disable the trigger that's causing problems
await localConnection.query('ALTER TABLE categories DISABLE TRIGGER update_categories_updated_at');
// Process each type in order with its own savepoint // Process each type in order with its own savepoint
for (const type of typeOrder) { for (const type of typeOrder) {
try { try {
@@ -150,6 +153,9 @@ async function importCategories(prodConnection, localConnection) {
last_sync_timestamp = NOW() last_sync_timestamp = NOW()
`); `);
// Re-enable the trigger
await localConnection.query('ALTER TABLE categories ENABLE TRIGGER update_categories_updated_at');
outputProgress({ outputProgress({
status: "complete", status: "complete",
operation: "Categories import completed", operation: "Categories import completed",
@@ -178,6 +184,9 @@ async function importCategories(prodConnection, localConnection) {
// Only rollback if we haven't committed yet // Only rollback if we haven't committed yet
try { try {
await localConnection.query('ROLLBACK'); await localConnection.query('ROLLBACK');
// Make sure we re-enable the trigger even if there was an error
await localConnection.query('ALTER TABLE categories ENABLE TRIGGER update_categories_updated_at');
} catch (rollbackError) { } catch (rollbackError) {
console.error("Error during rollback:", rollbackError); console.error("Error during rollback:", rollbackError);
} }

View File

@@ -590,7 +590,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
ordered, po_cost_price, supplier_id, date_created, date_ordered ordered, po_cost_price, supplier_id, date_created, date_ordered
) )
SELECT SELECT
'R' || r.receiving_id as po_id, r.receiving_id::text as po_id,
r.pid, r.pid,
COALESCE(p.sku, 'NO-SKU') as sku, COALESCE(p.sku, 'NO-SKU') as sku,
COALESCE(p.name, 'Unknown Product') as name, COALESCE(p.name, 'Unknown Product') as name,
@@ -626,7 +626,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
po_id, pid, receiving_id, allocated_qty, cost_each, received_date, received_by po_id, pid, receiving_id, allocated_qty, cost_each, received_date, received_by
) )
SELECT SELECT
'R' || r.receiving_id as po_id, r.receiving_id::text as po_id,
r.pid, r.pid,
r.receiving_id, r.receiving_id,
r.qty_each as allocated_qty, r.qty_each as allocated_qty,

View File

@@ -56,36 +56,94 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
} }
}); });
// Calculate financial metrics with optimized query // First, calculate beginning inventory values (12 months ago)
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_beginning_inventory AS
WITH beginning_inventory_calc AS (
SELECT
p.pid,
p.stock_quantity as current_quantity,
COALESCE(SUM(o.quantity), 0) as sold_quantity,
COALESCE(SUM(po.received), 0) as received_quantity,
GREATEST(0, (p.stock_quantity + COALESCE(SUM(o.quantity), 0) - COALESCE(SUM(po.received), 0))) as beginning_quantity,
p.cost_price
FROM
products p
LEFT JOIN
orders o ON p.pid = o.pid
AND o.canceled = false
AND o.date >= CURRENT_DATE - INTERVAL '12 months'::interval
LEFT JOIN
purchase_orders po ON p.pid = po.pid
AND po.received_date IS NOT NULL
AND po.received_date >= CURRENT_DATE - INTERVAL '12 months'::interval
GROUP BY
p.pid, p.stock_quantity, p.cost_price
)
SELECT
pid,
beginning_quantity,
beginning_quantity * cost_price as beginning_value,
current_quantity * cost_price as current_value,
((beginning_quantity * cost_price) + (current_quantity * cost_price)) / 2 as average_inventory_value
FROM
beginning_inventory_calc
`);
processedCount = Math.floor(totalProducts * 0.60);
outputProgress({
status: 'running',
operation: 'Beginning inventory values calculated, computing financial metrics',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
// Calculate financial metrics with optimized query and standard formulas
await connection.query(` await connection.query(`
WITH product_financials AS ( WITH product_financials AS (
SELECT SELECT
p.pid, p.pid,
p.cost_price * p.stock_quantity as inventory_value, COALESCE(bi.average_inventory_value, p.cost_price * p.stock_quantity) as avg_inventory_value,
SUM(o.quantity * o.price) as total_revenue, p.cost_price * p.stock_quantity as current_inventory_value,
SUM(o.quantity * p.cost_price) as cost_of_goods_sold, SUM(o.quantity * (o.price - COALESCE(o.discount, 0))) as total_revenue,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit, SUM(o.quantity * COALESCE(o.costeach, 0)) as cost_of_goods_sold,
SUM(o.quantity * (o.price - COALESCE(o.discount, 0) - COALESCE(o.costeach, 0))) as gross_profit,
MIN(o.date) as first_sale_date, MIN(o.date) as first_sale_date,
MAX(o.date) as last_sale_date, MAX(o.date) as last_sale_date,
EXTRACT(DAY FROM (MAX(o.date)::timestamp with time zone - MIN(o.date)::timestamp with time zone)) + 1 as calculation_period_days, EXTRACT(DAY FROM (MAX(o.date)::timestamp with time zone - MIN(o.date)::timestamp with time zone)) + 1 as calculation_period_days,
COUNT(DISTINCT DATE(o.date)) as active_days COUNT(DISTINCT DATE(o.date)) as active_days
FROM products p FROM products p
LEFT JOIN orders o ON p.pid = o.pid LEFT JOIN orders o ON p.pid = o.pid
LEFT JOIN temp_beginning_inventory bi ON p.pid = bi.pid
WHERE o.canceled = false WHERE o.canceled = false
AND DATE(o.date) >= CURRENT_DATE - INTERVAL '12 months' AND DATE(o.date) >= CURRENT_DATE - INTERVAL '12 months'::interval
GROUP BY p.pid, p.cost_price, p.stock_quantity GROUP BY p.pid, p.cost_price, p.stock_quantity, bi.average_inventory_value
) )
UPDATE product_metrics pm UPDATE product_metrics pm
SET SET
inventory_value = COALESCE(pf.inventory_value, 0), inventory_value = COALESCE(pf.current_inventory_value, 0)::decimal(10,3),
total_revenue = COALESCE(pf.total_revenue, 0), total_revenue = COALESCE(pf.total_revenue, 0)::decimal(10,3),
cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0), cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0)::decimal(10,3),
gross_profit = COALESCE(pf.gross_profit, 0), gross_profit = COALESCE(pf.gross_profit, 0)::decimal(10,3),
gmroi = CASE turnover_rate = CASE
WHEN COALESCE(pf.inventory_value, 0) > 0 AND pf.active_days > 0 THEN WHEN COALESCE(pf.avg_inventory_value, 0) > 0 THEN
(COALESCE(pf.gross_profit, 0) * (365.0 / pf.active_days)) / COALESCE(pf.inventory_value, 0) COALESCE(pf.cost_of_goods_sold, 0) / NULLIF(pf.avg_inventory_value, 0)
ELSE 0 ELSE 0
END, END::decimal(12,3),
gmroi = CASE
WHEN COALESCE(pf.avg_inventory_value, 0) > 0 THEN
COALESCE(pf.gross_profit, 0) / NULLIF(pf.avg_inventory_value, 0)
ELSE 0
END::decimal(10,3),
last_calculated_at = CURRENT_TIMESTAMP last_calculated_at = CURRENT_TIMESTAMP
FROM product_financials pf FROM product_financials pf
WHERE pm.pid = pf.pid WHERE pm.pid = pf.pid
@@ -115,53 +173,8 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
success success
}; };
// Update time-based aggregates with optimized query // Clean up temporary tables
await connection.query(` await connection.query('DROP TABLE IF EXISTS temp_beginning_inventory');
WITH monthly_financials AS (
SELECT
p.pid,
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
COUNT(DISTINCT DATE(o.date)) as active_days,
MIN(o.date) as period_start,
MAX(o.date) as period_end
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity
)
UPDATE product_time_aggregates pta
SET
inventory_value = COALESCE(mf.inventory_value, 0),
gmroi = CASE
WHEN COALESCE(mf.inventory_value, 0) > 0 AND mf.active_days > 0 THEN
(COALESCE(mf.gross_profit, 0) * (365.0 / mf.active_days)) / COALESCE(mf.inventory_value, 0)
ELSE 0
END
FROM monthly_financials mf
WHERE pta.pid = mf.pid
AND pta.year = mf.year
AND pta.month = mf.month
`);
processedCount = Math.floor(totalProducts * 0.70);
outputProgress({
status: 'running',
operation: 'Time-based aggregates updated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
// If we get here, everything completed successfully // If we get here, everything completed successfully
success = true; success = true;
@@ -187,6 +200,12 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
throw error; throw error;
} finally { } finally {
if (connection) { if (connection) {
try {
// Make sure temporary tables are always cleaned up
await connection.query('DROP TABLE IF EXISTS temp_beginning_inventory');
} catch (err) {
console.error('Error cleaning up temp tables:', err);
}
connection.release(); connection.release();
} }
} }

View File

@@ -66,8 +66,36 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
WHERE category_id IS NULL AND vendor IS NULL WHERE category_id IS NULL AND vendor IS NULL
LIMIT 1 LIMIT 1
`); `);
// Check if threshold data was returned
if (!thresholds.rows || thresholds.rows.length === 0) {
console.warn('No default thresholds found in the database. Using explicit type casting in the query.');
}
const defaultThresholds = thresholds.rows[0]; const defaultThresholds = thresholds.rows[0];
// Get financial calculation configuration parameters
const financialConfig = await connection.query(`
SELECT
order_cost,
holding_rate,
service_level_z_score,
min_reorder_qty,
default_reorder_qty,
default_safety_stock
FROM financial_calc_config
WHERE id = 1
LIMIT 1
`);
const finConfig = financialConfig.rows[0] || {
order_cost: 25.00,
holding_rate: 0.25,
service_level_z_score: 1.96,
min_reorder_qty: 1,
default_reorder_qty: 5,
default_safety_stock: 5
};
// Calculate base product metrics // Calculate base product metrics
if (!SKIP_PRODUCT_BASE_METRICS) { if (!SKIP_PRODUCT_BASE_METRICS) {
outputProgress({ outputProgress({
@@ -109,6 +137,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
avg_margin_percent DECIMAL(10,3), avg_margin_percent DECIMAL(10,3),
first_sale_date DATE, first_sale_date DATE,
last_sale_date DATE, last_sale_date DATE,
stddev_daily_sales DECIMAL(10,3),
PRIMARY KEY (pid) PRIMARY KEY (pid)
) )
`); `);
@@ -117,10 +146,11 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
await connection.query(` await connection.query(`
CREATE TEMPORARY TABLE temp_purchase_metrics ( CREATE TEMPORARY TABLE temp_purchase_metrics (
pid BIGINT NOT NULL, pid BIGINT NOT NULL,
avg_lead_time_days DOUBLE PRECISION, avg_lead_time_days DECIMAL(10,2),
last_purchase_date DATE, last_purchase_date DATE,
first_received_date DATE, first_received_date DATE,
last_received_date DATE, last_received_date DATE,
stddev_lead_time_days DECIMAL(10,2),
PRIMARY KEY (pid) PRIMARY KEY (pid)
) )
`); `);
@@ -140,11 +170,22 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
ELSE 0 ELSE 0
END as avg_margin_percent, END as avg_margin_percent,
MIN(o.date) as first_sale_date, MIN(o.date) as first_sale_date,
MAX(o.date) as last_sale_date MAX(o.date) as last_sale_date,
COALESCE(STDDEV_SAMP(daily_qty.quantity), 0) as stddev_daily_sales
FROM products p FROM products p
LEFT JOIN orders o ON p.pid = o.pid LEFT JOIN orders o ON p.pid = o.pid
AND o.canceled = false AND o.canceled = false
AND o.date >= CURRENT_DATE - INTERVAL '90 days' AND o.date >= CURRENT_DATE - INTERVAL '90 days'
LEFT JOIN (
SELECT
pid,
DATE(date) as sale_date,
SUM(quantity) as quantity
FROM orders
WHERE canceled = false
AND date >= CURRENT_DATE - INTERVAL '90 days'
GROUP BY pid, DATE(date)
) daily_qty ON p.pid = daily_qty.pid
GROUP BY p.pid GROUP BY p.pid
`); `);
@@ -163,7 +204,14 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
) as avg_lead_time_days, ) as avg_lead_time_days,
MAX(po.date) as last_purchase_date, MAX(po.date) as last_purchase_date,
MIN(po.received_date) as first_received_date, MIN(po.received_date) as first_received_date,
MAX(po.received_date) as last_received_date MAX(po.received_date) as last_received_date,
STDDEV_SAMP(
CASE
WHEN po.received_date IS NOT NULL AND po.date IS NOT NULL
THEN EXTRACT(EPOCH FROM (po.received_date::timestamp with time zone - po.date::timestamp with time zone)) / 86400.0
ELSE NULL
END
) as stddev_lead_time_days
FROM products p FROM products p
LEFT JOIN purchase_orders po ON p.pid = po.pid LEFT JOIN purchase_orders po ON p.pid = po.pid
AND po.received_date IS NOT NULL AND po.received_date IS NOT NULL
@@ -184,7 +232,8 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
30.0 as avg_lead_time_days, 30.0 as avg_lead_time_days,
NULL as last_purchase_date, NULL as last_purchase_date,
NULL as first_received_date, NULL as first_received_date,
NULL as last_received_date NULL as last_received_date,
0.0 as stddev_lead_time_days
FROM products p FROM products p
LEFT JOIN temp_purchase_metrics tpm ON p.pid = tpm.pid LEFT JOIN temp_purchase_metrics tpm ON p.pid = tpm.pid
WHERE tpm.pid IS NULL WHERE tpm.pid IS NULL
@@ -208,6 +257,17 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
if (batch.rows.length === 0) break; if (batch.rows.length === 0) break;
// Process the entire batch in a single efficient query // Process the entire batch in a single efficient query
const lowStockThreshold = parseInt(defaultThresholds?.low_stock_threshold) || 5;
const criticalDays = parseInt(defaultThresholds?.critical_days) || 7;
const reorderDays = parseInt(defaultThresholds?.reorder_days) || 14;
const overstockDays = parseInt(defaultThresholds?.overstock_days) || 90;
const serviceLevel = parseFloat(finConfig?.service_level_z_score) || 1.96;
const defaultSafetyStock = parseInt(finConfig?.default_safety_stock) || 5;
const defaultReorderQty = parseInt(finConfig?.default_reorder_qty) || 5;
const orderCost = parseFloat(finConfig?.order_cost) || 25.00;
const holdingRate = parseFloat(finConfig?.holding_rate) || 0.25;
const minReorderQty = parseInt(finConfig?.min_reorder_qty) || 1;
await connection.query(` await connection.query(`
UPDATE product_metrics pm UPDATE product_metrics pm
SET SET
@@ -219,7 +279,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
avg_margin_percent = COALESCE(sm.avg_margin_percent, 0), avg_margin_percent = COALESCE(sm.avg_margin_percent, 0),
first_sale_date = sm.first_sale_date, first_sale_date = sm.first_sale_date,
last_sale_date = sm.last_sale_date, last_sale_date = sm.last_sale_date,
avg_lead_time_days = COALESCE(lm.avg_lead_time_days, 30), avg_lead_time_days = COALESCE(lm.avg_lead_time_days, 30.0),
days_of_inventory = CASE days_of_inventory = CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 WHEN COALESCE(sm.daily_sales_avg, 0) > 0
THEN FLOOR(p.stock_quantity / NULLIF(sm.daily_sales_avg, 0)) THEN FLOOR(p.stock_quantity / NULLIF(sm.daily_sales_avg, 0))
@@ -232,57 +292,61 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
END, END,
stock_status = CASE stock_status = CASE
WHEN p.stock_quantity <= 0 THEN 'Out of Stock' WHEN p.stock_quantity <= 0 THEN 'Out of Stock'
WHEN COALESCE(sm.daily_sales_avg, 0) = 0 AND p.stock_quantity <= $1 THEN 'Low Stock' WHEN COALESCE(sm.daily_sales_avg, 0) = 0 AND p.stock_quantity <= ${lowStockThreshold} THEN 'Low Stock'
WHEN COALESCE(sm.daily_sales_avg, 0) = 0 THEN 'In Stock' WHEN COALESCE(sm.daily_sales_avg, 0) = 0 THEN 'In Stock'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= $2 THEN 'Critical' WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= ${criticalDays} THEN 'Critical'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= $3 THEN 'Reorder' WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= ${reorderDays} THEN 'Reorder'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > $4 THEN 'Overstocked' WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > ${overstockDays} THEN 'Overstocked'
ELSE 'Healthy' ELSE 'Healthy'
END, END,
safety_stock = CASE safety_stock = CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 THEN WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND COALESCE(lm.avg_lead_time_days, 0) > 0 THEN
CEIL(sm.daily_sales_avg * SQRT(ABS(COALESCE(lm.avg_lead_time_days, 30))) * 1.96) CEIL(
ELSE $5 ${serviceLevel} * SQRT(
GREATEST(0, COALESCE(lm.avg_lead_time_days, 0)) * POWER(COALESCE(sm.stddev_daily_sales, 0), 2) +
POWER(COALESCE(sm.daily_sales_avg, 0), 2) * POWER(COALESCE(lm.stddev_lead_time_days, 0), 2)
)
)
ELSE ${defaultSafetyStock}
END, END,
reorder_point = CASE reorder_point = CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 THEN WHEN COALESCE(sm.daily_sales_avg, 0) > 0 THEN
CEIL(sm.daily_sales_avg * COALESCE(lm.avg_lead_time_days, 30)) + CEIL(sm.daily_sales_avg * GREATEST(0, COALESCE(lm.avg_lead_time_days, 30.0))) +
CEIL(sm.daily_sales_avg * SQRT(ABS(COALESCE(lm.avg_lead_time_days, 30))) * 1.96) (CASE
ELSE $6 WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND COALESCE(lm.avg_lead_time_days, 0) > 0 THEN
CEIL(
${serviceLevel} * SQRT(
GREATEST(0, COALESCE(lm.avg_lead_time_days, 0)) * POWER(COALESCE(sm.stddev_daily_sales, 0), 2) +
POWER(COALESCE(sm.daily_sales_avg, 0), 2) * POWER(COALESCE(lm.stddev_lead_time_days, 0), 2)
)
)
ELSE ${defaultSafetyStock}
END)
ELSE ${lowStockThreshold}
END, END,
reorder_qty = CASE reorder_qty = CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND NULLIF(p.cost_price, 0) IS NOT NULL AND NULLIF(p.cost_price, 0) > 0 THEN WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND NULLIF(p.cost_price, 0) IS NOT NULL AND NULLIF(p.cost_price, 0) > 0 THEN
GREATEST( GREATEST(
CEIL(SQRT(ABS((2 * (sm.daily_sales_avg * 365) * 25) / (NULLIF(p.cost_price, 0) * 0.25)))), CEIL(SQRT(
$7 (2 * (sm.daily_sales_avg * 365) * ${orderCost}) /
NULLIF(p.cost_price * ${holdingRate}, 0)
)),
${minReorderQty}
) )
ELSE $8 ELSE ${defaultReorderQty}
END, END,
overstocked_amt = CASE overstocked_amt = CASE
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > $9 WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > ${overstockDays}
THEN GREATEST(0, p.stock_quantity - CEIL(sm.daily_sales_avg * $10)) THEN GREATEST(0, p.stock_quantity - CEIL(sm.daily_sales_avg * ${overstockDays}))
ELSE 0 ELSE 0
END, END,
last_calculated_at = NOW() last_calculated_at = NOW()
FROM products p FROM products p
LEFT JOIN temp_sales_metrics sm ON p.pid = sm.pid LEFT JOIN temp_sales_metrics sm ON p.pid = sm.pid
LEFT JOIN temp_purchase_metrics lm ON p.pid = lm.pid LEFT JOIN temp_purchase_metrics lm ON p.pid = lm.pid
WHERE p.pid = ANY($11::bigint[]) WHERE p.pid = ANY($1::BIGINT[])
AND pm.pid = p.pid AND pm.pid = p.pid
`, `, [batch.rows.map(row => row.pid)]);
[
defaultThresholds.low_stock_threshold,
defaultThresholds.critical_days,
defaultThresholds.reorder_days,
defaultThresholds.overstock_days,
defaultThresholds.low_stock_threshold,
defaultThresholds.low_stock_threshold,
defaultThresholds.low_stock_threshold,
defaultThresholds.low_stock_threshold,
defaultThresholds.overstock_days,
defaultThresholds.overstock_days,
batch.rows.map(row => row.pid)
]);
lastPid = batch.rows[batch.rows.length - 1].pid; lastPid = batch.rows[batch.rows.length - 1].pid;
processedCount += batch.rows.length; processedCount += batch.rows.length;
@@ -311,25 +375,22 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
} }
// Calculate forecast accuracy and bias in batches // Calculate forecast accuracy and bias in batches
lastPid = 0; let forecastPid = 0;
while (true) { while (true) {
if (isCancelled) break; if (isCancelled) break;
const batch = await connection.query( const forecastBatch = await connection.query(
'SELECT pid FROM products WHERE pid > $1 ORDER BY pid LIMIT $2', 'SELECT pid FROM products WHERE pid > $1 ORDER BY pid LIMIT $2',
[lastPid, BATCH_SIZE] [forecastPid, BATCH_SIZE]
); );
if (batch.rows.length === 0) break; if (forecastBatch.rows.length === 0) break;
const forecastPidArray = forecastBatch.rows.map(row => row.pid);
// Use array_to_string to convert the array to a string of comma-separated values
await connection.query(` await connection.query(`
UPDATE product_metrics pm WITH forecast_metrics AS (
SET
forecast_accuracy = GREATEST(0, 100 - LEAST(fa.avg_forecast_error, 100)),
forecast_bias = GREATEST(-100, LEAST(fa.avg_forecast_bias, 100)),
last_forecast_date = fa.last_forecast_date,
last_calculated_at = NOW()
FROM (
SELECT SELECT
sf.pid, sf.pid,
AVG(CASE AVG(CASE
@@ -348,13 +409,20 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
AND DATE(o.date) = sf.forecast_date AND DATE(o.date) = sf.forecast_date
WHERE o.canceled = false WHERE o.canceled = false
AND sf.forecast_date >= CURRENT_DATE - INTERVAL '90 days' AND sf.forecast_date >= CURRENT_DATE - INTERVAL '90 days'
AND sf.pid = ANY($1::bigint[]) AND sf.pid = ANY('{${forecastPidArray.join(',')}}'::BIGINT[])
GROUP BY sf.pid GROUP BY sf.pid
) fa )
WHERE pm.pid = fa.pid UPDATE product_metrics pm
`, [batch.rows.map(row => row.pid)]); SET
forecast_accuracy = GREATEST(0, 100 - LEAST(fm.avg_forecast_error, 100)),
forecast_bias = GREATEST(-100, LEAST(fm.avg_forecast_bias, 100)),
last_forecast_date = fm.last_forecast_date,
last_calculated_at = NOW()
FROM forecast_metrics fm
WHERE pm.pid = fm.pid
`);
lastPid = batch.rows[batch.rows.length - 1].pid; forecastPid = forecastBatch.rows[forecastBatch.rows.length - 1].pid;
} }
// Calculate product time aggregates // Calculate product time aggregates
@@ -375,61 +443,12 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
} }
}); });
// Calculate time-based aggregates // Note: The time-aggregates calculation has been moved to time-aggregates.js
await connection.query(` // This module will not duplicate that functionality
INSERT INTO product_time_aggregates (
pid,
year,
month,
total_quantity_sold,
total_revenue,
total_cost,
order_count,
avg_price,
profit_margin,
inventory_value,
gmroi
)
SELECT
p.pid,
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
SUM(o.quantity) as total_quantity_sold,
SUM(o.price * o.quantity) as total_revenue,
SUM(p.cost_price * o.quantity) as total_cost,
COUNT(DISTINCT o.order_number) as order_count,
AVG(o.price) as avg_price,
CASE
WHEN SUM(o.quantity * o.price) > 0
THEN ((SUM(o.quantity * o.price) - SUM(o.quantity * p.cost_price)) / SUM(o.quantity * o.price)) * 100
ELSE 0
END as profit_margin,
p.cost_price * p.stock_quantity as inventory_value,
CASE
WHEN p.cost_price * p.stock_quantity > 0
THEN (SUM(o.quantity * (o.price - p.cost_price))) / (p.cost_price * p.stock_quantity)
ELSE 0
END as gmroi
FROM products p
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
WHERE o.date >= CURRENT_DATE - INTERVAL '12 months'
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone)
ON CONFLICT (pid, year, month) DO UPDATE
SET
total_quantity_sold = EXCLUDED.total_quantity_sold,
total_revenue = EXCLUDED.total_revenue,
total_cost = EXCLUDED.total_cost,
order_count = EXCLUDED.order_count,
avg_price = EXCLUDED.avg_price,
profit_margin = EXCLUDED.profit_margin,
inventory_value = EXCLUDED.inventory_value,
gmroi = EXCLUDED.gmroi
`);
processedCount = Math.floor(totalProducts * 0.6); processedCount = Math.floor(totalProducts * 0.6);
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Product time aggregates calculated', operation: 'Product time aggregates calculation delegated to time-aggregates module',
current: processedCount || 0, current: processedCount || 0,
total: totalProducts || 0, total: totalProducts || 0,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
@@ -488,6 +507,10 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
const abcConfig = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1'); const abcConfig = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
const abcThresholds = abcConfig.rows[0] || { a_threshold: 20, b_threshold: 50 }; const abcThresholds = abcConfig.rows[0] || { a_threshold: 20, b_threshold: 50 };
// Extract values and ensure they are valid numbers
const aThreshold = parseFloat(abcThresholds.a_threshold) || 20;
const bThreshold = parseFloat(abcThresholds.b_threshold) || 50;
// First, create and populate the rankings table with an index // First, create and populate the rankings table with an index
await connection.query('DROP TABLE IF EXISTS temp_revenue_ranks'); await connection.query('DROP TABLE IF EXISTS temp_revenue_ranks');
await connection.query(` await connection.query(`
@@ -557,13 +580,13 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
OR pm.abc_class != OR pm.abc_class !=
CASE CASE
WHEN tr.pid IS NULL THEN 'C' WHEN tr.pid IS NULL THEN 'C'
WHEN tr.percentile <= $2 THEN 'A' WHEN tr.percentile <= ${aThreshold} THEN 'A'
WHEN tr.percentile <= $3 THEN 'B' WHEN tr.percentile <= ${bThreshold} THEN 'B'
ELSE 'C' ELSE 'C'
END) END)
ORDER BY pm.pid ORDER BY pm.pid
LIMIT $4 LIMIT $2
`, [abcProcessedCount, abcThresholds.a_threshold, abcThresholds.b_threshold, batchSize]); `, [abcProcessedCount, batchSize]);
if (pids.rows.length === 0) break; if (pids.rows.length === 0) break;
@@ -574,15 +597,15 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
SET abc_class = SET abc_class =
CASE CASE
WHEN tr.pid IS NULL THEN 'C' WHEN tr.pid IS NULL THEN 'C'
WHEN tr.percentile <= $1 THEN 'A' WHEN tr.percentile <= ${aThreshold} THEN 'A'
WHEN tr.percentile <= $2 THEN 'B' WHEN tr.percentile <= ${bThreshold} THEN 'B'
ELSE 'C' ELSE 'C'
END, END,
last_calculated_at = NOW() last_calculated_at = NOW()
FROM (SELECT pid, percentile FROM temp_revenue_ranks) tr FROM (SELECT pid, percentile FROM temp_revenue_ranks) tr
WHERE pm.pid = tr.pid AND pm.pid = ANY($3::bigint[]) WHERE pm.pid = tr.pid AND pm.pid = ANY($1::BIGINT[])
OR (pm.pid = ANY($3::bigint[]) AND tr.pid IS NULL) OR (pm.pid = ANY($1::BIGINT[]) AND tr.pid IS NULL)
`, [abcThresholds.a_threshold, abcThresholds.b_threshold, pidValues]); `, [pidValues]);
// Now update turnover rate with proper handling of zero inventory periods // Now update turnover rate with proper handling of zero inventory periods
await connection.query(` await connection.query(`
@@ -610,7 +633,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
JOIN products p ON o.pid = p.pid JOIN products p ON o.pid = p.pid
WHERE o.canceled = false WHERE o.canceled = false
AND o.date >= CURRENT_DATE - INTERVAL '90 days' AND o.date >= CURRENT_DATE - INTERVAL '90 days'
AND o.pid = ANY($1::bigint[]) AND o.pid = ANY($1::BIGINT[])
GROUP BY o.pid GROUP BY o.pid
) sales ) sales
WHERE pm.pid = sales.pid WHERE pm.pid = sales.pid
@@ -707,40 +730,7 @@ function calculateStockStatus(stock, config, daily_sales_avg, weekly_sales_avg,
return 'Healthy'; return 'Healthy';
} }
function calculateReorderQuantities(stock, stock_status, daily_sales_avg, avg_lead_time, config) { // Note: calculateReorderQuantities function has been removed as its logic has been incorporated
// Calculate safety stock based on service level and lead time // in the main SQL query with configurable parameters
const z_score = 1.96; // 95% service level
const lead_time = avg_lead_time || config.target_days;
const safety_stock = Math.ceil(daily_sales_avg * Math.sqrt(lead_time) * z_score);
// Calculate reorder point
const lead_time_demand = daily_sales_avg * lead_time;
const reorder_point = Math.ceil(lead_time_demand + safety_stock);
// Calculate reorder quantity using EOQ formula if we have the necessary data
let reorder_qty = 0;
if (daily_sales_avg > 0) {
const annual_demand = daily_sales_avg * 365;
const order_cost = 25; // Fixed cost per order
const holding_cost = config.cost_price * 0.25; // 25% of unit cost as annual holding cost
reorder_qty = Math.ceil(Math.sqrt((2 * annual_demand * order_cost) / holding_cost));
} else {
// If no sales data, use a basic calculation
reorder_qty = Math.max(safety_stock, config.low_stock_threshold);
}
// Calculate overstocked amount
const overstocked_amt = stock_status === 'Overstocked' ?
stock - Math.ceil(daily_sales_avg * config.overstock_days) :
0;
return {
safety_stock,
reorder_point,
reorder_qty,
overstocked_amt
};
}
module.exports = calculateProductMetrics; module.exports = calculateProductMetrics;

View File

@@ -216,13 +216,7 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
GREATEST(0, GREATEST(0,
ROUND( ROUND(
ds.avg_daily_qty * ds.avg_daily_qty *
(1 + COALESCE(sf.seasonality_factor, 0)) * (1 + COALESCE(sf.seasonality_factor, 0))
CASE
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 1.5 THEN 0.85
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 1.0 THEN 0.9
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 0.5 THEN 0.95
ELSE 1.0
END
) )
) as forecast_quantity, ) as forecast_quantity,
CASE CASE
@@ -336,8 +330,8 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
cs.cat_id::bigint as category_id, cs.cat_id::bigint as category_id,
fd.forecast_date, fd.forecast_date,
GREATEST(0, GREATEST(0,
AVG(cs.daily_quantity) * ROUND(AVG(cs.daily_quantity) *
(1 + COALESCE(sf.seasonality_factor, 0)) (1 + COALESCE(sf.seasonality_factor, 0)))
) as forecast_units, ) as forecast_units,
GREATEST(0, GREATEST(0,
COALESCE( COALESCE(
@@ -345,8 +339,7 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
WHEN SUM(cs.day_count) >= 4 THEN AVG(cs.daily_revenue) WHEN SUM(cs.day_count) >= 4 THEN AVG(cs.daily_revenue)
ELSE ct.overall_avg_revenue ELSE ct.overall_avg_revenue
END * END *
(1 + COALESCE(sf.seasonality_factor, 0)) * (1 + COALESCE(sf.seasonality_factor, 0)),
(0.95 + (random() * 0.1)),
0 0
) )
) as forecast_revenue, ) as forecast_revenue,
@@ -427,6 +420,18 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
throw error; throw error;
} finally { } finally {
if (connection) { if (connection) {
try {
// Ensure temporary tables are cleaned up
await connection.query(`
DROP TABLE IF EXISTS temp_forecast_dates;
DROP TABLE IF EXISTS temp_daily_sales;
DROP TABLE IF EXISTS temp_product_stats;
DROP TABLE IF EXISTS temp_category_sales;
DROP TABLE IF EXISTS temp_category_stats;
`);
} catch (err) {
console.error('Error cleaning up temporary tables:', err);
}
connection.release(); connection.release();
} }
} }

View File

@@ -55,6 +55,93 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
} }
}); });
// Create a temporary table for end-of-month inventory values
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_monthly_inventory AS
WITH months AS (
-- Generate all year/month combinations for the last 12 months
SELECT
EXTRACT(YEAR FROM month_date)::INTEGER as year,
EXTRACT(MONTH FROM month_date)::INTEGER as month,
month_date as start_date,
(month_date + INTERVAL '1 month'::interval - INTERVAL '1 day'::interval)::DATE as end_date
FROM (
SELECT generate_series(
DATE_TRUNC('month', CURRENT_DATE - INTERVAL '12 months'::interval)::DATE,
DATE_TRUNC('month', CURRENT_DATE)::DATE,
INTERVAL '1 month'::interval
) as month_date
) dates
),
monthly_inventory_calc AS (
SELECT
p.pid,
m.year,
m.month,
m.end_date,
p.stock_quantity as current_quantity,
-- Calculate sold during period (before end_date)
COALESCE(SUM(
CASE
WHEN o.date <= m.end_date THEN o.quantity
ELSE 0
END
), 0) as sold_after_end_date,
-- Calculate received during period (before end_date)
COALESCE(SUM(
CASE
WHEN po.received_date <= m.end_date THEN po.received
ELSE 0
END
), 0) as received_after_end_date,
p.cost_price
FROM
products p
CROSS JOIN
months m
LEFT JOIN
orders o ON p.pid = o.pid
AND o.canceled = false
AND o.date > m.end_date
AND o.date <= CURRENT_DATE
LEFT JOIN
purchase_orders po ON p.pid = po.pid
AND po.received_date IS NOT NULL
AND po.received_date > m.end_date
AND po.received_date <= CURRENT_DATE
GROUP BY
p.pid, m.year, m.month, m.end_date, p.stock_quantity, p.cost_price
)
SELECT
pid,
year,
month,
-- End of month quantity = current quantity - sold after + received after
GREATEST(0, current_quantity - sold_after_end_date + received_after_end_date) as end_of_month_quantity,
-- End of month inventory value
GREATEST(0, current_quantity - sold_after_end_date + received_after_end_date) * cost_price as end_of_month_value,
cost_price
FROM
monthly_inventory_calc
`);
processedCount = Math.floor(totalProducts * 0.40);
outputProgress({
status: 'running',
operation: 'Monthly inventory values calculated, processing time aggregates',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
// Initial insert of time-based aggregates // Initial insert of time-based aggregates
await connection.query(` await connection.query(`
INSERT INTO product_time_aggregates ( INSERT INTO product_time_aggregates (
@@ -75,76 +162,67 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
WITH monthly_sales AS ( WITH monthly_sales AS (
SELECT SELECT
o.pid, o.pid,
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year, EXTRACT(YEAR FROM o.date::timestamp with time zone)::INTEGER as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month, EXTRACT(MONTH FROM o.date::timestamp with time zone)::INTEGER as month,
SUM(o.quantity) as total_quantity_sold, SUM(o.quantity) as total_quantity_sold,
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue, SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost, SUM(COALESCE(o.costeach, 0) * o.quantity) as total_cost,
COUNT(DISTINCT o.order_number) as order_count, COUNT(DISTINCT o.order_number) as order_count,
AVG(o.price - COALESCE(o.discount, 0)) as avg_price, AVG(o.price - COALESCE(o.discount, 0)) as avg_price,
CASE CASE
WHEN SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) > 0 WHEN SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) > 0
THEN ((SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) - SUM(COALESCE(p.cost_price, 0) * o.quantity)) THEN ((SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) - SUM(COALESCE(o.costeach, 0) * o.quantity))
/ SUM((o.price - COALESCE(o.discount, 0)) * o.quantity)) * 100 / SUM((o.price - COALESCE(o.discount, 0)) * o.quantity)) * 100
ELSE 0 ELSE 0
END as profit_margin, END as profit_margin,
p.cost_price * p.stock_quantity as inventory_value,
COUNT(DISTINCT DATE(o.date)) as active_days COUNT(DISTINCT DATE(o.date)) as active_days
FROM orders o FROM orders o
JOIN products p ON o.pid = p.pid JOIN products p ON o.pid = p.pid
WHERE o.canceled = false WHERE o.canceled = false
GROUP BY o.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity GROUP BY o.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone)
), ),
monthly_stock AS ( monthly_stock AS (
SELECT SELECT
pid, pid,
EXTRACT(YEAR FROM date::timestamp with time zone) as year, EXTRACT(YEAR FROM date::timestamp with time zone)::INTEGER as year,
EXTRACT(MONTH FROM date::timestamp with time zone) as month, EXTRACT(MONTH FROM date::timestamp with time zone)::INTEGER as month,
SUM(received) as stock_received, SUM(received) as stock_received,
SUM(ordered) as stock_ordered SUM(ordered) as stock_ordered
FROM purchase_orders FROM purchase_orders
GROUP BY pid, EXTRACT(YEAR FROM date::timestamp with time zone), EXTRACT(MONTH FROM date::timestamp with time zone) GROUP BY pid, EXTRACT(YEAR FROM date::timestamp with time zone), EXTRACT(MONTH FROM date::timestamp with time zone)
),
base_products AS (
SELECT
p.pid,
p.cost_price * p.stock_quantity as inventory_value
FROM products p
) )
SELECT SELECT
COALESCE(s.pid, ms.pid) as pid, COALESCE(s.pid, ms.pid, mi.pid) as pid,
COALESCE(s.year, ms.year) as year, COALESCE(s.year, ms.year, mi.year) as year,
COALESCE(s.month, ms.month) as month, COALESCE(s.month, ms.month, mi.month) as month,
COALESCE(s.total_quantity_sold, 0) as total_quantity_sold, COALESCE(s.total_quantity_sold, 0)::INTEGER as total_quantity_sold,
COALESCE(s.total_revenue, 0) as total_revenue, COALESCE(s.total_revenue, 0)::DECIMAL(10,3) as total_revenue,
COALESCE(s.total_cost, 0) as total_cost, COALESCE(s.total_cost, 0)::DECIMAL(10,3) as total_cost,
COALESCE(s.order_count, 0) as order_count, COALESCE(s.order_count, 0)::INTEGER as order_count,
COALESCE(ms.stock_received, 0) as stock_received, COALESCE(ms.stock_received, 0)::INTEGER as stock_received,
COALESCE(ms.stock_ordered, 0) as stock_ordered, COALESCE(ms.stock_ordered, 0)::INTEGER as stock_ordered,
COALESCE(s.avg_price, 0) as avg_price, COALESCE(s.avg_price, 0)::DECIMAL(10,3) as avg_price,
COALESCE(s.profit_margin, 0) as profit_margin, COALESCE(s.profit_margin, 0)::DECIMAL(10,3) as profit_margin,
COALESCE(s.inventory_value, bp.inventory_value, 0) as inventory_value, COALESCE(mi.end_of_month_value, 0)::DECIMAL(10,3) as inventory_value,
CASE CASE
WHEN COALESCE(s.inventory_value, bp.inventory_value, 0) > 0 WHEN COALESCE(mi.end_of_month_value, 0) > 0
AND COALESCE(s.active_days, 0) > 0 THEN (COALESCE(s.total_revenue, 0) - COALESCE(s.total_cost, 0))
THEN (COALESCE(s.total_revenue - s.total_cost, 0) * (365.0 / s.active_days)) / NULLIF(COALESCE(mi.end_of_month_value, 0), 0)
/ COALESCE(s.inventory_value, bp.inventory_value)
ELSE 0 ELSE 0
END as gmroi END::DECIMAL(10,3) as gmroi
FROM ( FROM (
SELECT * FROM monthly_sales s SELECT * FROM monthly_sales s
UNION ALL UNION ALL
SELECT SELECT
ms.pid, pid,
ms.year, year,
ms.month, month,
0 as total_quantity_sold, 0 as total_quantity_sold,
0 as total_revenue, 0 as total_revenue,
0 as total_cost, 0 as total_cost,
0 as order_count, 0 as order_count,
NULL as avg_price, NULL as avg_price,
0 as profit_margin, 0 as profit_margin,
NULL as inventory_value,
0 as active_days 0 as active_days
FROM monthly_stock ms FROM monthly_stock ms
WHERE NOT EXISTS ( WHERE NOT EXISTS (
@@ -153,50 +231,40 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
AND s2.year = ms.year AND s2.year = ms.year
AND s2.month = ms.month AND s2.month = ms.month
) )
UNION ALL
SELECT
pid,
year,
month,
0 as total_quantity_sold,
0 as total_revenue,
0 as total_cost,
0 as order_count,
NULL as avg_price,
0 as profit_margin,
0 as active_days
FROM temp_monthly_inventory mi
WHERE NOT EXISTS (
SELECT 1 FROM monthly_sales s3
WHERE s3.pid = mi.pid
AND s3.year = mi.year
AND s3.month = mi.month
)
AND NOT EXISTS (
SELECT 1 FROM monthly_stock ms3
WHERE ms3.pid = mi.pid
AND ms3.year = mi.year
AND ms3.month = mi.month
)
) s ) s
LEFT JOIN monthly_stock ms LEFT JOIN monthly_stock ms
ON s.pid = ms.pid ON s.pid = ms.pid
AND s.year = ms.year AND s.year = ms.year
AND s.month = ms.month AND s.month = ms.month
JOIN base_products bp ON COALESCE(s.pid, ms.pid) = bp.pid LEFT JOIN temp_monthly_inventory mi
UNION ON s.pid = mi.pid
SELECT AND s.year = mi.year
ms.pid, AND s.month = mi.month
ms.year,
ms.month,
0 as total_quantity_sold,
0 as total_revenue,
0 as total_cost,
0 as order_count,
ms.stock_received,
ms.stock_ordered,
0 as avg_price,
0 as profit_margin,
bp.inventory_value,
0 as gmroi
FROM monthly_stock ms
JOIN base_products bp ON ms.pid = bp.pid
WHERE NOT EXISTS (
SELECT 1 FROM (
SELECT * FROM monthly_sales
UNION ALL
SELECT
ms2.pid,
ms2.year,
ms2.month,
0, 0, 0, 0, NULL, 0, NULL, 0
FROM monthly_stock ms2
WHERE NOT EXISTS (
SELECT 1 FROM monthly_sales s2
WHERE s2.pid = ms2.pid
AND s2.year = ms2.year
AND s2.month = ms2.month
)
) s
WHERE s.pid = ms.pid
AND s.year = ms.year
AND s.month = ms.month
)
ON CONFLICT (pid, year, month) DO UPDATE ON CONFLICT (pid, year, month) DO UPDATE
SET SET
total_quantity_sold = EXCLUDED.total_quantity_sold, total_quantity_sold = EXCLUDED.total_quantity_sold,
@@ -214,7 +282,7 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
processedCount = Math.floor(totalProducts * 0.60); processedCount = Math.floor(totalProducts * 0.60);
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Base time aggregates calculated, updating financial metrics', operation: 'Base time aggregates calculated',
current: processedCount, current: processedCount,
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
@@ -235,44 +303,8 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
success success
}; };
// Update with financial metrics // Clean up temporary tables
await connection.query(` await connection.query('DROP TABLE IF EXISTS temp_monthly_inventory');
UPDATE product_time_aggregates pta
SET inventory_value = COALESCE(fin.inventory_value, 0)
FROM (
SELECT
p.pid,
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
COUNT(DISTINCT DATE(o.date)) as active_days
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity
) fin
WHERE pta.pid = fin.pid
AND pta.year = fin.year
AND pta.month = fin.month
`);
processedCount = Math.floor(totalProducts * 0.65);
outputProgress({
status: 'running',
operation: 'Financial metrics updated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
// If we get here, everything completed successfully // If we get here, everything completed successfully
success = true; success = true;
@@ -298,6 +330,12 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
throw error; throw error;
} finally { } finally {
if (connection) { if (connection) {
try {
// Ensure temporary tables are cleaned up
await connection.query('DROP TABLE IF EXISTS temp_monthly_inventory');
} catch (err) {
console.error('Error cleaning up temporary tables:', err);
}
connection.release(); connection.release();
} }
} }

View File

@@ -779,10 +779,16 @@ router.get('/history/calculate', async (req, res) => {
id, id,
start_time, start_time,
end_time, end_time,
duration_minutes,
status, status,
error_message, error_message,
modules_processed::integer, total_products,
total_modules::integer total_orders,
total_purchase_orders,
processed_products,
processed_orders,
processed_purchase_orders,
additional_info
FROM calculate_history FROM calculate_history
ORDER BY start_time DESC ORDER BY start_time DESC
LIMIT 20 LIMIT 20

View File

@@ -65,6 +65,19 @@ router.get('/', async (req, res) => {
paramCounter++; paramCounter++;
} }
// Handle text filters for specific fields
if (req.query.barcode) {
conditions.push(`p.barcode ILIKE $${paramCounter}`);
params.push(`%${req.query.barcode}%`);
paramCounter++;
}
if (req.query.vendor_reference) {
conditions.push(`p.vendor_reference ILIKE $${paramCounter}`);
params.push(`%${req.query.vendor_reference}%`);
paramCounter++;
}
// Handle numeric filters with operators // Handle numeric filters with operators
const numericFields = { const numericFields = {
stock: 'p.stock_quantity', stock: 'p.stock_quantity',
@@ -74,11 +87,22 @@ router.get('/', async (req, res) => {
dailySalesAvg: 'pm.daily_sales_avg', dailySalesAvg: 'pm.daily_sales_avg',
weeklySalesAvg: 'pm.weekly_sales_avg', weeklySalesAvg: 'pm.weekly_sales_avg',
monthlySalesAvg: 'pm.monthly_sales_avg', monthlySalesAvg: 'pm.monthly_sales_avg',
avgQuantityPerOrder: 'pm.avg_quantity_per_order',
numberOfOrders: 'pm.number_of_orders',
margin: 'pm.avg_margin_percent', margin: 'pm.avg_margin_percent',
gmroi: 'pm.gmroi', gmroi: 'pm.gmroi',
inventoryValue: 'pm.inventory_value',
costOfGoodsSold: 'pm.cost_of_goods_sold',
grossProfit: 'pm.gross_profit',
turnoverRate: 'pm.turnover_rate',
leadTime: 'pm.current_lead_time', leadTime: 'pm.current_lead_time',
currentLeadTime: 'pm.current_lead_time',
targetLeadTime: 'pm.target_lead_time',
stockCoverage: 'pm.days_of_inventory', stockCoverage: 'pm.days_of_inventory',
daysOfStock: 'pm.days_of_inventory' daysOfStock: 'pm.days_of_inventory',
weeksOfStock: 'pm.weeks_of_inventory',
reorderPoint: 'pm.reorder_point',
safetyStock: 'pm.safety_stock'
}; };
Object.entries(req.query).forEach(([key, value]) => { Object.entries(req.query).forEach(([key, value]) => {
@@ -102,6 +126,24 @@ router.get('/', async (req, res) => {
} }
}); });
// Handle date filters
const dateFields = {
firstSaleDate: 'pm.first_sale_date',
lastSaleDate: 'pm.last_sale_date',
lastPurchaseDate: 'pm.last_purchase_date',
firstReceivedDate: 'pm.first_received_date',
lastReceivedDate: 'pm.last_received_date'
};
Object.entries(req.query).forEach(([key, value]) => {
const field = dateFields[key];
if (field) {
conditions.push(`${field}::TEXT LIKE $${paramCounter}`);
params.push(`${value}%`); // Format like '2023-01%' to match by month or '2023-01-01' for exact date
paramCounter++;
}
});
// Handle select filters // Handle select filters
if (req.query.vendor) { if (req.query.vendor) {
conditions.push(`p.vendor = $${paramCounter}`); conditions.push(`p.vendor = $${paramCounter}`);

View File

@@ -1,4 +1,3 @@
import { useQuery } from '@tanstack/react-query';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'; import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { ResponsiveContainer, BarChart, Bar, XAxis, YAxis, Tooltip, ScatterChart, Scatter, ZAxis } from 'recharts'; import { ResponsiveContainer, BarChart, Bar, XAxis, YAxis, Tooltip, ScatterChart, Scatter, ZAxis } from 'recharts';
import config from '../../config'; import config from '../../config';

View File

@@ -95,12 +95,8 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
isChangeReverted, isChangeReverted,
getFieldDisplayValueWithHighlight, getFieldDisplayValueWithHighlight,
fields, fields,
debugData,
}) => { }) => {
const [costPerMillionTokens, setCostPerMillionTokens] = useState(2.5); // Default cost const [costPerMillionTokens, setCostPerMillionTokens] = useState(2.5); // Default cost
const hasCompanyPrompts =
currentPrompt.debugData?.promptSources?.companyPrompts &&
currentPrompt.debugData.promptSources.companyPrompts.length > 0;
// Create our own state to track changes // Create our own state to track changes
const [localReversionState, setLocalReversionState] = useState< const [localReversionState, setLocalReversionState] = useState<
@@ -157,17 +153,6 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
return !!localReversionState[key]; return !!localReversionState[key];
}; };
// Use "full" as the default tab
const defaultTab = "full";
const [activeTab, setActiveTab] = useState(defaultTab);
// Update activeTab when the dialog is opened with new data
React.useEffect(() => {
if (currentPrompt.isOpen) {
setActiveTab("full");
}
}, [currentPrompt.isOpen]);
// Format time helper // Format time helper
const formatTime = (seconds: number): string => { const formatTime = (seconds: number): string => {
if (seconds < 60) { if (seconds < 60) {

View File

@@ -51,7 +51,9 @@ const FILTER_OPTIONS: FilterOption[] = [
// Basic Info Group // Basic Info Group
{ id: "search", label: "Search", type: "text", group: "Basic Info" }, { id: "search", label: "Search", type: "text", group: "Basic Info" },
{ id: "sku", label: "SKU", type: "text", group: "Basic Info" }, { id: "sku", label: "SKU", type: "text", group: "Basic Info" },
{ id: "barcode", label: "UPC/Barcode", type: "text", group: "Basic Info" },
{ id: "vendor", label: "Vendor", type: "select", group: "Basic Info" }, { id: "vendor", label: "Vendor", type: "select", group: "Basic Info" },
{ id: "vendor_reference", label: "Supplier #", type: "text", group: "Basic Info" },
{ id: "brand", label: "Brand", type: "select", group: "Basic Info" }, { id: "brand", label: "Brand", type: "select", group: "Basic Info" },
{ id: "category", label: "Category", type: "select", group: "Basic Info" }, { id: "category", label: "Category", type: "select", group: "Basic Info" },
@@ -84,6 +86,27 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Inventory", group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"], operators: ["=", ">", ">=", "<", "<=", "between"],
}, },
{
id: "weeksOfStock",
label: "Weeks of Stock",
type: "number",
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "reorderPoint",
label: "Reorder Point",
type: "number",
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "safetyStock",
label: "Safety Stock",
type: "number",
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{ {
id: "replenishable", id: "replenishable",
label: "Replenishable", label: "Replenishable",
@@ -94,6 +117,17 @@ const FILTER_OPTIONS: FilterOption[] = [
], ],
group: "Inventory", group: "Inventory",
}, },
{
id: "abcClass",
label: "ABC Class",
type: "select",
options: [
{ label: "A", value: "A" },
{ label: "B", value: "B" },
{ label: "C", value: "C" },
],
group: "Inventory",
},
// Pricing Group // Pricing Group
{ {
@@ -140,6 +174,32 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Sales Metrics", group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"], operators: ["=", ">", ">=", "<", "<=", "between"],
}, },
{
id: "avgQuantityPerOrder",
label: "Avg Qty/Order",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "numberOfOrders",
label: "Order Count",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "firstSaleDate",
label: "First Sale Date",
type: "text",
group: "Sales Metrics",
},
{
id: "lastSaleDate",
label: "Last Sale Date",
type: "text",
group: "Sales Metrics",
},
// Financial Metrics Group // Financial Metrics Group
{ {
@@ -156,6 +216,34 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Financial Metrics", group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"], operators: ["=", ">", ">=", "<", "<=", "between"],
}, },
{
id: "inventoryValue",
label: "Inventory Value",
type: "number",
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "costOfGoodsSold",
label: "COGS",
type: "number",
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "grossProfit",
label: "Gross Profit",
type: "number",
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "turnoverRate",
label: "Turnover Rate",
type: "number",
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
// Lead Time & Stock Coverage Group // Lead Time & Stock Coverage Group
{ {
@@ -165,6 +253,20 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Lead Time & Coverage", group: "Lead Time & Coverage",
operators: ["=", ">", ">=", "<", "<=", "between"], operators: ["=", ">", ">=", "<", "<=", "between"],
}, },
{
id: "currentLeadTime",
label: "Current Lead Time",
type: "number",
group: "Lead Time & Coverage",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "targetLeadTime",
label: "Target Lead Time",
type: "number",
group: "Lead Time & Coverage",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{ {
id: "leadTimeStatus", id: "leadTimeStatus",
label: "Lead Time Status", label: "Lead Time Status",
@@ -183,19 +285,26 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Lead Time & Coverage", group: "Lead Time & Coverage",
operators: ["=", ">", ">=", "<", "<=", "between"], operators: ["=", ">", ">=", "<", "<=", "between"],
}, },
{
id: "lastPurchaseDate",
label: "Last Purchase Date",
type: "text",
group: "Lead Time & Coverage",
},
{
id: "firstReceivedDate",
label: "First Received Date",
type: "text",
group: "Lead Time & Coverage",
},
{
id: "lastReceivedDate",
label: "Last Received Date",
type: "text",
group: "Lead Time & Coverage",
},
// Classification Group // Classification Group
{
id: "abcClass",
label: "ABC Class",
type: "select",
options: [
{ label: "A", value: "A" },
{ label: "B", value: "B" },
{ label: "C", value: "C" },
],
group: "Classification",
},
{ {
id: "managingStock", id: "managingStock",
label: "Managing Stock", label: "Managing Stock",

View File

@@ -47,17 +47,16 @@ interface HistoryRecord {
id: number; id: number;
start_time: string; start_time: string;
end_time: string | null; end_time: string | null;
duration_minutes: number; duration_minutes?: number;
status: "running" | "completed" | "failed" | "cancelled"; status: "running" | "completed" | "failed" | "cancelled";
error_message: string | null; error_message: string | null;
additional_info?: Record<string, any>; additional_info?: Record<string, any>;
} }
interface ImportHistoryRecord extends HistoryRecord { interface ImportHistoryRecord extends HistoryRecord {
table_name: string;
records_added: number; records_added: number;
records_updated: number; records_updated: number;
is_incremental: boolean; is_incremental?: boolean;
} }
interface CalculateHistoryRecord extends HistoryRecord { interface CalculateHistoryRecord extends HistoryRecord {
@@ -67,6 +66,7 @@ interface CalculateHistoryRecord extends HistoryRecord {
processed_products: number; processed_products: number;
processed_orders: number; processed_orders: number;
processed_purchase_orders: number; processed_purchase_orders: number;
duration_minutes?: number;
} }
interface ModuleStatus { interface ModuleStatus {
@@ -82,13 +82,14 @@ interface TableStatus {
export function DataManagement() { export function DataManagement() {
const [isUpdating, setIsUpdating] = useState(false); const [isUpdating, setIsUpdating] = useState(false);
const [isResetting, setIsResetting] = useState(false); const [isResetting, setIsResetting] = useState(false);
const [] = useState<ImportProgress | null>(null); const [isLoading, setIsLoading] = useState(true);
const [eventSource, setEventSource] = useState<EventSource | null>(null); const [hasError, setHasError] = useState(false);
const [importHistory, setImportHistory] = useState<ImportHistoryRecord[]>([]); const [importHistory, setImportHistory] = useState<ImportHistoryRecord[]>([]);
const [calculateHistory, setCalculateHistory] = useState<CalculateHistoryRecord[]>([]); const [calculateHistory, setCalculateHistory] = useState<CalculateHistoryRecord[]>([]);
const [moduleStatus, setModuleStatus] = useState<ModuleStatus[]>([]); const [moduleStatus, setModuleStatus] = useState<ModuleStatus[]>([]);
const [tableStatus, setTableStatus] = useState<TableStatus[]>([]); const [tableStatus, setTableStatus] = useState<TableStatus[]>([]);
const [scriptOutput, setScriptOutput] = useState<string[]>([]); const [scriptOutput, setScriptOutput] = useState<string[]>([]);
const [eventSource, setEventSource] = useState<EventSource | null>(null);
// Add useRef for scroll handling // Add useRef for scroll handling
const terminalRef = useRef<HTMLDivElement>(null); const terminalRef = useRef<HTMLDivElement>(null);
@@ -359,11 +360,14 @@ export function DataManagement() {
const fetchHistory = async () => { const fetchHistory = async () => {
try { try {
setIsLoading(true);
setHasError(false);
const [importRes, calcRes, moduleRes, tableRes] = await Promise.all([ const [importRes, calcRes, moduleRes, tableRes] = await Promise.all([
fetch(`${config.apiUrl}/csv/history/import`), fetch(`${config.apiUrl}/csv/history/import`, { credentials: 'include' }),
fetch(`${config.apiUrl}/csv/history/calculate`), fetch(`${config.apiUrl}/csv/history/calculate`, { credentials: 'include' }),
fetch(`${config.apiUrl}/csv/status/modules`), fetch(`${config.apiUrl}/csv/status/modules`, { credentials: 'include' }),
fetch(`${config.apiUrl}/csv/status/tables`), fetch(`${config.apiUrl}/csv/status/tables`, { credentials: 'include' }),
]); ]);
if (!importRes.ok || !calcRes.ok || !moduleRes.ok || !tableRes.ok) { if (!importRes.ok || !calcRes.ok || !moduleRes.ok || !tableRes.ok) {
@@ -377,18 +381,41 @@ export function DataManagement() {
tableRes.json(), tableRes.json(),
]); ]);
// Ensure we're setting arrays even if the response is empty or invalid // Process import history to add duration_minutes if it doesn't exist
setImportHistory(Array.isArray(importData) ? importData : []); const processedImportData = (importData || []).map((record: ImportHistoryRecord) => {
setCalculateHistory(Array.isArray(calcData) ? calcData : []); if (!record.duration_minutes && record.start_time && record.end_time) {
setModuleStatus(Array.isArray(moduleData) ? moduleData : []); const start = new Date(record.start_time).getTime();
setTableStatus(Array.isArray(tableData) ? tableData : []); const end = new Date(record.end_time).getTime();
record.duration_minutes = (end - start) / (1000 * 60);
}
return record;
});
// Process calculate history to add duration_minutes if it doesn't exist
const processedCalcData = (calcData || []).map((record: CalculateHistoryRecord) => {
if (!record.duration_minutes && record.start_time && record.end_time) {
const start = new Date(record.start_time).getTime();
const end = new Date(record.end_time).getTime();
record.duration_minutes = (end - start) / (1000 * 60);
}
return record;
});
setImportHistory(processedImportData);
setCalculateHistory(processedCalcData);
setModuleStatus(moduleData || []);
setTableStatus(tableData || []);
setHasError(false);
} catch (error) { } catch (error) {
console.error("Error fetching history:", error); console.error("Error fetching history:", error);
// Set empty arrays as fallback setHasError(true);
toast.error("Failed to load data. Please try again.");
setImportHistory([]); setImportHistory([]);
setCalculateHistory([]); setCalculateHistory([]);
setModuleStatus([]); setModuleStatus([]);
setTableStatus([]); setTableStatus([]);
} finally {
setIsLoading(false);
} }
}; };
@@ -398,6 +425,7 @@ export function DataManagement() {
if (!response.ok) throw new Error('Failed to fetch table status'); if (!response.ok) throw new Error('Failed to fetch table status');
const data = await response.json(); const data = await response.json();
setTableStatus(Array.isArray(data) ? data : []); setTableStatus(Array.isArray(data) ? data : []);
toast.success("Table status refreshed");
} catch (error) { } catch (error) {
toast.error("Failed to refresh table status"); toast.error("Failed to refresh table status");
setTableStatus([]); setTableStatus([]);
@@ -441,21 +469,26 @@ export function DataManagement() {
}; };
const refreshAllData = async () => { const refreshAllData = async () => {
setIsLoading(true);
try { try {
await Promise.all([ await fetchHistory();
refreshTableStatus(),
refreshModuleStatus(),
refreshImportHistory(),
refreshCalculateHistory()
]);
toast.success("All data refreshed"); toast.success("All data refreshed");
} catch (error) { } catch (error) {
toast.error("Failed to refresh some data"); toast.error("Failed to refresh data");
} finally {
setIsLoading(false);
} }
}; };
useEffect(() => { useEffect(() => {
// Fetch data immediately on component mount
fetchHistory(); fetchHistory();
// Set up periodic refresh every minute
const refreshInterval = setInterval(fetchHistory, 60000);
// Clean up interval on component unmount
return () => clearInterval(refreshInterval);
}, []); }, []);
// Add useEffect to handle auto-scrolling // Add useEffect to handle auto-scrolling
@@ -607,8 +640,13 @@ export function DataManagement() {
size="icon" size="icon"
onClick={refreshAllData} onClick={refreshAllData}
className="h-8 w-8" className="h-8 w-8"
disabled={isLoading}
> >
<RefreshCcw className="h-4 w-4" /> {isLoading ? (
<Loader2 className="h-4 w-4 animate-spin" />
) : (
<RefreshCcw className="h-4 w-4" />
)}
</Button> </Button>
</div> </div>
@@ -620,7 +658,11 @@ export function DataManagement() {
</CardHeader> </CardHeader>
<CardContent> <CardContent>
<div className=""> <div className="">
{tableStatus.length > 0 ? ( {isLoading ? (
<div className="flex justify-center py-4">
<Loader2 className="h-6 w-6 animate-spin text-gray-400" />
</div>
) : tableStatus.length > 0 ? (
tableStatus.map((table) => ( tableStatus.map((table) => (
<div <div
key={table.table_name} key={table.table_name}
@@ -634,12 +676,17 @@ export function DataManagement() {
)) ))
) : ( ) : (
<div className="text-sm text-muted-foreground py-4 text-center"> <div className="text-sm text-muted-foreground py-4 text-center">
No imports have been performed yet.<br/>Run a full update or reset to import data. {hasError ? (
"Failed to load data. Please try refreshing."
) : (
<>No imports have been performed yet.<br/>Run a full update or reset to import data.</>
)}
</div> </div>
)} )}
</div> </div>
</CardContent> </CardContent>
</Card> </Card>
{/* Module Status */} {/* Module Status */}
<Card> <Card>
<CardHeader className="pb-3"> <CardHeader className="pb-3">
@@ -647,7 +694,11 @@ export function DataManagement() {
</CardHeader> </CardHeader>
<CardContent> <CardContent>
<div className=""> <div className="">
{moduleStatus.length > 0 ? ( {isLoading ? (
<div className="flex justify-center py-4">
<Loader2 className="h-6 w-6 animate-spin text-gray-400" />
</div>
) : moduleStatus.length > 0 ? (
moduleStatus.map((module) => ( moduleStatus.map((module) => (
<div <div
key={module.module_name} key={module.module_name}
@@ -661,13 +712,18 @@ export function DataManagement() {
)) ))
) : ( ) : (
<div className="text-sm text-muted-foreground py-4 text-center"> <div className="text-sm text-muted-foreground py-4 text-center">
No metrics have been calculated yet.<br/>Run a full update or reset to calculate metrics. {hasError ? (
"Failed to load data. Please try refreshing."
) : (
<>No metrics have been calculated yet.<br/>Run a full update or reset to calculate metrics.</>
)}
</div> </div>
)} )}
</div> </div>
</CardContent> </CardContent>
</Card> </Card>
</div> </div>
{/* Recent Import History */} {/* Recent Import History */}
<Card> <Card>
<CardHeader className="pb-3"> <CardHeader className="pb-3">
@@ -676,7 +732,16 @@ export function DataManagement() {
<CardContent className="px-4 mb-4 max-h-[300px] overflow-y-auto"> <CardContent className="px-4 mb-4 max-h-[300px] overflow-y-auto">
<Table> <Table>
<TableBody> <TableBody>
{importHistory.length > 0 ? ( {isLoading ? (
<TableRow>
<TableCell className="text-center py-8">
<div className="flex flex-col items-center justify-center">
<Loader2 className="h-6 w-6 animate-spin text-gray-400 mb-2" />
<span className="text-sm text-muted-foreground">Loading import history...</span>
</div>
</TableCell>
</TableRow>
) : importHistory.length > 0 ? (
importHistory.slice(0, 20).map((record) => ( importHistory.slice(0, 20).map((record) => (
<TableRow key={record.id} className="hover:bg-transparent"> <TableRow key={record.id} className="hover:bg-transparent">
<TableCell className="w-full p-0"> <TableCell className="w-full p-0">
@@ -686,33 +751,41 @@ export function DataManagement() {
className="border-0" className="border-0"
> >
<AccordionTrigger className="px-4 py-2"> <AccordionTrigger className="px-4 py-2">
<div className="flex justify-between items-start w-full pr-4"> <div className="flex justify-between items-center w-full pr-4">
<span className="font-medium min-w-[60px]"> <div className="w-[50px]">
#{record.id} <span className="font-medium">
</span> #{record.id}
<span className="text-sm text-gray-600 min-w-[120px]"> </span>
{formatDate(record.start_time)} </div>
</span> <div className="w-[170px]">
<span className="text-sm min-w-[100px]"> <span className="text-sm text-gray-600">
{formatDurationWithSeconds( {formatDate(record.start_time)}
record.duration_minutes, </span>
record.status === "running", </div>
record.start_time <div className="w-[140px]">
)} <span className="text-sm">
</span> {formatDurationWithSeconds(
<span record.duration_minutes || 0,
className={`min-w-[80px] ${ record.status === "running",
record.status === "completed" record.start_time
? "text-green-600" )}
: record.status === "failed" </span>
? "text-red-600" </div>
: record.status === "cancelled" <div className="w-[80px]">
? "text-yellow-600" <span
: "text-blue-600" className={`${
}`} record.status === "completed"
> ? "text-green-600"
{record.status} : record.status === "failed"
</span> ? "text-red-600"
: record.status === "cancelled"
? "text-yellow-600"
: "text-blue-600"
}`}
>
{record.status}
</span>
</div>
</div> </div>
</AccordionTrigger> </AccordionTrigger>
<AccordionContent className="px-4 pb-2"> <AccordionContent className="px-4 pb-2">
@@ -749,7 +822,11 @@ export function DataManagement() {
) : ( ) : (
<TableRow> <TableRow>
<TableCell className="text-center text-sm text-muted-foreground py-4"> <TableCell className="text-center text-sm text-muted-foreground py-4">
No import history available {hasError ? (
"Failed to load import history. Please try refreshing."
) : (
"No import history available"
)}
</TableCell> </TableCell>
</TableRow> </TableRow>
)} )}
@@ -766,7 +843,16 @@ export function DataManagement() {
<CardContent className="px-4 mb-4 max-h-[300px] overflow-y-auto"> <CardContent className="px-4 mb-4 max-h-[300px] overflow-y-auto">
<Table> <Table>
<TableBody> <TableBody>
{calculateHistory.length > 0 ? ( {isLoading ? (
<TableRow>
<TableCell className="text-center py-8">
<div className="flex flex-col items-center justify-center">
<Loader2 className="h-6 w-6 animate-spin text-gray-400 mb-2" />
<span className="text-sm text-muted-foreground">Loading calculation history...</span>
</div>
</TableCell>
</TableRow>
) : calculateHistory.length > 0 ? (
calculateHistory.slice(0, 20).map((record) => ( calculateHistory.slice(0, 20).map((record) => (
<TableRow key={record.id} className="hover:bg-transparent"> <TableRow key={record.id} className="hover:bg-transparent">
<TableCell className="w-full p-0"> <TableCell className="w-full p-0">
@@ -776,34 +862,41 @@ export function DataManagement() {
className="border-0" className="border-0"
> >
<AccordionTrigger className="px-4 py-2"> <AccordionTrigger className="px-4 py-2">
<div className="flex justify-between items-start w-full pr-4"> <div className="flex justify-between items-center w-full pr-4">
<span className="font-medium min-w-[60px]"> <div className="w-[50px]">
#{record.id} <span className="font-medium">
</span> #{record.id}
<span className="text-sm text-gray-600 min-w-[120px]"> </span>
{formatDate(record.start_time)} </div>
</span> <div className="w-[170px]">
<span className="text-sm min-w-[100px]"> <span className="text-sm text-gray-600">
{formatDurationWithSeconds( {formatDate(record.start_time)}
record.duration_minutes, </span>
record.status === "running", </div>
record.start_time <div className="w-[140px]">
)} <span className="text-sm">
</span> {formatDurationWithSeconds(
record.duration_minutes || 0,
<span record.status === "running",
className={`min-w-[80px] ${ record.start_time
record.status === "completed" )}
? "text-green-600" </span>
: record.status === "failed" </div>
? "text-red-600" <div className="w-[80px]">
: record.status === "cancelled" <span
? "text-yellow-600" className={`${
: "text-blue-600" record.status === "completed"
}`} ? "text-green-600"
> : record.status === "failed"
{record.status} ? "text-red-600"
</span> : record.status === "cancelled"
? "text-yellow-600"
: "text-blue-600"
}`}
>
{record.status}
</span>
</div>
</div> </div>
</AccordionTrigger> </AccordionTrigger>
<AccordionContent className="px-4 pb-2"> <AccordionContent className="px-4 pb-2">
@@ -817,28 +910,22 @@ export function DataManagement() {
</span> </span>
</div> </div>
<div className="flex justify-between text-sm"> <div className="flex justify-between text-sm">
<span className="text-gray-600"> <span className="text-gray-600">Products:</span>
Processed Products: <span>{record.processed_products} of {record.total_products}</span>
</span>
<span>{record.processed_products}</span>
</div> </div>
<div className="flex justify-between text-sm"> <div className="flex justify-between text-sm">
<span className="text-gray-600"> <span className="text-gray-600">Orders:</span>
Processed Orders: <span>{record.processed_orders} of {record.total_orders}</span>
</span>
<span>{record.processed_orders}</span>
</div> </div>
<div className="flex justify-between text-sm"> <div className="flex justify-between text-sm">
<span className="text-gray-600"> <span className="text-gray-600">Purchase Orders:</span>
Processed Purchase Orders: <span>{record.processed_purchase_orders} of {record.total_purchase_orders}</span>
</span>
<span>{record.processed_purchase_orders}</span>
</div> </div>
{record.error_message && ( {record.error_message && (
<div className="text-sm text-red-600 mt-2"> <div className="text-sm text-red-600 mt-2">
{record.error_message} {record.error_message}
</div> </div>
)} )}
{record.additional_info && {record.additional_info &&
formatJsonData(record.additional_info)} formatJsonData(record.additional_info)}
</div> </div>
@@ -851,14 +938,18 @@ export function DataManagement() {
) : ( ) : (
<TableRow> <TableRow>
<TableCell className="text-center text-sm text-muted-foreground py-4"> <TableCell className="text-center text-sm text-muted-foreground py-4">
No calculation history available {hasError ? (
"Failed to load calculation history. Please try refreshing."
) : (
"No calculation history available"
)}
</TableCell> </TableCell>
</TableRow> </TableRow>
)} )}
</TableBody> </TableBody>
</Table> </Table>
</CardContent> </CardContent>
</Card> </Card>
</div> </div>
</div> </div>
); );

View File

@@ -1,4 +1,4 @@
import { useState, useMemo, useCallback, useRef, useEffect } from "react"; import { useState, useMemo, useCallback, useEffect } from "react";
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { import {
@@ -90,7 +90,7 @@ const ImageForm = ({
}: { }: {
editingImage: ReusableImage | null; editingImage: ReusableImage | null;
formData: ImageFormData; formData: ImageFormData;
setFormData: (data: ImageFormData) => void; setFormData: (data: ImageFormData | ((prev: ImageFormData) => ImageFormData)) => void;
onSubmit: (e: React.FormEvent) => void; onSubmit: (e: React.FormEvent) => void;
onCancel: () => void; onCancel: () => void;
fieldOptions: FieldOptions | undefined; fieldOptions: FieldOptions | undefined;
@@ -99,11 +99,11 @@ const ImageForm = ({
isDragActive: boolean; isDragActive: boolean;
}) => { }) => {
const handleNameChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => { const handleNameChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
setFormData(prev => ({ ...prev, name: e.target.value })); setFormData((prev: ImageFormData) => ({ ...prev, name: e.target.value }));
}, [setFormData]); }, [setFormData]);
const handleGlobalChange = useCallback((checked: boolean) => { const handleGlobalChange = useCallback((checked: boolean) => {
setFormData(prev => ({ setFormData((prev: ImageFormData) => ({
...prev, ...prev,
is_global: checked, is_global: checked,
company: checked ? null : prev.company company: checked ? null : prev.company
@@ -111,7 +111,7 @@ const ImageForm = ({
}, [setFormData]); }, [setFormData]);
const handleCompanyChange = useCallback((value: string) => { const handleCompanyChange = useCallback((value: string) => {
setFormData(prev => ({ ...prev, company: value })); setFormData((prev: ImageFormData) => ({ ...prev, company: value }));
}, [setFormData]); }, [setFormData]);
return ( return (
@@ -738,12 +738,18 @@ export function ReusableImageManagement() {
</DialogContent> </DialogContent>
</Dialog> </Dialog>
<style jsx global>{` {/* Add global styles for this component using regular style tag */}
<style>{`
.reusable-image-table thead tr th,
.reusable-image-table tbody tr td {
padding-left: 1rem;
padding-right: 1rem;
}
.bg-checkerboard { .bg-checkerboard {
background-image: linear-gradient(45deg, #f0f0f0 25%, transparent 25%), background-image: linear-gradient(45deg, #f0f0f0 25%, transparent 25%),
linear-gradient(-45deg, #f0f0f0 25%, transparent 25%), linear-gradient(-45deg, #f0f0f0 25%, transparent 25%),
linear-gradient(45deg, transparent 75%, #f0f0f0 75%), linear-gradient(45deg, transparent 75%, #f0f0f0 75%),
linear-gradient(-45deg, transparent 75%, #f0f0f0 75%); linear-gradient(-45deg, transparent 75%, #f0f0f0 75%);
background-size: 20px 20px; background-size: 20px 20px;
background-position: 0 0, 0 10px, 10px -10px, -10px 0px; background-position: 0 0, 0 10px, 10px -10px, -10px 0px;
} }

View File

@@ -55,10 +55,13 @@ const AVAILABLE_COLUMNS: ColumnDef[] = [
{ key: 'stock_quantity', label: 'Shelf Count', group: 'Stock', format: (v) => v?.toString() ?? '-' }, { key: 'stock_quantity', label: 'Shelf Count', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'stock_status', label: 'Stock Status', group: 'Stock' }, { key: 'stock_status', label: 'Stock Status', group: 'Stock' },
{ key: 'days_of_inventory', label: 'Days of Stock', group: 'Stock', format: (v) => v?.toFixed(1) ?? '-' }, { key: 'days_of_inventory', label: 'Days of Stock', group: 'Stock', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'weeks_of_inventory', label: 'Weeks of Stock', group: 'Stock', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'abc_class', label: 'ABC Class', group: 'Stock' }, { key: 'abc_class', label: 'ABC Class', group: 'Stock' },
{ key: 'replenishable', label: 'Replenishable', group: 'Stock' }, { key: 'replenishable', label: 'Replenishable', group: 'Stock' },
{ key: 'moq', label: 'MOQ', group: 'Stock', format: (v) => v?.toString() ?? '-' }, { key: 'moq', label: 'MOQ', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'reorder_qty', label: 'Reorder Qty', group: 'Stock', format: (v) => v?.toString() ?? '-' }, { key: 'reorder_qty', label: 'Reorder Qty', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'reorder_point', label: 'Reorder Point', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'safety_stock', label: 'Safety Stock', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'overstocked_amt', label: 'Overstock Amt', group: 'Stock', format: (v) => v?.toString() ?? '-' }, { key: 'overstocked_amt', label: 'Overstock Amt', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'price', label: 'Price', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' }, { key: 'price', label: 'Price', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'regular_price', label: 'Default Price', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' }, { key: 'regular_price', label: 'Default Price', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
@@ -67,15 +70,22 @@ const AVAILABLE_COLUMNS: ColumnDef[] = [
{ key: 'daily_sales_avg', label: 'Daily Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' }, { key: 'daily_sales_avg', label: 'Daily Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'weekly_sales_avg', label: 'Weekly Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' }, { key: 'weekly_sales_avg', label: 'Weekly Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'monthly_sales_avg', label: 'Monthly Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' }, { key: 'monthly_sales_avg', label: 'Monthly Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'avg_quantity_per_order', label: 'Avg Qty/Order', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'number_of_orders', label: 'Order Count', group: 'Sales', format: (v) => v?.toString() ?? '-' },
{ key: 'first_sale_date', label: 'First Sale', group: 'Sales' }, { key: 'first_sale_date', label: 'First Sale', group: 'Sales' },
{ key: 'last_sale_date', label: 'Last Sale', group: 'Sales' }, { key: 'last_sale_date', label: 'Last Sale', group: 'Sales' },
{ key: 'gmroi', label: 'GMROI', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' }, { key: 'gmroi', label: 'GMROI', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'turnover_rate', label: 'Turnover Rate', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' }, { key: 'turnover_rate', label: 'Turnover Rate', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'avg_margin_percent', label: 'Margin %', group: 'Financial', format: (v) => v ? `${v.toFixed(1)}%` : '-' }, { key: 'avg_margin_percent', label: 'Margin %', group: 'Financial', format: (v) => v ? `${v.toFixed(1)}%` : '-' },
{ key: 'inventory_value', label: 'Inventory Value', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'cost_of_goods_sold', label: 'COGS', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'gross_profit', label: 'Gross Profit', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'current_lead_time', label: 'Current Lead Time', group: 'Lead Time', format: (v) => v?.toFixed(1) ?? '-' }, { key: 'current_lead_time', label: 'Current Lead Time', group: 'Lead Time', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'target_lead_time', label: 'Target Lead Time', group: 'Lead Time', format: (v) => v?.toFixed(1) ?? '-' }, { key: 'target_lead_time', label: 'Target Lead Time', group: 'Lead Time', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'lead_time_status', label: 'Lead Time Status', group: 'Lead Time' }, { key: 'lead_time_status', label: 'Lead Time Status', group: 'Lead Time' },
{ key: 'last_purchase_date', label: 'Last Purchase', group: 'Lead Time' }, { key: 'last_purchase_date', label: 'Last Purchase', group: 'Lead Time' },
{ key: 'first_received_date', label: 'First Received', group: 'Lead Time' },
{ key: 'last_received_date', label: 'Last Received', group: 'Lead Time' },
]; ];
// Define default columns for each view // Define default columns for each view
@@ -93,14 +103,17 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'daily_sales_avg', 'daily_sales_avg',
'weekly_sales_avg', 'weekly_sales_avg',
'monthly_sales_avg', 'monthly_sales_avg',
'inventory_value',
], ],
critical: [ critical: [
'image', 'image',
'title', 'title',
'stock_quantity', 'stock_quantity',
'safety_stock',
'daily_sales_avg', 'daily_sales_avg',
'weekly_sales_avg', 'weekly_sales_avg',
'reorder_qty', 'reorder_qty',
'reorder_point',
'vendor', 'vendor',
'last_purchase_date', 'last_purchase_date',
'current_lead_time', 'current_lead_time',
@@ -109,11 +122,13 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'image', 'image',
'title', 'title',
'stock_quantity', 'stock_quantity',
'reorder_point',
'daily_sales_avg', 'daily_sales_avg',
'weekly_sales_avg', 'weekly_sales_avg',
'reorder_qty', 'reorder_qty',
'vendor', 'vendor',
'last_purchase_date', 'last_purchase_date',
'avg_lead_time_days',
], ],
overstocked: [ overstocked: [
'image', 'image',
@@ -123,15 +138,19 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'weekly_sales_avg', 'weekly_sales_avg',
'overstocked_amt', 'overstocked_amt',
'days_of_inventory', 'days_of_inventory',
'inventory_value',
'turnover_rate',
], ],
'at-risk': [ 'at-risk': [
'image', 'image',
'title', 'title',
'stock_quantity', 'stock_quantity',
'safety_stock',
'daily_sales_avg', 'daily_sales_avg',
'weekly_sales_avg', 'weekly_sales_avg',
'days_of_inventory', 'days_of_inventory',
'last_sale_date', 'last_sale_date',
'current_lead_time',
], ],
new: [ new: [
'image', 'image',
@@ -141,6 +160,7 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'brand', 'brand',
'price', 'price',
'regular_price', 'regular_price',
'first_received_date',
], ],
healthy: [ healthy: [
'image', 'image',
@@ -150,6 +170,8 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'weekly_sales_avg', 'weekly_sales_avg',
'monthly_sales_avg', 'monthly_sales_avg',
'days_of_inventory', 'days_of_inventory',
'gross_profit',
'gmroi',
], ],
}; };

File diff suppressed because one or more lines are too long