Improve and debug calculate metrics script
This commit is contained in:
@@ -30,6 +30,10 @@ CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
daily_sales_avg DECIMAL(10,3),
|
||||
weekly_sales_avg DECIMAL(10,3),
|
||||
monthly_sales_avg DECIMAL(10,3),
|
||||
avg_quantity_per_order DECIMAL(10,3),
|
||||
number_of_orders INT,
|
||||
first_sale_date DATE,
|
||||
last_sale_date DATE,
|
||||
-- Stock metrics
|
||||
days_of_inventory INT,
|
||||
weeks_of_inventory INT,
|
||||
@@ -38,6 +42,7 @@ CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
-- Financial metrics
|
||||
avg_margin_percent DECIMAL(10,3),
|
||||
total_revenue DECIMAL(10,3),
|
||||
inventory_value DECIMAL(10,3),
|
||||
-- Purchase metrics
|
||||
avg_lead_time_days INT,
|
||||
last_purchase_date DATE,
|
||||
@@ -46,7 +51,8 @@ CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
abc_class CHAR(1),
|
||||
stock_status VARCHAR(20),
|
||||
PRIMARY KEY (product_id),
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
|
||||
INDEX idx_metrics_revenue (total_revenue)
|
||||
);
|
||||
|
||||
-- New table for time-based aggregates
|
||||
@@ -82,11 +88,103 @@ CREATE TABLE IF NOT EXISTS vendor_metrics (
|
||||
PRIMARY KEY (vendor)
|
||||
);
|
||||
|
||||
-- New table for stock threshold configurations
|
||||
CREATE TABLE IF NOT EXISTS stock_thresholds (
|
||||
id INT NOT NULL, -- Changed from AUTO_INCREMENT to explicitly set ID
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
critical_days INT NOT NULL DEFAULT 7,
|
||||
reorder_days INT NOT NULL DEFAULT 14,
|
||||
overstock_days INT NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor),
|
||||
INDEX idx_thresholds_category_vendor (category_id, vendor)
|
||||
);
|
||||
|
||||
-- Insert default thresholds with ID=1
|
||||
INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
|
||||
VALUES (1, NULL, NULL, 7, 14, 90)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
critical_days = VALUES(critical_days),
|
||||
reorder_days = VALUES(reorder_days),
|
||||
overstock_days = VALUES(overstock_days);
|
||||
|
||||
-- Re-enable foreign key checks
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
||||
|
||||
-- Create view for inventory health (after all tables are created)
|
||||
CREATE OR REPLACE VIEW inventory_health AS
|
||||
WITH product_thresholds AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
COALESCE(
|
||||
-- Try category+vendor specific
|
||||
(SELECT critical_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.category_id
|
||||
WHERE pc.product_id = p.product_id
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Try category specific
|
||||
(SELECT critical_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.category_id
|
||||
WHERE pc.product_id = p.product_id
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
-- Try vendor specific
|
||||
(SELECT critical_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Fall back to default
|
||||
(SELECT critical_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
7
|
||||
) as critical_days,
|
||||
COALESCE(
|
||||
-- Try category+vendor specific
|
||||
(SELECT reorder_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.category_id
|
||||
WHERE pc.product_id = p.product_id
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Try category specific
|
||||
(SELECT reorder_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.category_id
|
||||
WHERE pc.product_id = p.product_id
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
-- Try vendor specific
|
||||
(SELECT reorder_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Fall back to default
|
||||
(SELECT reorder_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
14
|
||||
) as reorder_days,
|
||||
COALESCE(
|
||||
-- Try category+vendor specific
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.category_id
|
||||
WHERE pc.product_id = p.product_id
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Try category specific
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.category_id
|
||||
WHERE pc.product_id = p.product_id
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
-- Try vendor specific
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Fall back to default
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
90
|
||||
) as overstock_days
|
||||
FROM products p
|
||||
)
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.SKU,
|
||||
@@ -97,15 +195,18 @@ SELECT
|
||||
COALESCE(pm.reorder_point, 0) as reorder_point,
|
||||
COALESCE(pm.safety_stock, 0) as safety_stock,
|
||||
CASE
|
||||
WHEN p.stock_quantity <= COALESCE(pm.safety_stock, 0) THEN 'Critical'
|
||||
WHEN p.stock_quantity <= COALESCE(pm.reorder_point, 0) THEN 'Reorder'
|
||||
WHEN p.stock_quantity > (COALESCE(pm.daily_sales_avg, 0) * 90) THEN 'Overstocked'
|
||||
WHEN pm.daily_sales_avg = 0 THEN 'New'
|
||||
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * pt.critical_days) THEN 'Critical'
|
||||
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * pt.reorder_days) THEN 'Reorder'
|
||||
WHEN p.stock_quantity > (pm.daily_sales_avg * pt.overstock_days) THEN 'Overstocked'
|
||||
ELSE 'Healthy'
|
||||
END as stock_status
|
||||
FROM
|
||||
products p
|
||||
LEFT JOIN
|
||||
product_metrics pm ON p.product_id = pm.product_id
|
||||
LEFT JOIN
|
||||
product_thresholds pt ON p.product_id = pt.product_id
|
||||
WHERE
|
||||
p.managing_stock = true;
|
||||
|
||||
|
||||
17
inventory-server/db/migrations/add_metrics_indexes.sql
Normal file
17
inventory-server/db/migrations/add_metrics_indexes.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
-- Indexes for orders table
|
||||
CREATE INDEX IF NOT EXISTS idx_orders_product_date ON orders(product_id, date);
|
||||
CREATE INDEX IF NOT EXISTS idx_orders_date ON orders(date);
|
||||
|
||||
-- Indexes for purchase_orders table
|
||||
CREATE INDEX IF NOT EXISTS idx_po_product_date ON purchase_orders(product_id, date);
|
||||
CREATE INDEX IF NOT EXISTS idx_po_product_status ON purchase_orders(product_id, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_po_vendor ON purchase_orders(vendor);
|
||||
|
||||
-- Indexes for product_metrics table
|
||||
CREATE INDEX IF NOT EXISTS idx_metrics_revenue ON product_metrics(total_revenue);
|
||||
|
||||
-- Indexes for stock_thresholds table
|
||||
CREATE INDEX IF NOT EXISTS idx_thresholds_category_vendor ON stock_thresholds(category_id, vendor);
|
||||
|
||||
-- Indexes for product_categories table
|
||||
CREATE INDEX IF NOT EXISTS idx_product_categories_both ON product_categories(product_id, category_id);
|
||||
@@ -50,7 +50,8 @@ CREATE TABLE product_categories (
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
|
||||
INDEX idx_category (category_id),
|
||||
INDEX idx_product (product_id)
|
||||
INDEX idx_product (product_id),
|
||||
INDEX idx_product_categories_both (product_id, category_id)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
-- Create orders table with its indexes
|
||||
@@ -80,6 +81,7 @@ CREATE TABLE orders (
|
||||
INDEX idx_date (date),
|
||||
INDEX idx_status (status),
|
||||
INDEX idx_orders_metrics (product_id, date, canceled, quantity, price),
|
||||
INDEX idx_orders_product_date (product_id, date),
|
||||
UNIQUE KEY unique_order_product (order_number, product_id)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
@@ -104,6 +106,8 @@ CREATE TABLE purchase_orders (
|
||||
INDEX idx_vendor (vendor),
|
||||
INDEX idx_status (status),
|
||||
INDEX idx_purchase_orders_metrics (product_id, date, status, ordered, received),
|
||||
INDEX idx_po_product_date (product_id, date),
|
||||
INDEX idx_po_product_status (product_id, status),
|
||||
UNIQUE KEY unique_po_product (po_id, product_id)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
|
||||
@@ -106,7 +106,11 @@ async function calculateMetrics() {
|
||||
|
||||
try {
|
||||
// Get total number of products
|
||||
const [countResult] = await connection.query('SELECT COUNT(*) as total FROM products');
|
||||
const [countResult] = await connection.query('SELECT COUNT(*) as total FROM products')
|
||||
.catch(err => {
|
||||
logError(err, 'Failed to count products');
|
||||
throw err;
|
||||
});
|
||||
totalProducts = countResult[0].total;
|
||||
|
||||
// Initial progress with percentage
|
||||
@@ -128,7 +132,11 @@ async function calculateMetrics() {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
|
||||
const [products] = await connection.query('SELECT product_id FROM products LIMIT ? OFFSET ?', [batchSize, offset]);
|
||||
const [products] = await connection.query('SELECT product_id, vendor FROM products LIMIT ? OFFSET ?', [batchSize, offset])
|
||||
.catch(err => {
|
||||
logError(err, `Failed to fetch products batch at offset ${offset}`);
|
||||
throw err;
|
||||
});
|
||||
processedCount += products.length;
|
||||
|
||||
// Update progress after each batch
|
||||
@@ -144,128 +152,242 @@ async function calculateMetrics() {
|
||||
});
|
||||
|
||||
// Process the batch
|
||||
const metricsUpdates = [];
|
||||
for (const product of products) {
|
||||
// Calculate sales metrics
|
||||
const [salesMetrics] = await connection.query(`
|
||||
SELECT
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
|
||||
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
|
||||
MAX(o.date) as last_sale_date
|
||||
FROM orders o
|
||||
JOIN products p ON o.product_id = p.product_id
|
||||
WHERE o.canceled = 0 AND o.product_id = ?
|
||||
GROUP BY o.product_id
|
||||
`, [product.product_id]);
|
||||
try {
|
||||
// Calculate sales metrics with trends
|
||||
const [salesMetrics] = await connection.query(`
|
||||
WITH sales_summary AS (
|
||||
SELECT
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
|
||||
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
|
||||
MAX(o.date) as last_sale_date,
|
||||
MIN(o.date) as first_sale_date,
|
||||
COUNT(DISTINCT o.order_number) as number_of_orders,
|
||||
AVG(o.quantity) as avg_quantity_per_order,
|
||||
-- Calculate rolling averages
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) THEN o.quantity ELSE 0 END) as last_30_days_qty,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 7 DAY) THEN o.quantity ELSE 0 END) as last_7_days_qty
|
||||
FROM orders o
|
||||
JOIN products p ON o.product_id = p.product_id
|
||||
WHERE o.canceled = 0 AND o.product_id = ?
|
||||
GROUP BY o.product_id
|
||||
)
|
||||
SELECT
|
||||
total_quantity_sold,
|
||||
total_revenue,
|
||||
total_cost,
|
||||
last_sale_date,
|
||||
first_sale_date,
|
||||
number_of_orders,
|
||||
avg_quantity_per_order,
|
||||
last_30_days_qty / 30 as rolling_daily_avg,
|
||||
last_7_days_qty / 7 as rolling_weekly_avg,
|
||||
total_quantity_sold as total_sales_to_date
|
||||
FROM sales_summary
|
||||
`, [product.product_id]).catch(err => {
|
||||
logError(err, `Failed to calculate sales metrics for product ${product.product_id}`);
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Calculate purchase metrics
|
||||
const [purchaseMetrics] = await connection.query(`
|
||||
SELECT
|
||||
SUM(received) as total_quantity_purchased,
|
||||
SUM(cost_price * received) as total_cost,
|
||||
MAX(date) as last_purchase_date,
|
||||
MAX(received_date) as last_received_date,
|
||||
AVG(DATEDIFF(received_date, date)) as avg_lead_time_days
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed' AND received > 0 AND product_id = ?
|
||||
GROUP BY product_id
|
||||
`, [product.product_id]);
|
||||
// Calculate purchase metrics with proper handling of negative quantities
|
||||
const [purchaseMetrics] = await connection.query(`
|
||||
WITH recent_orders AS (
|
||||
SELECT
|
||||
date,
|
||||
received_date,
|
||||
received,
|
||||
cost_price,
|
||||
DATEDIFF(received_date, date) as lead_time_days,
|
||||
ROW_NUMBER() OVER (ORDER BY date DESC) as order_rank
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed'
|
||||
AND product_id = ?
|
||||
AND received > 0
|
||||
AND received_date IS NOT NULL
|
||||
),
|
||||
lead_time_orders AS (
|
||||
SELECT *
|
||||
FROM recent_orders
|
||||
WHERE order_rank <= 5 -- Last 5 orders
|
||||
OR date >= DATE_SUB(CURDATE(), INTERVAL 90 DAY) -- Or orders from last 90 days
|
||||
)
|
||||
SELECT
|
||||
SUM(CASE WHEN received >= 0 THEN received ELSE 0 END) as total_quantity_purchased,
|
||||
SUM(CASE WHEN received >= 0 THEN cost_price * received ELSE 0 END) as total_cost,
|
||||
MAX(date) as last_purchase_date,
|
||||
MAX(received_date) as last_received_date,
|
||||
AVG(lead_time_days) as avg_lead_time_days,
|
||||
COUNT(*) as orders_analyzed
|
||||
FROM lead_time_orders
|
||||
`, [product.product_id]).catch(err => {
|
||||
logError(err, `Failed to calculate purchase metrics for product ${product.product_id}`);
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Get current stock
|
||||
const [stockInfo] = await connection.query(`
|
||||
SELECT stock_quantity, cost_price
|
||||
FROM products
|
||||
WHERE product_id = ?
|
||||
`, [product.product_id]);
|
||||
// Get current stock
|
||||
const [stockInfo] = await connection.query(`
|
||||
SELECT stock_quantity, cost_price
|
||||
FROM products
|
||||
WHERE product_id = ?
|
||||
`, [product.product_id]).catch(err => {
|
||||
logError(err, `Failed to get stock info for product ${product.product_id}`);
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Calculate metrics
|
||||
const metrics = salesMetrics[0] || {};
|
||||
const purchases = purchaseMetrics[0] || {};
|
||||
const stock = stockInfo[0] || {};
|
||||
// Get stock thresholds for this product's category/vendor
|
||||
const [thresholds] = await connection.query(`
|
||||
WITH product_info AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.vendor,
|
||||
pc.category_id
|
||||
FROM products p
|
||||
LEFT JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
WHERE p.product_id = ?
|
||||
),
|
||||
threshold_options AS (
|
||||
SELECT
|
||||
st.*,
|
||||
CASE
|
||||
WHEN st.category_id = pi.category_id AND st.vendor = pi.vendor THEN 1 -- Category + vendor match
|
||||
WHEN st.category_id = pi.category_id AND st.vendor IS NULL THEN 2 -- Category match
|
||||
WHEN st.category_id IS NULL AND st.vendor = pi.vendor THEN 3 -- Vendor match
|
||||
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 4 -- Default
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN stock_thresholds st
|
||||
WHERE (st.category_id = pi.category_id OR st.category_id IS NULL)
|
||||
AND (st.vendor = pi.vendor OR st.vendor IS NULL)
|
||||
)
|
||||
SELECT
|
||||
COALESCE(
|
||||
(SELECT critical_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
7
|
||||
) as critical_days,
|
||||
COALESCE(
|
||||
(SELECT reorder_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
14
|
||||
) as reorder_days,
|
||||
COALESCE(
|
||||
(SELECT overstock_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
90
|
||||
) as overstock_days
|
||||
`, [product.product_id]).catch(err => {
|
||||
logError(err, `Failed to get thresholds for product ${product.product_id}`);
|
||||
throw err;
|
||||
});
|
||||
|
||||
const daily_sales_avg = metrics.total_quantity_sold ? metrics.total_quantity_sold / 30 : 0;
|
||||
const weekly_sales_avg = metrics.total_quantity_sold ? metrics.total_quantity_sold / 4 : 0;
|
||||
const monthly_sales_avg = metrics.total_quantity_sold || 0;
|
||||
const threshold = thresholds[0] || { critical_days: 7, reorder_days: 14, overstock_days: 90 };
|
||||
|
||||
// Calculate margin percent with proper handling of edge cases
|
||||
let margin_percent = 0;
|
||||
if (metrics.total_revenue && metrics.total_revenue > 0) {
|
||||
margin_percent = ((metrics.total_revenue - metrics.total_cost) / metrics.total_revenue) * 100;
|
||||
// Handle -Infinity or Infinity cases
|
||||
margin_percent = isFinite(margin_percent) ? margin_percent : 0;
|
||||
// Calculate metrics
|
||||
const metrics = salesMetrics[0] || {};
|
||||
const purchases = purchaseMetrics[0] || {};
|
||||
const stock = stockInfo[0] || {};
|
||||
|
||||
const daily_sales_avg = metrics.rolling_daily_avg || 0;
|
||||
const weekly_sales_avg = metrics.rolling_weekly_avg || 0;
|
||||
const monthly_sales_avg = metrics.total_quantity_sold ? metrics.total_quantity_sold / 30 : 0;
|
||||
|
||||
// Calculate margin percent with proper handling of zero revenue
|
||||
const margin_percent = metrics.total_revenue > 0 ?
|
||||
((metrics.total_revenue - metrics.total_cost) / metrics.total_revenue) * 100 :
|
||||
null;
|
||||
|
||||
// Calculate current inventory value
|
||||
const inventory_value = (stock.stock_quantity || 0) * (stock.cost_price || 0);
|
||||
|
||||
// Calculate stock status using configurable thresholds with proper handling of zero sales
|
||||
const stock_status = daily_sales_avg === 0 ? 'New' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * threshold.critical_days)) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * threshold.reorder_days)) ? 'Reorder' :
|
||||
stock.stock_quantity > Math.max(1, daily_sales_avg * threshold.overstock_days) ? 'Overstocked' : 'Healthy';
|
||||
|
||||
// Add to batch update
|
||||
metricsUpdates.push([
|
||||
product.product_id,
|
||||
daily_sales_avg || null,
|
||||
weekly_sales_avg || null,
|
||||
monthly_sales_avg || null,
|
||||
metrics.avg_quantity_per_order || null,
|
||||
metrics.number_of_orders || 0,
|
||||
metrics.first_sale_date || null,
|
||||
metrics.last_sale_date || null,
|
||||
daily_sales_avg > 0 ? stock.stock_quantity / daily_sales_avg : null,
|
||||
weekly_sales_avg > 0 ? stock.stock_quantity / weekly_sales_avg : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * threshold.reorder_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * threshold.critical_days)) : null,
|
||||
margin_percent,
|
||||
metrics.total_revenue || 0,
|
||||
inventory_value || 0,
|
||||
purchases.avg_lead_time_days || null,
|
||||
purchases.last_purchase_date || null,
|
||||
purchases.last_received_date || null,
|
||||
stock_status
|
||||
]);
|
||||
} catch (err) {
|
||||
logError(err, `Failed processing product ${product.product_id}`);
|
||||
// Continue with next product instead of failing entire batch
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Update product metrics
|
||||
// Batch update metrics
|
||||
if (metricsUpdates.length > 0) {
|
||||
await connection.query(`
|
||||
INSERT INTO product_metrics (
|
||||
product_id,
|
||||
last_calculated_at,
|
||||
daily_sales_avg,
|
||||
weekly_sales_avg,
|
||||
monthly_sales_avg,
|
||||
avg_quantity_per_order,
|
||||
number_of_orders,
|
||||
first_sale_date,
|
||||
last_sale_date,
|
||||
days_of_inventory,
|
||||
weeks_of_inventory,
|
||||
reorder_point,
|
||||
safety_stock,
|
||||
avg_margin_percent,
|
||||
total_revenue,
|
||||
inventory_value,
|
||||
avg_lead_time_days,
|
||||
last_purchase_date,
|
||||
last_received_date,
|
||||
abc_class,
|
||||
stock_status
|
||||
) VALUES (
|
||||
?,
|
||||
NOW(),
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
NULL,
|
||||
?
|
||||
)
|
||||
) VALUES ?
|
||||
ON DUPLICATE KEY UPDATE
|
||||
last_calculated_at = VALUES(last_calculated_at),
|
||||
last_calculated_at = NOW(),
|
||||
daily_sales_avg = VALUES(daily_sales_avg),
|
||||
weekly_sales_avg = VALUES(weekly_sales_avg),
|
||||
monthly_sales_avg = VALUES(monthly_sales_avg),
|
||||
avg_quantity_per_order = VALUES(avg_quantity_per_order),
|
||||
number_of_orders = VALUES(number_of_orders),
|
||||
first_sale_date = VALUES(first_sale_date),
|
||||
last_sale_date = VALUES(last_sale_date),
|
||||
days_of_inventory = VALUES(days_of_inventory),
|
||||
weeks_of_inventory = VALUES(weeks_of_inventory),
|
||||
reorder_point = VALUES(reorder_point),
|
||||
safety_stock = VALUES(safety_stock),
|
||||
avg_margin_percent = VALUES(avg_margin_percent),
|
||||
total_revenue = VALUES(total_revenue),
|
||||
inventory_value = VALUES(inventory_value),
|
||||
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||
last_purchase_date = VALUES(last_purchase_date),
|
||||
last_received_date = VALUES(last_received_date),
|
||||
stock_status = VALUES(stock_status)
|
||||
`, [
|
||||
product.product_id,
|
||||
daily_sales_avg,
|
||||
weekly_sales_avg,
|
||||
monthly_sales_avg,
|
||||
daily_sales_avg ? stock.stock_quantity / daily_sales_avg : null,
|
||||
weekly_sales_avg ? stock.stock_quantity / weekly_sales_avg : null,
|
||||
Math.ceil(daily_sales_avg * 14), // 14 days reorder point
|
||||
Math.ceil(daily_sales_avg * 7), // 7 days safety stock
|
||||
margin_percent, // Use the properly handled margin percent
|
||||
metrics.total_revenue || 0,
|
||||
purchases.avg_lead_time_days || 0,
|
||||
purchases.last_purchase_date,
|
||||
purchases.last_received_date,
|
||||
daily_sales_avg === 0 ? 'New' :
|
||||
stock.stock_quantity <= Math.ceil(daily_sales_avg * 7) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.ceil(daily_sales_avg * 14) ? 'Reorder' :
|
||||
stock.stock_quantity > (daily_sales_avg * 90) ? 'Overstocked' : 'Healthy'
|
||||
]);
|
||||
`, [metricsUpdates]).catch(err => {
|
||||
logError(err, `Failed to batch update metrics for ${metricsUpdates.length} products`);
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,22 +405,27 @@ async function calculateMetrics() {
|
||||
|
||||
// Calculate ABC classification
|
||||
await connection.query(`
|
||||
WITH revenue_percentiles AS (
|
||||
WITH revenue_rankings AS (
|
||||
SELECT
|
||||
product_id,
|
||||
total_revenue,
|
||||
PERCENT_RANK() OVER (ORDER BY total_revenue DESC) as revenue_percentile
|
||||
PERCENT_RANK() OVER (ORDER BY COALESCE(total_revenue, 0) DESC) as revenue_rank
|
||||
FROM product_metrics
|
||||
WHERE total_revenue > 0
|
||||
),
|
||||
classification_update AS (
|
||||
SELECT
|
||||
product_id,
|
||||
CASE
|
||||
WHEN revenue_rank <= 0.2 THEN 'A'
|
||||
WHEN revenue_rank <= 0.5 THEN 'B'
|
||||
ELSE 'C'
|
||||
END as abc_class
|
||||
FROM revenue_rankings
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
JOIN revenue_percentiles rp ON pm.product_id = rp.product_id
|
||||
SET pm.abc_class =
|
||||
CASE
|
||||
WHEN rp.revenue_percentile < 0.2 THEN 'A'
|
||||
WHEN rp.revenue_percentile < 0.5 THEN 'B'
|
||||
ELSE 'C'
|
||||
END;
|
||||
JOIN classification_update cu ON pm.product_id = cu.product_id
|
||||
SET pm.abc_class = cu.abc_class,
|
||||
pm.last_calculated_at = NOW()
|
||||
`);
|
||||
|
||||
// Update progress for time-based aggregates
|
||||
|
||||
Reference in New Issue
Block a user