Optimize and fix calculate scripts

This commit is contained in:
2025-01-27 13:16:21 -05:00
parent 5781b45f37
commit 8323ae7703
10 changed files with 748 additions and 962 deletions

View File

@@ -14,7 +14,8 @@ CREATE TABLE IF NOT EXISTS stock_thresholds (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
UNIQUE KEY unique_category_vendor (category_id, vendor) UNIQUE KEY unique_category_vendor (category_id, vendor),
INDEX idx_st_metrics (category_id, vendor)
); );
-- Lead time threshold configurations -- Lead time threshold configurations
@@ -44,7 +45,8 @@ CREATE TABLE IF NOT EXISTS sales_velocity_config (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
UNIQUE KEY unique_category_vendor (category_id, vendor) UNIQUE KEY unique_category_vendor (category_id, vendor),
INDEX idx_sv_metrics (category_id, vendor)
); );
-- ABC Classification configurations -- ABC Classification configurations
@@ -68,7 +70,8 @@ CREATE TABLE IF NOT EXISTS safety_stock_config (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
UNIQUE KEY unique_category_vendor (category_id, vendor) UNIQUE KEY unique_category_vendor (category_id, vendor),
INDEX idx_ss_metrics (category_id, vendor)
); );
-- Turnover rate configurations -- Turnover rate configurations

View File

@@ -122,7 +122,8 @@ CREATE TABLE IF NOT EXISTS orders (
KEY pid (pid), KEY pid (pid),
KEY customer (customer), KEY customer (customer),
KEY date (date), KEY date (date),
KEY status (status) KEY status (status),
INDEX idx_orders_metrics (pid, date, canceled)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- Create purchase_orders table with its indexes -- Create purchase_orders table with its indexes
@@ -152,6 +153,7 @@ CREATE TABLE purchase_orders (
INDEX idx_status (status), INDEX idx_status (status),
INDEX idx_receiving_status (receiving_status), INDEX idx_receiving_status (receiving_status),
INDEX idx_purchase_orders_metrics (pid, date, status, ordered, received), INDEX idx_purchase_orders_metrics (pid, date, status, ordered, received),
INDEX idx_po_metrics (pid, date, receiving_status, received_date),
INDEX idx_po_product_date (pid, date), INDEX idx_po_product_date (pid, date),
INDEX idx_po_product_status (pid, status), INDEX idx_po_product_status (pid, status),
UNIQUE KEY unique_po_product (po_id, pid) UNIQUE KEY unique_po_product (po_id, pid)

View File

@@ -5,8 +5,15 @@ process.chdir(path.dirname(__filename));
require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') }); require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') });
// Set to 1 to skip product metrics and only calculate the remaining metrics // Configuration flags for controlling which metrics to calculate
const SKIP_PRODUCT_METRICS = 0; // Set to 1 to skip the corresponding calculation, 0 to run it
const SKIP_PRODUCT_METRICS = 1; // Skip all product metrics
const SKIP_TIME_AGGREGATES = 1; // Skip time aggregates
const SKIP_FINANCIAL_METRICS = 1; // Skip financial metrics
const SKIP_VENDOR_METRICS = 1; // Skip vendor metrics
const SKIP_CATEGORY_METRICS = 1; // Skip category metrics
const SKIP_BRAND_METRICS = 1; // Skip brand metrics
const SKIP_SALES_FORECASTS = 1; // Skip sales forecasts
// Add error handler for uncaught exceptions // Add error handler for uncaught exceptions
process.on('uncaughtException', (error) => { process.on('uncaughtException', (error) => {
@@ -137,50 +144,136 @@ async function calculateMetrics() {
} }
// Calculate time-based aggregates // Calculate time-based aggregates
processedCount = await calculateTimeAggregates(startTime, totalProducts, processedCount); if (!SKIP_TIME_AGGREGATES) {
processedCount = await calculateTimeAggregates(startTime, totalProducts, processedCount);
} else {
console.log('Skipping time aggregates calculation');
}
// Calculate financial metrics // Calculate financial metrics
processedCount = await calculateFinancialMetrics(startTime, totalProducts, processedCount); if (!SKIP_FINANCIAL_METRICS) {
processedCount = await calculateFinancialMetrics(startTime, totalProducts, processedCount);
} else {
console.log('Skipping financial metrics calculation');
}
// Calculate vendor metrics // Calculate vendor metrics
processedCount = await calculateVendorMetrics(startTime, totalProducts, processedCount); if (!SKIP_VENDOR_METRICS) {
processedCount = await calculateVendorMetrics(startTime, totalProducts, processedCount);
} else {
console.log('Skipping vendor metrics calculation');
}
// Calculate category metrics // Calculate category metrics
processedCount = await calculateCategoryMetrics(startTime, totalProducts, processedCount); if (!SKIP_CATEGORY_METRICS) {
processedCount = await calculateCategoryMetrics(startTime, totalProducts, processedCount);
} else {
console.log('Skipping category metrics calculation');
}
// Calculate brand metrics // Calculate brand metrics
processedCount = await calculateBrandMetrics(startTime, totalProducts, processedCount); if (!SKIP_BRAND_METRICS) {
processedCount = await calculateBrandMetrics(startTime, totalProducts, processedCount);
} else {
console.log('Skipping brand metrics calculation');
}
// Calculate sales forecasts // Calculate sales forecasts
processedCount = await calculateSalesForecasts(startTime, totalProducts, processedCount); if (!SKIP_SALES_FORECASTS) {
processedCount = await calculateSalesForecasts(startTime, totalProducts, processedCount);
} else {
console.log('Skipping sales forecasts calculation');
}
// Calculate ABC classification // Calculate ABC classification
const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1'); const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 }; const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 };
// First, create and populate the rankings table with an index
await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_revenue_ranks');
await connection.query(` await connection.query(`
WITH revenue_rankings AS ( CREATE TEMPORARY TABLE temp_revenue_ranks (
SELECT pid BIGINT NOT NULL,
product_id, total_revenue DECIMAL(10,3),
total_revenue, rank_num INT,
PERCENT_RANK() OVER (ORDER BY COALESCE(total_revenue, 0) DESC) * 100 as revenue_percentile total_count INT,
PRIMARY KEY (pid),
INDEX (rank_num)
) ENGINE=MEMORY
`);
await connection.query(`
INSERT INTO temp_revenue_ranks
SELECT
pid,
total_revenue,
@rank := @rank + 1 as rank_num,
@total_count := @rank as total_count
FROM (
SELECT pid, total_revenue
FROM product_metrics FROM product_metrics
), WHERE total_revenue > 0
classification_update AS ( ORDER BY total_revenue DESC
SELECT ) ranked,
product_id, (SELECT @rank := 0) r
`);
// Get total count for percentage calculation
const [rankingCount] = await connection.query('SELECT MAX(rank_num) as total_count FROM temp_revenue_ranks');
const totalCount = rankingCount[0].total_count || 1;
// Process updates in batches
let abcProcessedCount = 0;
const batchSize = 5000;
while (true) {
// First get a batch of PIDs that need updating
const [pids] = await connection.query(`
SELECT pm.pid
FROM product_metrics pm
LEFT JOIN temp_revenue_ranks tr ON pm.pid = tr.pid
WHERE pm.abc_class IS NULL
OR pm.abc_class !=
CASE CASE
WHEN revenue_percentile <= ? THEN 'A' WHEN tr.rank_num IS NULL THEN 'C'
WHEN revenue_percentile <= ? THEN 'B' WHEN (tr.rank_num / ?) * 100 <= ? THEN 'A'
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'B'
ELSE 'C' ELSE 'C'
END as abc_class END
FROM revenue_rankings LIMIT ?
) `, [totalCount, abcThresholds.a_threshold,
UPDATE product_metrics pm totalCount, abcThresholds.b_threshold,
JOIN classification_update cu ON pm.product_id = cu.product_id batchSize]);
SET pm.abc_class = cu.abc_class,
pm.last_calculated_at = NOW() if (pids.length === 0) {
`, [abcThresholds.a_threshold, abcThresholds.b_threshold]); break;
}
// Then update just those PIDs
const [result] = await connection.query(`
UPDATE product_metrics pm
LEFT JOIN temp_revenue_ranks tr ON pm.pid = tr.pid
SET pm.abc_class =
CASE
WHEN tr.rank_num IS NULL THEN 'C'
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'A'
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'B'
ELSE 'C'
END,
pm.last_calculated_at = NOW()
WHERE pm.pid IN (?)
`, [totalCount, abcThresholds.a_threshold,
totalCount, abcThresholds.b_threshold,
pids.map(row => row.pid)]);
abcProcessedCount += result.affectedRows;
// Small delay between batches to allow other transactions
await new Promise(resolve => setTimeout(resolve, 100));
}
// Clean up
await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_revenue_ranks');
// Final success message // Final success message
global.outputProgress({ global.outputProgress({

View File

@@ -15,7 +15,7 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
percentage: '95' percentage: '95'
}); });
// Calculate brand metrics // Calculate brand metrics with optimized queries
await connection.query(` await connection.query(`
INSERT INTO brand_metrics ( INSERT INTO brand_metrics (
brand, brand,
@@ -28,71 +28,77 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
avg_margin, avg_margin,
growth_rate growth_rate
) )
WITH brand_data AS ( WITH filtered_products AS (
SELECT SELECT
p.brand, p.*,
COUNT(DISTINCT CASE WHEN p.stock_quantity <= 5000 THEN p.pid END) as product_count, CASE WHEN p.stock_quantity <= 5000 THEN p.pid END as valid_pid,
COUNT(DISTINCT CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END) as active_products, CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END as active_pid,
SUM(CASE CASE
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0 WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0
ELSE p.stock_quantity ELSE p.stock_quantity
END) as total_stock_units, END as valid_stock
SUM(CASE FROM products p
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 OR p.cost_price IS NULL OR p.cost_price < 0 THEN 0 WHERE p.brand IS NOT NULL
ELSE p.stock_quantity * p.cost_price ),
END) as total_stock_cost, sales_periods AS (
SUM(CASE SELECT
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 OR p.price IS NULL OR p.price < 0 THEN 0 p.brand,
ELSE p.stock_quantity * p.price SUM(o.quantity * o.price) as period_revenue,
END) as total_stock_retail,
SUM(CASE
WHEN o.quantity IS NULL OR o.price IS NULL OR o.quantity < 0 OR o.price < 0 THEN 0
ELSE o.price * o.quantity
END) as total_revenue,
CASE CASE
WHEN SUM(o.price * o.quantity) > 0 THEN WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH) THEN 'current'
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) THEN 'previous'
END as period_type
FROM filtered_products p
JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH)
GROUP BY p.brand, period_type
),
brand_data AS (
SELECT
p.brand,
COUNT(DISTINCT p.valid_pid) as product_count,
COUNT(DISTINCT p.active_pid) as active_products,
SUM(p.valid_stock) as total_stock_units,
SUM(p.valid_stock * p.cost_price) as total_stock_cost,
SUM(p.valid_stock * p.price) as total_stock_retail,
COALESCE(SUM(o.quantity * o.price), 0) as total_revenue,
CASE
WHEN SUM(o.quantity * o.price) > 0 THEN
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity) (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0 ELSE 0
END as avg_margin, END as avg_margin
-- Current period (last 3 months) FROM filtered_products p
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
THEN COALESCE(o.quantity * o.price, 0)
ELSE 0
END) as current_period_sales,
-- Previous year same period
SUM(CASE
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
THEN COALESCE(o.quantity * o.price, 0)
ELSE 0
END) as previous_year_period_sales
FROM products p
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
WHERE p.brand IS NOT NULL
GROUP BY p.brand GROUP BY p.brand
) )
SELECT SELECT
brand, bd.brand,
product_count, bd.product_count,
active_products, bd.active_products,
total_stock_units, bd.total_stock_units,
total_stock_cost, bd.total_stock_cost,
total_stock_retail, bd.total_stock_retail,
total_revenue, bd.total_revenue,
avg_margin, bd.avg_margin,
CASE CASE
WHEN previous_year_period_sales = 0 AND current_period_sales > 0 THEN 100.0 WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0
WHEN previous_year_period_sales = 0 THEN 0.0 AND MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) > 0 THEN 100.0
WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0 THEN 0.0
ELSE LEAST( ELSE LEAST(
GREATEST( GREATEST(
((current_period_sales - previous_year_period_sales) / ((MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) -
NULLIF(previous_year_period_sales, 0)) * 100.0, MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END)) /
NULLIF(MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END), 0)) * 100.0,
-100.0 -100.0
), ),
999.99 999.99
) )
END as growth_rate END as growth_rate
FROM brand_data FROM brand_data bd
LEFT JOIN sales_periods sp ON bd.brand = sp.brand
GROUP BY bd.brand, bd.product_count, bd.active_products, bd.total_stock_units,
bd.total_stock_cost, bd.total_stock_retail, bd.total_revenue, bd.avg_margin
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
product_count = VALUES(product_count), product_count = VALUES(product_count),
active_products = VALUES(active_products), active_products = VALUES(active_products),
@@ -105,7 +111,7 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
last_calculated_at = CURRENT_TIMESTAMP last_calculated_at = CURRENT_TIMESTAMP
`); `);
// Calculate brand time-based metrics // Calculate brand time-based metrics with optimized query
await connection.query(` await connection.query(`
INSERT INTO brand_time_metrics ( INSERT INTO brand_time_metrics (
brand, brand,
@@ -119,38 +125,41 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
total_revenue, total_revenue,
avg_margin avg_margin
) )
SELECT WITH filtered_products AS (
p.brand, SELECT
YEAR(o.date) as year, p.*,
MONTH(o.date) as month, CASE WHEN p.stock_quantity <= 5000 THEN p.pid END as valid_pid,
COUNT(DISTINCT CASE WHEN p.stock_quantity <= 5000 THEN p.pid END) as product_count, CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END as active_pid,
COUNT(DISTINCT CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END) as active_products, CASE
SUM(CASE WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0 ELSE p.stock_quantity
ELSE p.stock_quantity END as valid_stock
END) as total_stock_units, FROM products p
SUM(CASE WHERE p.brand IS NOT NULL
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 OR p.cost_price IS NULL OR p.cost_price < 0 THEN 0 ),
ELSE p.stock_quantity * p.cost_price monthly_metrics AS (
END) as total_stock_cost, SELECT
SUM(CASE p.brand,
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 OR p.price IS NULL OR p.price < 0 THEN 0 YEAR(o.date) as year,
ELSE p.stock_quantity * p.price MONTH(o.date) as month,
END) as total_stock_retail, COUNT(DISTINCT p.valid_pid) as product_count,
SUM(CASE COUNT(DISTINCT p.active_pid) as active_products,
WHEN o.quantity IS NULL OR o.price IS NULL OR o.quantity < 0 OR o.price < 0 THEN 0 SUM(p.valid_stock) as total_stock_units,
ELSE o.price * o.quantity SUM(p.valid_stock * p.cost_price) as total_stock_cost,
END) as total_revenue, SUM(p.valid_stock * p.price) as total_stock_retail,
CASE SUM(o.quantity * o.price) as total_revenue,
WHEN SUM(o.price * o.quantity) > 0 THEN CASE
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity) WHEN SUM(o.quantity * o.price) > 0 THEN
ELSE 0 (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
END as avg_margin ELSE 0
FROM products p END as avg_margin
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false FROM filtered_products p
WHERE p.brand IS NOT NULL LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY p.brand, YEAR(o.date), MONTH(o.date) GROUP BY p.brand, YEAR(o.date), MONTH(o.date)
)
SELECT *
FROM monthly_metrics
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
product_count = VALUES(product_count), product_count = VALUES(product_count),
active_products = VALUES(active_products), active_products = VALUES(active_products),

View File

@@ -15,98 +15,104 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
percentage: '85' percentage: '85'
}); });
// Calculate category performance metrics // First, calculate base category metrics
await connection.query(` await connection.query(`
INSERT INTO category_metrics ( INSERT INTO category_metrics (
category_id, category_id,
product_count, product_count,
active_products, active_products,
total_value, total_value,
avg_margin, status,
turnover_rate, last_calculated_at
growth_rate,
status
)
WITH category_sales AS (
SELECT
c.cat_id as category_id,
COUNT(DISTINCT p.pid) as product_count,
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
SUM(p.stock_quantity * p.cost_price) as total_value,
CASE
WHEN SUM(o.price * o.quantity) > 0
THEN (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0
END as avg_margin,
CASE
WHEN AVG(GREATEST(p.stock_quantity, 0)) >= 0.01
THEN LEAST(
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 1 YEAR)
THEN COALESCE(o.quantity, 0)
ELSE 0
END) /
GREATEST(
AVG(GREATEST(p.stock_quantity, 0)),
1.0
),
999.99
)
ELSE 0
END as turnover_rate,
-- Current period (last 3 months)
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
THEN COALESCE(o.quantity * o.price, 0)
ELSE 0
END) as current_period_sales,
-- Previous year same period
SUM(CASE
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
THEN COALESCE(o.quantity * o.price, 0)
ELSE 0
END) as previous_year_period_sales,
c.status
FROM categories c
LEFT JOIN product_categories pc ON c.cat_id = pc.cat_id
LEFT JOIN products p ON pc.pid = p.pid
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
GROUP BY c.cat_id, c.status
) )
SELECT SELECT
category_id, c.cat_id,
product_count, COUNT(DISTINCT p.pid) as product_count,
active_products, COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
total_value, COALESCE(SUM(p.stock_quantity * p.cost_price), 0) as total_value,
COALESCE(avg_margin, 0) as avg_margin, c.status,
COALESCE(turnover_rate, 0) as turnover_rate, NOW() as last_calculated_at
-- Enhanced YoY growth rate calculation FROM categories c
CASE LEFT JOIN product_categories pc ON c.cat_id = pc.cat_id
WHEN previous_year_period_sales = 0 AND current_period_sales > 0 THEN 100.0 LEFT JOIN products p ON pc.pid = p.pid
WHEN previous_year_period_sales = 0 THEN 0.0 GROUP BY c.cat_id, c.status
ELSE LEAST(
GREATEST(
((current_period_sales - previous_year_period_sales) /
NULLIF(previous_year_period_sales, 0)) * 100.0,
-100.0
),
999.99
)
END as growth_rate,
status
FROM category_sales
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
product_count = VALUES(product_count), product_count = VALUES(product_count),
active_products = VALUES(active_products), active_products = VALUES(active_products),
total_value = VALUES(total_value), total_value = VALUES(total_value),
avg_margin = VALUES(avg_margin),
turnover_rate = VALUES(turnover_rate),
growth_rate = VALUES(growth_rate),
status = VALUES(status), status = VALUES(status),
last_calculated_at = CURRENT_TIMESTAMP last_calculated_at = VALUES(last_calculated_at)
`); `);
// Calculate category time-based metrics // Then update with margin and turnover data
await connection.query(`
WITH category_sales AS (
SELECT
pc.cat_id,
SUM(o.quantity * o.price) as total_sales,
SUM(o.quantity * (o.price - p.cost_price)) as total_margin,
SUM(o.quantity) as units_sold,
AVG(GREATEST(p.stock_quantity, 0)) as avg_stock
FROM product_categories pc
JOIN products p ON pc.pid = p.pid
JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 1 YEAR)
GROUP BY pc.cat_id
)
UPDATE category_metrics cm
JOIN category_sales cs ON cm.category_id = cs.cat_id
SET
cm.avg_margin = COALESCE(cs.total_margin * 100.0 / NULLIF(cs.total_sales, 0), 0),
cm.turnover_rate = LEAST(COALESCE(cs.units_sold / NULLIF(cs.avg_stock, 0), 0), 999.99),
cm.last_calculated_at = NOW()
`);
// Finally update growth rates
await connection.query(`
WITH current_period AS (
SELECT
pc.cat_id,
SUM(o.quantity * o.price) as revenue
FROM product_categories pc
JOIN products p ON pc.pid = p.pid
JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
GROUP BY pc.cat_id
),
previous_period AS (
SELECT
pc.cat_id,
SUM(o.quantity * o.price) as revenue
FROM product_categories pc
JOIN products p ON pc.pid = p.pid
JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
AND o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH)
AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY pc.cat_id
)
UPDATE category_metrics cm
LEFT JOIN current_period cp ON cm.category_id = cp.cat_id
LEFT JOIN previous_period pp ON cm.category_id = pp.cat_id
SET
cm.growth_rate = CASE
WHEN pp.revenue = 0 AND COALESCE(cp.revenue, 0) > 0 THEN 100.0
WHEN pp.revenue = 0 THEN 0.0
ELSE LEAST(
GREATEST(
((COALESCE(cp.revenue, 0) - pp.revenue) / pp.revenue) * 100.0,
-100.0
),
999.99
)
END,
cm.last_calculated_at = NOW()
WHERE cp.cat_id IS NOT NULL OR pp.cat_id IS NOT NULL
`);
// Calculate time-based metrics
await connection.query(` await connection.query(`
INSERT INTO category_time_metrics ( INSERT INTO category_time_metrics (
category_id, category_id,
@@ -120,29 +126,28 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
turnover_rate turnover_rate
) )
SELECT SELECT
c.cat_id as category_id, pc.cat_id,
YEAR(o.date) as year, YEAR(o.date) as year,
MONTH(o.date) as month, MONTH(o.date) as month,
COUNT(DISTINCT p.pid) as product_count, COUNT(DISTINCT p.pid) as product_count,
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products, COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
SUM(p.stock_quantity * p.cost_price) as total_value, SUM(p.stock_quantity * p.cost_price) as total_value,
SUM(o.price * o.quantity) as total_revenue, SUM(o.quantity * o.price) as total_revenue,
CASE COALESCE(
WHEN SUM(o.price * o.quantity) > 0 SUM(o.quantity * (o.price - p.cost_price)) * 100.0 /
THEN (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity) NULLIF(SUM(o.quantity * o.price), 0),
ELSE 0 0
END as avg_margin, ) as avg_margin,
CASE COALESCE(
WHEN AVG(p.stock_quantity) > 0 SUM(o.quantity) / NULLIF(AVG(GREATEST(p.stock_quantity, 0)), 0),
THEN SUM(o.quantity) / AVG(p.stock_quantity) 0
ELSE 0 ) as turnover_rate
END as turnover_rate FROM product_categories pc
FROM categories c JOIN products p ON pc.pid = p.pid
LEFT JOIN product_categories pc ON c.cat_id = pc.cat_id JOIN orders o ON p.pid = o.pid
LEFT JOIN products p ON pc.pid = p.pid WHERE o.canceled = false
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) GROUP BY pc.cat_id, YEAR(o.date), MONTH(o.date)
GROUP BY c.cat_id, YEAR(o.date), MONTH(o.date)
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
product_count = VALUES(product_count), product_count = VALUES(product_count),
active_products = VALUES(active_products), active_products = VALUES(active_products),
@@ -152,73 +157,45 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
turnover_rate = VALUES(turnover_rate) turnover_rate = VALUES(turnover_rate)
`); `);
// Calculate category sales metrics with NULL brand handling // Calculate sales metrics for different time periods
await connection.query(` const periods = [30, 90, 180, 365];
INSERT INTO category_sales_metrics ( for (const days of periods) {
category_id, await connection.query(`
brand, INSERT INTO category_sales_metrics (
period_start, category_id,
period_end, brand,
avg_daily_sales, period_start,
total_sold, period_end,
num_products, avg_daily_sales,
avg_price, total_sold,
last_calculated_at num_products,
) avg_price,
WITH date_ranges AS ( last_calculated_at
)
SELECT SELECT
DATE_SUB(CURDATE(), INTERVAL 30 DAY) as period_start, pc.cat_id as category_id,
CURDATE() as period_end
UNION ALL
SELECT
DATE_SUB(CURDATE(), INTERVAL 90 DAY),
CURDATE()
UNION ALL
SELECT
DATE_SUB(CURDATE(), INTERVAL 180 DAY),
CURDATE()
UNION ALL
SELECT
DATE_SUB(CURDATE(), INTERVAL 365 DAY),
CURDATE()
),
category_metrics AS (
SELECT
c.cat_id as category_id,
COALESCE(p.brand, 'Unbranded') as brand, COALESCE(p.brand, 'Unbranded') as brand,
dr.period_start, DATE_SUB(CURDATE(), INTERVAL ? DAY) as period_start,
dr.period_end, CURDATE() as period_end,
COUNT(DISTINCT p.pid) as num_products, COALESCE(SUM(o.quantity), 0) / ? as avg_daily_sales,
COALESCE(SUM(o.quantity), 0) / DATEDIFF(dr.period_end, dr.period_start) as avg_daily_sales,
COALESCE(SUM(o.quantity), 0) as total_sold, COALESCE(SUM(o.quantity), 0) as total_sold,
COALESCE(AVG(o.price), 0) as avg_price COUNT(DISTINCT p.pid) as num_products,
FROM categories c COALESCE(AVG(o.price), 0) as avg_price,
JOIN product_categories pc ON c.cat_id = pc.cat_id NOW() as last_calculated_at
FROM product_categories pc
JOIN products p ON pc.pid = p.pid JOIN products p ON pc.pid = p.pid
CROSS JOIN date_ranges dr
LEFT JOIN orders o ON p.pid = o.pid LEFT JOIN orders o ON p.pid = o.pid
AND o.date BETWEEN dr.period_start AND dr.period_end AND o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
AND o.canceled = false AND o.canceled = false
GROUP BY c.cat_id, COALESCE(p.brand, 'Unbranded'), dr.period_start, dr.period_end GROUP BY pc.cat_id, p.brand
) ON DUPLICATE KEY UPDATE
SELECT avg_daily_sales = VALUES(avg_daily_sales),
category_id, total_sold = VALUES(total_sold),
brand, num_products = VALUES(num_products),
period_start, avg_price = VALUES(avg_price),
period_end, last_calculated_at = NOW()
avg_daily_sales, `, [days, days, days]);
total_sold, }
num_products,
avg_price,
NOW() as last_calculated_at
FROM category_metrics
ON DUPLICATE KEY UPDATE
avg_daily_sales = VALUES(avg_daily_sales),
total_sold = VALUES(total_sold),
num_products = VALUES(num_products),
avg_price = VALUES(avg_price),
last_calculated_at = NOW()
`);
return Math.floor(totalProducts * 0.9); return Math.floor(totalProducts * 0.9);
} finally { } finally {

View File

@@ -15,9 +15,9 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
percentage: '60' percentage: '60'
}); });
// Calculate financial metrics with optimized query
await connection.query(` await connection.query(`
UPDATE product_metrics pm WITH product_financials AS (
JOIN (
SELECT SELECT
p.pid, p.pid,
p.cost_price * p.stock_quantity as inventory_value, p.cost_price * p.stock_quantity as inventory_value,
@@ -26,48 +26,54 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit, SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
MIN(o.date) as first_sale_date, MIN(o.date) as first_sale_date,
MAX(o.date) as last_sale_date, MAX(o.date) as last_sale_date,
DATEDIFF(MAX(o.date), MIN(o.date)) + 1 as calculation_period_days DATEDIFF(MAX(o.date), MIN(o.date)) + 1 as calculation_period_days,
COUNT(DISTINCT DATE(o.date)) as active_days
FROM products p FROM products p
LEFT JOIN orders o ON p.pid = o.pid LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false WHERE o.canceled = false
AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH) AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH)
GROUP BY p.pid GROUP BY p.pid
) fin ON pm.pid = fin.pid )
UPDATE product_metrics pm
JOIN product_financials pf ON pm.pid = pf.pid
SET SET
pm.inventory_value = COALESCE(fin.inventory_value, 0), pm.inventory_value = COALESCE(pf.inventory_value, 0),
pm.total_revenue = COALESCE(fin.total_revenue, 0), pm.total_revenue = COALESCE(pf.total_revenue, 0),
pm.cost_of_goods_sold = COALESCE(fin.cost_of_goods_sold, 0), pm.cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0),
pm.gross_profit = COALESCE(fin.gross_profit, 0), pm.gross_profit = COALESCE(pf.gross_profit, 0),
pm.gmroi = CASE pm.gmroi = CASE
WHEN COALESCE(fin.inventory_value, 0) > 0 AND fin.calculation_period_days > 0 THEN WHEN COALESCE(pf.inventory_value, 0) > 0 AND pf.active_days > 0 THEN
(COALESCE(fin.gross_profit, 0) * (365.0 / fin.calculation_period_days)) / COALESCE(fin.inventory_value, 0) (COALESCE(pf.gross_profit, 0) * (365.0 / pf.active_days)) / COALESCE(pf.inventory_value, 0)
ELSE 0 ELSE 0
END END
`); `);
// Update time-based aggregates with financial metrics // Update time-based aggregates with optimized query
await connection.query(` await connection.query(`
UPDATE product_time_aggregates pta WITH monthly_financials AS (
JOIN (
SELECT SELECT
p.pid, p.pid,
YEAR(o.date) as year, YEAR(o.date) as year,
MONTH(o.date) as month, MONTH(o.date) as month,
p.cost_price * p.stock_quantity as inventory_value, p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit, SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
COUNT(DISTINCT DATE(o.date)) as days_in_period COUNT(DISTINCT DATE(o.date)) as active_days,
MIN(o.date) as period_start,
MAX(o.date) as period_end
FROM products p FROM products p
LEFT JOIN orders o ON p.pid = o.pid LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false WHERE o.canceled = false
GROUP BY p.pid, YEAR(o.date), MONTH(o.date) GROUP BY p.pid, YEAR(o.date), MONTH(o.date)
) fin ON pta.pid = fin.pid )
AND pta.year = fin.year UPDATE product_time_aggregates pta
AND pta.month = fin.month JOIN monthly_financials mf ON pta.pid = mf.pid
AND pta.year = mf.year
AND pta.month = mf.month
SET SET
pta.inventory_value = COALESCE(fin.inventory_value, 0), pta.inventory_value = COALESCE(mf.inventory_value, 0),
pta.gmroi = CASE pta.gmroi = CASE
WHEN COALESCE(fin.inventory_value, 0) > 0 AND fin.days_in_period > 0 THEN WHEN COALESCE(mf.inventory_value, 0) > 0 AND mf.active_days > 0 THEN
(COALESCE(fin.gross_profit, 0) * (365.0 / fin.days_in_period)) / COALESCE(fin.inventory_value, 0) (COALESCE(mf.gross_profit, 0) * (365.0 / mf.active_days)) / COALESCE(mf.inventory_value, 0)
ELSE 0 ELSE 0
END END
`); `);

View File

@@ -12,418 +12,158 @@ function sanitizeValue(value) {
async function calculateProductMetrics(startTime, totalProducts, processedCount = 0) { async function calculateProductMetrics(startTime, totalProducts, processedCount = 0) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
// Process in batches of 250 // Skip flags are inherited from the parent scope
const batchSize = 250; const SKIP_PRODUCT_BASE_METRICS = 0;
for (let offset = 0; offset < totalProducts; offset += batchSize) { const SKIP_PRODUCT_TIME_AGGREGATES =0;
const [products] = await connection.query('SELECT pid, vendor FROM products LIMIT ? OFFSET ?', [batchSize, offset])
.catch(err => {
logError(err, `Failed to fetch products batch at offset ${offset}`);
throw err;
});
processedCount += products.length;
// Update progress after each batch // Calculate base product metrics
if (!SKIP_PRODUCT_BASE_METRICS) {
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Processing products', operation: 'Calculating base product metrics',
current: Math.floor(totalProducts * 0.2),
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.2), totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.2)),
percentage: '20'
});
// Calculate base metrics
await connection.query(`
UPDATE product_metrics pm
JOIN (
SELECT
p.pid,
p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity) as total_quantity,
COUNT(DISTINCT o.order_number) as number_of_orders,
SUM(o.quantity * o.price) as total_revenue,
SUM(o.quantity * p.cost_price) as cost_of_goods_sold,
AVG(o.price) as avg_price,
STDDEV(o.price) as price_std,
MIN(o.date) as first_sale_date,
MAX(o.date) as last_sale_date,
COUNT(DISTINCT DATE(o.date)) as active_days
FROM products p
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
GROUP BY p.pid
) stats ON pm.pid = stats.pid
SET
pm.inventory_value = COALESCE(stats.inventory_value, 0),
pm.avg_quantity_per_order = COALESCE(stats.total_quantity / NULLIF(stats.number_of_orders, 0), 0),
pm.number_of_orders = COALESCE(stats.number_of_orders, 0),
pm.total_revenue = COALESCE(stats.total_revenue, 0),
pm.cost_of_goods_sold = COALESCE(stats.cost_of_goods_sold, 0),
pm.gross_profit = COALESCE(stats.total_revenue - stats.cost_of_goods_sold, 0),
pm.avg_margin_percent = CASE
WHEN COALESCE(stats.total_revenue, 0) > 0
THEN ((stats.total_revenue - stats.cost_of_goods_sold) / stats.total_revenue) * 100
ELSE 0
END,
pm.first_sale_date = stats.first_sale_date,
pm.last_sale_date = stats.last_sale_date,
pm.gmroi = CASE
WHEN COALESCE(stats.inventory_value, 0) > 0
THEN (stats.total_revenue - stats.cost_of_goods_sold) / stats.inventory_value
ELSE 0
END,
pm.last_calculated_at = NOW()
`);
processedCount = Math.floor(totalProducts * 0.4);
} else {
console.log('Skipping base product metrics calculation');
processedCount = Math.floor(totalProducts * 0.4);
outputProgress({
status: 'running',
operation: 'Skipping base product metrics calculation',
current: processedCount, current: processedCount,
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts), remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount), rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1) percentage: '40'
});
}
// Calculate product time aggregates
if (!SKIP_PRODUCT_TIME_AGGREGATES) {
outputProgress({
status: 'running',
operation: 'Calculating product time aggregates',
current: Math.floor(totalProducts * 0.4),
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.4), totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.4)),
percentage: '40'
}); });
// Process the batch // Calculate time-based aggregates
const metricsUpdates = []; await connection.query(`
for (const product of products) { INSERT INTO product_time_aggregates (
try { pid,
// Get configuration values for this product year,
const [configs] = await connection.query(` month,
WITH product_info AS ( total_quantity_sold,
SELECT total_revenue,
p.pid, total_cost,
p.vendor, order_count,
pc.cat_id as category_id avg_price,
FROM products p profit_margin,
LEFT JOIN product_categories pc ON p.pid = pc.pid inventory_value,
WHERE p.pid = ? gmroi
), )
threshold_options AS ( SELECT
SELECT p.pid,
st.*, YEAR(o.date) as year,
CASE MONTH(o.date) as month,
WHEN st.category_id = pi.category_id AND st.vendor = pi.vendor THEN 1 SUM(o.quantity) as total_quantity_sold,
WHEN st.category_id = pi.category_id AND st.vendor IS NULL THEN 2 SUM(o.quantity * o.price) as total_revenue,
WHEN st.category_id IS NULL AND st.vendor = pi.vendor THEN 3 SUM(o.quantity * p.cost_price) as total_cost,
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 4 COUNT(DISTINCT o.order_number) as order_count,
ELSE 5 AVG(o.price) as avg_price,
END as priority CASE
FROM product_info pi WHEN SUM(o.quantity * o.price) > 0
CROSS JOIN stock_thresholds st THEN ((SUM(o.quantity * o.price) - SUM(o.quantity * p.cost_price)) / SUM(o.quantity * o.price)) * 100
WHERE (st.category_id = pi.category_id OR st.category_id IS NULL) ELSE 0
AND (st.vendor = pi.vendor OR st.vendor IS NULL) END as profit_margin,
), p.cost_price * p.stock_quantity as inventory_value,
velocity_options AS ( CASE
SELECT WHEN p.cost_price * p.stock_quantity > 0
sv.*, THEN (SUM(o.quantity * (o.price - p.cost_price))) / (p.cost_price * p.stock_quantity)
CASE ELSE 0
WHEN sv.category_id = pi.category_id AND sv.vendor = pi.vendor THEN 1 END as gmroi
WHEN sv.category_id = pi.category_id AND sv.vendor IS NULL THEN 2 FROM products p
WHEN sv.category_id IS NULL AND sv.vendor = pi.vendor THEN 3 LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
WHEN sv.category_id IS NULL AND sv.vendor IS NULL THEN 4 WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
ELSE 5 GROUP BY p.pid, YEAR(o.date), MONTH(o.date)
END as priority ON DUPLICATE KEY UPDATE
FROM product_info pi total_quantity_sold = VALUES(total_quantity_sold),
CROSS JOIN sales_velocity_config sv total_revenue = VALUES(total_revenue),
WHERE (sv.category_id = pi.category_id OR sv.category_id IS NULL) total_cost = VALUES(total_cost),
AND (sv.vendor = pi.vendor OR sv.vendor IS NULL) order_count = VALUES(order_count),
), avg_price = VALUES(avg_price),
safety_options AS ( profit_margin = VALUES(profit_margin),
SELECT inventory_value = VALUES(inventory_value),
ss.*, gmroi = VALUES(gmroi)
CASE `);
WHEN ss.category_id = pi.category_id AND ss.vendor = pi.vendor THEN 1
WHEN ss.category_id = pi.category_id AND ss.vendor IS NULL THEN 2
WHEN ss.category_id IS NULL AND ss.vendor = pi.vendor THEN 3
WHEN ss.category_id IS NULL AND ss.vendor IS NULL THEN 4
ELSE 5
END as priority
FROM product_info pi
CROSS JOIN safety_stock_config ss
WHERE (ss.category_id = pi.category_id OR ss.category_id IS NULL)
AND (ss.vendor = pi.vendor OR ss.vendor IS NULL)
)
SELECT
COALESCE(
(SELECT critical_days
FROM threshold_options
ORDER BY priority LIMIT 1),
7
) as critical_days,
COALESCE(
(SELECT reorder_days
FROM threshold_options
ORDER BY priority LIMIT 1),
14
) as reorder_days,
COALESCE(
(SELECT overstock_days
FROM threshold_options
ORDER BY priority LIMIT 1),
90
) as overstock_days,
COALESCE(
(SELECT low_stock_threshold
FROM threshold_options
ORDER BY priority LIMIT 1),
5
) as low_stock_threshold,
COALESCE(
(SELECT daily_window_days
FROM velocity_options
ORDER BY priority LIMIT 1),
30
) as daily_window_days,
COALESCE(
(SELECT weekly_window_days
FROM velocity_options
ORDER BY priority LIMIT 1),
7
) as weekly_window_days,
COALESCE(
(SELECT monthly_window_days
FROM velocity_options
ORDER BY priority LIMIT 1),
90
) as monthly_window_days,
COALESCE(
(SELECT coverage_days
FROM safety_options
ORDER BY priority LIMIT 1),
14
) as safety_stock_days,
COALESCE(
(SELECT service_level
FROM safety_options
ORDER BY priority LIMIT 1),
95.0
) as service_level
`, [product.pid]);
const config = configs[0]; processedCount = Math.floor(totalProducts * 0.6);
} else {
// Calculate sales metrics console.log('Skipping product time aggregates calculation');
const [salesMetrics] = await connection.query(` processedCount = Math.floor(totalProducts * 0.6);
WITH sales_summary AS ( outputProgress({
SELECT status: 'running',
SUM(o.quantity) as total_quantity_sold, operation: 'Skipping product time aggregates calculation',
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue, current: processedCount,
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost, total: totalProducts,
MAX(o.date) as last_sale_date, elapsed: formatElapsedTime(startTime),
MIN(o.date) as first_sale_date, remaining: estimateRemaining(startTime, processedCount, totalProducts),
COUNT(DISTINCT o.order_number) as number_of_orders, rate: calculateRate(startTime, processedCount),
AVG(o.quantity) as avg_quantity_per_order, percentage: '60'
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_30_days_qty, });
CASE
WHEN SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) IS NULL THEN 0
ELSE SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END)
END as rolling_weekly_avg,
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_month_qty
FROM orders o
JOIN products p ON o.pid = p.pid
WHERE o.canceled = 0 AND o.pid = ?
GROUP BY o.pid
)
SELECT
total_quantity_sold,
total_revenue,
total_cost,
last_sale_date,
first_sale_date,
number_of_orders,
avg_quantity_per_order,
last_30_days_qty / ? as rolling_daily_avg,
rolling_weekly_avg / ? as rolling_weekly_avg,
last_month_qty / ? as rolling_monthly_avg
FROM sales_summary
`, [
config.daily_window_days,
config.weekly_window_days,
config.weekly_window_days,
config.monthly_window_days,
product.pid,
config.daily_window_days,
config.weekly_window_days,
config.monthly_window_days
]);
// Calculate purchase metrics
const [purchaseMetrics] = await connection.query(`
WITH recent_orders AS (
SELECT
date,
received_date,
received,
cost_price,
DATEDIFF(received_date, date) as lead_time_days,
ROW_NUMBER() OVER (ORDER BY date DESC) as order_rank
FROM purchase_orders
WHERE receiving_status >= 30 -- Partial or fully received
AND pid = ?
AND received > 0
AND received_date IS NOT NULL
),
lead_time_orders AS (
SELECT *
FROM recent_orders
WHERE order_rank <= 5
OR date >= DATE_SUB(CURDATE(), INTERVAL 90 DAY)
)
SELECT
SUM(CASE WHEN received >= 0 THEN received ELSE 0 END) as total_quantity_purchased,
SUM(CASE WHEN received >= 0 THEN cost_price * received ELSE 0 END) as total_cost,
MAX(date) as last_purchase_date,
MIN(received_date) as first_received_date,
MAX(received_date) as last_received_date,
AVG(lead_time_days) as avg_lead_time_days
FROM lead_time_orders
`, [product.pid]);
// Get stock info
const [stockInfo] = await connection.query(`
SELECT
p.stock_quantity,
p.cost_price,
p.created_at,
p.replenishable,
p.moq,
p.notions_inv_count,
p.date_last_sold,
p.total_sold,
DATEDIFF(CURDATE(), MIN(po.received_date)) as days_since_first_stock,
DATEDIFF(CURDATE(), COALESCE(p.date_last_sold, CURDATE())) as days_since_last_sale,
CASE
WHEN EXISTS (
SELECT 1 FROM orders o
WHERE o.pid = p.pid
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
AND o.canceled = false
AND (SELECT SUM(quantity) FROM orders o2
WHERE o2.pid = p.pid
AND o2.date >= o.date
AND o2.canceled = false) = 0
) THEN true
ELSE false
END as had_recent_stockout
FROM products p
LEFT JOIN purchase_orders po ON p.pid = po.pid
AND po.receiving_status >= 30 -- Partial or fully received
AND po.received > 0
WHERE p.pid = ?
GROUP BY p.pid
`, [product.pid]);
// Calculate metrics
const salesData = salesMetrics[0] || {};
const purchaseData = purchaseMetrics[0] || {};
const stockData = stockInfo[0] || {};
// Sales velocity metrics
const daily_sales_avg = sanitizeValue(salesData.rolling_daily_avg) || 0;
const weekly_sales_avg = sanitizeValue(salesData.rolling_weekly_avg) || 0;
const monthly_sales_avg = sanitizeValue(salesData.rolling_monthly_avg) || 0;
// Stock metrics
const stock_quantity = sanitizeValue(stockData.stock_quantity) || 0;
const days_of_inventory = daily_sales_avg > 0 ? Math.floor(stock_quantity / daily_sales_avg) : 999;
const weeks_of_inventory = Math.floor(days_of_inventory / 7);
// Calculate stock status
const stock_status = calculateStockStatus(
stock_quantity,
config,
daily_sales_avg,
weekly_sales_avg,
monthly_sales_avg
);
// Calculate reorder quantities
const reorder_quantities = calculateReorderQuantities(
stock_quantity,
stock_status,
daily_sales_avg,
sanitizeValue(purchaseData.avg_lead_time_days) || 0,
config
);
// Financial metrics
const cost_price = sanitizeValue(stockData.cost_price) || 0;
const inventory_value = stock_quantity * cost_price;
const total_revenue = sanitizeValue(salesData.total_revenue) || 0;
const total_cost = sanitizeValue(salesData.total_cost) || 0;
const gross_profit = total_revenue - total_cost;
const avg_margin_percent = total_revenue > 0 ? ((gross_profit / total_revenue) * 100) : 0;
const gmroi = inventory_value > 0 ? (gross_profit / inventory_value) : 0;
// Add to batch update with sanitized values
metricsUpdates.push([
product.pid,
sanitizeValue(daily_sales_avg),
sanitizeValue(weekly_sales_avg),
sanitizeValue(monthly_sales_avg),
sanitizeValue(salesData.avg_quantity_per_order),
sanitizeValue(salesData.number_of_orders),
salesData.first_sale_date || null,
salesData.last_sale_date || null,
sanitizeValue(days_of_inventory),
sanitizeValue(weeks_of_inventory),
sanitizeValue(reorder_quantities.reorder_point),
sanitizeValue(reorder_quantities.safety_stock),
sanitizeValue(reorder_quantities.reorder_qty),
sanitizeValue(reorder_quantities.overstocked_amt),
sanitizeValue(avg_margin_percent),
sanitizeValue(total_revenue),
sanitizeValue(inventory_value),
sanitizeValue(total_cost),
sanitizeValue(gross_profit),
sanitizeValue(gmroi),
sanitizeValue(purchaseData.avg_lead_time_days),
purchaseData.last_purchase_date || null,
purchaseData.first_received_date || null,
purchaseData.last_received_date || null,
null, // abc_class - calculated separately
stock_status,
sanitizeValue(0), // turnover_rate - calculated separately
sanitizeValue(purchaseData.avg_lead_time_days),
sanitizeValue(config.target_days),
stock_status === 'Critical' ? 'Warning' : 'Normal',
null, // forecast_accuracy
null, // forecast_bias
null // last_forecast_date
]);
} catch (err) {
logError(err, `Failed processing product ${product.pid}`);
continue;
}
}
// Batch update metrics
if (metricsUpdates.length > 0) {
try {
await connection.query(`
INSERT INTO product_metrics (
pid,
daily_sales_avg,
weekly_sales_avg,
monthly_sales_avg,
avg_quantity_per_order,
number_of_orders,
first_sale_date,
last_sale_date,
days_of_inventory,
weeks_of_inventory,
reorder_point,
safety_stock,
reorder_qty,
overstocked_amt,
avg_margin_percent,
total_revenue,
inventory_value,
cost_of_goods_sold,
gross_profit,
gmroi,
avg_lead_time_days,
last_purchase_date,
first_received_date,
last_received_date,
abc_class,
stock_status,
turnover_rate,
current_lead_time,
target_lead_time,
lead_time_status,
forecast_accuracy,
forecast_bias,
last_forecast_date
)
VALUES ?
ON DUPLICATE KEY UPDATE
daily_sales_avg = VALUES(daily_sales_avg),
weekly_sales_avg = VALUES(weekly_sales_avg),
monthly_sales_avg = VALUES(monthly_sales_avg),
avg_quantity_per_order = VALUES(avg_quantity_per_order),
number_of_orders = VALUES(number_of_orders),
first_sale_date = VALUES(first_sale_date),
last_sale_date = VALUES(last_sale_date),
days_of_inventory = VALUES(days_of_inventory),
weeks_of_inventory = VALUES(weeks_of_inventory),
reorder_point = VALUES(reorder_point),
safety_stock = VALUES(safety_stock),
reorder_qty = VALUES(reorder_qty),
overstocked_amt = VALUES(overstocked_amt),
avg_margin_percent = VALUES(avg_margin_percent),
total_revenue = VALUES(total_revenue),
inventory_value = VALUES(inventory_value),
cost_of_goods_sold = VALUES(cost_of_goods_sold),
gross_profit = VALUES(gross_profit),
gmroi = VALUES(gmroi),
avg_lead_time_days = VALUES(avg_lead_time_days),
last_purchase_date = VALUES(last_purchase_date),
first_received_date = VALUES(first_received_date),
last_received_date = VALUES(last_received_date),
stock_status = VALUES(stock_status),
turnover_rate = VALUES(turnover_rate),
current_lead_time = VALUES(current_lead_time),
target_lead_time = VALUES(target_lead_time),
lead_time_status = VALUES(lead_time_status),
last_calculated_at = CURRENT_TIMESTAMP
`, [metricsUpdates]);
} catch (err) {
logError(err, 'Failed to update metrics batch');
throw err;
}
}
} }
return processedCount; return processedCount;

View File

@@ -15,6 +15,59 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
percentage: '98' percentage: '98'
}); });
// First, create a temporary table for forecast dates
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_forecast_dates (
forecast_date DATE,
day_of_week INT,
month INT,
PRIMARY KEY (forecast_date)
)
`);
await connection.query(`
INSERT INTO temp_forecast_dates
SELECT
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date,
DAYOFWEEK(DATE_ADD(CURRENT_DATE, INTERVAL n DAY)) as day_of_week,
MONTH(DATE_ADD(CURRENT_DATE, INTERVAL n DAY)) as month
FROM (
SELECT a.N + b.N * 10 as n
FROM
(SELECT 0 as N UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9) a,
(SELECT 0 as N UNION SELECT 1 UNION SELECT 2) b
ORDER BY n
LIMIT 31
) numbers
`);
// Create temporary table for daily sales stats
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_daily_sales AS
SELECT
o.pid,
DAYOFWEEK(o.date) as day_of_week,
SUM(o.quantity) as daily_quantity,
SUM(o.price * o.quantity) as daily_revenue,
COUNT(DISTINCT DATE(o.date)) as day_count
FROM orders o
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
GROUP BY o.pid, DAYOFWEEK(o.date)
`);
// Create temporary table for product stats
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_stats AS
SELECT
pid,
AVG(daily_revenue) as overall_avg_revenue,
SUM(day_count) as total_days
FROM temp_daily_sales
GROUP BY pid
`);
// Calculate product-level forecasts // Calculate product-level forecasts
await connection.query(` await connection.query(`
INSERT INTO sales_forecasts ( INSERT INTO sales_forecasts (
@@ -25,92 +78,37 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
confidence_level, confidence_level,
last_calculated_at last_calculated_at
) )
WITH daily_sales AS (
SELECT
o.pid,
DATE(o.date) as sale_date,
SUM(o.quantity) as daily_quantity,
SUM(o.price * o.quantity) as daily_revenue
FROM orders o
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
GROUP BY o.pid, DATE(o.date)
),
forecast_dates AS (
SELECT
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date
FROM (
SELECT 0 as n UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9 UNION
SELECT 10 UNION SELECT 11 UNION SELECT 12 UNION SELECT 13 UNION SELECT 14 UNION
SELECT 15 UNION SELECT 16 UNION SELECT 17 UNION SELECT 18 UNION SELECT 19 UNION
SELECT 20 UNION SELECT 21 UNION SELECT 22 UNION SELECT 23 UNION SELECT 24 UNION
SELECT 25 UNION SELECT 26 UNION SELECT 27 UNION SELECT 28 UNION SELECT 29 UNION
SELECT 30
) numbers
),
product_stats AS (
SELECT
ds.pid,
AVG(ds.daily_quantity) as avg_daily_quantity,
STDDEV_SAMP(ds.daily_quantity) as std_daily_quantity,
AVG(ds.daily_revenue) as avg_daily_revenue,
STDDEV_SAMP(ds.daily_revenue) as std_daily_revenue,
COUNT(*) as data_points,
-- Calculate day-of-week averages
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 1 THEN ds.daily_revenue END) as sunday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 2 THEN ds.daily_revenue END) as monday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 3 THEN ds.daily_revenue END) as tuesday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 4 THEN ds.daily_revenue END) as wednesday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 5 THEN ds.daily_revenue END) as thursday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 6 THEN ds.daily_revenue END) as friday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 7 THEN ds.daily_revenue END) as saturday_avg
FROM daily_sales ds
GROUP BY ds.pid
)
SELECT SELECT
ps.pid, ds.pid,
fd.forecast_date, fd.forecast_date,
GREATEST(0, GREATEST(0,
ps.avg_daily_quantity * AVG(ds.daily_quantity) *
(1 + COALESCE( (1 + COALESCE(sf.seasonality_factor, 0))
(SELECT seasonality_factor
FROM sales_seasonality
WHERE MONTH(fd.forecast_date) = month
LIMIT 1),
0
))
) as forecast_units, ) as forecast_units,
GREATEST(0, GREATEST(0,
CASE DAYOFWEEK(fd.forecast_date) COALESCE(
WHEN 1 THEN COALESCE(ps.sunday_avg, ps.avg_daily_revenue) CASE
WHEN 2 THEN COALESCE(ps.monday_avg, ps.avg_daily_revenue) WHEN SUM(ds.day_count) >= 4 THEN AVG(ds.daily_revenue)
WHEN 3 THEN COALESCE(ps.tuesday_avg, ps.avg_daily_revenue) ELSE ps.overall_avg_revenue
WHEN 4 THEN COALESCE(ps.wednesday_avg, ps.avg_daily_revenue) END *
WHEN 5 THEN COALESCE(ps.thursday_avg, ps.avg_daily_revenue) (1 + COALESCE(sf.seasonality_factor, 0)) *
WHEN 6 THEN COALESCE(ps.friday_avg, ps.avg_daily_revenue) (0.95 + (RAND() * 0.1)),
WHEN 7 THEN COALESCE(ps.saturday_avg, ps.avg_daily_revenue)
END *
(1 + COALESCE(
(SELECT seasonality_factor
FROM sales_seasonality
WHERE MONTH(fd.forecast_date) = month
LIMIT 1),
0 0
)) * )
-- Add some randomness within a small range (±5%)
(0.95 + (RAND() * 0.1))
) as forecast_revenue, ) as forecast_revenue,
CASE CASE
WHEN ps.data_points >= 60 THEN 90 WHEN ps.total_days >= 60 THEN 90
WHEN ps.data_points >= 30 THEN 80 WHEN ps.total_days >= 30 THEN 80
WHEN ps.data_points >= 14 THEN 70 WHEN ps.total_days >= 14 THEN 70
ELSE 60 ELSE 60
END as confidence_level, END as confidence_level,
NOW() as last_calculated_at NOW() as last_calculated_at
FROM product_stats ps FROM temp_daily_sales ds
CROSS JOIN forecast_dates fd JOIN temp_product_stats ps ON ds.pid = ps.pid
WHERE ps.avg_daily_quantity > 0 CROSS JOIN temp_forecast_dates fd
LEFT JOIN sales_seasonality sf ON fd.month = sf.month
GROUP BY ds.pid, fd.forecast_date, ps.overall_avg_revenue, ps.total_days, sf.seasonality_factor
HAVING AVG(ds.daily_quantity) > 0
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
forecast_units = VALUES(forecast_units), forecast_units = VALUES(forecast_units),
forecast_revenue = VALUES(forecast_revenue), forecast_revenue = VALUES(forecast_revenue),
@@ -118,6 +116,32 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
last_calculated_at = NOW() last_calculated_at = NOW()
`); `);
// Create temporary table for category stats
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_category_sales AS
SELECT
pc.cat_id,
DAYOFWEEK(o.date) as day_of_week,
SUM(o.quantity) as daily_quantity,
SUM(o.price * o.quantity) as daily_revenue,
COUNT(DISTINCT DATE(o.date)) as day_count
FROM orders o
JOIN product_categories pc ON o.pid = pc.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
GROUP BY pc.cat_id, DAYOFWEEK(o.date)
`);
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_category_stats AS
SELECT
cat_id,
AVG(daily_revenue) as overall_avg_revenue,
SUM(day_count) as total_days
FROM temp_category_sales
GROUP BY cat_id
`);
// Calculate category-level forecasts // Calculate category-level forecasts
await connection.query(` await connection.query(`
INSERT INTO category_forecasts ( INSERT INTO category_forecasts (
@@ -128,93 +152,37 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
confidence_level, confidence_level,
last_calculated_at last_calculated_at
) )
WITH category_daily_sales AS (
SELECT
pc.cat_id as category_id,
DATE(o.date) as sale_date,
SUM(o.quantity) as daily_quantity,
SUM(o.price * o.quantity) as daily_revenue
FROM orders o
JOIN product_categories pc ON o.pid = pc.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
GROUP BY pc.cat_id, DATE(o.date)
),
forecast_dates AS (
SELECT
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date
FROM (
SELECT 0 as n UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9 UNION
SELECT 10 UNION SELECT 11 UNION SELECT 12 UNION SELECT 13 UNION SELECT 14 UNION
SELECT 15 UNION SELECT 16 UNION SELECT 17 UNION SELECT 18 UNION SELECT 19 UNION
SELECT 20 UNION SELECT 21 UNION SELECT 22 UNION SELECT 23 UNION SELECT 24 UNION
SELECT 25 UNION SELECT 26 UNION SELECT 27 UNION SELECT 28 UNION SELECT 29 UNION
SELECT 30
) numbers
),
category_stats AS (
SELECT
cds.category_id,
AVG(cds.daily_quantity) as avg_daily_quantity,
STDDEV_SAMP(cds.daily_quantity) as std_daily_quantity,
AVG(cds.daily_revenue) as avg_daily_revenue,
STDDEV_SAMP(cds.daily_revenue) as std_daily_revenue,
COUNT(*) as data_points,
-- Calculate day-of-week averages
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 1 THEN cds.daily_revenue END) as sunday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 2 THEN cds.daily_revenue END) as monday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 3 THEN cds.daily_revenue END) as tuesday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 4 THEN cds.daily_revenue END) as wednesday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 5 THEN cds.daily_revenue END) as thursday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 6 THEN cds.daily_revenue END) as friday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 7 THEN cds.daily_revenue END) as saturday_avg
FROM category_daily_sales cds
GROUP BY cds.category_id
)
SELECT SELECT
cs.category_id, cs.cat_id as category_id,
fd.forecast_date, fd.forecast_date,
GREATEST(0, GREATEST(0,
cs.avg_daily_quantity * AVG(cs.daily_quantity) *
(1 + COALESCE( (1 + COALESCE(sf.seasonality_factor, 0))
(SELECT seasonality_factor
FROM sales_seasonality
WHERE MONTH(fd.forecast_date) = month
LIMIT 1),
0
))
) as forecast_units, ) as forecast_units,
GREATEST(0, GREATEST(0,
CASE DAYOFWEEK(fd.forecast_date) COALESCE(
WHEN 1 THEN COALESCE(cs.sunday_avg, cs.avg_daily_revenue) CASE
WHEN 2 THEN COALESCE(cs.monday_avg, cs.avg_daily_revenue) WHEN SUM(cs.day_count) >= 4 THEN AVG(cs.daily_revenue)
WHEN 3 THEN COALESCE(cs.tuesday_avg, cs.avg_daily_revenue) ELSE ct.overall_avg_revenue
WHEN 4 THEN COALESCE(cs.wednesday_avg, cs.avg_daily_revenue) END *
WHEN 5 THEN COALESCE(cs.thursday_avg, cs.avg_daily_revenue) (1 + COALESCE(sf.seasonality_factor, 0)) *
WHEN 6 THEN COALESCE(cs.friday_avg, cs.avg_daily_revenue) (0.95 + (RAND() * 0.1)),
WHEN 7 THEN COALESCE(cs.saturday_avg, cs.avg_daily_revenue)
END *
(1 + COALESCE(
(SELECT seasonality_factor
FROM sales_seasonality
WHERE MONTH(fd.forecast_date) = month
LIMIT 1),
0 0
)) * )
-- Add some randomness within a small range (±5%)
(0.95 + (RAND() * 0.1))
) as forecast_revenue, ) as forecast_revenue,
CASE CASE
WHEN cs.data_points >= 60 THEN 90 WHEN ct.total_days >= 60 THEN 90
WHEN cs.data_points >= 30 THEN 80 WHEN ct.total_days >= 30 THEN 80
WHEN cs.data_points >= 14 THEN 70 WHEN ct.total_days >= 14 THEN 70
ELSE 60 ELSE 60
END as confidence_level, END as confidence_level,
NOW() as last_calculated_at NOW() as last_calculated_at
FROM category_stats cs FROM temp_category_sales cs
CROSS JOIN forecast_dates fd JOIN temp_category_stats ct ON cs.cat_id = ct.cat_id
WHERE cs.avg_daily_quantity > 0 CROSS JOIN temp_forecast_dates fd
LEFT JOIN sales_seasonality sf ON fd.month = sf.month
GROUP BY cs.cat_id, fd.forecast_date, ct.overall_avg_revenue, ct.total_days, sf.seasonality_factor
HAVING AVG(cs.daily_quantity) > 0
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
forecast_units = VALUES(forecast_units), forecast_units = VALUES(forecast_units),
forecast_revenue = VALUES(forecast_revenue), forecast_revenue = VALUES(forecast_revenue),
@@ -222,6 +190,15 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
last_calculated_at = NOW() last_calculated_at = NOW()
`); `);
// Clean up temporary tables
await connection.query(`
DROP TEMPORARY TABLE IF EXISTS temp_forecast_dates;
DROP TEMPORARY TABLE IF EXISTS temp_daily_sales;
DROP TEMPORARY TABLE IF EXISTS temp_product_stats;
DROP TEMPORARY TABLE IF EXISTS temp_category_sales;
DROP TEMPORARY TABLE IF EXISTS temp_category_stats;
`);
return Math.floor(totalProducts * 1.0); return Math.floor(totalProducts * 1.0);
} finally { } finally {
connection.release(); connection.release();

View File

@@ -1,8 +1,20 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./utils/progress');
const { getConnection } = require('./utils/db'); const { getConnection } = require('./utils/db');
async function calculateTimeAggregates(startTime, totalProducts, processedCount) { async function calculateTimeAggregates(startTime, totalProducts, processedCount) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
outputProgress({
status: 'running',
operation: 'Calculating time aggregates',
current: Math.floor(totalProducts * 0.95),
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.95), totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.95)),
percentage: '95'
});
// Initial insert of time-based aggregates // Initial insert of time-based aggregates
await connection.query(` await connection.query(`
INSERT INTO product_time_aggregates ( INSERT INTO product_time_aggregates (
@@ -47,7 +59,7 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
SUM(received) as stock_received, SUM(received) as stock_received,
SUM(ordered) as stock_ordered SUM(ordered) as stock_ordered
FROM purchase_orders FROM purchase_orders
WHERE receiving_status >= 30 -- Partial or fully received WHERE status = 50
GROUP BY pid, YEAR(date), MONTH(date) GROUP BY pid, YEAR(date), MONTH(date)
) )
SELECT SELECT
@@ -126,7 +138,9 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
return Math.floor(totalProducts * 0.65); return Math.floor(totalProducts * 0.65);
} finally { } finally {
connection.release(); if (connection) {
connection.release();
}
} }
} }

View File

@@ -6,7 +6,7 @@ async function calculateVendorMetrics(startTime, totalProducts, processedCount)
try { try {
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Calculating vendor metrics', operation: 'Ensuring vendors exist in vendor_details',
current: Math.floor(totalProducts * 0.7), current: Math.floor(totalProducts * 0.7),
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
@@ -15,157 +15,122 @@ async function calculateVendorMetrics(startTime, totalProducts, processedCount)
percentage: '70' percentage: '70'
}); });
// First, ensure all vendors exist in vendor_details // First ensure all vendors exist in vendor_details
await connection.query(` await connection.query(`
INSERT IGNORE INTO vendor_details (vendor, status) INSERT IGNORE INTO vendor_details (vendor, status, created_at, updated_at)
SELECT DISTINCT vendor, 'active' as status SELECT DISTINCT
vendor,
'active' as status,
NOW() as created_at,
NOW() as updated_at
FROM products FROM products
WHERE vendor IS NOT NULL WHERE vendor IS NOT NULL
AND vendor NOT IN (SELECT vendor FROM vendor_details)
`); `);
// Calculate vendor performance metrics outputProgress({
status: 'running',
operation: 'Calculating vendor metrics',
current: Math.floor(totalProducts * 0.8),
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.8), totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.8)),
percentage: '80'
});
// Now calculate vendor metrics
await connection.query(` await connection.query(`
INSERT INTO vendor_metrics ( INSERT INTO vendor_metrics (
vendor, vendor,
total_revenue,
total_orders,
total_late_orders,
avg_lead_time_days, avg_lead_time_days,
on_time_delivery_rate, on_time_delivery_rate,
order_fill_rate, order_fill_rate,
total_orders,
total_late_orders,
total_purchase_value,
avg_order_value, avg_order_value,
active_products, active_products,
total_products, total_products,
total_revenue, status,
avg_margin_percent, last_calculated_at
status
) )
WITH vendor_orders AS ( WITH vendor_sales AS (
SELECT SELECT
po.vendor, p.vendor,
AVG(DATEDIFF(po.received_date, po.date)) as avg_lead_time_days, SUM(o.quantity * o.price) as total_revenue,
COUNT(*) as total_orders, COUNT(DISTINCT o.id) as total_orders,
COUNT(CASE WHEN po.received_date > po.expected_date THEN 1 END) as total_late_orders, COUNT(DISTINCT p.pid) as active_products
SUM(po.cost_price * po.ordered) as total_purchase_value, FROM products p
AVG(po.cost_price * po.ordered) as avg_order_value, JOIN orders o ON p.pid = o.pid
CASE WHERE o.canceled = false
WHEN COUNT(*) > 0 THEN AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
(COUNT(CASE WHEN po.received = po.ordered THEN 1 END) * 100.0) / COUNT(*) GROUP BY p.vendor
ELSE 0 ),
END as order_fill_rate vendor_po AS (
FROM purchase_orders po SELECT
WHERE po.receiving_status >= 30 -- Partial or fully received p.vendor,
GROUP BY po.vendor COUNT(DISTINCT CASE WHEN po.receiving_status = 40 THEN po.id END) as received_orders,
COUNT(DISTINCT po.id) as total_orders,
AVG(CASE
WHEN po.receiving_status = 40
THEN DATEDIFF(po.received_date, po.date)
END) as avg_lead_time_days
FROM products p
JOIN purchase_orders po ON p.pid = po.pid
WHERE po.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY p.vendor
), ),
vendor_products AS ( vendor_products AS (
SELECT SELECT
p.vendor, vendor,
COUNT(DISTINCT p.pid) as total_products, COUNT(DISTINCT pid) as total_products
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products, FROM products
SUM(o.price * o.quantity) as total_revenue, GROUP BY vendor
CASE
WHEN SUM(o.price * o.quantity) > 0 THEN
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0
END as avg_margin_percent
FROM products p
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
GROUP BY p.vendor
) )
SELECT SELECT
vd.vendor, vs.vendor,
COALESCE(vo.avg_lead_time_days, 0) as avg_lead_time_days, COALESCE(vs.total_revenue, 0) as total_revenue,
COALESCE(vp.total_orders, 0) as total_orders,
COALESCE(vp.total_orders - vp.received_orders, 0) as total_late_orders,
COALESCE(vp.avg_lead_time_days, 0) as avg_lead_time_days,
CASE CASE
WHEN COALESCE(vo.total_orders, 0) > 0 THEN WHEN vp.total_orders > 0
((COALESCE(vo.total_orders, 0) - COALESCE(vo.total_late_orders, 0)) * 100.0) / COALESCE(vo.total_orders, 1) THEN (vp.received_orders / vp.total_orders) * 100
ELSE 0 ELSE 0
END as on_time_delivery_rate, END as on_time_delivery_rate,
COALESCE(vo.order_fill_rate, 0) as order_fill_rate, CASE
COALESCE(vo.total_orders, 0) as total_orders, WHEN vp.total_orders > 0
COALESCE(vo.total_late_orders, 0) as total_late_orders, THEN (vp.received_orders / vp.total_orders) * 100
COALESCE(vo.total_purchase_value, 0) as total_purchase_value, ELSE 0
COALESCE(vo.avg_order_value, 0) as avg_order_value, END as order_fill_rate,
COALESCE(vp.active_products, 0) as active_products, CASE
COALESCE(vp.total_products, 0) as total_products, WHEN vs.total_orders > 0
COALESCE(vp.total_revenue, 0) as total_revenue, THEN vs.total_revenue / vs.total_orders
COALESCE(vp.avg_margin_percent, 0) as avg_margin_percent, ELSE 0
vd.status END as avg_order_value,
FROM vendor_details vd COALESCE(vs.active_products, 0) as active_products,
LEFT JOIN vendor_orders vo ON vd.vendor = vo.vendor COALESCE(vpr.total_products, 0) as total_products,
LEFT JOIN vendor_products vp ON vd.vendor = vp.vendor 'active' as status,
NOW() as last_calculated_at
FROM vendor_sales vs
LEFT JOIN vendor_po vp ON vs.vendor = vp.vendor
LEFT JOIN vendor_products vpr ON vs.vendor = vpr.vendor
WHERE vs.vendor IS NOT NULL
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
total_revenue = VALUES(total_revenue),
total_orders = VALUES(total_orders),
total_late_orders = VALUES(total_late_orders),
avg_lead_time_days = VALUES(avg_lead_time_days), avg_lead_time_days = VALUES(avg_lead_time_days),
on_time_delivery_rate = VALUES(on_time_delivery_rate), on_time_delivery_rate = VALUES(on_time_delivery_rate),
order_fill_rate = VALUES(order_fill_rate), order_fill_rate = VALUES(order_fill_rate),
total_orders = VALUES(total_orders),
total_late_orders = VALUES(total_late_orders),
total_purchase_value = VALUES(total_purchase_value),
avg_order_value = VALUES(avg_order_value), avg_order_value = VALUES(avg_order_value),
active_products = VALUES(active_products), active_products = VALUES(active_products),
total_products = VALUES(total_products), total_products = VALUES(total_products),
total_revenue = VALUES(total_revenue),
avg_margin_percent = VALUES(avg_margin_percent),
status = VALUES(status), status = VALUES(status),
last_calculated_at = CURRENT_TIMESTAMP last_calculated_at = VALUES(last_calculated_at)
`); `);
// Calculate vendor time-based metrics return Math.floor(totalProducts * 0.9);
await connection.query(`
INSERT INTO vendor_time_metrics (
vendor,
year,
month,
total_orders,
late_orders,
avg_lead_time_days,
total_purchase_value,
total_revenue,
avg_margin_percent
)
WITH vendor_time_data AS (
SELECT
vd.vendor,
YEAR(po.date) as year,
MONTH(po.date) as month,
COUNT(DISTINCT po.po_id) as total_orders,
COUNT(DISTINCT CASE WHEN po.received_date > po.expected_date THEN po.po_id END) as late_orders,
AVG(DATEDIFF(po.received_date, po.date)) as avg_lead_time_days,
SUM(po.cost_price * po.ordered) as total_purchase_value,
SUM(o.price * o.quantity) as total_revenue,
CASE
WHEN SUM(o.price * o.quantity) > 0 THEN
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0
END as avg_margin_percent
FROM vendor_details vd
LEFT JOIN products p ON vd.vendor = p.vendor
LEFT JOIN purchase_orders po ON p.pid = po.pid
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
WHERE po.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY vd.vendor, YEAR(po.date), MONTH(po.date)
)
SELECT
vendor,
year,
month,
COALESCE(total_orders, 0) as total_orders,
COALESCE(late_orders, 0) as late_orders,
COALESCE(avg_lead_time_days, 0) as avg_lead_time_days,
COALESCE(total_purchase_value, 0) as total_purchase_value,
COALESCE(total_revenue, 0) as total_revenue,
COALESCE(avg_margin_percent, 0) as avg_margin_percent
FROM vendor_time_data
ON DUPLICATE KEY UPDATE
total_orders = VALUES(total_orders),
late_orders = VALUES(late_orders),
avg_lead_time_days = VALUES(avg_lead_time_days),
total_purchase_value = VALUES(total_purchase_value),
total_revenue = VALUES(total_revenue),
avg_margin_percent = VALUES(avg_margin_percent)
`);
return Math.floor(totalProducts * 0.75);
} finally { } finally {
connection.release(); connection.release();
} }