Optimize and fix calculate scripts
This commit is contained in:
@@ -5,8 +5,15 @@ process.chdir(path.dirname(__filename));
|
||||
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') });
|
||||
|
||||
// Set to 1 to skip product metrics and only calculate the remaining metrics
|
||||
const SKIP_PRODUCT_METRICS = 0;
|
||||
// Configuration flags for controlling which metrics to calculate
|
||||
// Set to 1 to skip the corresponding calculation, 0 to run it
|
||||
const SKIP_PRODUCT_METRICS = 1; // Skip all product metrics
|
||||
const SKIP_TIME_AGGREGATES = 1; // Skip time aggregates
|
||||
const SKIP_FINANCIAL_METRICS = 1; // Skip financial metrics
|
||||
const SKIP_VENDOR_METRICS = 1; // Skip vendor metrics
|
||||
const SKIP_CATEGORY_METRICS = 1; // Skip category metrics
|
||||
const SKIP_BRAND_METRICS = 1; // Skip brand metrics
|
||||
const SKIP_SALES_FORECASTS = 1; // Skip sales forecasts
|
||||
|
||||
// Add error handler for uncaught exceptions
|
||||
process.on('uncaughtException', (error) => {
|
||||
@@ -137,50 +144,136 @@ async function calculateMetrics() {
|
||||
}
|
||||
|
||||
// Calculate time-based aggregates
|
||||
processedCount = await calculateTimeAggregates(startTime, totalProducts, processedCount);
|
||||
if (!SKIP_TIME_AGGREGATES) {
|
||||
processedCount = await calculateTimeAggregates(startTime, totalProducts, processedCount);
|
||||
} else {
|
||||
console.log('Skipping time aggregates calculation');
|
||||
}
|
||||
|
||||
// Calculate financial metrics
|
||||
processedCount = await calculateFinancialMetrics(startTime, totalProducts, processedCount);
|
||||
if (!SKIP_FINANCIAL_METRICS) {
|
||||
processedCount = await calculateFinancialMetrics(startTime, totalProducts, processedCount);
|
||||
} else {
|
||||
console.log('Skipping financial metrics calculation');
|
||||
}
|
||||
|
||||
// Calculate vendor metrics
|
||||
processedCount = await calculateVendorMetrics(startTime, totalProducts, processedCount);
|
||||
if (!SKIP_VENDOR_METRICS) {
|
||||
processedCount = await calculateVendorMetrics(startTime, totalProducts, processedCount);
|
||||
} else {
|
||||
console.log('Skipping vendor metrics calculation');
|
||||
}
|
||||
|
||||
// Calculate category metrics
|
||||
processedCount = await calculateCategoryMetrics(startTime, totalProducts, processedCount);
|
||||
if (!SKIP_CATEGORY_METRICS) {
|
||||
processedCount = await calculateCategoryMetrics(startTime, totalProducts, processedCount);
|
||||
} else {
|
||||
console.log('Skipping category metrics calculation');
|
||||
}
|
||||
|
||||
// Calculate brand metrics
|
||||
processedCount = await calculateBrandMetrics(startTime, totalProducts, processedCount);
|
||||
if (!SKIP_BRAND_METRICS) {
|
||||
processedCount = await calculateBrandMetrics(startTime, totalProducts, processedCount);
|
||||
} else {
|
||||
console.log('Skipping brand metrics calculation');
|
||||
}
|
||||
|
||||
// Calculate sales forecasts
|
||||
processedCount = await calculateSalesForecasts(startTime, totalProducts, processedCount);
|
||||
if (!SKIP_SALES_FORECASTS) {
|
||||
processedCount = await calculateSalesForecasts(startTime, totalProducts, processedCount);
|
||||
} else {
|
||||
console.log('Skipping sales forecasts calculation');
|
||||
}
|
||||
|
||||
// Calculate ABC classification
|
||||
const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
|
||||
const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 };
|
||||
|
||||
// First, create and populate the rankings table with an index
|
||||
await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_revenue_ranks');
|
||||
await connection.query(`
|
||||
WITH revenue_rankings AS (
|
||||
SELECT
|
||||
product_id,
|
||||
total_revenue,
|
||||
PERCENT_RANK() OVER (ORDER BY COALESCE(total_revenue, 0) DESC) * 100 as revenue_percentile
|
||||
FROM product_metrics
|
||||
),
|
||||
classification_update AS (
|
||||
SELECT
|
||||
product_id,
|
||||
CREATE TEMPORARY TABLE temp_revenue_ranks (
|
||||
pid BIGINT NOT NULL,
|
||||
total_revenue DECIMAL(10,3),
|
||||
rank_num INT,
|
||||
total_count INT,
|
||||
PRIMARY KEY (pid),
|
||||
INDEX (rank_num)
|
||||
) ENGINE=MEMORY
|
||||
`);
|
||||
|
||||
await connection.query(`
|
||||
INSERT INTO temp_revenue_ranks
|
||||
SELECT
|
||||
pid,
|
||||
total_revenue,
|
||||
@rank := @rank + 1 as rank_num,
|
||||
@total_count := @rank as total_count
|
||||
FROM (
|
||||
SELECT pid, total_revenue
|
||||
FROM product_metrics
|
||||
WHERE total_revenue > 0
|
||||
ORDER BY total_revenue DESC
|
||||
) ranked,
|
||||
(SELECT @rank := 0) r
|
||||
`);
|
||||
|
||||
// Get total count for percentage calculation
|
||||
const [rankingCount] = await connection.query('SELECT MAX(rank_num) as total_count FROM temp_revenue_ranks');
|
||||
const totalCount = rankingCount[0].total_count || 1;
|
||||
|
||||
// Process updates in batches
|
||||
let abcProcessedCount = 0;
|
||||
const batchSize = 5000;
|
||||
|
||||
while (true) {
|
||||
// First get a batch of PIDs that need updating
|
||||
const [pids] = await connection.query(`
|
||||
SELECT pm.pid
|
||||
FROM product_metrics pm
|
||||
LEFT JOIN temp_revenue_ranks tr ON pm.pid = tr.pid
|
||||
WHERE pm.abc_class IS NULL
|
||||
OR pm.abc_class !=
|
||||
CASE
|
||||
WHEN revenue_percentile <= ? THEN 'A'
|
||||
WHEN revenue_percentile <= ? THEN 'B'
|
||||
WHEN tr.rank_num IS NULL THEN 'C'
|
||||
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'A'
|
||||
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'B'
|
||||
ELSE 'C'
|
||||
END as abc_class
|
||||
FROM revenue_rankings
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
JOIN classification_update cu ON pm.product_id = cu.product_id
|
||||
SET pm.abc_class = cu.abc_class,
|
||||
pm.last_calculated_at = NOW()
|
||||
`, [abcThresholds.a_threshold, abcThresholds.b_threshold]);
|
||||
END
|
||||
LIMIT ?
|
||||
`, [totalCount, abcThresholds.a_threshold,
|
||||
totalCount, abcThresholds.b_threshold,
|
||||
batchSize]);
|
||||
|
||||
if (pids.length === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Then update just those PIDs
|
||||
const [result] = await connection.query(`
|
||||
UPDATE product_metrics pm
|
||||
LEFT JOIN temp_revenue_ranks tr ON pm.pid = tr.pid
|
||||
SET pm.abc_class =
|
||||
CASE
|
||||
WHEN tr.rank_num IS NULL THEN 'C'
|
||||
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'A'
|
||||
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'B'
|
||||
ELSE 'C'
|
||||
END,
|
||||
pm.last_calculated_at = NOW()
|
||||
WHERE pm.pid IN (?)
|
||||
`, [totalCount, abcThresholds.a_threshold,
|
||||
totalCount, abcThresholds.b_threshold,
|
||||
pids.map(row => row.pid)]);
|
||||
|
||||
abcProcessedCount += result.affectedRows;
|
||||
|
||||
// Small delay between batches to allow other transactions
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
|
||||
// Clean up
|
||||
await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_revenue_ranks');
|
||||
|
||||
// Final success message
|
||||
global.outputProgress({
|
||||
|
||||
@@ -15,7 +15,7 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
|
||||
percentage: '95'
|
||||
});
|
||||
|
||||
// Calculate brand metrics
|
||||
// Calculate brand metrics with optimized queries
|
||||
await connection.query(`
|
||||
INSERT INTO brand_metrics (
|
||||
brand,
|
||||
@@ -28,71 +28,77 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
|
||||
avg_margin,
|
||||
growth_rate
|
||||
)
|
||||
WITH brand_data AS (
|
||||
WITH filtered_products AS (
|
||||
SELECT
|
||||
p.brand,
|
||||
COUNT(DISTINCT CASE WHEN p.stock_quantity <= 5000 THEN p.pid END) as product_count,
|
||||
COUNT(DISTINCT CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END) as active_products,
|
||||
SUM(CASE
|
||||
p.*,
|
||||
CASE WHEN p.stock_quantity <= 5000 THEN p.pid END as valid_pid,
|
||||
CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END as active_pid,
|
||||
CASE
|
||||
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0
|
||||
ELSE p.stock_quantity
|
||||
END) as total_stock_units,
|
||||
SUM(CASE
|
||||
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 OR p.cost_price IS NULL OR p.cost_price < 0 THEN 0
|
||||
ELSE p.stock_quantity * p.cost_price
|
||||
END) as total_stock_cost,
|
||||
SUM(CASE
|
||||
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 OR p.price IS NULL OR p.price < 0 THEN 0
|
||||
ELSE p.stock_quantity * p.price
|
||||
END) as total_stock_retail,
|
||||
SUM(CASE
|
||||
WHEN o.quantity IS NULL OR o.price IS NULL OR o.quantity < 0 OR o.price < 0 THEN 0
|
||||
ELSE o.price * o.quantity
|
||||
END) as total_revenue,
|
||||
END as valid_stock
|
||||
FROM products p
|
||||
WHERE p.brand IS NOT NULL
|
||||
),
|
||||
sales_periods AS (
|
||||
SELECT
|
||||
p.brand,
|
||||
SUM(o.quantity * o.price) as period_revenue,
|
||||
CASE
|
||||
WHEN SUM(o.price * o.quantity) > 0 THEN
|
||||
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH) THEN 'current'
|
||||
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) THEN 'previous'
|
||||
END as period_type
|
||||
FROM filtered_products p
|
||||
JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH)
|
||||
GROUP BY p.brand, period_type
|
||||
),
|
||||
brand_data AS (
|
||||
SELECT
|
||||
p.brand,
|
||||
COUNT(DISTINCT p.valid_pid) as product_count,
|
||||
COUNT(DISTINCT p.active_pid) as active_products,
|
||||
SUM(p.valid_stock) as total_stock_units,
|
||||
SUM(p.valid_stock * p.cost_price) as total_stock_cost,
|
||||
SUM(p.valid_stock * p.price) as total_stock_retail,
|
||||
COALESCE(SUM(o.quantity * o.price), 0) as total_revenue,
|
||||
CASE
|
||||
WHEN SUM(o.quantity * o.price) > 0 THEN
|
||||
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
|
||||
ELSE 0
|
||||
END as avg_margin,
|
||||
-- Current period (last 3 months)
|
||||
SUM(CASE
|
||||
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
|
||||
THEN COALESCE(o.quantity * o.price, 0)
|
||||
ELSE 0
|
||||
END) as current_period_sales,
|
||||
-- Previous year same period
|
||||
SUM(CASE
|
||||
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
THEN COALESCE(o.quantity * o.price, 0)
|
||||
ELSE 0
|
||||
END) as previous_year_period_sales
|
||||
FROM products p
|
||||
END as avg_margin
|
||||
FROM filtered_products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
WHERE p.brand IS NOT NULL
|
||||
GROUP BY p.brand
|
||||
)
|
||||
SELECT
|
||||
brand,
|
||||
product_count,
|
||||
active_products,
|
||||
total_stock_units,
|
||||
total_stock_cost,
|
||||
total_stock_retail,
|
||||
total_revenue,
|
||||
avg_margin,
|
||||
bd.brand,
|
||||
bd.product_count,
|
||||
bd.active_products,
|
||||
bd.total_stock_units,
|
||||
bd.total_stock_cost,
|
||||
bd.total_stock_retail,
|
||||
bd.total_revenue,
|
||||
bd.avg_margin,
|
||||
CASE
|
||||
WHEN previous_year_period_sales = 0 AND current_period_sales > 0 THEN 100.0
|
||||
WHEN previous_year_period_sales = 0 THEN 0.0
|
||||
WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0
|
||||
AND MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) > 0 THEN 100.0
|
||||
WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0 THEN 0.0
|
||||
ELSE LEAST(
|
||||
GREATEST(
|
||||
((current_period_sales - previous_year_period_sales) /
|
||||
NULLIF(previous_year_period_sales, 0)) * 100.0,
|
||||
((MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) -
|
||||
MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END)) /
|
||||
NULLIF(MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END), 0)) * 100.0,
|
||||
-100.0
|
||||
),
|
||||
999.99
|
||||
)
|
||||
END as growth_rate
|
||||
FROM brand_data
|
||||
FROM brand_data bd
|
||||
LEFT JOIN sales_periods sp ON bd.brand = sp.brand
|
||||
GROUP BY bd.brand, bd.product_count, bd.active_products, bd.total_stock_units,
|
||||
bd.total_stock_cost, bd.total_stock_retail, bd.total_revenue, bd.avg_margin
|
||||
ON DUPLICATE KEY UPDATE
|
||||
product_count = VALUES(product_count),
|
||||
active_products = VALUES(active_products),
|
||||
@@ -105,7 +111,7 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
|
||||
last_calculated_at = CURRENT_TIMESTAMP
|
||||
`);
|
||||
|
||||
// Calculate brand time-based metrics
|
||||
// Calculate brand time-based metrics with optimized query
|
||||
await connection.query(`
|
||||
INSERT INTO brand_time_metrics (
|
||||
brand,
|
||||
@@ -119,38 +125,41 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
|
||||
total_revenue,
|
||||
avg_margin
|
||||
)
|
||||
SELECT
|
||||
p.brand,
|
||||
YEAR(o.date) as year,
|
||||
MONTH(o.date) as month,
|
||||
COUNT(DISTINCT CASE WHEN p.stock_quantity <= 5000 THEN p.pid END) as product_count,
|
||||
COUNT(DISTINCT CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END) as active_products,
|
||||
SUM(CASE
|
||||
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0
|
||||
ELSE p.stock_quantity
|
||||
END) as total_stock_units,
|
||||
SUM(CASE
|
||||
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 OR p.cost_price IS NULL OR p.cost_price < 0 THEN 0
|
||||
ELSE p.stock_quantity * p.cost_price
|
||||
END) as total_stock_cost,
|
||||
SUM(CASE
|
||||
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 OR p.price IS NULL OR p.price < 0 THEN 0
|
||||
ELSE p.stock_quantity * p.price
|
||||
END) as total_stock_retail,
|
||||
SUM(CASE
|
||||
WHEN o.quantity IS NULL OR o.price IS NULL OR o.quantity < 0 OR o.price < 0 THEN 0
|
||||
ELSE o.price * o.quantity
|
||||
END) as total_revenue,
|
||||
CASE
|
||||
WHEN SUM(o.price * o.quantity) > 0 THEN
|
||||
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
|
||||
ELSE 0
|
||||
END as avg_margin
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
WHERE p.brand IS NOT NULL
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY p.brand, YEAR(o.date), MONTH(o.date)
|
||||
WITH filtered_products AS (
|
||||
SELECT
|
||||
p.*,
|
||||
CASE WHEN p.stock_quantity <= 5000 THEN p.pid END as valid_pid,
|
||||
CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END as active_pid,
|
||||
CASE
|
||||
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0
|
||||
ELSE p.stock_quantity
|
||||
END as valid_stock
|
||||
FROM products p
|
||||
WHERE p.brand IS NOT NULL
|
||||
),
|
||||
monthly_metrics AS (
|
||||
SELECT
|
||||
p.brand,
|
||||
YEAR(o.date) as year,
|
||||
MONTH(o.date) as month,
|
||||
COUNT(DISTINCT p.valid_pid) as product_count,
|
||||
COUNT(DISTINCT p.active_pid) as active_products,
|
||||
SUM(p.valid_stock) as total_stock_units,
|
||||
SUM(p.valid_stock * p.cost_price) as total_stock_cost,
|
||||
SUM(p.valid_stock * p.price) as total_stock_retail,
|
||||
SUM(o.quantity * o.price) as total_revenue,
|
||||
CASE
|
||||
WHEN SUM(o.quantity * o.price) > 0 THEN
|
||||
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
|
||||
ELSE 0
|
||||
END as avg_margin
|
||||
FROM filtered_products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY p.brand, YEAR(o.date), MONTH(o.date)
|
||||
)
|
||||
SELECT *
|
||||
FROM monthly_metrics
|
||||
ON DUPLICATE KEY UPDATE
|
||||
product_count = VALUES(product_count),
|
||||
active_products = VALUES(active_products),
|
||||
|
||||
@@ -15,98 +15,104 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
|
||||
percentage: '85'
|
||||
});
|
||||
|
||||
// Calculate category performance metrics
|
||||
// First, calculate base category metrics
|
||||
await connection.query(`
|
||||
INSERT INTO category_metrics (
|
||||
category_id,
|
||||
product_count,
|
||||
active_products,
|
||||
total_value,
|
||||
avg_margin,
|
||||
turnover_rate,
|
||||
growth_rate,
|
||||
status
|
||||
)
|
||||
WITH category_sales AS (
|
||||
SELECT
|
||||
c.cat_id as category_id,
|
||||
COUNT(DISTINCT p.pid) as product_count,
|
||||
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
|
||||
SUM(p.stock_quantity * p.cost_price) as total_value,
|
||||
CASE
|
||||
WHEN SUM(o.price * o.quantity) > 0
|
||||
THEN (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
|
||||
ELSE 0
|
||||
END as avg_margin,
|
||||
CASE
|
||||
WHEN AVG(GREATEST(p.stock_quantity, 0)) >= 0.01
|
||||
THEN LEAST(
|
||||
SUM(CASE
|
||||
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 1 YEAR)
|
||||
THEN COALESCE(o.quantity, 0)
|
||||
ELSE 0
|
||||
END) /
|
||||
GREATEST(
|
||||
AVG(GREATEST(p.stock_quantity, 0)),
|
||||
1.0
|
||||
),
|
||||
999.99
|
||||
)
|
||||
ELSE 0
|
||||
END as turnover_rate,
|
||||
-- Current period (last 3 months)
|
||||
SUM(CASE
|
||||
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
|
||||
THEN COALESCE(o.quantity * o.price, 0)
|
||||
ELSE 0
|
||||
END) as current_period_sales,
|
||||
-- Previous year same period
|
||||
SUM(CASE
|
||||
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
THEN COALESCE(o.quantity * o.price, 0)
|
||||
ELSE 0
|
||||
END) as previous_year_period_sales,
|
||||
c.status
|
||||
FROM categories c
|
||||
LEFT JOIN product_categories pc ON c.cat_id = pc.cat_id
|
||||
LEFT JOIN products p ON pc.pid = p.pid
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
GROUP BY c.cat_id, c.status
|
||||
status,
|
||||
last_calculated_at
|
||||
)
|
||||
SELECT
|
||||
category_id,
|
||||
product_count,
|
||||
active_products,
|
||||
total_value,
|
||||
COALESCE(avg_margin, 0) as avg_margin,
|
||||
COALESCE(turnover_rate, 0) as turnover_rate,
|
||||
-- Enhanced YoY growth rate calculation
|
||||
CASE
|
||||
WHEN previous_year_period_sales = 0 AND current_period_sales > 0 THEN 100.0
|
||||
WHEN previous_year_period_sales = 0 THEN 0.0
|
||||
ELSE LEAST(
|
||||
GREATEST(
|
||||
((current_period_sales - previous_year_period_sales) /
|
||||
NULLIF(previous_year_period_sales, 0)) * 100.0,
|
||||
-100.0
|
||||
),
|
||||
999.99
|
||||
)
|
||||
END as growth_rate,
|
||||
status
|
||||
FROM category_sales
|
||||
c.cat_id,
|
||||
COUNT(DISTINCT p.pid) as product_count,
|
||||
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
|
||||
COALESCE(SUM(p.stock_quantity * p.cost_price), 0) as total_value,
|
||||
c.status,
|
||||
NOW() as last_calculated_at
|
||||
FROM categories c
|
||||
LEFT JOIN product_categories pc ON c.cat_id = pc.cat_id
|
||||
LEFT JOIN products p ON pc.pid = p.pid
|
||||
GROUP BY c.cat_id, c.status
|
||||
ON DUPLICATE KEY UPDATE
|
||||
product_count = VALUES(product_count),
|
||||
active_products = VALUES(active_products),
|
||||
total_value = VALUES(total_value),
|
||||
avg_margin = VALUES(avg_margin),
|
||||
turnover_rate = VALUES(turnover_rate),
|
||||
growth_rate = VALUES(growth_rate),
|
||||
status = VALUES(status),
|
||||
last_calculated_at = CURRENT_TIMESTAMP
|
||||
last_calculated_at = VALUES(last_calculated_at)
|
||||
`);
|
||||
|
||||
// Calculate category time-based metrics
|
||||
// Then update with margin and turnover data
|
||||
await connection.query(`
|
||||
WITH category_sales AS (
|
||||
SELECT
|
||||
pc.cat_id,
|
||||
SUM(o.quantity * o.price) as total_sales,
|
||||
SUM(o.quantity * (o.price - p.cost_price)) as total_margin,
|
||||
SUM(o.quantity) as units_sold,
|
||||
AVG(GREATEST(p.stock_quantity, 0)) as avg_stock
|
||||
FROM product_categories pc
|
||||
JOIN products p ON pc.pid = p.pid
|
||||
JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 1 YEAR)
|
||||
GROUP BY pc.cat_id
|
||||
)
|
||||
UPDATE category_metrics cm
|
||||
JOIN category_sales cs ON cm.category_id = cs.cat_id
|
||||
SET
|
||||
cm.avg_margin = COALESCE(cs.total_margin * 100.0 / NULLIF(cs.total_sales, 0), 0),
|
||||
cm.turnover_rate = LEAST(COALESCE(cs.units_sold / NULLIF(cs.avg_stock, 0), 0), 999.99),
|
||||
cm.last_calculated_at = NOW()
|
||||
`);
|
||||
|
||||
// Finally update growth rates
|
||||
await connection.query(`
|
||||
WITH current_period AS (
|
||||
SELECT
|
||||
pc.cat_id,
|
||||
SUM(o.quantity * o.price) as revenue
|
||||
FROM product_categories pc
|
||||
JOIN products p ON pc.pid = p.pid
|
||||
JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
|
||||
GROUP BY pc.cat_id
|
||||
),
|
||||
previous_period AS (
|
||||
SELECT
|
||||
pc.cat_id,
|
||||
SUM(o.quantity * o.price) as revenue
|
||||
FROM product_categories pc
|
||||
JOIN products p ON pc.pid = p.pid
|
||||
JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH)
|
||||
AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY pc.cat_id
|
||||
)
|
||||
UPDATE category_metrics cm
|
||||
LEFT JOIN current_period cp ON cm.category_id = cp.cat_id
|
||||
LEFT JOIN previous_period pp ON cm.category_id = pp.cat_id
|
||||
SET
|
||||
cm.growth_rate = CASE
|
||||
WHEN pp.revenue = 0 AND COALESCE(cp.revenue, 0) > 0 THEN 100.0
|
||||
WHEN pp.revenue = 0 THEN 0.0
|
||||
ELSE LEAST(
|
||||
GREATEST(
|
||||
((COALESCE(cp.revenue, 0) - pp.revenue) / pp.revenue) * 100.0,
|
||||
-100.0
|
||||
),
|
||||
999.99
|
||||
)
|
||||
END,
|
||||
cm.last_calculated_at = NOW()
|
||||
WHERE cp.cat_id IS NOT NULL OR pp.cat_id IS NOT NULL
|
||||
`);
|
||||
|
||||
// Calculate time-based metrics
|
||||
await connection.query(`
|
||||
INSERT INTO category_time_metrics (
|
||||
category_id,
|
||||
@@ -120,29 +126,28 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
|
||||
turnover_rate
|
||||
)
|
||||
SELECT
|
||||
c.cat_id as category_id,
|
||||
pc.cat_id,
|
||||
YEAR(o.date) as year,
|
||||
MONTH(o.date) as month,
|
||||
COUNT(DISTINCT p.pid) as product_count,
|
||||
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
|
||||
SUM(p.stock_quantity * p.cost_price) as total_value,
|
||||
SUM(o.price * o.quantity) as total_revenue,
|
||||
CASE
|
||||
WHEN SUM(o.price * o.quantity) > 0
|
||||
THEN (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
|
||||
ELSE 0
|
||||
END as avg_margin,
|
||||
CASE
|
||||
WHEN AVG(p.stock_quantity) > 0
|
||||
THEN SUM(o.quantity) / AVG(p.stock_quantity)
|
||||
ELSE 0
|
||||
END as turnover_rate
|
||||
FROM categories c
|
||||
LEFT JOIN product_categories pc ON c.cat_id = pc.cat_id
|
||||
LEFT JOIN products p ON pc.pid = p.pid
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY c.cat_id, YEAR(o.date), MONTH(o.date)
|
||||
SUM(o.quantity * o.price) as total_revenue,
|
||||
COALESCE(
|
||||
SUM(o.quantity * (o.price - p.cost_price)) * 100.0 /
|
||||
NULLIF(SUM(o.quantity * o.price), 0),
|
||||
0
|
||||
) as avg_margin,
|
||||
COALESCE(
|
||||
SUM(o.quantity) / NULLIF(AVG(GREATEST(p.stock_quantity, 0)), 0),
|
||||
0
|
||||
) as turnover_rate
|
||||
FROM product_categories pc
|
||||
JOIN products p ON pc.pid = p.pid
|
||||
JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY pc.cat_id, YEAR(o.date), MONTH(o.date)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
product_count = VALUES(product_count),
|
||||
active_products = VALUES(active_products),
|
||||
@@ -152,73 +157,45 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
|
||||
turnover_rate = VALUES(turnover_rate)
|
||||
`);
|
||||
|
||||
// Calculate category sales metrics with NULL brand handling
|
||||
await connection.query(`
|
||||
INSERT INTO category_sales_metrics (
|
||||
category_id,
|
||||
brand,
|
||||
period_start,
|
||||
period_end,
|
||||
avg_daily_sales,
|
||||
total_sold,
|
||||
num_products,
|
||||
avg_price,
|
||||
last_calculated_at
|
||||
)
|
||||
WITH date_ranges AS (
|
||||
// Calculate sales metrics for different time periods
|
||||
const periods = [30, 90, 180, 365];
|
||||
for (const days of periods) {
|
||||
await connection.query(`
|
||||
INSERT INTO category_sales_metrics (
|
||||
category_id,
|
||||
brand,
|
||||
period_start,
|
||||
period_end,
|
||||
avg_daily_sales,
|
||||
total_sold,
|
||||
num_products,
|
||||
avg_price,
|
||||
last_calculated_at
|
||||
)
|
||||
SELECT
|
||||
DATE_SUB(CURDATE(), INTERVAL 30 DAY) as period_start,
|
||||
CURDATE() as period_end
|
||||
UNION ALL
|
||||
SELECT
|
||||
DATE_SUB(CURDATE(), INTERVAL 90 DAY),
|
||||
CURDATE()
|
||||
UNION ALL
|
||||
SELECT
|
||||
DATE_SUB(CURDATE(), INTERVAL 180 DAY),
|
||||
CURDATE()
|
||||
UNION ALL
|
||||
SELECT
|
||||
DATE_SUB(CURDATE(), INTERVAL 365 DAY),
|
||||
CURDATE()
|
||||
),
|
||||
category_metrics AS (
|
||||
SELECT
|
||||
c.cat_id as category_id,
|
||||
pc.cat_id as category_id,
|
||||
COALESCE(p.brand, 'Unbranded') as brand,
|
||||
dr.period_start,
|
||||
dr.period_end,
|
||||
COUNT(DISTINCT p.pid) as num_products,
|
||||
COALESCE(SUM(o.quantity), 0) / DATEDIFF(dr.period_end, dr.period_start) as avg_daily_sales,
|
||||
DATE_SUB(CURDATE(), INTERVAL ? DAY) as period_start,
|
||||
CURDATE() as period_end,
|
||||
COALESCE(SUM(o.quantity), 0) / ? as avg_daily_sales,
|
||||
COALESCE(SUM(o.quantity), 0) as total_sold,
|
||||
COALESCE(AVG(o.price), 0) as avg_price
|
||||
FROM categories c
|
||||
JOIN product_categories pc ON c.cat_id = pc.cat_id
|
||||
COUNT(DISTINCT p.pid) as num_products,
|
||||
COALESCE(AVG(o.price), 0) as avg_price,
|
||||
NOW() as last_calculated_at
|
||||
FROM product_categories pc
|
||||
JOIN products p ON pc.pid = p.pid
|
||||
CROSS JOIN date_ranges dr
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
AND o.date BETWEEN dr.period_start AND dr.period_end
|
||||
AND o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
|
||||
AND o.canceled = false
|
||||
GROUP BY c.cat_id, COALESCE(p.brand, 'Unbranded'), dr.period_start, dr.period_end
|
||||
)
|
||||
SELECT
|
||||
category_id,
|
||||
brand,
|
||||
period_start,
|
||||
period_end,
|
||||
avg_daily_sales,
|
||||
total_sold,
|
||||
num_products,
|
||||
avg_price,
|
||||
NOW() as last_calculated_at
|
||||
FROM category_metrics
|
||||
ON DUPLICATE KEY UPDATE
|
||||
avg_daily_sales = VALUES(avg_daily_sales),
|
||||
total_sold = VALUES(total_sold),
|
||||
num_products = VALUES(num_products),
|
||||
avg_price = VALUES(avg_price),
|
||||
last_calculated_at = NOW()
|
||||
`);
|
||||
GROUP BY pc.cat_id, p.brand
|
||||
ON DUPLICATE KEY UPDATE
|
||||
avg_daily_sales = VALUES(avg_daily_sales),
|
||||
total_sold = VALUES(total_sold),
|
||||
num_products = VALUES(num_products),
|
||||
avg_price = VALUES(avg_price),
|
||||
last_calculated_at = NOW()
|
||||
`, [days, days, days]);
|
||||
}
|
||||
|
||||
return Math.floor(totalProducts * 0.9);
|
||||
} finally {
|
||||
|
||||
@@ -15,9 +15,9 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
|
||||
percentage: '60'
|
||||
});
|
||||
|
||||
// Calculate financial metrics with optimized query
|
||||
await connection.query(`
|
||||
UPDATE product_metrics pm
|
||||
JOIN (
|
||||
WITH product_financials AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
@@ -26,48 +26,54 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
|
||||
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
|
||||
MIN(o.date) as first_sale_date,
|
||||
MAX(o.date) as last_sale_date,
|
||||
DATEDIFF(MAX(o.date), MIN(o.date)) + 1 as calculation_period_days
|
||||
DATEDIFF(MAX(o.date), MIN(o.date)) + 1 as calculation_period_days,
|
||||
COUNT(DISTINCT DATE(o.date)) as active_days
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH)
|
||||
GROUP BY p.pid
|
||||
) fin ON pm.pid = fin.pid
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
JOIN product_financials pf ON pm.pid = pf.pid
|
||||
SET
|
||||
pm.inventory_value = COALESCE(fin.inventory_value, 0),
|
||||
pm.total_revenue = COALESCE(fin.total_revenue, 0),
|
||||
pm.cost_of_goods_sold = COALESCE(fin.cost_of_goods_sold, 0),
|
||||
pm.gross_profit = COALESCE(fin.gross_profit, 0),
|
||||
pm.inventory_value = COALESCE(pf.inventory_value, 0),
|
||||
pm.total_revenue = COALESCE(pf.total_revenue, 0),
|
||||
pm.cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0),
|
||||
pm.gross_profit = COALESCE(pf.gross_profit, 0),
|
||||
pm.gmroi = CASE
|
||||
WHEN COALESCE(fin.inventory_value, 0) > 0 AND fin.calculation_period_days > 0 THEN
|
||||
(COALESCE(fin.gross_profit, 0) * (365.0 / fin.calculation_period_days)) / COALESCE(fin.inventory_value, 0)
|
||||
WHEN COALESCE(pf.inventory_value, 0) > 0 AND pf.active_days > 0 THEN
|
||||
(COALESCE(pf.gross_profit, 0) * (365.0 / pf.active_days)) / COALESCE(pf.inventory_value, 0)
|
||||
ELSE 0
|
||||
END
|
||||
`);
|
||||
|
||||
// Update time-based aggregates with financial metrics
|
||||
// Update time-based aggregates with optimized query
|
||||
await connection.query(`
|
||||
UPDATE product_time_aggregates pta
|
||||
JOIN (
|
||||
WITH monthly_financials AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
YEAR(o.date) as year,
|
||||
MONTH(o.date) as month,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
|
||||
COUNT(DISTINCT DATE(o.date)) as days_in_period
|
||||
COUNT(DISTINCT DATE(o.date)) as active_days,
|
||||
MIN(o.date) as period_start,
|
||||
MAX(o.date) as period_end
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
GROUP BY p.pid, YEAR(o.date), MONTH(o.date)
|
||||
) fin ON pta.pid = fin.pid
|
||||
AND pta.year = fin.year
|
||||
AND pta.month = fin.month
|
||||
)
|
||||
UPDATE product_time_aggregates pta
|
||||
JOIN monthly_financials mf ON pta.pid = mf.pid
|
||||
AND pta.year = mf.year
|
||||
AND pta.month = mf.month
|
||||
SET
|
||||
pta.inventory_value = COALESCE(fin.inventory_value, 0),
|
||||
pta.inventory_value = COALESCE(mf.inventory_value, 0),
|
||||
pta.gmroi = CASE
|
||||
WHEN COALESCE(fin.inventory_value, 0) > 0 AND fin.days_in_period > 0 THEN
|
||||
(COALESCE(fin.gross_profit, 0) * (365.0 / fin.days_in_period)) / COALESCE(fin.inventory_value, 0)
|
||||
WHEN COALESCE(mf.inventory_value, 0) > 0 AND mf.active_days > 0 THEN
|
||||
(COALESCE(mf.gross_profit, 0) * (365.0 / mf.active_days)) / COALESCE(mf.inventory_value, 0)
|
||||
ELSE 0
|
||||
END
|
||||
`);
|
||||
|
||||
@@ -12,418 +12,158 @@ function sanitizeValue(value) {
|
||||
async function calculateProductMetrics(startTime, totalProducts, processedCount = 0) {
|
||||
const connection = await getConnection();
|
||||
try {
|
||||
// Process in batches of 250
|
||||
const batchSize = 250;
|
||||
for (let offset = 0; offset < totalProducts; offset += batchSize) {
|
||||
const [products] = await connection.query('SELECT pid, vendor FROM products LIMIT ? OFFSET ?', [batchSize, offset])
|
||||
.catch(err => {
|
||||
logError(err, `Failed to fetch products batch at offset ${offset}`);
|
||||
throw err;
|
||||
});
|
||||
processedCount += products.length;
|
||||
// Skip flags are inherited from the parent scope
|
||||
const SKIP_PRODUCT_BASE_METRICS = 0;
|
||||
const SKIP_PRODUCT_TIME_AGGREGATES =0;
|
||||
|
||||
// Update progress after each batch
|
||||
// Calculate base product metrics
|
||||
if (!SKIP_PRODUCT_BASE_METRICS) {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Processing products',
|
||||
operation: 'Calculating base product metrics',
|
||||
current: Math.floor(totalProducts * 0.2),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.2), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.2)),
|
||||
percentage: '20'
|
||||
});
|
||||
|
||||
// Calculate base metrics
|
||||
await connection.query(`
|
||||
UPDATE product_metrics pm
|
||||
JOIN (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
SUM(o.quantity) as total_quantity,
|
||||
COUNT(DISTINCT o.order_number) as number_of_orders,
|
||||
SUM(o.quantity * o.price) as total_revenue,
|
||||
SUM(o.quantity * p.cost_price) as cost_of_goods_sold,
|
||||
AVG(o.price) as avg_price,
|
||||
STDDEV(o.price) as price_std,
|
||||
MIN(o.date) as first_sale_date,
|
||||
MAX(o.date) as last_sale_date,
|
||||
COUNT(DISTINCT DATE(o.date)) as active_days
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
GROUP BY p.pid
|
||||
) stats ON pm.pid = stats.pid
|
||||
SET
|
||||
pm.inventory_value = COALESCE(stats.inventory_value, 0),
|
||||
pm.avg_quantity_per_order = COALESCE(stats.total_quantity / NULLIF(stats.number_of_orders, 0), 0),
|
||||
pm.number_of_orders = COALESCE(stats.number_of_orders, 0),
|
||||
pm.total_revenue = COALESCE(stats.total_revenue, 0),
|
||||
pm.cost_of_goods_sold = COALESCE(stats.cost_of_goods_sold, 0),
|
||||
pm.gross_profit = COALESCE(stats.total_revenue - stats.cost_of_goods_sold, 0),
|
||||
pm.avg_margin_percent = CASE
|
||||
WHEN COALESCE(stats.total_revenue, 0) > 0
|
||||
THEN ((stats.total_revenue - stats.cost_of_goods_sold) / stats.total_revenue) * 100
|
||||
ELSE 0
|
||||
END,
|
||||
pm.first_sale_date = stats.first_sale_date,
|
||||
pm.last_sale_date = stats.last_sale_date,
|
||||
pm.gmroi = CASE
|
||||
WHEN COALESCE(stats.inventory_value, 0) > 0
|
||||
THEN (stats.total_revenue - stats.cost_of_goods_sold) / stats.inventory_value
|
||||
ELSE 0
|
||||
END,
|
||||
pm.last_calculated_at = NOW()
|
||||
`);
|
||||
|
||||
processedCount = Math.floor(totalProducts * 0.4);
|
||||
} else {
|
||||
console.log('Skipping base product metrics calculation');
|
||||
processedCount = Math.floor(totalProducts * 0.4);
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Skipping base product metrics calculation',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalProducts),
|
||||
rate: calculateRate(startTime, processedCount),
|
||||
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
|
||||
percentage: '40'
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate product time aggregates
|
||||
if (!SKIP_PRODUCT_TIME_AGGREGATES) {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating product time aggregates',
|
||||
current: Math.floor(totalProducts * 0.4),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.4), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.4)),
|
||||
percentage: '40'
|
||||
});
|
||||
|
||||
// Process the batch
|
||||
const metricsUpdates = [];
|
||||
for (const product of products) {
|
||||
try {
|
||||
// Get configuration values for this product
|
||||
const [configs] = await connection.query(`
|
||||
WITH product_info AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.vendor,
|
||||
pc.cat_id as category_id
|
||||
FROM products p
|
||||
LEFT JOIN product_categories pc ON p.pid = pc.pid
|
||||
WHERE p.pid = ?
|
||||
),
|
||||
threshold_options AS (
|
||||
SELECT
|
||||
st.*,
|
||||
CASE
|
||||
WHEN st.category_id = pi.category_id AND st.vendor = pi.vendor THEN 1
|
||||
WHEN st.category_id = pi.category_id AND st.vendor IS NULL THEN 2
|
||||
WHEN st.category_id IS NULL AND st.vendor = pi.vendor THEN 3
|
||||
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 4
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN stock_thresholds st
|
||||
WHERE (st.category_id = pi.category_id OR st.category_id IS NULL)
|
||||
AND (st.vendor = pi.vendor OR st.vendor IS NULL)
|
||||
),
|
||||
velocity_options AS (
|
||||
SELECT
|
||||
sv.*,
|
||||
CASE
|
||||
WHEN sv.category_id = pi.category_id AND sv.vendor = pi.vendor THEN 1
|
||||
WHEN sv.category_id = pi.category_id AND sv.vendor IS NULL THEN 2
|
||||
WHEN sv.category_id IS NULL AND sv.vendor = pi.vendor THEN 3
|
||||
WHEN sv.category_id IS NULL AND sv.vendor IS NULL THEN 4
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN sales_velocity_config sv
|
||||
WHERE (sv.category_id = pi.category_id OR sv.category_id IS NULL)
|
||||
AND (sv.vendor = pi.vendor OR sv.vendor IS NULL)
|
||||
),
|
||||
safety_options AS (
|
||||
SELECT
|
||||
ss.*,
|
||||
CASE
|
||||
WHEN ss.category_id = pi.category_id AND ss.vendor = pi.vendor THEN 1
|
||||
WHEN ss.category_id = pi.category_id AND ss.vendor IS NULL THEN 2
|
||||
WHEN ss.category_id IS NULL AND ss.vendor = pi.vendor THEN 3
|
||||
WHEN ss.category_id IS NULL AND ss.vendor IS NULL THEN 4
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN safety_stock_config ss
|
||||
WHERE (ss.category_id = pi.category_id OR ss.category_id IS NULL)
|
||||
AND (ss.vendor = pi.vendor OR ss.vendor IS NULL)
|
||||
)
|
||||
SELECT
|
||||
COALESCE(
|
||||
(SELECT critical_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
7
|
||||
) as critical_days,
|
||||
COALESCE(
|
||||
(SELECT reorder_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
14
|
||||
) as reorder_days,
|
||||
COALESCE(
|
||||
(SELECT overstock_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
90
|
||||
) as overstock_days,
|
||||
COALESCE(
|
||||
(SELECT low_stock_threshold
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
5
|
||||
) as low_stock_threshold,
|
||||
COALESCE(
|
||||
(SELECT daily_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
30
|
||||
) as daily_window_days,
|
||||
COALESCE(
|
||||
(SELECT weekly_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
7
|
||||
) as weekly_window_days,
|
||||
COALESCE(
|
||||
(SELECT monthly_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
90
|
||||
) as monthly_window_days,
|
||||
COALESCE(
|
||||
(SELECT coverage_days
|
||||
FROM safety_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
14
|
||||
) as safety_stock_days,
|
||||
COALESCE(
|
||||
(SELECT service_level
|
||||
FROM safety_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
95.0
|
||||
) as service_level
|
||||
`, [product.pid]);
|
||||
// Calculate time-based aggregates
|
||||
await connection.query(`
|
||||
INSERT INTO product_time_aggregates (
|
||||
pid,
|
||||
year,
|
||||
month,
|
||||
total_quantity_sold,
|
||||
total_revenue,
|
||||
total_cost,
|
||||
order_count,
|
||||
avg_price,
|
||||
profit_margin,
|
||||
inventory_value,
|
||||
gmroi
|
||||
)
|
||||
SELECT
|
||||
p.pid,
|
||||
YEAR(o.date) as year,
|
||||
MONTH(o.date) as month,
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
SUM(o.quantity * o.price) as total_revenue,
|
||||
SUM(o.quantity * p.cost_price) as total_cost,
|
||||
COUNT(DISTINCT o.order_number) as order_count,
|
||||
AVG(o.price) as avg_price,
|
||||
CASE
|
||||
WHEN SUM(o.quantity * o.price) > 0
|
||||
THEN ((SUM(o.quantity * o.price) - SUM(o.quantity * p.cost_price)) / SUM(o.quantity * o.price)) * 100
|
||||
ELSE 0
|
||||
END as profit_margin,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
CASE
|
||||
WHEN p.cost_price * p.stock_quantity > 0
|
||||
THEN (SUM(o.quantity * (o.price - p.cost_price))) / (p.cost_price * p.stock_quantity)
|
||||
ELSE 0
|
||||
END as gmroi
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY p.pid, YEAR(o.date), MONTH(o.date)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
total_quantity_sold = VALUES(total_quantity_sold),
|
||||
total_revenue = VALUES(total_revenue),
|
||||
total_cost = VALUES(total_cost),
|
||||
order_count = VALUES(order_count),
|
||||
avg_price = VALUES(avg_price),
|
||||
profit_margin = VALUES(profit_margin),
|
||||
inventory_value = VALUES(inventory_value),
|
||||
gmroi = VALUES(gmroi)
|
||||
`);
|
||||
|
||||
const config = configs[0];
|
||||
|
||||
// Calculate sales metrics
|
||||
const [salesMetrics] = await connection.query(`
|
||||
WITH sales_summary AS (
|
||||
SELECT
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
|
||||
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
|
||||
MAX(o.date) as last_sale_date,
|
||||
MIN(o.date) as first_sale_date,
|
||||
COUNT(DISTINCT o.order_number) as number_of_orders,
|
||||
AVG(o.quantity) as avg_quantity_per_order,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_30_days_qty,
|
||||
CASE
|
||||
WHEN SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) IS NULL THEN 0
|
||||
ELSE SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END)
|
||||
END as rolling_weekly_avg,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_month_qty
|
||||
FROM orders o
|
||||
JOIN products p ON o.pid = p.pid
|
||||
WHERE o.canceled = 0 AND o.pid = ?
|
||||
GROUP BY o.pid
|
||||
)
|
||||
SELECT
|
||||
total_quantity_sold,
|
||||
total_revenue,
|
||||
total_cost,
|
||||
last_sale_date,
|
||||
first_sale_date,
|
||||
number_of_orders,
|
||||
avg_quantity_per_order,
|
||||
last_30_days_qty / ? as rolling_daily_avg,
|
||||
rolling_weekly_avg / ? as rolling_weekly_avg,
|
||||
last_month_qty / ? as rolling_monthly_avg
|
||||
FROM sales_summary
|
||||
`, [
|
||||
config.daily_window_days,
|
||||
config.weekly_window_days,
|
||||
config.weekly_window_days,
|
||||
config.monthly_window_days,
|
||||
product.pid,
|
||||
config.daily_window_days,
|
||||
config.weekly_window_days,
|
||||
config.monthly_window_days
|
||||
]);
|
||||
|
||||
// Calculate purchase metrics
|
||||
const [purchaseMetrics] = await connection.query(`
|
||||
WITH recent_orders AS (
|
||||
SELECT
|
||||
date,
|
||||
received_date,
|
||||
received,
|
||||
cost_price,
|
||||
DATEDIFF(received_date, date) as lead_time_days,
|
||||
ROW_NUMBER() OVER (ORDER BY date DESC) as order_rank
|
||||
FROM purchase_orders
|
||||
WHERE receiving_status >= 30 -- Partial or fully received
|
||||
AND pid = ?
|
||||
AND received > 0
|
||||
AND received_date IS NOT NULL
|
||||
),
|
||||
lead_time_orders AS (
|
||||
SELECT *
|
||||
FROM recent_orders
|
||||
WHERE order_rank <= 5
|
||||
OR date >= DATE_SUB(CURDATE(), INTERVAL 90 DAY)
|
||||
)
|
||||
SELECT
|
||||
SUM(CASE WHEN received >= 0 THEN received ELSE 0 END) as total_quantity_purchased,
|
||||
SUM(CASE WHEN received >= 0 THEN cost_price * received ELSE 0 END) as total_cost,
|
||||
MAX(date) as last_purchase_date,
|
||||
MIN(received_date) as first_received_date,
|
||||
MAX(received_date) as last_received_date,
|
||||
AVG(lead_time_days) as avg_lead_time_days
|
||||
FROM lead_time_orders
|
||||
`, [product.pid]);
|
||||
|
||||
// Get stock info
|
||||
const [stockInfo] = await connection.query(`
|
||||
SELECT
|
||||
p.stock_quantity,
|
||||
p.cost_price,
|
||||
p.created_at,
|
||||
p.replenishable,
|
||||
p.moq,
|
||||
p.notions_inv_count,
|
||||
p.date_last_sold,
|
||||
p.total_sold,
|
||||
DATEDIFF(CURDATE(), MIN(po.received_date)) as days_since_first_stock,
|
||||
DATEDIFF(CURDATE(), COALESCE(p.date_last_sold, CURDATE())) as days_since_last_sale,
|
||||
CASE
|
||||
WHEN EXISTS (
|
||||
SELECT 1 FROM orders o
|
||||
WHERE o.pid = p.pid
|
||||
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
AND o.canceled = false
|
||||
AND (SELECT SUM(quantity) FROM orders o2
|
||||
WHERE o2.pid = p.pid
|
||||
AND o2.date >= o.date
|
||||
AND o2.canceled = false) = 0
|
||||
) THEN true
|
||||
ELSE false
|
||||
END as had_recent_stockout
|
||||
FROM products p
|
||||
LEFT JOIN purchase_orders po ON p.pid = po.pid
|
||||
AND po.receiving_status >= 30 -- Partial or fully received
|
||||
AND po.received > 0
|
||||
WHERE p.pid = ?
|
||||
GROUP BY p.pid
|
||||
`, [product.pid]);
|
||||
|
||||
// Calculate metrics
|
||||
const salesData = salesMetrics[0] || {};
|
||||
const purchaseData = purchaseMetrics[0] || {};
|
||||
const stockData = stockInfo[0] || {};
|
||||
|
||||
// Sales velocity metrics
|
||||
const daily_sales_avg = sanitizeValue(salesData.rolling_daily_avg) || 0;
|
||||
const weekly_sales_avg = sanitizeValue(salesData.rolling_weekly_avg) || 0;
|
||||
const monthly_sales_avg = sanitizeValue(salesData.rolling_monthly_avg) || 0;
|
||||
|
||||
// Stock metrics
|
||||
const stock_quantity = sanitizeValue(stockData.stock_quantity) || 0;
|
||||
const days_of_inventory = daily_sales_avg > 0 ? Math.floor(stock_quantity / daily_sales_avg) : 999;
|
||||
const weeks_of_inventory = Math.floor(days_of_inventory / 7);
|
||||
|
||||
// Calculate stock status
|
||||
const stock_status = calculateStockStatus(
|
||||
stock_quantity,
|
||||
config,
|
||||
daily_sales_avg,
|
||||
weekly_sales_avg,
|
||||
monthly_sales_avg
|
||||
);
|
||||
|
||||
// Calculate reorder quantities
|
||||
const reorder_quantities = calculateReorderQuantities(
|
||||
stock_quantity,
|
||||
stock_status,
|
||||
daily_sales_avg,
|
||||
sanitizeValue(purchaseData.avg_lead_time_days) || 0,
|
||||
config
|
||||
);
|
||||
|
||||
// Financial metrics
|
||||
const cost_price = sanitizeValue(stockData.cost_price) || 0;
|
||||
const inventory_value = stock_quantity * cost_price;
|
||||
const total_revenue = sanitizeValue(salesData.total_revenue) || 0;
|
||||
const total_cost = sanitizeValue(salesData.total_cost) || 0;
|
||||
const gross_profit = total_revenue - total_cost;
|
||||
const avg_margin_percent = total_revenue > 0 ? ((gross_profit / total_revenue) * 100) : 0;
|
||||
const gmroi = inventory_value > 0 ? (gross_profit / inventory_value) : 0;
|
||||
|
||||
// Add to batch update with sanitized values
|
||||
metricsUpdates.push([
|
||||
product.pid,
|
||||
sanitizeValue(daily_sales_avg),
|
||||
sanitizeValue(weekly_sales_avg),
|
||||
sanitizeValue(monthly_sales_avg),
|
||||
sanitizeValue(salesData.avg_quantity_per_order),
|
||||
sanitizeValue(salesData.number_of_orders),
|
||||
salesData.first_sale_date || null,
|
||||
salesData.last_sale_date || null,
|
||||
sanitizeValue(days_of_inventory),
|
||||
sanitizeValue(weeks_of_inventory),
|
||||
sanitizeValue(reorder_quantities.reorder_point),
|
||||
sanitizeValue(reorder_quantities.safety_stock),
|
||||
sanitizeValue(reorder_quantities.reorder_qty),
|
||||
sanitizeValue(reorder_quantities.overstocked_amt),
|
||||
sanitizeValue(avg_margin_percent),
|
||||
sanitizeValue(total_revenue),
|
||||
sanitizeValue(inventory_value),
|
||||
sanitizeValue(total_cost),
|
||||
sanitizeValue(gross_profit),
|
||||
sanitizeValue(gmroi),
|
||||
sanitizeValue(purchaseData.avg_lead_time_days),
|
||||
purchaseData.last_purchase_date || null,
|
||||
purchaseData.first_received_date || null,
|
||||
purchaseData.last_received_date || null,
|
||||
null, // abc_class - calculated separately
|
||||
stock_status,
|
||||
sanitizeValue(0), // turnover_rate - calculated separately
|
||||
sanitizeValue(purchaseData.avg_lead_time_days),
|
||||
sanitizeValue(config.target_days),
|
||||
stock_status === 'Critical' ? 'Warning' : 'Normal',
|
||||
null, // forecast_accuracy
|
||||
null, // forecast_bias
|
||||
null // last_forecast_date
|
||||
]);
|
||||
} catch (err) {
|
||||
logError(err, `Failed processing product ${product.pid}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Batch update metrics
|
||||
if (metricsUpdates.length > 0) {
|
||||
try {
|
||||
await connection.query(`
|
||||
INSERT INTO product_metrics (
|
||||
pid,
|
||||
daily_sales_avg,
|
||||
weekly_sales_avg,
|
||||
monthly_sales_avg,
|
||||
avg_quantity_per_order,
|
||||
number_of_orders,
|
||||
first_sale_date,
|
||||
last_sale_date,
|
||||
days_of_inventory,
|
||||
weeks_of_inventory,
|
||||
reorder_point,
|
||||
safety_stock,
|
||||
reorder_qty,
|
||||
overstocked_amt,
|
||||
avg_margin_percent,
|
||||
total_revenue,
|
||||
inventory_value,
|
||||
cost_of_goods_sold,
|
||||
gross_profit,
|
||||
gmroi,
|
||||
avg_lead_time_days,
|
||||
last_purchase_date,
|
||||
first_received_date,
|
||||
last_received_date,
|
||||
abc_class,
|
||||
stock_status,
|
||||
turnover_rate,
|
||||
current_lead_time,
|
||||
target_lead_time,
|
||||
lead_time_status,
|
||||
forecast_accuracy,
|
||||
forecast_bias,
|
||||
last_forecast_date
|
||||
)
|
||||
VALUES ?
|
||||
ON DUPLICATE KEY UPDATE
|
||||
daily_sales_avg = VALUES(daily_sales_avg),
|
||||
weekly_sales_avg = VALUES(weekly_sales_avg),
|
||||
monthly_sales_avg = VALUES(monthly_sales_avg),
|
||||
avg_quantity_per_order = VALUES(avg_quantity_per_order),
|
||||
number_of_orders = VALUES(number_of_orders),
|
||||
first_sale_date = VALUES(first_sale_date),
|
||||
last_sale_date = VALUES(last_sale_date),
|
||||
days_of_inventory = VALUES(days_of_inventory),
|
||||
weeks_of_inventory = VALUES(weeks_of_inventory),
|
||||
reorder_point = VALUES(reorder_point),
|
||||
safety_stock = VALUES(safety_stock),
|
||||
reorder_qty = VALUES(reorder_qty),
|
||||
overstocked_amt = VALUES(overstocked_amt),
|
||||
avg_margin_percent = VALUES(avg_margin_percent),
|
||||
total_revenue = VALUES(total_revenue),
|
||||
inventory_value = VALUES(inventory_value),
|
||||
cost_of_goods_sold = VALUES(cost_of_goods_sold),
|
||||
gross_profit = VALUES(gross_profit),
|
||||
gmroi = VALUES(gmroi),
|
||||
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||
last_purchase_date = VALUES(last_purchase_date),
|
||||
first_received_date = VALUES(first_received_date),
|
||||
last_received_date = VALUES(last_received_date),
|
||||
stock_status = VALUES(stock_status),
|
||||
turnover_rate = VALUES(turnover_rate),
|
||||
current_lead_time = VALUES(current_lead_time),
|
||||
target_lead_time = VALUES(target_lead_time),
|
||||
lead_time_status = VALUES(lead_time_status),
|
||||
last_calculated_at = CURRENT_TIMESTAMP
|
||||
`, [metricsUpdates]);
|
||||
} catch (err) {
|
||||
logError(err, 'Failed to update metrics batch');
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
processedCount = Math.floor(totalProducts * 0.6);
|
||||
} else {
|
||||
console.log('Skipping product time aggregates calculation');
|
||||
processedCount = Math.floor(totalProducts * 0.6);
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Skipping product time aggregates calculation',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalProducts),
|
||||
rate: calculateRate(startTime, processedCount),
|
||||
percentage: '60'
|
||||
});
|
||||
}
|
||||
|
||||
return processedCount;
|
||||
|
||||
@@ -15,6 +15,59 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
|
||||
percentage: '98'
|
||||
});
|
||||
|
||||
// First, create a temporary table for forecast dates
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_forecast_dates (
|
||||
forecast_date DATE,
|
||||
day_of_week INT,
|
||||
month INT,
|
||||
PRIMARY KEY (forecast_date)
|
||||
)
|
||||
`);
|
||||
|
||||
await connection.query(`
|
||||
INSERT INTO temp_forecast_dates
|
||||
SELECT
|
||||
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date,
|
||||
DAYOFWEEK(DATE_ADD(CURRENT_DATE, INTERVAL n DAY)) as day_of_week,
|
||||
MONTH(DATE_ADD(CURRENT_DATE, INTERVAL n DAY)) as month
|
||||
FROM (
|
||||
SELECT a.N + b.N * 10 as n
|
||||
FROM
|
||||
(SELECT 0 as N UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
|
||||
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9) a,
|
||||
(SELECT 0 as N UNION SELECT 1 UNION SELECT 2) b
|
||||
ORDER BY n
|
||||
LIMIT 31
|
||||
) numbers
|
||||
`);
|
||||
|
||||
// Create temporary table for daily sales stats
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_daily_sales AS
|
||||
SELECT
|
||||
o.pid,
|
||||
DAYOFWEEK(o.date) as day_of_week,
|
||||
SUM(o.quantity) as daily_quantity,
|
||||
SUM(o.price * o.quantity) as daily_revenue,
|
||||
COUNT(DISTINCT DATE(o.date)) as day_count
|
||||
FROM orders o
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
|
||||
GROUP BY o.pid, DAYOFWEEK(o.date)
|
||||
`);
|
||||
|
||||
// Create temporary table for product stats
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_stats AS
|
||||
SELECT
|
||||
pid,
|
||||
AVG(daily_revenue) as overall_avg_revenue,
|
||||
SUM(day_count) as total_days
|
||||
FROM temp_daily_sales
|
||||
GROUP BY pid
|
||||
`);
|
||||
|
||||
// Calculate product-level forecasts
|
||||
await connection.query(`
|
||||
INSERT INTO sales_forecasts (
|
||||
@@ -25,92 +78,37 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
|
||||
confidence_level,
|
||||
last_calculated_at
|
||||
)
|
||||
WITH daily_sales AS (
|
||||
SELECT
|
||||
o.pid,
|
||||
DATE(o.date) as sale_date,
|
||||
SUM(o.quantity) as daily_quantity,
|
||||
SUM(o.price * o.quantity) as daily_revenue
|
||||
FROM orders o
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
|
||||
GROUP BY o.pid, DATE(o.date)
|
||||
),
|
||||
forecast_dates AS (
|
||||
SELECT
|
||||
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date
|
||||
FROM (
|
||||
SELECT 0 as n UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
|
||||
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9 UNION
|
||||
SELECT 10 UNION SELECT 11 UNION SELECT 12 UNION SELECT 13 UNION SELECT 14 UNION
|
||||
SELECT 15 UNION SELECT 16 UNION SELECT 17 UNION SELECT 18 UNION SELECT 19 UNION
|
||||
SELECT 20 UNION SELECT 21 UNION SELECT 22 UNION SELECT 23 UNION SELECT 24 UNION
|
||||
SELECT 25 UNION SELECT 26 UNION SELECT 27 UNION SELECT 28 UNION SELECT 29 UNION
|
||||
SELECT 30
|
||||
) numbers
|
||||
),
|
||||
product_stats AS (
|
||||
SELECT
|
||||
ds.pid,
|
||||
AVG(ds.daily_quantity) as avg_daily_quantity,
|
||||
STDDEV_SAMP(ds.daily_quantity) as std_daily_quantity,
|
||||
AVG(ds.daily_revenue) as avg_daily_revenue,
|
||||
STDDEV_SAMP(ds.daily_revenue) as std_daily_revenue,
|
||||
COUNT(*) as data_points,
|
||||
-- Calculate day-of-week averages
|
||||
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 1 THEN ds.daily_revenue END) as sunday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 2 THEN ds.daily_revenue END) as monday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 3 THEN ds.daily_revenue END) as tuesday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 4 THEN ds.daily_revenue END) as wednesday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 5 THEN ds.daily_revenue END) as thursday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 6 THEN ds.daily_revenue END) as friday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 7 THEN ds.daily_revenue END) as saturday_avg
|
||||
FROM daily_sales ds
|
||||
GROUP BY ds.pid
|
||||
)
|
||||
SELECT
|
||||
ps.pid,
|
||||
ds.pid,
|
||||
fd.forecast_date,
|
||||
GREATEST(0,
|
||||
ps.avg_daily_quantity *
|
||||
(1 + COALESCE(
|
||||
(SELECT seasonality_factor
|
||||
FROM sales_seasonality
|
||||
WHERE MONTH(fd.forecast_date) = month
|
||||
LIMIT 1),
|
||||
0
|
||||
))
|
||||
AVG(ds.daily_quantity) *
|
||||
(1 + COALESCE(sf.seasonality_factor, 0))
|
||||
) as forecast_units,
|
||||
GREATEST(0,
|
||||
CASE DAYOFWEEK(fd.forecast_date)
|
||||
WHEN 1 THEN COALESCE(ps.sunday_avg, ps.avg_daily_revenue)
|
||||
WHEN 2 THEN COALESCE(ps.monday_avg, ps.avg_daily_revenue)
|
||||
WHEN 3 THEN COALESCE(ps.tuesday_avg, ps.avg_daily_revenue)
|
||||
WHEN 4 THEN COALESCE(ps.wednesday_avg, ps.avg_daily_revenue)
|
||||
WHEN 5 THEN COALESCE(ps.thursday_avg, ps.avg_daily_revenue)
|
||||
WHEN 6 THEN COALESCE(ps.friday_avg, ps.avg_daily_revenue)
|
||||
WHEN 7 THEN COALESCE(ps.saturday_avg, ps.avg_daily_revenue)
|
||||
END *
|
||||
(1 + COALESCE(
|
||||
(SELECT seasonality_factor
|
||||
FROM sales_seasonality
|
||||
WHERE MONTH(fd.forecast_date) = month
|
||||
LIMIT 1),
|
||||
COALESCE(
|
||||
CASE
|
||||
WHEN SUM(ds.day_count) >= 4 THEN AVG(ds.daily_revenue)
|
||||
ELSE ps.overall_avg_revenue
|
||||
END *
|
||||
(1 + COALESCE(sf.seasonality_factor, 0)) *
|
||||
(0.95 + (RAND() * 0.1)),
|
||||
0
|
||||
)) *
|
||||
-- Add some randomness within a small range (±5%)
|
||||
(0.95 + (RAND() * 0.1))
|
||||
)
|
||||
) as forecast_revenue,
|
||||
CASE
|
||||
WHEN ps.data_points >= 60 THEN 90
|
||||
WHEN ps.data_points >= 30 THEN 80
|
||||
WHEN ps.data_points >= 14 THEN 70
|
||||
WHEN ps.total_days >= 60 THEN 90
|
||||
WHEN ps.total_days >= 30 THEN 80
|
||||
WHEN ps.total_days >= 14 THEN 70
|
||||
ELSE 60
|
||||
END as confidence_level,
|
||||
NOW() as last_calculated_at
|
||||
FROM product_stats ps
|
||||
CROSS JOIN forecast_dates fd
|
||||
WHERE ps.avg_daily_quantity > 0
|
||||
FROM temp_daily_sales ds
|
||||
JOIN temp_product_stats ps ON ds.pid = ps.pid
|
||||
CROSS JOIN temp_forecast_dates fd
|
||||
LEFT JOIN sales_seasonality sf ON fd.month = sf.month
|
||||
GROUP BY ds.pid, fd.forecast_date, ps.overall_avg_revenue, ps.total_days, sf.seasonality_factor
|
||||
HAVING AVG(ds.daily_quantity) > 0
|
||||
ON DUPLICATE KEY UPDATE
|
||||
forecast_units = VALUES(forecast_units),
|
||||
forecast_revenue = VALUES(forecast_revenue),
|
||||
@@ -118,6 +116,32 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
|
||||
last_calculated_at = NOW()
|
||||
`);
|
||||
|
||||
// Create temporary table for category stats
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_category_sales AS
|
||||
SELECT
|
||||
pc.cat_id,
|
||||
DAYOFWEEK(o.date) as day_of_week,
|
||||
SUM(o.quantity) as daily_quantity,
|
||||
SUM(o.price * o.quantity) as daily_revenue,
|
||||
COUNT(DISTINCT DATE(o.date)) as day_count
|
||||
FROM orders o
|
||||
JOIN product_categories pc ON o.pid = pc.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
|
||||
GROUP BY pc.cat_id, DAYOFWEEK(o.date)
|
||||
`);
|
||||
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_category_stats AS
|
||||
SELECT
|
||||
cat_id,
|
||||
AVG(daily_revenue) as overall_avg_revenue,
|
||||
SUM(day_count) as total_days
|
||||
FROM temp_category_sales
|
||||
GROUP BY cat_id
|
||||
`);
|
||||
|
||||
// Calculate category-level forecasts
|
||||
await connection.query(`
|
||||
INSERT INTO category_forecasts (
|
||||
@@ -128,93 +152,37 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
|
||||
confidence_level,
|
||||
last_calculated_at
|
||||
)
|
||||
WITH category_daily_sales AS (
|
||||
SELECT
|
||||
pc.cat_id as category_id,
|
||||
DATE(o.date) as sale_date,
|
||||
SUM(o.quantity) as daily_quantity,
|
||||
SUM(o.price * o.quantity) as daily_revenue
|
||||
FROM orders o
|
||||
JOIN product_categories pc ON o.pid = pc.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
|
||||
GROUP BY pc.cat_id, DATE(o.date)
|
||||
),
|
||||
forecast_dates AS (
|
||||
SELECT
|
||||
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date
|
||||
FROM (
|
||||
SELECT 0 as n UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
|
||||
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9 UNION
|
||||
SELECT 10 UNION SELECT 11 UNION SELECT 12 UNION SELECT 13 UNION SELECT 14 UNION
|
||||
SELECT 15 UNION SELECT 16 UNION SELECT 17 UNION SELECT 18 UNION SELECT 19 UNION
|
||||
SELECT 20 UNION SELECT 21 UNION SELECT 22 UNION SELECT 23 UNION SELECT 24 UNION
|
||||
SELECT 25 UNION SELECT 26 UNION SELECT 27 UNION SELECT 28 UNION SELECT 29 UNION
|
||||
SELECT 30
|
||||
) numbers
|
||||
),
|
||||
category_stats AS (
|
||||
SELECT
|
||||
cds.category_id,
|
||||
AVG(cds.daily_quantity) as avg_daily_quantity,
|
||||
STDDEV_SAMP(cds.daily_quantity) as std_daily_quantity,
|
||||
AVG(cds.daily_revenue) as avg_daily_revenue,
|
||||
STDDEV_SAMP(cds.daily_revenue) as std_daily_revenue,
|
||||
COUNT(*) as data_points,
|
||||
-- Calculate day-of-week averages
|
||||
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 1 THEN cds.daily_revenue END) as sunday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 2 THEN cds.daily_revenue END) as monday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 3 THEN cds.daily_revenue END) as tuesday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 4 THEN cds.daily_revenue END) as wednesday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 5 THEN cds.daily_revenue END) as thursday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 6 THEN cds.daily_revenue END) as friday_avg,
|
||||
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 7 THEN cds.daily_revenue END) as saturday_avg
|
||||
FROM category_daily_sales cds
|
||||
GROUP BY cds.category_id
|
||||
)
|
||||
SELECT
|
||||
cs.category_id,
|
||||
cs.cat_id as category_id,
|
||||
fd.forecast_date,
|
||||
GREATEST(0,
|
||||
cs.avg_daily_quantity *
|
||||
(1 + COALESCE(
|
||||
(SELECT seasonality_factor
|
||||
FROM sales_seasonality
|
||||
WHERE MONTH(fd.forecast_date) = month
|
||||
LIMIT 1),
|
||||
0
|
||||
))
|
||||
AVG(cs.daily_quantity) *
|
||||
(1 + COALESCE(sf.seasonality_factor, 0))
|
||||
) as forecast_units,
|
||||
GREATEST(0,
|
||||
CASE DAYOFWEEK(fd.forecast_date)
|
||||
WHEN 1 THEN COALESCE(cs.sunday_avg, cs.avg_daily_revenue)
|
||||
WHEN 2 THEN COALESCE(cs.monday_avg, cs.avg_daily_revenue)
|
||||
WHEN 3 THEN COALESCE(cs.tuesday_avg, cs.avg_daily_revenue)
|
||||
WHEN 4 THEN COALESCE(cs.wednesday_avg, cs.avg_daily_revenue)
|
||||
WHEN 5 THEN COALESCE(cs.thursday_avg, cs.avg_daily_revenue)
|
||||
WHEN 6 THEN COALESCE(cs.friday_avg, cs.avg_daily_revenue)
|
||||
WHEN 7 THEN COALESCE(cs.saturday_avg, cs.avg_daily_revenue)
|
||||
END *
|
||||
(1 + COALESCE(
|
||||
(SELECT seasonality_factor
|
||||
FROM sales_seasonality
|
||||
WHERE MONTH(fd.forecast_date) = month
|
||||
LIMIT 1),
|
||||
COALESCE(
|
||||
CASE
|
||||
WHEN SUM(cs.day_count) >= 4 THEN AVG(cs.daily_revenue)
|
||||
ELSE ct.overall_avg_revenue
|
||||
END *
|
||||
(1 + COALESCE(sf.seasonality_factor, 0)) *
|
||||
(0.95 + (RAND() * 0.1)),
|
||||
0
|
||||
)) *
|
||||
-- Add some randomness within a small range (±5%)
|
||||
(0.95 + (RAND() * 0.1))
|
||||
)
|
||||
) as forecast_revenue,
|
||||
CASE
|
||||
WHEN cs.data_points >= 60 THEN 90
|
||||
WHEN cs.data_points >= 30 THEN 80
|
||||
WHEN cs.data_points >= 14 THEN 70
|
||||
WHEN ct.total_days >= 60 THEN 90
|
||||
WHEN ct.total_days >= 30 THEN 80
|
||||
WHEN ct.total_days >= 14 THEN 70
|
||||
ELSE 60
|
||||
END as confidence_level,
|
||||
NOW() as last_calculated_at
|
||||
FROM category_stats cs
|
||||
CROSS JOIN forecast_dates fd
|
||||
WHERE cs.avg_daily_quantity > 0
|
||||
FROM temp_category_sales cs
|
||||
JOIN temp_category_stats ct ON cs.cat_id = ct.cat_id
|
||||
CROSS JOIN temp_forecast_dates fd
|
||||
LEFT JOIN sales_seasonality sf ON fd.month = sf.month
|
||||
GROUP BY cs.cat_id, fd.forecast_date, ct.overall_avg_revenue, ct.total_days, sf.seasonality_factor
|
||||
HAVING AVG(cs.daily_quantity) > 0
|
||||
ON DUPLICATE KEY UPDATE
|
||||
forecast_units = VALUES(forecast_units),
|
||||
forecast_revenue = VALUES(forecast_revenue),
|
||||
@@ -222,6 +190,15 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
|
||||
last_calculated_at = NOW()
|
||||
`);
|
||||
|
||||
// Clean up temporary tables
|
||||
await connection.query(`
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_forecast_dates;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_daily_sales;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_product_stats;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_category_sales;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_category_stats;
|
||||
`);
|
||||
|
||||
return Math.floor(totalProducts * 1.0);
|
||||
} finally {
|
||||
connection.release();
|
||||
|
||||
@@ -1,8 +1,20 @@
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./utils/progress');
|
||||
const { getConnection } = require('./utils/db');
|
||||
|
||||
async function calculateTimeAggregates(startTime, totalProducts, processedCount) {
|
||||
const connection = await getConnection();
|
||||
try {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating time aggregates',
|
||||
current: Math.floor(totalProducts * 0.95),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.95), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.95)),
|
||||
percentage: '95'
|
||||
});
|
||||
|
||||
// Initial insert of time-based aggregates
|
||||
await connection.query(`
|
||||
INSERT INTO product_time_aggregates (
|
||||
@@ -47,7 +59,7 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
|
||||
SUM(received) as stock_received,
|
||||
SUM(ordered) as stock_ordered
|
||||
FROM purchase_orders
|
||||
WHERE receiving_status >= 30 -- Partial or fully received
|
||||
WHERE status = 50
|
||||
GROUP BY pid, YEAR(date), MONTH(date)
|
||||
)
|
||||
SELECT
|
||||
@@ -126,7 +138,9 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
|
||||
|
||||
return Math.floor(totalProducts * 0.65);
|
||||
} finally {
|
||||
connection.release();
|
||||
if (connection) {
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ async function calculateVendorMetrics(startTime, totalProducts, processedCount)
|
||||
try {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating vendor metrics',
|
||||
operation: 'Ensuring vendors exist in vendor_details',
|
||||
current: Math.floor(totalProducts * 0.7),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
@@ -15,160 +15,125 @@ async function calculateVendorMetrics(startTime, totalProducts, processedCount)
|
||||
percentage: '70'
|
||||
});
|
||||
|
||||
// First, ensure all vendors exist in vendor_details
|
||||
// First ensure all vendors exist in vendor_details
|
||||
await connection.query(`
|
||||
INSERT IGNORE INTO vendor_details (vendor, status)
|
||||
SELECT DISTINCT vendor, 'active' as status
|
||||
INSERT IGNORE INTO vendor_details (vendor, status, created_at, updated_at)
|
||||
SELECT DISTINCT
|
||||
vendor,
|
||||
'active' as status,
|
||||
NOW() as created_at,
|
||||
NOW() as updated_at
|
||||
FROM products
|
||||
WHERE vendor IS NOT NULL
|
||||
AND vendor NOT IN (SELECT vendor FROM vendor_details)
|
||||
`);
|
||||
|
||||
// Calculate vendor performance metrics
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating vendor metrics',
|
||||
current: Math.floor(totalProducts * 0.8),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.8), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.8)),
|
||||
percentage: '80'
|
||||
});
|
||||
|
||||
// Now calculate vendor metrics
|
||||
await connection.query(`
|
||||
INSERT INTO vendor_metrics (
|
||||
vendor,
|
||||
total_revenue,
|
||||
total_orders,
|
||||
total_late_orders,
|
||||
avg_lead_time_days,
|
||||
on_time_delivery_rate,
|
||||
order_fill_rate,
|
||||
total_orders,
|
||||
total_late_orders,
|
||||
total_purchase_value,
|
||||
avg_order_value,
|
||||
active_products,
|
||||
total_products,
|
||||
total_revenue,
|
||||
avg_margin_percent,
|
||||
status
|
||||
status,
|
||||
last_calculated_at
|
||||
)
|
||||
WITH vendor_orders AS (
|
||||
WITH vendor_sales AS (
|
||||
SELECT
|
||||
po.vendor,
|
||||
AVG(DATEDIFF(po.received_date, po.date)) as avg_lead_time_days,
|
||||
COUNT(*) as total_orders,
|
||||
COUNT(CASE WHEN po.received_date > po.expected_date THEN 1 END) as total_late_orders,
|
||||
SUM(po.cost_price * po.ordered) as total_purchase_value,
|
||||
AVG(po.cost_price * po.ordered) as avg_order_value,
|
||||
CASE
|
||||
WHEN COUNT(*) > 0 THEN
|
||||
(COUNT(CASE WHEN po.received = po.ordered THEN 1 END) * 100.0) / COUNT(*)
|
||||
ELSE 0
|
||||
END as order_fill_rate
|
||||
FROM purchase_orders po
|
||||
WHERE po.receiving_status >= 30 -- Partial or fully received
|
||||
GROUP BY po.vendor
|
||||
p.vendor,
|
||||
SUM(o.quantity * o.price) as total_revenue,
|
||||
COUNT(DISTINCT o.id) as total_orders,
|
||||
COUNT(DISTINCT p.pid) as active_products
|
||||
FROM products p
|
||||
JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY p.vendor
|
||||
),
|
||||
vendor_po AS (
|
||||
SELECT
|
||||
p.vendor,
|
||||
COUNT(DISTINCT CASE WHEN po.receiving_status = 40 THEN po.id END) as received_orders,
|
||||
COUNT(DISTINCT po.id) as total_orders,
|
||||
AVG(CASE
|
||||
WHEN po.receiving_status = 40
|
||||
THEN DATEDIFF(po.received_date, po.date)
|
||||
END) as avg_lead_time_days
|
||||
FROM products p
|
||||
JOIN purchase_orders po ON p.pid = po.pid
|
||||
WHERE po.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY p.vendor
|
||||
),
|
||||
vendor_products AS (
|
||||
SELECT
|
||||
p.vendor,
|
||||
COUNT(DISTINCT p.pid) as total_products,
|
||||
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
|
||||
SUM(o.price * o.quantity) as total_revenue,
|
||||
CASE
|
||||
WHEN SUM(o.price * o.quantity) > 0 THEN
|
||||
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
|
||||
ELSE 0
|
||||
END as avg_margin_percent
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
GROUP BY p.vendor
|
||||
vendor,
|
||||
COUNT(DISTINCT pid) as total_products
|
||||
FROM products
|
||||
GROUP BY vendor
|
||||
)
|
||||
SELECT
|
||||
vd.vendor,
|
||||
COALESCE(vo.avg_lead_time_days, 0) as avg_lead_time_days,
|
||||
vs.vendor,
|
||||
COALESCE(vs.total_revenue, 0) as total_revenue,
|
||||
COALESCE(vp.total_orders, 0) as total_orders,
|
||||
COALESCE(vp.total_orders - vp.received_orders, 0) as total_late_orders,
|
||||
COALESCE(vp.avg_lead_time_days, 0) as avg_lead_time_days,
|
||||
CASE
|
||||
WHEN COALESCE(vo.total_orders, 0) > 0 THEN
|
||||
((COALESCE(vo.total_orders, 0) - COALESCE(vo.total_late_orders, 0)) * 100.0) / COALESCE(vo.total_orders, 1)
|
||||
WHEN vp.total_orders > 0
|
||||
THEN (vp.received_orders / vp.total_orders) * 100
|
||||
ELSE 0
|
||||
END as on_time_delivery_rate,
|
||||
COALESCE(vo.order_fill_rate, 0) as order_fill_rate,
|
||||
COALESCE(vo.total_orders, 0) as total_orders,
|
||||
COALESCE(vo.total_late_orders, 0) as total_late_orders,
|
||||
COALESCE(vo.total_purchase_value, 0) as total_purchase_value,
|
||||
COALESCE(vo.avg_order_value, 0) as avg_order_value,
|
||||
COALESCE(vp.active_products, 0) as active_products,
|
||||
COALESCE(vp.total_products, 0) as total_products,
|
||||
COALESCE(vp.total_revenue, 0) as total_revenue,
|
||||
COALESCE(vp.avg_margin_percent, 0) as avg_margin_percent,
|
||||
vd.status
|
||||
FROM vendor_details vd
|
||||
LEFT JOIN vendor_orders vo ON vd.vendor = vo.vendor
|
||||
LEFT JOIN vendor_products vp ON vd.vendor = vp.vendor
|
||||
CASE
|
||||
WHEN vp.total_orders > 0
|
||||
THEN (vp.received_orders / vp.total_orders) * 100
|
||||
ELSE 0
|
||||
END as order_fill_rate,
|
||||
CASE
|
||||
WHEN vs.total_orders > 0
|
||||
THEN vs.total_revenue / vs.total_orders
|
||||
ELSE 0
|
||||
END as avg_order_value,
|
||||
COALESCE(vs.active_products, 0) as active_products,
|
||||
COALESCE(vpr.total_products, 0) as total_products,
|
||||
'active' as status,
|
||||
NOW() as last_calculated_at
|
||||
FROM vendor_sales vs
|
||||
LEFT JOIN vendor_po vp ON vs.vendor = vp.vendor
|
||||
LEFT JOIN vendor_products vpr ON vs.vendor = vpr.vendor
|
||||
WHERE vs.vendor IS NOT NULL
|
||||
ON DUPLICATE KEY UPDATE
|
||||
total_revenue = VALUES(total_revenue),
|
||||
total_orders = VALUES(total_orders),
|
||||
total_late_orders = VALUES(total_late_orders),
|
||||
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||
on_time_delivery_rate = VALUES(on_time_delivery_rate),
|
||||
order_fill_rate = VALUES(order_fill_rate),
|
||||
total_orders = VALUES(total_orders),
|
||||
total_late_orders = VALUES(total_late_orders),
|
||||
total_purchase_value = VALUES(total_purchase_value),
|
||||
avg_order_value = VALUES(avg_order_value),
|
||||
active_products = VALUES(active_products),
|
||||
total_products = VALUES(total_products),
|
||||
total_revenue = VALUES(total_revenue),
|
||||
avg_margin_percent = VALUES(avg_margin_percent),
|
||||
status = VALUES(status),
|
||||
last_calculated_at = CURRENT_TIMESTAMP
|
||||
last_calculated_at = VALUES(last_calculated_at)
|
||||
`);
|
||||
|
||||
// Calculate vendor time-based metrics
|
||||
await connection.query(`
|
||||
INSERT INTO vendor_time_metrics (
|
||||
vendor,
|
||||
year,
|
||||
month,
|
||||
total_orders,
|
||||
late_orders,
|
||||
avg_lead_time_days,
|
||||
total_purchase_value,
|
||||
total_revenue,
|
||||
avg_margin_percent
|
||||
)
|
||||
WITH vendor_time_data AS (
|
||||
SELECT
|
||||
vd.vendor,
|
||||
YEAR(po.date) as year,
|
||||
MONTH(po.date) as month,
|
||||
COUNT(DISTINCT po.po_id) as total_orders,
|
||||
COUNT(DISTINCT CASE WHEN po.received_date > po.expected_date THEN po.po_id END) as late_orders,
|
||||
AVG(DATEDIFF(po.received_date, po.date)) as avg_lead_time_days,
|
||||
SUM(po.cost_price * po.ordered) as total_purchase_value,
|
||||
SUM(o.price * o.quantity) as total_revenue,
|
||||
CASE
|
||||
WHEN SUM(o.price * o.quantity) > 0 THEN
|
||||
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
|
||||
ELSE 0
|
||||
END as avg_margin_percent
|
||||
FROM vendor_details vd
|
||||
LEFT JOIN products p ON vd.vendor = p.vendor
|
||||
LEFT JOIN purchase_orders po ON p.pid = po.pid
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
WHERE po.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
|
||||
GROUP BY vd.vendor, YEAR(po.date), MONTH(po.date)
|
||||
)
|
||||
SELECT
|
||||
vendor,
|
||||
year,
|
||||
month,
|
||||
COALESCE(total_orders, 0) as total_orders,
|
||||
COALESCE(late_orders, 0) as late_orders,
|
||||
COALESCE(avg_lead_time_days, 0) as avg_lead_time_days,
|
||||
COALESCE(total_purchase_value, 0) as total_purchase_value,
|
||||
COALESCE(total_revenue, 0) as total_revenue,
|
||||
COALESCE(avg_margin_percent, 0) as avg_margin_percent
|
||||
FROM vendor_time_data
|
||||
ON DUPLICATE KEY UPDATE
|
||||
total_orders = VALUES(total_orders),
|
||||
late_orders = VALUES(late_orders),
|
||||
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||
total_purchase_value = VALUES(total_purchase_value),
|
||||
total_revenue = VALUES(total_revenue),
|
||||
avg_margin_percent = VALUES(avg_margin_percent)
|
||||
`);
|
||||
|
||||
return Math.floor(totalProducts * 0.75);
|
||||
return Math.floor(totalProducts * 0.9);
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = calculateVendorMetrics;
|
||||
module.exports = calculateVendorMetrics;
|
||||
Reference in New Issue
Block a user