Fix (probably) discrepancies and errors in import/calculate scripts

This commit is contained in:
2025-02-02 00:01:46 -05:00
parent bd5bcdd548
commit 8a43da502a
10 changed files with 423 additions and 218 deletions

View File

@@ -7,8 +7,8 @@ require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') });
// Configuration flags for controlling which metrics to calculate // Configuration flags for controlling which metrics to calculate
// Set to 1 to skip the corresponding calculation, 0 to run it // Set to 1 to skip the corresponding calculation, 0 to run it
const SKIP_PRODUCT_METRICS = 1; const SKIP_PRODUCT_METRICS = 0;
const SKIP_TIME_AGGREGATES = 1; const SKIP_TIME_AGGREGATES = 0;
const SKIP_FINANCIAL_METRICS = 0; const SKIP_FINANCIAL_METRICS = 0;
const SKIP_VENDOR_METRICS = 0; const SKIP_VENDOR_METRICS = 0;
const SKIP_CATEGORY_METRICS = 0; const SKIP_CATEGORY_METRICS = 0;

View File

@@ -317,14 +317,25 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
for (let i = 0; i < orderIds.length; i += 5000) { for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000); const batchIds = orderIds.slice(i, i + 5000);
const [costs] = await prodConnection.query(` const [costs] = await prodConnection.query(`
SELECT orderid as order_id, pid, costeach SELECT
FROM order_costs oc.orderid as order_id,
WHERE orderid IN (?) oc.pid,
COALESCE(
oc.costeach,
(SELECT pi.costeach
FROM product_inventory pi
WHERE pi.pid = oc.pid
AND pi.daterec <= o.date_placed
ORDER BY pi.daterec DESC LIMIT 1)
) as costeach
FROM order_costs oc
JOIN _order o ON oc.orderid = o.order_id
WHERE oc.orderid IN (?)
`, [batchIds]); `, [batchIds]);
if (costs.length > 0) { if (costs.length > 0) {
const placeholders = costs.map(() => '(?, ?, ?)').join(","); const placeholders = costs.map(() => '(?, ?, ?)').join(",");
const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach]); const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach || 0]);
await localConnection.query(` await localConnection.query(`
INSERT INTO temp_order_costs (order_id, pid, costeach) INSERT INTO temp_order_costs (order_id, pid, costeach)
VALUES ${placeholders} VALUES ${placeholders}
@@ -355,7 +366,13 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
om.date, om.date,
oi.price, oi.price,
oi.quantity, oi.quantity,
oi.base_discount + COALESCE(od.discount, 0) as discount, oi.base_discount + COALESCE(od.discount, 0) +
CASE
WHEN o.summary_discount > 0 THEN
ROUND((o.summary_discount * (oi.price * oi.quantity)) /
NULLIF(o.summary_subtotal, 0), 2)
ELSE 0
END as discount,
COALESCE(ot.tax, 0) as tax, COALESCE(ot.tax, 0) as tax,
0 as tax_included, 0 as tax_included,
0 as shipping, 0 as shipping,
@@ -366,6 +383,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
COALESCE(tc.costeach, 0) as costeach COALESCE(tc.costeach, 0) as costeach
FROM temp_order_items oi FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN _order o ON oi.order_id = o.order_id
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
@@ -455,7 +473,13 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
om.date, om.date,
oi.price, oi.price,
oi.quantity, oi.quantity,
oi.base_discount + COALESCE(od.discount, 0) as discount, oi.base_discount + COALESCE(od.discount, 0) +
CASE
WHEN o.summary_discount > 0 THEN
ROUND((o.summary_discount * (oi.price * oi.quantity)) /
NULLIF(o.summary_subtotal, 0), 2)
ELSE 0
END as discount,
COALESCE(ot.tax, 0) as tax, COALESCE(ot.tax, 0) as tax,
0 as tax_included, 0 as tax_included,
0 as shipping, 0 as shipping,
@@ -466,6 +490,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
COALESCE(tc.costeach, 0) as costeach COALESCE(tc.costeach, 0) as costeach
FROM temp_order_items oi FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN _order o ON oi.order_id = o.order_id
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid

View File

@@ -470,43 +470,100 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
// Process category relationships // Process category relationships
if (batch.some(p => p.category_ids)) { if (batch.some(p => p.category_ids)) {
const categoryRelationships = batch // First get all valid categories
.filter(p => p.category_ids) const allCategoryIds = [...new Set(
.flatMap(product => batch
product.category_ids .filter(p => p.category_ids)
.split(',') .flatMap(product =>
.map(id => id.trim()) product.category_ids
.filter(id => id) .split(',')
.map(Number) .map(id => id.trim())
.filter(id => !isNaN(id)) .filter(id => id)
.map(catId => [catId, product.pid]) .map(Number)
); .filter(id => !isNaN(id))
)
)];
if (categoryRelationships.length > 0) { // Verify categories exist and get their hierarchy
// Verify categories exist before inserting relationships const [categories] = await localConnection.query(`
const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))]; WITH RECURSIVE category_hierarchy AS (
const [existingCats] = await localConnection.query( SELECT
"SELECT cat_id FROM categories WHERE cat_id IN (?)", cat_id,
[uniqueCatIds] parent_id,
); type,
const existingCatIds = new Set(existingCats.map(c => c.cat_id)); 1 as level,
CAST(cat_id AS CHAR(200)) as path
FROM categories
WHERE cat_id IN (?)
UNION ALL
SELECT
c.cat_id,
c.parent_id,
c.type,
ch.level + 1,
CONCAT(ch.path, ',', c.cat_id)
FROM categories c
JOIN category_hierarchy ch ON c.parent_id = ch.cat_id
WHERE ch.level < 10 -- Prevent infinite recursion
)
SELECT DISTINCT
cat_id,
parent_id,
type,
path
FROM category_hierarchy
WHERE cat_id IN (?)
ORDER BY level DESC
`, [allCategoryIds, allCategoryIds]);
// Filter relationships to only include existing categories const validCategories = new Map(categories.map(c => [c.cat_id, c]));
const validRelationships = categoryRelationships.filter(([catId]) => const validCategoryIds = new Set(categories.map(c => c.cat_id));
existingCatIds.has(catId)
);
if (validRelationships.length > 0) { // Build category relationships ensuring proper hierarchy
const catPlaceholders = validRelationships const categoryRelationships = [];
.map(() => "(?, ?)") batch
.join(","); .filter(p => p.category_ids)
await localConnection.query( .forEach(product => {
`INSERT IGNORE INTO product_categories (cat_id, pid) const productCategories = product.category_ids
VALUES ${catPlaceholders}`, .split(',')
validRelationships.flat() .map(id => id.trim())
); .filter(id => id)
.map(Number)
.filter(id => !isNaN(id))
.filter(id => validCategoryIds.has(id))
.map(id => validCategories.get(id))
.sort((a, b) => a.type - b.type); // Sort by type to ensure proper hierarchy
// Only add relationships that maintain proper hierarchy
productCategories.forEach(category => {
if (category.path.split(',').every(parentId =>
validCategoryIds.has(Number(parentId))
)) {
categoryRelationships.push([category.cat_id, product.pid]);
}
});
});
if (categoryRelationships.length > 0) {
// First remove any existing relationships that will be replaced
await localConnection.query(`
DELETE FROM product_categories
WHERE pid IN (?) AND cat_id IN (?)
`, [
[...new Set(categoryRelationships.map(([_, pid]) => pid))],
[...new Set(categoryRelationships.map(([catId, _]) => catId))]
]);
// Then insert the new relationships
const placeholders = categoryRelationships
.map(() => "(?, ?)")
.join(",");
await localConnection.query(`
INSERT INTO product_categories (cat_id, pid)
VALUES ${placeholders}
`, categoryRelationships.flat());
} }
}
} }
} }

View File

@@ -321,41 +321,47 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
let lastFulfillmentReceiving = null; let lastFulfillmentReceiving = null;
for (const receiving of allReceivings) { for (const receiving of allReceivings) {
const qtyToApply = Math.min(remainingToFulfill, receiving.qty_each); // Convert quantities to base units using supplier data
if (qtyToApply > 0) { const baseQtyReceived = receiving.qty_each * (
// If this is the first receiving being applied, use its cost receiving.type === 'original' ? 1 :
if (actualCost === null) { Math.max(1, product.supplier_qty_per_unit || 1)
actualCost = receiving.cost_each; );
firstFulfillmentReceiving = receiving; const qtyToApply = Math.min(remainingToFulfill, baseQtyReceived);
if (qtyToApply > 0) {
// If this is the first receiving being applied, use its cost
if (actualCost === null && receiving.cost_each > 0) {
actualCost = receiving.cost_each;
firstFulfillmentReceiving = receiving;
}
lastFulfillmentReceiving = receiving;
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: qtyToApply,
qty_total: baseQtyReceived,
cost: receiving.cost_each || actualCost || product.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
remaining_qty: baseQtyReceived - qtyToApply
});
remainingToFulfill -= qtyToApply;
} else {
// Track excess receivings
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: 0,
qty_total: baseQtyReceived,
cost: receiving.cost_each || actualCost || product.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
is_excess: true
});
} }
lastFulfillmentReceiving = receiving; totalReceived += baseQtyReceived;
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: qtyToApply,
qty_total: receiving.qty_each,
cost: receiving.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
remaining_qty: receiving.qty_each - qtyToApply
});
remainingToFulfill -= qtyToApply;
} else {
// Track excess receivings
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: 0,
qty_total: receiving.qty_each,
cost: receiving.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
is_excess: true
});
}
totalReceived += receiving.qty_each;
} }
const receiving_status = !totalReceived ? 1 : // created const receiving_status = !totalReceived ? 1 : // created

View File

@@ -1,82 +0,0 @@
// Split into inserts and updates
const insertsAndUpdates = batch.reduce((acc, po) => {
const key = `${po.po_id}-${po.pid}`;
if (existingPOMap.has(key)) {
const existing = existingPOMap.get(key);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = po[col] ?? null;
const oldVal = existing[col] ?? null;
// Special handling for numbers to avoid type coercion issues
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
}
// Special handling for receiving_history JSON
if (col === 'receiving_history') {
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
}
return newVal !== oldVal;
});
if (hasChanges) {
console.log(`PO line changed: ${key}`, {
po_id: po.po_id,
pid: po.pid,
changes: columnNames.filter(col => {
const newVal = po[col] ?? null;
const oldVal = existing[col] ?? null;
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001;
}
if (col === 'receiving_history') {
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
}
return newVal !== oldVal;
})
});
acc.updates.push({
po_id: po.po_id,
pid: po.pid,
values: columnNames.map(col => po[col] ?? null)
});
}
} else {
console.log(`New PO line: ${key}`);
acc.inserts.push({
po_id: po.po_id,
pid: po.pid,
values: columnNames.map(col => po[col] ?? null)
});
}
return acc;
}, { inserts: [], updates: [] });
// Handle inserts
if (insertsAndUpdates.inserts.length > 0) {
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(",");
const insertResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${insertPlaceholders}
`, insertsAndUpdates.inserts.map(i => i.values).flat());
recordsAdded += insertResult[0].affectedRows;
}
// Handle updates
if (insertsAndUpdates.updates.length > 0) {
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(",");
const updateResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE
${columnNames
.filter(col => col !== "po_id" && col !== "pid")
.map(col => `${col} = VALUES(${col})`)
.join(",")};
`, insertsAndUpdates.updates.map(u => u.values).flat());
// Each update affects 2 rows in affectedRows, so we divide by 2 to get actual count
recordsUpdated += insertsAndUpdates.updates.length;
}

View File

@@ -55,10 +55,21 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount, i
WITH filtered_products AS ( WITH filtered_products AS (
SELECT SELECT
p.*, p.*,
CASE WHEN p.stock_quantity <= 5000 THEN p.pid END as valid_pid,
CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END as active_pid,
CASE CASE
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0 WHEN p.stock_quantity <= 5000 AND p.stock_quantity >= 0
THEN p.pid
END as valid_pid,
CASE
WHEN p.visible = true
AND p.stock_quantity <= 5000
AND p.stock_quantity >= 0
THEN p.pid
END as active_pid,
CASE
WHEN p.stock_quantity IS NULL
OR p.stock_quantity < 0
OR p.stock_quantity > 5000
THEN 0
ELSE p.stock_quantity ELSE p.stock_quantity
END as valid_stock END as valid_stock
FROM products p FROM products p
@@ -67,10 +78,13 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount, i
sales_periods AS ( sales_periods AS (
SELECT SELECT
p.brand, p.brand,
SUM(o.quantity * o.price) as period_revenue, SUM(o.quantity * (o.price - COALESCE(o.discount, 0))) as period_revenue,
SUM(o.quantity * (o.price - COALESCE(o.discount, 0) - p.cost_price)) as period_margin,
COUNT(DISTINCT DATE(o.date)) as period_days,
CASE CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH) THEN 'current' WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH) THEN 'current'
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) THEN 'previous' WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH)
AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) THEN 'previous'
END as period_type END as period_type
FROM filtered_products p FROM filtered_products p
JOIN orders o ON p.pid = o.pid JOIN orders o ON p.pid = o.pid
@@ -86,10 +100,11 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount, i
SUM(p.valid_stock) as total_stock_units, SUM(p.valid_stock) as total_stock_units,
SUM(p.valid_stock * p.cost_price) as total_stock_cost, SUM(p.valid_stock * p.cost_price) as total_stock_cost,
SUM(p.valid_stock * p.price) as total_stock_retail, SUM(p.valid_stock * p.price) as total_stock_retail,
COALESCE(SUM(o.quantity * o.price), 0) as total_revenue, COALESCE(SUM(o.quantity * (o.price - COALESCE(o.discount, 0))), 0) as total_revenue,
CASE CASE
WHEN SUM(o.quantity * o.price) > 0 THEN WHEN SUM(o.quantity * (o.price - COALESCE(o.discount, 0))) > 0
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity) THEN (SUM(o.quantity * (o.price - COALESCE(o.discount, 0) - p.cost_price)) * 100.0) /
SUM(o.quantity * (o.price - COALESCE(o.discount, 0)))
ELSE 0 ELSE 0
END as avg_margin END as avg_margin
FROM filtered_products p FROM filtered_products p
@@ -107,16 +122,18 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount, i
bd.avg_margin, bd.avg_margin,
CASE CASE
WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0 WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0
AND MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) > 0 THEN 100.0 AND MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) > 0
WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0 THEN 0.0 THEN 100.0
ELSE LEAST( WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0
GREATEST( THEN 0.0
ELSE GREATEST(
-100.0,
LEAST(
((MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) - ((MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) -
MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END)) / MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END)) /
NULLIF(MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END), 0)) * 100.0, NULLIF(ABS(MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END)), 0)) * 100.0,
-100.0 999.99
), )
999.99
) )
END as growth_rate END as growth_rate
FROM brand_data bd FROM brand_data bd

View File

@@ -151,7 +151,9 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
WITH current_period AS ( WITH current_period AS (
SELECT SELECT
pc.cat_id, pc.cat_id,
SUM(o.quantity * o.price / (1 + COALESCE(ss.seasonality_factor, 0))) as revenue SUM(o.quantity * (o.price - COALESCE(o.discount, 0)) /
(1 + COALESCE(ss.seasonality_factor, 0))) as revenue,
COUNT(DISTINCT DATE(o.date)) as days
FROM product_categories pc FROM product_categories pc
JOIN products p ON pc.pid = p.pid JOIN products p ON pc.pid = p.pid
JOIN orders o ON p.pid = o.pid JOIN orders o ON p.pid = o.pid
@@ -163,7 +165,9 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
previous_period AS ( previous_period AS (
SELECT SELECT
pc.cat_id, pc.cat_id,
SUM(o.quantity * o.price / (1 + COALESCE(ss.seasonality_factor, 0))) as revenue SUM(o.quantity * (o.price - COALESCE(o.discount, 0)) /
(1 + COALESCE(ss.seasonality_factor, 0))) as revenue,
COUNT(DISTINCT DATE(o.date)) as days
FROM product_categories pc FROM product_categories pc
JOIN products p ON pc.pid = p.pid JOIN products p ON pc.pid = p.pid
JOIN orders o ON p.pid = o.pid JOIN orders o ON p.pid = o.pid
@@ -177,7 +181,8 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
SELECT SELECT
pc.cat_id, pc.cat_id,
MONTH(o.date) as month, MONTH(o.date) as month,
SUM(o.quantity * o.price / (1 + COALESCE(ss.seasonality_factor, 0))) as revenue, SUM(o.quantity * (o.price - COALESCE(o.discount, 0)) /
(1 + COALESCE(ss.seasonality_factor, 0))) as revenue,
COUNT(DISTINCT DATE(o.date)) as days_in_month COUNT(DISTINCT DATE(o.date)) as days_in_month
FROM product_categories pc FROM product_categories pc
JOIN products p ON pc.pid = p.pid JOIN products p ON pc.pid = p.pid
@@ -192,8 +197,8 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
cat_id, cat_id,
COUNT(*) as n, COUNT(*) as n,
AVG(month) as avg_x, AVG(month) as avg_x,
AVG(revenue / days_in_month) as avg_y, AVG(revenue / NULLIF(days_in_month, 0)) as avg_y,
SUM(month * (revenue / days_in_month)) as sum_xy, SUM(month * (revenue / NULLIF(days_in_month, 0))) as sum_xy,
SUM(month * month) as sum_xx SUM(month * month) as sum_xx
FROM trend_data FROM trend_data
GROUP BY cat_id GROUP BY cat_id
@@ -202,7 +207,8 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
trend_analysis AS ( trend_analysis AS (
SELECT SELECT
cat_id, cat_id,
((n * sum_xy) - (avg_x * n * avg_y)) / ((n * sum_xx) - (n * avg_x * avg_x)) as trend_slope, ((n * sum_xy) - (avg_x * n * avg_y)) /
NULLIF((n * sum_xx) - (n * avg_x * avg_x), 0) as trend_slope,
avg_y as avg_daily_revenue avg_y as avg_daily_revenue
FROM trend_stats FROM trend_stats
) )
@@ -213,24 +219,31 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
SET SET
cm.growth_rate = CASE cm.growth_rate = CASE
WHEN pp.revenue = 0 AND COALESCE(cp.revenue, 0) > 0 THEN 100.0 WHEN pp.revenue = 0 AND COALESCE(cp.revenue, 0) > 0 THEN 100.0
WHEN pp.revenue = 0 THEN 0.0 WHEN pp.revenue = 0 OR cp.revenue IS NULL THEN 0.0
WHEN ta.trend_slope IS NOT NULL THEN WHEN ta.trend_slope IS NOT NULL THEN
LEAST( GREATEST(
GREATEST( -100.0,
LEAST(
(ta.trend_slope / NULLIF(ta.avg_daily_revenue, 0)) * 365 * 100, (ta.trend_slope / NULLIF(ta.avg_daily_revenue, 0)) * 365 * 100,
-100.0 999.99
), )
999.99
) )
ELSE ELSE
LEAST( GREATEST(
GREATEST( -100.0,
((COALESCE(cp.revenue, 0) - pp.revenue) / pp.revenue) * 100.0, LEAST(
-100.0 ((COALESCE(cp.revenue, 0) - pp.revenue) /
), NULLIF(ABS(pp.revenue), 0)) * 100.0,
999.99 999.99
)
) )
END, END,
cm.avg_margin = CASE
WHEN cp.revenue > 0 THEN
(SUM(o.quantity * (o.price - COALESCE(o.discount, 0) - p.cost_price)) /
NULLIF(SUM(o.quantity * (o.price - COALESCE(o.discount, 0))), 0)) * 100
ELSE cm.avg_margin
END,
cm.last_calculated_at = NOW() cm.last_calculated_at = NOW()
WHERE cp.cat_id IS NOT NULL OR pp.cat_id IS NOT NULL WHERE cp.cat_id IS NOT NULL OR pp.cat_id IS NOT NULL
`); `);

View File

@@ -286,6 +286,146 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
}); });
} }
// Calculate ABC classification
outputProgress({
status: 'running',
operation: 'Starting ABC classification',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
if (isCancelled) return processedCount;
const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 };
// First, create and populate the rankings table with an index
await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_revenue_ranks');
await connection.query(`
CREATE TEMPORARY TABLE temp_revenue_ranks (
pid BIGINT NOT NULL,
total_revenue DECIMAL(10,3),
rank_num INT,
dense_rank INT,
percentile DECIMAL(5,2),
total_count INT,
PRIMARY KEY (pid),
INDEX (rank_num),
INDEX (dense_rank),
INDEX (percentile)
) ENGINE=MEMORY
`);
// Calculate rankings with proper tie handling
await connection.query(`
INSERT INTO temp_revenue_ranks
WITH revenue_data AS (
SELECT
pid,
total_revenue,
COUNT(*) OVER () as total_count,
PERCENT_RANK() OVER (ORDER BY total_revenue DESC) * 100 as percentile,
RANK() OVER (ORDER BY total_revenue DESC) as rank_num,
DENSE_RANK() OVER (ORDER BY total_revenue DESC) as dense_rank
FROM product_metrics
WHERE total_revenue > 0
)
SELECT
pid,
total_revenue,
rank_num,
dense_rank,
percentile,
total_count
FROM revenue_data
`);
// Get total count for percentage calculation
const [rankingCount] = await connection.query('SELECT MAX(rank_num) as total_count FROM temp_revenue_ranks');
const totalCount = rankingCount[0].total_count || 1;
const max_rank = totalCount;
// Process updates in batches
let abcProcessedCount = 0;
const batchSize = 5000;
while (true) {
if (isCancelled) return processedCount;
// Get a batch of PIDs that need updating
const [pids] = await connection.query(`
SELECT pm.pid
FROM product_metrics pm
LEFT JOIN temp_revenue_ranks tr ON pm.pid = tr.pid
WHERE pm.abc_class IS NULL
OR pm.abc_class !=
CASE
WHEN tr.pid IS NULL THEN 'C'
WHEN tr.percentile <= ? THEN 'A'
WHEN tr.percentile <= ? THEN 'B'
ELSE 'C'
END
LIMIT ?
`, [abcThresholds.a_threshold, abcThresholds.b_threshold, batchSize]);
if (pids.length === 0) break;
await connection.query(`
UPDATE product_metrics pm
LEFT JOIN temp_revenue_ranks tr ON pm.pid = tr.pid
SET pm.abc_class =
CASE
WHEN tr.pid IS NULL THEN 'C'
WHEN tr.percentile <= ? THEN 'A'
WHEN tr.percentile <= ? THEN 'B'
ELSE 'C'
END,
pm.last_calculated_at = NOW()
WHERE pm.pid IN (?)
`, [abcThresholds.a_threshold, abcThresholds.b_threshold, pids.map(row => row.pid)]);
// Now update turnover rate with proper handling of zero inventory periods
await connection.query(`
UPDATE product_metrics pm
JOIN (
SELECT
o.pid,
SUM(o.quantity) as total_sold,
COUNT(DISTINCT DATE(o.date)) as active_days,
AVG(CASE
WHEN p.stock_quantity > 0 THEN p.stock_quantity
ELSE NULL
END) as avg_nonzero_stock
FROM orders o
JOIN products p ON o.pid = p.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
AND o.pid IN (?)
GROUP BY o.pid
) sales ON pm.pid = sales.pid
SET
pm.turnover_rate = CASE
WHEN sales.avg_nonzero_stock > 0 AND sales.active_days > 0
THEN LEAST(
(sales.total_sold / sales.avg_nonzero_stock) * (365.0 / sales.active_days),
999.99
)
ELSE 0
END,
pm.last_calculated_at = NOW()
WHERE pm.pid IN (?)
`, [pids.map(row => row.pid), pids.map(row => row.pid)]);
}
return processedCount; return processedCount;
} catch (error) { } catch (error) {
logError(error, 'Error calculating product metrics'); logError(error, 'Error calculating product metrics');

View File

@@ -159,37 +159,72 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount,
confidence_level, confidence_level,
last_calculated_at last_calculated_at
) )
WITH daily_stats AS (
SELECT
ds.pid,
AVG(ds.daily_quantity) as avg_daily_qty,
STDDEV(ds.daily_quantity) as std_daily_qty,
COUNT(DISTINCT ds.day_count) as data_points,
SUM(ds.day_count) as total_days,
AVG(ds.daily_revenue) as avg_daily_revenue,
STDDEV(ds.daily_revenue) as std_daily_revenue,
MIN(ds.daily_quantity) as min_daily_qty,
MAX(ds.daily_quantity) as max_daily_qty,
AVG(ABS(ds.daily_quantity - LAG(ds.daily_quantity) OVER (PARTITION BY ds.pid ORDER BY ds.day_of_week))) as avg_daily_variance
FROM temp_daily_sales ds
GROUP BY ds.pid
HAVING AVG(ds.daily_quantity) > 0
)
SELECT SELECT
ds.pid, ds.pid,
fd.forecast_date, fd.forecast_date,
GREATEST(0, GREATEST(0,
AVG(ds.daily_quantity) * ROUND(
(1 + COALESCE(sf.seasonality_factor, 0)) ds.avg_daily_qty *
(1 + COALESCE(sf.seasonality_factor, 0)) *
CASE
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 1.5 THEN 0.85
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 1.0 THEN 0.9
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 0.5 THEN 0.95
ELSE 1.0
END,
2
)
) as forecast_units, ) as forecast_units,
GREATEST(0, GREATEST(0,
COALESCE( ROUND(
CASE COALESCE(
WHEN SUM(ds.day_count) >= 4 THEN AVG(ds.daily_revenue) CASE
ELSE ps.overall_avg_revenue WHEN ds.data_points >= 4 THEN ds.avg_daily_revenue
END * ELSE ps.overall_avg_revenue
(1 + COALESCE(sf.seasonality_factor, 0)) * END *
(0.95 + (RAND() * 0.1)), (1 + COALESCE(sf.seasonality_factor, 0)) *
0 CASE
WHEN ds.std_daily_revenue / NULLIF(ds.avg_daily_revenue, 0) > 1.5 THEN 0.85
WHEN ds.std_daily_revenue / NULLIF(ds.avg_daily_revenue, 0) > 1.0 THEN 0.9
WHEN ds.std_daily_revenue / NULLIF(ds.avg_daily_revenue, 0) > 0.5 THEN 0.95
ELSE 1.0
END,
0
),
2
) )
) as forecast_revenue, ) as forecast_revenue,
CASE CASE
WHEN ps.total_days >= 60 THEN 90 WHEN ds.total_days >= 60 AND ds.avg_daily_variance / NULLIF(ds.avg_daily_qty, 0) < 0.5 THEN 90
WHEN ps.total_days >= 30 THEN 80 WHEN ds.total_days >= 60 THEN 85
WHEN ps.total_days >= 14 THEN 70 WHEN ds.total_days >= 30 AND ds.avg_daily_variance / NULLIF(ds.avg_daily_qty, 0) < 0.5 THEN 80
WHEN ds.total_days >= 30 THEN 75
WHEN ds.total_days >= 14 AND ds.avg_daily_variance / NULLIF(ds.avg_daily_qty, 0) < 0.5 THEN 70
WHEN ds.total_days >= 14 THEN 65
ELSE 60 ELSE 60
END as confidence_level, END as confidence_level,
NOW() as last_calculated_at NOW() as last_calculated_at
FROM temp_daily_sales ds FROM daily_stats ds
JOIN temp_product_stats ps ON ds.pid = ps.pid JOIN temp_product_stats ps ON ds.pid = ps.pid
CROSS JOIN temp_forecast_dates fd CROSS JOIN temp_forecast_dates fd
LEFT JOIN sales_seasonality sf ON fd.month = sf.month LEFT JOIN sales_seasonality sf ON fd.month = sf.month
GROUP BY ds.pid, fd.forecast_date, ps.overall_avg_revenue, ps.total_days, sf.seasonality_factor GROUP BY ds.pid, fd.forecast_date, ps.overall_avg_revenue, sf.seasonality_factor
HAVING AVG(ds.daily_quantity) > 0
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
forecast_units = VALUES(forecast_units), forecast_units = VALUES(forecast_units),
forecast_revenue = VALUES(forecast_revenue), forecast_revenue = VALUES(forecast_revenue),

View File

@@ -56,7 +56,7 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount,
inventory_value, inventory_value,
gmroi gmroi
) )
WITH sales_data AS ( WITH monthly_sales AS (
SELECT SELECT
o.pid, o.pid,
YEAR(o.date) as year, YEAR(o.date) as year,
@@ -66,19 +66,13 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount,
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost, SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
COUNT(DISTINCT o.order_number) as order_count, COUNT(DISTINCT o.order_number) as order_count,
AVG(o.price - COALESCE(o.discount, 0)) as avg_price, AVG(o.price - COALESCE(o.discount, 0)) as avg_price,
CASE COUNT(DISTINCT DATE(o.date)) as active_days
WHEN SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) = 0 THEN 0
ELSE ((SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) -
SUM(COALESCE(p.cost_price, 0) * o.quantity)) /
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity)) * 100
END as profit_margin,
p.cost_price * p.stock_quantity as inventory_value
FROM orders o FROM orders o
JOIN products p ON o.pid = p.pid JOIN products p ON o.pid = p.pid
WHERE o.canceled = 0 WHERE o.canceled = false
GROUP BY o.pid, YEAR(o.date), MONTH(o.date), p.cost_price, p.stock_quantity GROUP BY o.pid, YEAR(o.date), MONTH(o.date)
), ),
purchase_data AS ( monthly_stock AS (
SELECT SELECT
pid, pid,
YEAR(date) as year, YEAR(date) as year,