Debug metric calculations and reset scripts (still broken)
This commit is contained in:
@@ -79,8 +79,7 @@ CREATE TABLE IF NOT EXISTS vendor_metrics (
|
||||
order_fill_rate DECIMAL(5,2),
|
||||
total_orders INT,
|
||||
total_late_orders INT,
|
||||
PRIMARY KEY (vendor),
|
||||
FOREIGN KEY (vendor) REFERENCES products(vendor) ON DELETE CASCADE
|
||||
PRIMARY KEY (vendor)
|
||||
);
|
||||
|
||||
-- Re-enable foreign key checks
|
||||
@@ -89,3 +88,27 @@ SET FOREIGN_KEY_CHECKS = 1;
|
||||
-- Create optimized indexes for metrics calculations
|
||||
CREATE INDEX idx_orders_metrics ON orders (product_id, date, canceled, quantity, price);
|
||||
CREATE INDEX idx_purchase_orders_metrics ON purchase_orders (product_id, date, status, ordered, received);
|
||||
|
||||
-- Create view for inventory health (after all tables are created)
|
||||
CREATE OR REPLACE VIEW inventory_health AS
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.SKU,
|
||||
p.title,
|
||||
p.stock_quantity,
|
||||
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
|
||||
COALESCE(pm.days_of_inventory, 0) as days_of_inventory,
|
||||
COALESCE(pm.reorder_point, 0) as reorder_point,
|
||||
COALESCE(pm.safety_stock, 0) as safety_stock,
|
||||
CASE
|
||||
WHEN p.stock_quantity <= COALESCE(pm.safety_stock, 0) THEN 'Critical'
|
||||
WHEN p.stock_quantity <= COALESCE(pm.reorder_point, 0) THEN 'Reorder'
|
||||
WHEN p.stock_quantity > (COALESCE(pm.daily_sales_avg, 0) * 90) THEN 'Overstocked'
|
||||
ELSE 'Healthy'
|
||||
END as stock_status
|
||||
FROM
|
||||
products p
|
||||
LEFT JOIN
|
||||
product_metrics pm ON p.product_id = pm.product_id
|
||||
WHERE
|
||||
p.managing_stock = true;
|
||||
@@ -118,27 +118,3 @@ WHERE
|
||||
o.canceled = false
|
||||
GROUP BY
|
||||
p.product_id, p.SKU, p.title;
|
||||
|
||||
-- Create view for inventory health
|
||||
CREATE OR REPLACE VIEW inventory_health AS
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.SKU,
|
||||
p.title,
|
||||
p.stock_quantity,
|
||||
pm.daily_sales_avg,
|
||||
pm.days_of_inventory,
|
||||
pm.reorder_point,
|
||||
pm.safety_stock,
|
||||
CASE
|
||||
WHEN p.stock_quantity <= pm.safety_stock THEN 'Critical'
|
||||
WHEN p.stock_quantity <= pm.reorder_point THEN 'Reorder'
|
||||
WHEN p.stock_quantity > (pm.daily_sales_avg * 90) THEN 'Overstocked'
|
||||
ELSE 'Healthy'
|
||||
END as stock_status
|
||||
FROM
|
||||
products p
|
||||
LEFT JOIN
|
||||
product_metrics pm ON p.product_id = pm.product_id
|
||||
WHERE
|
||||
p.managing_stock = true;
|
||||
@@ -2,9 +2,48 @@ const mysql = require('mysql2/promise');
|
||||
const path = require('path');
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') });
|
||||
|
||||
// Helper function to format elapsed time
|
||||
function formatElapsedTime(startTime) {
|
||||
const elapsed = Date.now() - startTime;
|
||||
const seconds = Math.floor(elapsed / 1000);
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
|
||||
if (hours > 0) {
|
||||
return `${hours}h ${minutes % 60}m`;
|
||||
} else if (minutes > 0) {
|
||||
return `${minutes}m ${seconds % 60}s`;
|
||||
} else {
|
||||
return `${seconds}s`;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to estimate remaining time
|
||||
function estimateRemaining(startTime, current, total) {
|
||||
if (current === 0) return null;
|
||||
const elapsed = Date.now() - startTime;
|
||||
const rate = current / elapsed;
|
||||
const remaining = (total - current) / rate;
|
||||
|
||||
const minutes = Math.floor(remaining / 60000);
|
||||
const seconds = Math.floor((remaining % 60000) / 1000);
|
||||
|
||||
if (minutes > 0) {
|
||||
return `${minutes}m ${seconds}s`;
|
||||
} else {
|
||||
return `${seconds}s`;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to calculate rate
|
||||
function calculateRate(startTime, current) {
|
||||
const elapsed = (Date.now() - startTime) / 1000; // Convert to seconds
|
||||
return elapsed > 0 ? Math.round(current / elapsed) : 0;
|
||||
}
|
||||
|
||||
// Helper function to output progress
|
||||
function outputProgress(data) {
|
||||
console.log(JSON.stringify(data));
|
||||
process.stdout.write(JSON.stringify(data) + '\n');
|
||||
}
|
||||
|
||||
// Helper function to log errors
|
||||
@@ -27,205 +66,201 @@ const dbConfig = {
|
||||
queueLimit: 0
|
||||
};
|
||||
|
||||
// Add cancel handler
|
||||
let isCancelled = false;
|
||||
|
||||
function cancelCalculation() {
|
||||
isCancelled = true;
|
||||
process.stdout.write(JSON.stringify({
|
||||
status: 'cancelled',
|
||||
operation: 'Calculation cancelled',
|
||||
current: 0,
|
||||
total: 0,
|
||||
elapsed: null,
|
||||
remaining: null,
|
||||
rate: 0
|
||||
}) + '\n');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
async function calculateMetrics() {
|
||||
let pool;
|
||||
const startTime = Date.now();
|
||||
let processedCount = 0;
|
||||
let totalProducts = 0; // Initialize at the top
|
||||
|
||||
try {
|
||||
isCancelled = false;
|
||||
pool = mysql.createPool(dbConfig);
|
||||
const connection = await pool.getConnection();
|
||||
|
||||
try {
|
||||
// Create temporary tables for metrics calculations
|
||||
// Get total number of products
|
||||
const [countResult] = await connection.query('SELECT COUNT(*) as total FROM products');
|
||||
totalProducts = countResult[0].total;
|
||||
|
||||
// Initial progress
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Creating temporary tables',
|
||||
percentage: '0'
|
||||
operation: 'Processing products',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: '0s',
|
||||
remaining: 'Calculating...',
|
||||
rate: 0
|
||||
});
|
||||
|
||||
// Create and truncate tables one at a time
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS temp_sales_metrics (
|
||||
product_id INT PRIMARY KEY,
|
||||
total_quantity_sold INT DEFAULT 0,
|
||||
total_revenue DECIMAL(10,2) DEFAULT 0.00,
|
||||
average_price DECIMAL(10,2) DEFAULT 0.00,
|
||||
last_sale_date DATE,
|
||||
sales_rank INT
|
||||
)
|
||||
`);
|
||||
// Process in batches of 100
|
||||
const batchSize = 100;
|
||||
for (let offset = 0; offset < totalProducts; offset += batchSize) {
|
||||
if (isCancelled) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS temp_purchase_metrics (
|
||||
product_id INT PRIMARY KEY,
|
||||
total_quantity_purchased INT DEFAULT 0,
|
||||
total_cost DECIMAL(10,2) DEFAULT 0.00,
|
||||
average_cost DECIMAL(10,2) DEFAULT 0.00,
|
||||
last_purchase_date DATE,
|
||||
purchase_rank INT
|
||||
)
|
||||
`);
|
||||
const [products] = await connection.query('SELECT product_id FROM products LIMIT ? OFFSET ?', [batchSize, offset]);
|
||||
processedCount += products.length;
|
||||
|
||||
await connection.query('TRUNCATE TABLE temp_sales_metrics');
|
||||
await connection.query('TRUNCATE TABLE temp_purchase_metrics');
|
||||
// Update progress after each batch
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Processing products',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalProducts),
|
||||
rate: calculateRate(startTime, processedCount)
|
||||
});
|
||||
|
||||
// Calculate sales metrics
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating sales metrics',
|
||||
percentage: '20'
|
||||
});
|
||||
// Process the batch
|
||||
for (const product of products) {
|
||||
// Calculate sales metrics
|
||||
const [salesMetrics] = await connection.query(`
|
||||
SELECT
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
|
||||
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
|
||||
MAX(o.date) as last_sale_date
|
||||
FROM orders o
|
||||
JOIN products p ON o.product_id = p.product_id
|
||||
WHERE o.canceled = 0 AND o.product_id = ?
|
||||
GROUP BY o.product_id
|
||||
`, [product.product_id]);
|
||||
|
||||
// First insert sales metrics
|
||||
await connection.query(`
|
||||
INSERT INTO temp_sales_metrics (
|
||||
product_id,
|
||||
total_quantity_sold,
|
||||
total_revenue,
|
||||
average_price,
|
||||
last_sale_date
|
||||
)
|
||||
SELECT
|
||||
product_id,
|
||||
SUM(quantity) as total_quantity_sold,
|
||||
SUM((price - COALESCE(discount, 0)) * quantity) as total_revenue,
|
||||
AVG(price - COALESCE(discount, 0)) as average_price,
|
||||
MAX(date) as last_sale_date
|
||||
FROM orders
|
||||
WHERE canceled = 0
|
||||
GROUP BY product_id
|
||||
`);
|
||||
// Calculate purchase metrics
|
||||
const [purchaseMetrics] = await connection.query(`
|
||||
SELECT
|
||||
SUM(received) as total_quantity_purchased,
|
||||
SUM(cost_price * received) as total_cost,
|
||||
MAX(date) as last_purchase_date,
|
||||
MAX(received_date) as last_received_date,
|
||||
AVG(DATEDIFF(received_date, date)) as avg_lead_time_days
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed' AND received > 0 AND product_id = ?
|
||||
GROUP BY product_id
|
||||
`, [product.product_id]);
|
||||
|
||||
// Then update sales rank using a temporary table
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE sales_rankings AS
|
||||
SELECT
|
||||
product_id,
|
||||
RANK() OVER (ORDER BY total_revenue DESC) as rank
|
||||
FROM temp_sales_metrics
|
||||
`);
|
||||
// Get current stock
|
||||
const [stockInfo] = await connection.query(`
|
||||
SELECT stock_quantity, cost_price
|
||||
FROM products
|
||||
WHERE product_id = ?
|
||||
`, [product.product_id]);
|
||||
|
||||
await connection.query(`
|
||||
UPDATE temp_sales_metrics t
|
||||
JOIN sales_rankings r ON t.product_id = r.product_id
|
||||
SET t.sales_rank = r.rank
|
||||
`);
|
||||
// Calculate metrics
|
||||
const metrics = salesMetrics[0] || {};
|
||||
const purchases = purchaseMetrics[0] || {};
|
||||
const stock = stockInfo[0] || {};
|
||||
|
||||
await connection.query(`DROP TEMPORARY TABLE sales_rankings`);
|
||||
const daily_sales_avg = metrics.total_quantity_sold ? metrics.total_quantity_sold / 30 : 0;
|
||||
const weekly_sales_avg = metrics.total_quantity_sold ? metrics.total_quantity_sold / 4 : 0;
|
||||
const monthly_sales_avg = metrics.total_quantity_sold || 0;
|
||||
|
||||
// Calculate purchase metrics
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating purchase metrics',
|
||||
percentage: '40'
|
||||
});
|
||||
// Update product metrics
|
||||
await connection.query(`
|
||||
INSERT INTO product_metrics (
|
||||
product_id,
|
||||
last_calculated_at,
|
||||
daily_sales_avg,
|
||||
weekly_sales_avg,
|
||||
monthly_sales_avg,
|
||||
days_of_inventory,
|
||||
weeks_of_inventory,
|
||||
reorder_point,
|
||||
safety_stock,
|
||||
avg_margin_percent,
|
||||
total_revenue,
|
||||
avg_lead_time_days,
|
||||
last_purchase_date,
|
||||
last_received_date,
|
||||
abc_class,
|
||||
stock_status
|
||||
) VALUES (
|
||||
?,
|
||||
NOW(),
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
?,
|
||||
NULL,
|
||||
?
|
||||
)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
last_calculated_at = VALUES(last_calculated_at),
|
||||
daily_sales_avg = VALUES(daily_sales_avg),
|
||||
weekly_sales_avg = VALUES(weekly_sales_avg),
|
||||
monthly_sales_avg = VALUES(monthly_sales_avg),
|
||||
days_of_inventory = VALUES(days_of_inventory),
|
||||
weeks_of_inventory = VALUES(weeks_of_inventory),
|
||||
reorder_point = VALUES(reorder_point),
|
||||
safety_stock = VALUES(safety_stock),
|
||||
avg_margin_percent = VALUES(avg_margin_percent),
|
||||
total_revenue = VALUES(total_revenue),
|
||||
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||
last_purchase_date = VALUES(last_purchase_date),
|
||||
last_received_date = VALUES(last_received_date),
|
||||
stock_status = VALUES(stock_status)
|
||||
`, [
|
||||
product.product_id,
|
||||
daily_sales_avg,
|
||||
weekly_sales_avg,
|
||||
monthly_sales_avg,
|
||||
daily_sales_avg ? stock.stock_quantity / daily_sales_avg : null,
|
||||
weekly_sales_avg ? stock.stock_quantity / weekly_sales_avg : null,
|
||||
Math.ceil(daily_sales_avg * 14), // 14 days reorder point
|
||||
Math.ceil(daily_sales_avg * 7), // 7 days safety stock
|
||||
metrics.total_revenue ? ((metrics.total_revenue - metrics.total_cost) / metrics.total_revenue) * 100 : 0,
|
||||
metrics.total_revenue || 0,
|
||||
purchases.avg_lead_time_days || 0,
|
||||
purchases.last_purchase_date,
|
||||
purchases.last_received_date,
|
||||
daily_sales_avg === 0 ? 'New' :
|
||||
stock.stock_quantity <= Math.ceil(daily_sales_avg * 7) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.ceil(daily_sales_avg * 14) ? 'Reorder' :
|
||||
stock.stock_quantity > (daily_sales_avg * 90) ? 'Overstocked' : 'Healthy'
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
// First insert purchase metrics
|
||||
await connection.query(`
|
||||
INSERT INTO temp_purchase_metrics (
|
||||
product_id,
|
||||
total_quantity_purchased,
|
||||
total_cost,
|
||||
average_cost,
|
||||
last_purchase_date
|
||||
)
|
||||
SELECT
|
||||
product_id,
|
||||
SUM(received) as total_quantity_purchased,
|
||||
SUM(cost_price * received) as total_cost,
|
||||
AVG(cost_price) as average_cost,
|
||||
MAX(received_date) as last_purchase_date
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed' AND received > 0
|
||||
GROUP BY product_id
|
||||
`);
|
||||
|
||||
// Then update purchase rank using a temporary table
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE purchase_rankings AS
|
||||
SELECT
|
||||
product_id,
|
||||
RANK() OVER (ORDER BY total_cost DESC) as rank
|
||||
FROM temp_purchase_metrics
|
||||
`);
|
||||
|
||||
await connection.query(`
|
||||
UPDATE temp_purchase_metrics t
|
||||
JOIN purchase_rankings r ON t.product_id = r.product_id
|
||||
SET t.purchase_rank = r.rank
|
||||
`);
|
||||
|
||||
await connection.query(`DROP TEMPORARY TABLE purchase_rankings`);
|
||||
|
||||
// Update product metrics
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Updating product metrics',
|
||||
percentage: '60'
|
||||
});
|
||||
|
||||
await connection.query(`
|
||||
INSERT INTO product_metrics (
|
||||
product_id,
|
||||
total_quantity_sold,
|
||||
total_revenue,
|
||||
average_price,
|
||||
total_quantity_purchased,
|
||||
total_cost,
|
||||
average_cost,
|
||||
profit_margin,
|
||||
turnover_rate,
|
||||
last_sale_date,
|
||||
last_purchase_date,
|
||||
sales_rank,
|
||||
purchase_rank,
|
||||
last_calculated_at
|
||||
)
|
||||
SELECT
|
||||
p.product_id,
|
||||
COALESCE(s.total_quantity_sold, 0),
|
||||
COALESCE(s.total_revenue, 0.00),
|
||||
COALESCE(s.average_price, 0.00),
|
||||
COALESCE(po.total_quantity_purchased, 0),
|
||||
COALESCE(po.total_cost, 0.00),
|
||||
COALESCE(po.average_cost, 0.00),
|
||||
CASE
|
||||
WHEN COALESCE(s.total_revenue, 0) = 0 THEN 0
|
||||
ELSE ((s.total_revenue - po.total_cost) / s.total_revenue) * 100
|
||||
END as profit_margin,
|
||||
CASE
|
||||
WHEN COALESCE(po.total_quantity_purchased, 0) = 0 THEN 0
|
||||
ELSE (s.total_quantity_sold / po.total_quantity_purchased) * 100
|
||||
END as turnover_rate,
|
||||
s.last_sale_date,
|
||||
po.last_purchase_date,
|
||||
s.sales_rank,
|
||||
po.purchase_rank,
|
||||
NOW()
|
||||
FROM products p
|
||||
LEFT JOIN temp_sales_metrics s ON p.product_id = s.product_id
|
||||
LEFT JOIN temp_purchase_metrics po ON p.product_id = po.product_id
|
||||
ON DUPLICATE KEY UPDATE
|
||||
total_quantity_sold = VALUES(total_quantity_sold),
|
||||
total_revenue = VALUES(total_revenue),
|
||||
average_price = VALUES(average_price),
|
||||
total_quantity_purchased = VALUES(total_quantity_purchased),
|
||||
total_cost = VALUES(total_cost),
|
||||
average_cost = VALUES(average_cost),
|
||||
profit_margin = VALUES(profit_margin),
|
||||
turnover_rate = VALUES(turnover_rate),
|
||||
last_sale_date = VALUES(last_sale_date),
|
||||
last_purchase_date = VALUES(last_purchase_date),
|
||||
sales_rank = VALUES(sales_rank),
|
||||
purchase_rank = VALUES(purchase_rank),
|
||||
last_calculated_at = VALUES(last_calculated_at);
|
||||
`);
|
||||
|
||||
// Calculate ABC classification
|
||||
// Update progress for ABC classification
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating ABC classification',
|
||||
percentage: '80'
|
||||
current: totalProducts,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, totalProducts, totalProducts),
|
||||
rate: calculateRate(startTime, totalProducts)
|
||||
});
|
||||
|
||||
// Calculate ABC classification
|
||||
await connection.query(`
|
||||
WITH revenue_percentiles AS (
|
||||
SELECT
|
||||
@@ -245,114 +280,183 @@ async function calculateMetrics() {
|
||||
END;
|
||||
`);
|
||||
|
||||
// Calculate time-based aggregates
|
||||
// Update progress for time-based aggregates
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating time aggregates',
|
||||
percentage: '90'
|
||||
operation: 'Calculating time-based aggregates',
|
||||
current: totalProducts,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, totalProducts, totalProducts),
|
||||
rate: calculateRate(startTime, totalProducts)
|
||||
});
|
||||
|
||||
// Calculate time-based aggregates
|
||||
await connection.query('TRUNCATE TABLE product_time_aggregates;');
|
||||
|
||||
await connection.query(`
|
||||
TRUNCATE TABLE product_time_aggregates;
|
||||
|
||||
-- Daily aggregates
|
||||
INSERT INTO product_time_aggregates (product_id, period_type, period_start, quantity_sold, revenue)
|
||||
SELECT
|
||||
INSERT INTO product_time_aggregates (
|
||||
product_id,
|
||||
'daily' as period_type,
|
||||
DATE(date) as period_start,
|
||||
SUM(quantity) as quantity_sold,
|
||||
SUM((price - COALESCE(discount, 0)) * quantity) as revenue
|
||||
FROM orders
|
||||
WHERE canceled = 0
|
||||
GROUP BY product_id, DATE(date);
|
||||
|
||||
-- Weekly aggregates
|
||||
INSERT INTO product_time_aggregates (product_id, period_type, period_start, quantity_sold, revenue)
|
||||
year,
|
||||
month,
|
||||
total_quantity_sold,
|
||||
total_revenue,
|
||||
total_cost,
|
||||
order_count,
|
||||
stock_received,
|
||||
stock_ordered,
|
||||
avg_price,
|
||||
profit_margin
|
||||
)
|
||||
WITH sales_data AS (
|
||||
SELECT
|
||||
o.product_id,
|
||||
YEAR(o.date) as year,
|
||||
MONTH(o.date) as month,
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
|
||||
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
|
||||
COUNT(DISTINCT o.order_number) as order_count,
|
||||
AVG(o.price - COALESCE(o.discount, 0)) as avg_price,
|
||||
CASE
|
||||
WHEN SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) = 0 THEN 0
|
||||
ELSE ((SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) -
|
||||
SUM(COALESCE(p.cost_price, 0) * o.quantity)) /
|
||||
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity)) * 100
|
||||
END as profit_margin
|
||||
FROM orders o
|
||||
JOIN products p ON o.product_id = p.product_id
|
||||
WHERE o.canceled = 0
|
||||
GROUP BY o.product_id, YEAR(o.date), MONTH(o.date)
|
||||
),
|
||||
purchase_data AS (
|
||||
SELECT
|
||||
product_id,
|
||||
YEAR(date) as year,
|
||||
MONTH(date) as month,
|
||||
SUM(received) as stock_received,
|
||||
SUM(ordered) as stock_ordered
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed'
|
||||
GROUP BY product_id, YEAR(date), MONTH(date)
|
||||
)
|
||||
SELECT
|
||||
product_id,
|
||||
'weekly' as period_type,
|
||||
DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY)) as period_start,
|
||||
SUM(quantity) as quantity_sold,
|
||||
SUM((price - COALESCE(discount, 0)) * quantity) as revenue
|
||||
FROM orders
|
||||
WHERE canceled = 0
|
||||
GROUP BY product_id, DATE(DATE_SUB(date, INTERVAL WEEKDAY(date) DAY));
|
||||
|
||||
-- Monthly aggregates
|
||||
INSERT INTO product_time_aggregates (product_id, period_type, period_start, quantity_sold, revenue)
|
||||
s.product_id,
|
||||
s.year,
|
||||
s.month,
|
||||
s.total_quantity_sold,
|
||||
s.total_revenue,
|
||||
s.total_cost,
|
||||
s.order_count,
|
||||
COALESCE(p.stock_received, 0) as stock_received,
|
||||
COALESCE(p.stock_ordered, 0) as stock_ordered,
|
||||
s.avg_price,
|
||||
s.profit_margin
|
||||
FROM sales_data s
|
||||
LEFT JOIN purchase_data p
|
||||
ON s.product_id = p.product_id
|
||||
AND s.year = p.year
|
||||
AND s.month = p.month
|
||||
UNION
|
||||
SELECT
|
||||
product_id,
|
||||
'monthly' as period_type,
|
||||
DATE(DATE_SUB(date, INTERVAL DAY(date)-1 DAY)) as period_start,
|
||||
SUM(quantity) as quantity_sold,
|
||||
SUM((price - COALESCE(discount, 0)) * quantity) as revenue
|
||||
FROM orders
|
||||
WHERE canceled = 0
|
||||
GROUP BY product_id, DATE(DATE_SUB(date, INTERVAL DAY(date)-1 DAY));
|
||||
p.product_id,
|
||||
p.year,
|
||||
p.month,
|
||||
0 as total_quantity_sold,
|
||||
0 as total_revenue,
|
||||
0 as total_cost,
|
||||
0 as order_count,
|
||||
p.stock_received,
|
||||
p.stock_ordered,
|
||||
0 as avg_price,
|
||||
0 as profit_margin
|
||||
FROM purchase_data p
|
||||
LEFT JOIN sales_data s
|
||||
ON p.product_id = s.product_id
|
||||
AND p.year = s.year
|
||||
AND p.month = s.month
|
||||
WHERE s.product_id IS NULL
|
||||
`);
|
||||
|
||||
// Calculate vendor metrics
|
||||
// Update progress for vendor metrics
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating vendor metrics',
|
||||
percentage: '95'
|
||||
current: totalProducts,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, totalProducts, totalProducts),
|
||||
rate: calculateRate(startTime, totalProducts)
|
||||
});
|
||||
|
||||
// Calculate vendor metrics
|
||||
await connection.query(`
|
||||
INSERT INTO vendor_metrics (
|
||||
vendor,
|
||||
last_calculated_at,
|
||||
avg_lead_time_days,
|
||||
on_time_delivery_rate,
|
||||
order_fill_rate,
|
||||
total_orders,
|
||||
total_items_ordered,
|
||||
total_items_received,
|
||||
total_spend,
|
||||
average_order_value,
|
||||
fulfillment_rate,
|
||||
average_delivery_days,
|
||||
last_order_date,
|
||||
last_delivery_date
|
||||
total_late_orders
|
||||
)
|
||||
SELECT
|
||||
vendor,
|
||||
NOW() as last_calculated_at,
|
||||
COALESCE(AVG(DATEDIFF(received_date, date)), 0) as avg_lead_time_days,
|
||||
COALESCE((COUNT(CASE WHEN DATEDIFF(received_date, date) <= 14 THEN 1 END) * 100.0 / NULLIF(COUNT(*), 0)), 0) as on_time_delivery_rate,
|
||||
COALESCE((SUM(received) * 100.0 / NULLIF(SUM(ordered), 0)), 0) as order_fill_rate,
|
||||
COUNT(DISTINCT po_id) as total_orders,
|
||||
SUM(ordered) as total_items_ordered,
|
||||
SUM(received) as total_items_received,
|
||||
SUM(cost_price * received) as total_spend,
|
||||
AVG(cost_price * ordered) as average_order_value,
|
||||
(SUM(received) / NULLIF(SUM(ordered), 0)) * 100 as fulfillment_rate,
|
||||
AVG(DATEDIFF(received_date, date)) as average_delivery_days,
|
||||
MAX(date) as last_order_date,
|
||||
MAX(received_date) as last_delivery_date
|
||||
COUNT(CASE WHEN DATEDIFF(received_date, date) > 14 THEN 1 END) as total_late_orders
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed'
|
||||
GROUP BY vendor
|
||||
ON DUPLICATE KEY UPDATE
|
||||
last_calculated_at = VALUES(last_calculated_at),
|
||||
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||
on_time_delivery_rate = VALUES(on_time_delivery_rate),
|
||||
order_fill_rate = VALUES(order_fill_rate),
|
||||
total_orders = VALUES(total_orders),
|
||||
total_items_ordered = VALUES(total_items_ordered),
|
||||
total_items_received = VALUES(total_items_received),
|
||||
total_spend = VALUES(total_spend),
|
||||
average_order_value = VALUES(average_order_value),
|
||||
fulfillment_rate = VALUES(fulfillment_rate),
|
||||
average_delivery_days = VALUES(average_delivery_days),
|
||||
last_order_date = VALUES(last_order_date),
|
||||
last_delivery_date = VALUES(last_delivery_date);
|
||||
total_late_orders = VALUES(total_late_orders)
|
||||
`);
|
||||
|
||||
// Final success message
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Metrics calculation completed',
|
||||
percentage: '100'
|
||||
operation: 'Metrics calculation complete',
|
||||
current: totalProducts,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: '0s',
|
||||
rate: calculateRate(startTime, totalProducts)
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
logError(error, 'Error calculating metrics');
|
||||
if (isCancelled) {
|
||||
outputProgress({
|
||||
status: 'cancelled',
|
||||
operation: 'Calculation cancelled',
|
||||
current: processedCount,
|
||||
total: totalProducts || 0, // Use 0 if not yet defined
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: calculateRate(startTime, processedCount)
|
||||
});
|
||||
} else {
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Error: ' + error.message,
|
||||
current: processedCount,
|
||||
total: totalProducts || 0, // Use 0 if not yet defined
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: calculateRate(startTime, processedCount)
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, 'Fatal error during metrics calculation');
|
||||
throw error;
|
||||
} finally {
|
||||
if (pool) {
|
||||
await pool.end();
|
||||
@@ -360,15 +464,16 @@ async function calculateMetrics() {
|
||||
}
|
||||
}
|
||||
|
||||
// Export the function if being required as a module
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = calculateMetrics;
|
||||
}
|
||||
// Export both functions
|
||||
module.exports = calculateMetrics;
|
||||
module.exports.cancelCalculation = cancelCalculation;
|
||||
|
||||
// Run directly if called from command line
|
||||
if (require.main === module) {
|
||||
calculateMetrics().catch(error => {
|
||||
console.error('Error:', error);
|
||||
if (!error.message.includes('Operation cancelled')) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const mysql = require('mysql2/promise');
|
||||
const dotenv = require('dotenv');
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, '../.env') });
|
||||
|
||||
const dbConfig = {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
multipleStatements: true
|
||||
};
|
||||
|
||||
const SNAPSHOTS_DIR = path.join(__dirname, '../snapshots');
|
||||
|
||||
async function createSnapshot() {
|
||||
console.log('Creating test database snapshot...');
|
||||
const pool = mysql.createPool(dbConfig);
|
||||
|
||||
try {
|
||||
const connection = await pool.getConnection();
|
||||
try {
|
||||
// Create snapshots directory if it doesn't exist
|
||||
if (!fs.existsSync(SNAPSHOTS_DIR)) {
|
||||
fs.mkdirSync(SNAPSHOTS_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// Get categories
|
||||
const [categories] = await connection.query(`
|
||||
SELECT id, name, created_at
|
||||
FROM categories
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
// Get a diverse set of ~100 products
|
||||
const [products] = await connection.query(`
|
||||
SELECT p.*
|
||||
FROM products p
|
||||
WHERE EXISTS (
|
||||
SELECT 1 FROM orders o WHERE o.product_id = p.product_id
|
||||
UNION
|
||||
SELECT 1 FROM purchase_orders po WHERE po.product_id = p.product_id
|
||||
)
|
||||
ORDER BY RAND()
|
||||
LIMIT 100
|
||||
`);
|
||||
|
||||
// Get product_categories for selected products
|
||||
const [product_categories] = await connection.query(`
|
||||
SELECT pc.product_id, pc.category_id
|
||||
FROM product_categories pc
|
||||
WHERE pc.product_id IN (?)
|
||||
`, [products.map(p => p.product_id)]);
|
||||
|
||||
// Get orders for selected products (last 6 months)
|
||||
const [orders] = await connection.query(`
|
||||
SELECT o.*
|
||||
FROM orders o
|
||||
WHERE o.product_id IN (?)
|
||||
AND o.date >= DATE_SUB(NOW(), INTERVAL 6 MONTH)
|
||||
ORDER BY o.date DESC
|
||||
`, [products.map(p => p.product_id)]);
|
||||
|
||||
// Get purchase orders for selected products (last 6 months)
|
||||
const [purchase_orders] = await connection.query(`
|
||||
SELECT po.*
|
||||
FROM purchase_orders po
|
||||
WHERE po.product_id IN (?)
|
||||
AND po.date >= DATE_SUB(NOW(), INTERVAL 6 MONTH)
|
||||
ORDER BY po.date DESC
|
||||
`, [products.map(p => p.product_id)]);
|
||||
|
||||
// Create snapshot object
|
||||
const snapshot = {
|
||||
metadata: {
|
||||
created_at: new Date().toISOString(),
|
||||
description: 'Test snapshot with ~100 diverse products and their related data'
|
||||
},
|
||||
categories,
|
||||
products,
|
||||
product_categories,
|
||||
orders,
|
||||
purchase_orders
|
||||
};
|
||||
|
||||
// Save snapshot
|
||||
const snapshotPath = path.join(SNAPSHOTS_DIR, 'test_snapshot.json');
|
||||
fs.writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2));
|
||||
|
||||
console.log('Snapshot created successfully:');
|
||||
console.log('Products:', products.length);
|
||||
console.log('Orders:', orders.length);
|
||||
console.log('Purchase Orders:', purchase_orders.length);
|
||||
console.log('Categories:', categories.length);
|
||||
console.log('Saved to:', snapshotPath);
|
||||
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error creating snapshot:', error);
|
||||
throw error;
|
||||
} finally {
|
||||
await pool.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Create snapshot if run directly
|
||||
if (require.main === module) {
|
||||
createSnapshot().catch(console.error);
|
||||
}
|
||||
|
||||
module.exports = { createSnapshot };
|
||||
@@ -15,47 +15,14 @@ function outputProgress(data) {
|
||||
console.log(JSON.stringify(data));
|
||||
}
|
||||
|
||||
function getMetricsTablesFromSchema() {
|
||||
const schemaPath = path.join(__dirname, '../db/metrics-schema.sql');
|
||||
const schemaSQL = fs.readFileSync(schemaPath, 'utf8');
|
||||
|
||||
// Extract table names from CREATE TABLE statements
|
||||
const createTableRegex = /CREATE TABLE.*?`(\w+)`/g;
|
||||
const tables = [];
|
||||
let match;
|
||||
|
||||
while ((match = createTableRegex.exec(schemaSQL)) !== null) {
|
||||
tables.push(match[1]);
|
||||
}
|
||||
|
||||
return tables;
|
||||
}
|
||||
|
||||
async function checkIndexExists(connection, tableName, indexName) {
|
||||
const [rows] = await connection.query(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM information_schema.statistics
|
||||
WHERE table_schema = DATABASE()
|
||||
AND table_name = ?
|
||||
AND index_name = ?`,
|
||||
[tableName, indexName]
|
||||
);
|
||||
return rows[0].count > 0;
|
||||
}
|
||||
|
||||
async function createMetricsIndexes(connection) {
|
||||
// Check and create orders index
|
||||
const ordersIndexExists = await checkIndexExists(connection, 'orders', 'idx_orders_metrics');
|
||||
if (!ordersIndexExists) {
|
||||
await connection.query('CREATE INDEX idx_orders_metrics ON orders (product_id, date, canceled, quantity, price)');
|
||||
}
|
||||
|
||||
// Check and create purchase_orders index
|
||||
const poIndexExists = await checkIndexExists(connection, 'purchase_orders', 'idx_purchase_orders_metrics');
|
||||
if (!poIndexExists) {
|
||||
await connection.query('CREATE INDEX idx_purchase_orders_metrics ON purchase_orders (product_id, date, status, ordered, received)');
|
||||
}
|
||||
}
|
||||
// Explicitly define all metrics-related tables
|
||||
const METRICS_TABLES = [
|
||||
'temp_sales_metrics',
|
||||
'temp_purchase_metrics',
|
||||
'product_metrics',
|
||||
'product_time_aggregates',
|
||||
'vendor_metrics'
|
||||
];
|
||||
|
||||
async function resetMetrics() {
|
||||
outputProgress({
|
||||
@@ -67,55 +34,92 @@ async function resetMetrics() {
|
||||
const connection = await mysql.createConnection(dbConfig);
|
||||
|
||||
try {
|
||||
// Get list of metrics tables from schema
|
||||
const metricsTables = getMetricsTablesFromSchema();
|
||||
|
||||
// Disable foreign key checks first
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 0');
|
||||
|
||||
// Get list of existing metrics tables
|
||||
if (metricsTables.length > 0) {
|
||||
const [tables] = await connection.query(`
|
||||
SELECT GROUP_CONCAT(table_name) as tables
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
AND table_name IN (${metricsTables.map(table => `'${table}'`).join(',')})`
|
||||
);
|
||||
const [tables] = await connection.query(`
|
||||
SELECT GROUP_CONCAT(table_name) as tables
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
AND table_name IN (${METRICS_TABLES.map(table => `'${table}'`).join(',')})`
|
||||
);
|
||||
|
||||
if (tables[0].tables) {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Dropping metrics tables',
|
||||
percentage: '40'
|
||||
});
|
||||
if (tables[0].tables) {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Dropping existing metrics tables',
|
||||
percentage: '20'
|
||||
});
|
||||
|
||||
// Drop all metrics tables in one query
|
||||
const dropQuery = `DROP TABLE IF EXISTS ${tables[0].tables.split(',').map(table => '`' + table + '`').join(', ')}`;
|
||||
await connection.query(dropQuery);
|
||||
}
|
||||
// Drop all existing metrics tables in one query
|
||||
const dropQuery = `DROP TABLE IF EXISTS ${tables[0].tables.split(',').map(table => '`' + table + '`').join(', ')}`;
|
||||
await connection.query(dropQuery);
|
||||
}
|
||||
|
||||
// Read and execute metrics schema (without the index creation)
|
||||
// Read metrics schema
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Creating metrics tables',
|
||||
percentage: '60'
|
||||
percentage: '40'
|
||||
});
|
||||
|
||||
const schemaPath = path.join(__dirname, '../db/metrics-schema.sql');
|
||||
let schemaSQL = fs.readFileSync(schemaPath, 'utf8');
|
||||
const schemaSQL = fs.readFileSync(schemaPath, 'utf8');
|
||||
|
||||
// Remove the index creation statements from the schema
|
||||
schemaSQL = schemaSQL.split('-- Create optimized indexes')[0];
|
||||
await connection.query(schemaSQL);
|
||||
// Split schema into parts
|
||||
const parts = schemaSQL.split('-- Create optimized indexes');
|
||||
const tableSchema = parts[0];
|
||||
|
||||
// Create indexes if they don't exist
|
||||
// Execute table creation first
|
||||
await connection.query(tableSchema);
|
||||
|
||||
// Verify all tables were created
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Checking and creating indexes',
|
||||
operation: 'Verifying tables',
|
||||
percentage: '60'
|
||||
});
|
||||
|
||||
const [verifyTables] = await connection.query(`
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
AND table_name IN (${METRICS_TABLES.map(table => `'${table}'`).join(',')})`
|
||||
);
|
||||
|
||||
const missingTables = METRICS_TABLES.filter(table =>
|
||||
!verifyTables.find(t => t.table_name === table)
|
||||
);
|
||||
|
||||
if (missingTables.length > 0) {
|
||||
throw new Error(`Failed to create tables: ${missingTables.join(', ')}`);
|
||||
}
|
||||
|
||||
// Create indexes
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Creating indexes',
|
||||
percentage: '80'
|
||||
});
|
||||
await createMetricsIndexes(connection);
|
||||
|
||||
// Drop existing indexes if they exist
|
||||
try {
|
||||
await connection.query('DROP INDEX IF EXISTS idx_orders_metrics ON orders');
|
||||
await connection.query('DROP INDEX IF EXISTS idx_purchase_orders_metrics ON purchase_orders');
|
||||
} catch (err) {
|
||||
// Ignore any errors dropping indexes
|
||||
console.warn('Warning dropping indexes:', err.message);
|
||||
}
|
||||
|
||||
// Create new indexes
|
||||
try {
|
||||
await connection.query('CREATE INDEX idx_orders_metrics ON orders (product_id, date, canceled, quantity, price)');
|
||||
await connection.query('CREATE INDEX idx_purchase_orders_metrics ON purchase_orders (product_id, date, status, ordered, received)');
|
||||
} catch (err) {
|
||||
// Log index creation errors but don't fail
|
||||
console.warn('Warning creating indexes:', err.message);
|
||||
}
|
||||
|
||||
// Re-enable foreign key checks
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 1');
|
||||
|
||||
@@ -1,129 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const mysql = require('mysql2/promise');
|
||||
const dotenv = require('dotenv');
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, '../.env') });
|
||||
|
||||
const dbConfig = {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
multipleStatements: true
|
||||
};
|
||||
|
||||
const SNAPSHOTS_DIR = path.join(__dirname, '../snapshots');
|
||||
|
||||
async function restoreSnapshot() {
|
||||
console.log('Restoring test database from snapshot...');
|
||||
const pool = mysql.createPool(dbConfig);
|
||||
|
||||
try {
|
||||
// Read snapshot
|
||||
const snapshotPath = path.join(SNAPSHOTS_DIR, 'test_snapshot.json');
|
||||
if (!fs.existsSync(snapshotPath)) {
|
||||
throw new Error('Snapshot file not found. Run create-test-snapshot.js first.');
|
||||
}
|
||||
|
||||
const snapshot = JSON.parse(fs.readFileSync(snapshotPath, 'utf8'));
|
||||
|
||||
// First, create schema (this will drop existing tables)
|
||||
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
|
||||
await pool.query(schemaSQL);
|
||||
|
||||
const connection = await pool.getConnection();
|
||||
try {
|
||||
await connection.beginTransaction();
|
||||
|
||||
// Insert categories first (they're referenced by product_categories)
|
||||
if (snapshot.categories.length > 0) {
|
||||
const categoryValues = snapshot.categories.map(c => [c.id, c.name, c.created_at]);
|
||||
await connection.query(
|
||||
'INSERT INTO categories (id, name, created_at) VALUES ?',
|
||||
[categoryValues]
|
||||
);
|
||||
}
|
||||
|
||||
// Insert products
|
||||
if (snapshot.products.length > 0) {
|
||||
const productValues = snapshot.products.map(p => [
|
||||
p.product_id, p.title, p.SKU, p.created_at, p.stock_quantity,
|
||||
p.price, p.regular_price, p.cost_price, p.landing_cost_price,
|
||||
p.barcode, p.updated_at, p.visible, p.managing_stock,
|
||||
p.replenishable, p.vendor, p.vendor_reference, p.permalink,
|
||||
p.categories, p.image, p.brand, p.options, p.tags, p.moq, p.uom
|
||||
]);
|
||||
await connection.query(
|
||||
'INSERT INTO products VALUES ?',
|
||||
[productValues]
|
||||
);
|
||||
}
|
||||
|
||||
// Insert product_categories relationships
|
||||
if (snapshot.product_categories.length > 0) {
|
||||
const pcValues = snapshot.product_categories.map(pc => [
|
||||
pc.product_id, pc.category_id
|
||||
]);
|
||||
await connection.query(
|
||||
'INSERT INTO product_categories (product_id, category_id) VALUES ?',
|
||||
[pcValues]
|
||||
);
|
||||
}
|
||||
|
||||
// Insert orders
|
||||
if (snapshot.orders.length > 0) {
|
||||
const orderValues = snapshot.orders.map(o => [
|
||||
o.id, o.order_number, o.product_id, o.SKU, o.date,
|
||||
o.price, o.quantity, o.discount, o.tax, o.tax_included,
|
||||
o.shipping, o.customer, o.status, o.payment_method,
|
||||
o.shipping_method, o.shipping_address, o.billing_address,
|
||||
o.canceled
|
||||
]);
|
||||
await connection.query(
|
||||
'INSERT INTO orders VALUES ?',
|
||||
[orderValues]
|
||||
);
|
||||
}
|
||||
|
||||
// Insert purchase orders
|
||||
if (snapshot.purchase_orders.length > 0) {
|
||||
const poValues = snapshot.purchase_orders.map(po => [
|
||||
po.id, po.po_id, po.vendor, po.date, po.expected_date,
|
||||
po.product_id, po.sku, po.cost_price, po.status, po.notes,
|
||||
po.ordered, po.received, po.received_date
|
||||
]);
|
||||
await connection.query(
|
||||
'INSERT INTO purchase_orders VALUES ?',
|
||||
[poValues]
|
||||
);
|
||||
}
|
||||
|
||||
await connection.commit();
|
||||
|
||||
console.log('Snapshot restored successfully:');
|
||||
console.log('Products:', snapshot.products.length);
|
||||
console.log('Orders:', snapshot.orders.length);
|
||||
console.log('Purchase Orders:', snapshot.purchase_orders.length);
|
||||
console.log('Categories:', snapshot.categories.length);
|
||||
|
||||
} catch (error) {
|
||||
await connection.rollback();
|
||||
throw error;
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error restoring snapshot:', error);
|
||||
throw error;
|
||||
} finally {
|
||||
await pool.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Restore snapshot if run directly
|
||||
if (require.main === module) {
|
||||
restoreSnapshot().catch(console.error);
|
||||
}
|
||||
|
||||
module.exports = { restoreSnapshot };
|
||||
Reference in New Issue
Block a user