Add more tables to db and processing to import script, add error logging to import, add test data snapshots (untested)

This commit is contained in:
2025-01-11 13:07:07 -05:00
parent 1d022cae01
commit 30018ad882
5 changed files with 1029 additions and 117 deletions

View File

@@ -30,6 +30,67 @@ CREATE TABLE IF NOT EXISTS products (
INDEX idx_brand (brand)
);
-- New table for product metrics
CREATE TABLE IF NOT EXISTS product_metrics (
product_id BIGINT NOT NULL,
last_calculated_at TIMESTAMP NOT NULL,
-- Sales velocity metrics
daily_sales_avg DECIMAL(10,3),
weekly_sales_avg DECIMAL(10,3),
monthly_sales_avg DECIMAL(10,3),
-- Stock metrics
days_of_inventory INT,
weeks_of_inventory INT,
reorder_point INT,
safety_stock INT,
-- Financial metrics
avg_margin_percent DECIMAL(10,3),
total_revenue DECIMAL(10,3),
-- Purchase metrics
avg_lead_time_days INT,
last_purchase_date DATE,
last_received_date DATE,
-- Classification
abc_class CHAR(1),
stock_status VARCHAR(20),
PRIMARY KEY (product_id),
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE
);
-- New table for time-based aggregates
CREATE TABLE IF NOT EXISTS product_time_aggregates (
product_id BIGINT NOT NULL,
year INT NOT NULL,
month INT NOT NULL,
-- Sales metrics
total_quantity_sold INT DEFAULT 0,
total_revenue DECIMAL(10,3) DEFAULT 0,
total_cost DECIMAL(10,3) DEFAULT 0,
order_count INT DEFAULT 0,
-- Stock changes
stock_received INT DEFAULT 0,
stock_ordered INT DEFAULT 0,
-- Calculated fields
avg_price DECIMAL(10,3),
profit_margin DECIMAL(10,3),
PRIMARY KEY (product_id, year, month),
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
INDEX idx_date (year, month)
);
-- New table for vendor performance
CREATE TABLE IF NOT EXISTS vendor_metrics (
vendor VARCHAR(100) NOT NULL,
last_calculated_at TIMESTAMP NOT NULL,
avg_lead_time_days DECIMAL(10,3),
on_time_delivery_rate DECIMAL(5,2),
order_fill_rate DECIMAL(5,2),
total_orders INT,
total_late_orders INT,
PRIMARY KEY (vendor),
FOREIGN KEY (vendor) REFERENCES products(vendor) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS orders (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
order_number VARCHAR(50) NOT NULL,
@@ -98,3 +159,47 @@ CREATE TABLE IF NOT EXISTS product_categories (
INDEX idx_category (category_id),
INDEX idx_product (product_id)
);
-- Create views for common calculations
CREATE OR REPLACE VIEW product_sales_trends AS
SELECT
p.product_id,
p.SKU,
p.title,
COALESCE(SUM(o.quantity), 0) as total_sold,
COALESCE(AVG(o.quantity), 0) as avg_quantity_per_order,
COALESCE(COUNT(DISTINCT o.order_number), 0) as number_of_orders,
MIN(o.date) as first_sale_date,
MAX(o.date) as last_sale_date
FROM
products p
LEFT JOIN
orders o ON p.product_id = o.product_id
WHERE
o.canceled = false
GROUP BY
p.product_id, p.SKU, p.title;
-- Create view for inventory health
CREATE OR REPLACE VIEW inventory_health AS
SELECT
p.product_id,
p.SKU,
p.title,
p.stock_quantity,
pm.daily_sales_avg,
pm.days_of_inventory,
pm.reorder_point,
pm.safety_stock,
CASE
WHEN p.stock_quantity <= pm.safety_stock THEN 'Critical'
WHEN p.stock_quantity <= pm.reorder_point THEN 'Reorder'
WHEN p.stock_quantity > (pm.daily_sales_avg * 90) THEN 'Overstocked'
ELSE 'Healthy'
END as stock_status
FROM
products p
LEFT JOIN
product_metrics pm ON p.product_id = pm.product_id
WHERE
p.managing_stock = true;

View File

@@ -0,0 +1,115 @@
const fs = require('fs');
const path = require('path');
const mysql = require('mysql2/promise');
const dotenv = require('dotenv');
dotenv.config({ path: path.join(__dirname, '../.env') });
const dbConfig = {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
multipleStatements: true
};
const SNAPSHOTS_DIR = path.join(__dirname, '../snapshots');
async function createSnapshot() {
console.log('Creating test database snapshot...');
const pool = mysql.createPool(dbConfig);
try {
const connection = await pool.getConnection();
try {
// Create snapshots directory if it doesn't exist
if (!fs.existsSync(SNAPSHOTS_DIR)) {
fs.mkdirSync(SNAPSHOTS_DIR, { recursive: true });
}
// Get categories
const [categories] = await connection.query(`
SELECT id, name, created_at
FROM categories
LIMIT 10
`);
// Get a diverse set of ~100 products
const [products] = await connection.query(`
SELECT p.*
FROM products p
WHERE EXISTS (
SELECT 1 FROM orders o WHERE o.product_id = p.product_id
UNION
SELECT 1 FROM purchase_orders po WHERE po.product_id = p.product_id
)
ORDER BY RAND()
LIMIT 100
`);
// Get product_categories for selected products
const [product_categories] = await connection.query(`
SELECT pc.product_id, pc.category_id
FROM product_categories pc
WHERE pc.product_id IN (?)
`, [products.map(p => p.product_id)]);
// Get orders for selected products (last 6 months)
const [orders] = await connection.query(`
SELECT o.*
FROM orders o
WHERE o.product_id IN (?)
AND o.date >= DATE_SUB(NOW(), INTERVAL 6 MONTH)
ORDER BY o.date DESC
`, [products.map(p => p.product_id)]);
// Get purchase orders for selected products (last 6 months)
const [purchase_orders] = await connection.query(`
SELECT po.*
FROM purchase_orders po
WHERE po.product_id IN (?)
AND po.date >= DATE_SUB(NOW(), INTERVAL 6 MONTH)
ORDER BY po.date DESC
`, [products.map(p => p.product_id)]);
// Create snapshot object
const snapshot = {
metadata: {
created_at: new Date().toISOString(),
description: 'Test snapshot with ~100 diverse products and their related data'
},
categories,
products,
product_categories,
orders,
purchase_orders
};
// Save snapshot
const snapshotPath = path.join(SNAPSHOTS_DIR, 'test_snapshot.json');
fs.writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2));
console.log('Snapshot created successfully:');
console.log('Products:', products.length);
console.log('Orders:', orders.length);
console.log('Purchase Orders:', purchase_orders.length);
console.log('Categories:', categories.length);
console.log('Saved to:', snapshotPath);
} finally {
connection.release();
}
} catch (error) {
console.error('Error creating snapshot:', error);
throw error;
} finally {
await pool.end();
}
}
// Create snapshot if run directly
if (require.main === module) {
createSnapshot().catch(console.error);
}
module.exports = { createSnapshot };

View File

@@ -23,6 +23,37 @@ const dbConfig = {
namedPlaceholders: true
};
// Set up logging
const LOG_DIR = path.join(__dirname, '../logs');
const ERROR_LOG = path.join(LOG_DIR, 'import-errors.log');
const IMPORT_LOG = path.join(LOG_DIR, 'import.log');
// Ensure log directory exists
if (!fs.existsSync(LOG_DIR)) {
fs.mkdirSync(LOG_DIR, { recursive: true });
}
// Helper function to log errors
function logError(error, context = '') {
const timestamp = new Date().toISOString();
const errorMessage = `[${timestamp}] ${context}\nError: ${error.message}\nStack: ${error.stack}\n\n`;
// Log to error file
fs.appendFileSync(ERROR_LOG, errorMessage);
// Also log to console
console.error(`\n${context}\nError: ${error.message}`);
}
// Helper function to log import progress
function logImport(message) {
const timestamp = new Date().toISOString();
const logMessage = `[${timestamp}] ${message}\n`;
// Log to import file
fs.appendFileSync(IMPORT_LOG, logMessage);
}
// Helper function to output progress in JSON format
function outputProgress(data) {
if (!data.status) {
@@ -31,6 +62,11 @@ function outputProgress(data) {
...data
};
}
// Log progress to import log
logImport(JSON.stringify(data));
// Output to console
console.log(JSON.stringify(data));
}
@@ -137,6 +173,306 @@ async function handleCategories(connection, productId, categoriesStr) {
}
}
// Helper function to calculate sales velocity metrics
async function calculateSalesVelocity(connection, productId) {
const [rows] = await connection.query(`
SELECT
COALESCE(COUNT(*) / NULLIF(DATEDIFF(MAX(date), MIN(date)), 0), 0) as daily_sales_avg,
COALESCE(SUM(quantity) / NULLIF(DATEDIFF(MAX(date), MIN(date)), 0) * 7, 0) as weekly_sales_avg,
COALESCE(SUM(quantity) / NULLIF(DATEDIFF(MAX(date), MIN(date)), 0) * 30, 0) as monthly_sales_avg
FROM orders
WHERE product_id = ? AND canceled = false
GROUP BY product_id
`, [productId]);
return rows[0] || { daily_sales_avg: 0, weekly_sales_avg: 0, monthly_sales_avg: 0 };
}
// Helper function to calculate stock metrics
async function calculateStockMetrics(connection, productId, dailySalesAvg) {
const [product] = await connection.query(
'SELECT stock_quantity FROM products WHERE product_id = ?',
[productId]
);
if (!product[0]) return null;
const stockQty = product[0].stock_quantity;
const daysOfInventory = dailySalesAvg > 0 ? Math.floor(stockQty / dailySalesAvg) : 999;
const weeksOfInventory = Math.floor(daysOfInventory / 7);
// Calculate safety stock (2 weeks of average sales)
const safetyStock = Math.ceil(dailySalesAvg * 14);
// Calculate reorder point (safety stock + 1 week of sales)
const reorderPoint = Math.ceil(safetyStock + (dailySalesAvg * 7));
return {
days_of_inventory: daysOfInventory,
weeks_of_inventory: weeksOfInventory,
safety_stock: safetyStock,
reorder_point: reorderPoint
};
}
// Helper function to calculate financial metrics
async function calculateFinancialMetrics(connection, productId) {
const [rows] = await connection.query(`
SELECT
SUM(o.price * o.quantity) as total_revenue,
AVG((o.price - p.cost_price) / o.price * 100) as avg_margin_percent
FROM orders o
JOIN products p ON o.product_id = p.product_id
WHERE o.product_id = ? AND o.canceled = false
GROUP BY o.product_id
`, [productId]);
return rows[0] || { total_revenue: 0, avg_margin_percent: 0 };
}
// Helper function to calculate purchase metrics
async function calculatePurchaseMetrics(connection, productId) {
const [rows] = await connection.query(`
SELECT
AVG(DATEDIFF(received_date, date)) as avg_lead_time_days,
MAX(date) as last_purchase_date,
MAX(received_date) as last_received_date
FROM purchase_orders
WHERE product_id = ? AND status = 'closed'
GROUP BY product_id
`, [productId]);
return rows[0] || {
avg_lead_time_days: 0,
last_purchase_date: null,
last_received_date: null
};
}
// Helper function to calculate ABC classification
async function calculateABCClass(connection, productId) {
// Get total revenue for this product
const [productRevenue] = await connection.query(`
SELECT SUM(price * quantity) as revenue
FROM orders
WHERE product_id = ? AND canceled = false
`, [productId]);
// Get total revenue across all products
const [totalRevenue] = await connection.query(`
SELECT SUM(price * quantity) as total
FROM orders
WHERE canceled = false
`);
const revenue = productRevenue[0]?.revenue || 0;
const total = totalRevenue[0]?.total || 0;
if (total === 0) return 'C';
const percentage = (revenue / total) * 100;
// A: Top 20% of revenue
// B: Next 30% of revenue
// C: Remaining 50% of revenue
if (percentage >= 20) return 'A';
if (percentage >= 5) return 'B';
return 'C';
}
// Helper function to calculate time-based aggregates
async function calculateTimeAggregates(connection, productId) {
await connection.query(`
INSERT INTO product_time_aggregates (
product_id, year, month,
total_quantity_sold, total_revenue, total_cost,
order_count, stock_received, stock_ordered,
avg_price, profit_margin
)
SELECT
o.product_id,
YEAR(o.date) as year,
MONTH(o.date) as month,
SUM(o.quantity) as total_quantity_sold,
SUM(o.price * o.quantity) as total_revenue,
SUM(p.cost_price * o.quantity) as total_cost,
COUNT(DISTINCT o.order_number) as order_count,
COALESCE(SUM(po.received), 0) as stock_received,
COALESCE(SUM(po.ordered), 0) as stock_ordered,
AVG(o.price) as avg_price,
CASE
WHEN SUM(o.price * o.quantity) = 0 THEN 0
ELSE ((SUM(o.price * o.quantity) - COALESCE(SUM(p.cost_price * o.quantity), 0)) /
NULLIF(SUM(o.price * o.quantity), 0) * 100)
END as profit_margin
FROM orders o
JOIN products p ON o.product_id = p.product_id
LEFT JOIN purchase_orders po ON o.product_id = po.product_id
AND YEAR(o.date) = YEAR(po.date)
AND MONTH(o.date) = MONTH(po.date)
WHERE o.product_id = ? AND o.canceled = false
GROUP BY o.product_id, YEAR(o.date), MONTH(o.date)
ON DUPLICATE KEY UPDATE
total_quantity_sold = VALUES(total_quantity_sold),
total_revenue = VALUES(total_revenue),
total_cost = VALUES(total_cost),
order_count = VALUES(order_count),
stock_received = VALUES(stock_received),
stock_ordered = VALUES(stock_ordered),
avg_price = VALUES(avg_price),
profit_margin = VALUES(profit_margin)
`, [productId]);
}
// Helper function to calculate vendor metrics
async function calculateVendorMetrics(connection) {
try {
// Get list of vendors
const [vendors] = await connection.query('SELECT DISTINCT vendor FROM products WHERE vendor IS NOT NULL');
const startTime = Date.now();
let current = 0;
const total = vendors.length;
outputProgress({
operation: 'Calculating vendor metrics',
current: 0,
total,
percentage: '0.0'
});
for (const { vendor } of vendors) {
// Calculate average lead time
const [leadTimeResult] = await connection.query(`
SELECT
AVG(DATEDIFF(received_date, date)) as avg_lead_time,
COUNT(*) as total_orders,
SUM(CASE WHEN ordered = received THEN 1 ELSE 0 END) as fulfilled_orders
FROM purchase_orders
WHERE vendor = ? AND status = 'closed'
GROUP BY vendor
`, [vendor]);
const metrics = leadTimeResult[0] || {
avg_lead_time: 0,
total_orders: 0,
fulfilled_orders: 0
};
// Calculate fill rate
const fillRate = metrics.total_orders > 0 ?
(metrics.fulfilled_orders / metrics.total_orders * 100) : 0;
// Update vendor metrics
await connection.query(`
INSERT INTO vendor_metrics (
vendor,
avg_lead_time_days,
total_orders,
fulfilled_orders,
fill_rate
) VALUES (?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
avg_lead_time_days = VALUES(avg_lead_time_days),
total_orders = VALUES(total_orders),
fulfilled_orders = VALUES(fulfilled_orders),
fill_rate = VALUES(fill_rate)
`, [
vendor,
metrics.avg_lead_time || 0,
metrics.total_orders,
metrics.fulfilled_orders,
fillRate
]);
current++;
updateProgress(current, total, 'Calculating vendor metrics', startTime);
}
outputProgress({
status: 'complete',
operation: 'Vendor metrics calculation completed',
current: total,
total,
percentage: '100.0'
});
} catch (error) {
logError(error, 'Error calculating vendor metrics');
throw error;
}
}
// Helper function to update product metrics
async function updateProductMetrics(connection, productId, startTime, current, total) {
try {
// Calculate sales velocity metrics
const velocityMetrics = await calculateSalesVelocity(connection, productId);
// Calculate stock metrics
const stockMetrics = await calculateStockMetrics(connection, productId, velocityMetrics.daily_sales_avg);
// Calculate financial metrics
const financialMetrics = await calculateFinancialMetrics(connection, productId);
// Calculate purchase metrics
const purchaseMetrics = await calculatePurchaseMetrics(connection, productId);
// Update metrics in database
await connection.query(`
INSERT INTO product_metrics (
product_id,
daily_sales_avg,
weekly_sales_avg,
monthly_sales_avg,
days_of_inventory,
weeks_of_inventory,
safety_stock,
reorder_point,
total_revenue,
avg_margin_percent,
avg_lead_time_days,
last_purchase_date,
last_received_date
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
daily_sales_avg = VALUES(daily_sales_avg),
weekly_sales_avg = VALUES(weekly_sales_avg),
monthly_sales_avg = VALUES(monthly_sales_avg),
days_of_inventory = VALUES(days_of_inventory),
weeks_of_inventory = VALUES(weeks_of_inventory),
safety_stock = VALUES(safety_stock),
reorder_point = VALUES(reorder_point),
total_revenue = VALUES(total_revenue),
avg_margin_percent = VALUES(avg_margin_percent),
avg_lead_time_days = VALUES(avg_lead_time_days),
last_purchase_date = VALUES(last_purchase_date),
last_received_date = VALUES(last_received_date)
`, [
productId,
velocityMetrics.daily_sales_avg,
velocityMetrics.weekly_sales_avg,
velocityMetrics.monthly_sales_avg,
stockMetrics?.days_of_inventory || 0,
stockMetrics?.weeks_of_inventory || 0,
stockMetrics?.safety_stock || 0,
stockMetrics?.reorder_point || 0,
financialMetrics.total_revenue,
financialMetrics.avg_margin_percent,
purchaseMetrics.avg_lead_time_days,
purchaseMetrics.last_purchase_date,
purchaseMetrics.last_received_date
]);
// Output progress every 5 products or every second
if (current % 5 === 0 || Date.now() - startTime > 1000) {
updateProgress(current, total, 'Calculating product metrics', startTime);
startTime = Date.now();
}
} catch (error) {
logError(error, `Error updating metrics for product ${productId}`);
throw error;
}
}
async function importProducts(pool, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
@@ -172,91 +508,101 @@ async function importProducts(pool, filePath) {
let batch = [];
let categoryUpdates = new Map(); // Store category updates for batch processing
for await (const record of parser) {
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
// Process remaining batch
if (batch.length > 0) {
await processBatch(batch, categoryUpdates);
// Get a connection from the pool that we'll reuse
const connection = await pool.getConnection();
try {
for await (const record of parser) {
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
// Process remaining batch
if (batch.length > 0) {
await processBatch(batch, categoryUpdates);
}
outputProgress({
operation: 'Products import',
message: `Reached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`,
current: rowCount,
total: totalRows
});
break;
}
outputProgress({
operation: 'Products import',
message: `Reached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`,
current: rowCount,
total: totalRows
rowCount++;
// Update progress every 100ms to avoid console flooding
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Products import', startTime);
lastUpdate = now;
}
// Add to batch
batch.push({
product_id: record.product_id,
title: record.title,
SKU: record.SKU,
created_at: convertDate(record.created_at),
stock_quantity: parseInt(record.stock_quantity) || 0,
price: parseFloat(record.price) || 0,
regular_price: parseFloat(record.regular_price) || 0,
cost_price: parseFloat(record.cost_price) || null,
landing_cost_price: parseFloat(record.landing_cost_price) || null,
barcode: record.barcode,
updated_at: convertDate(record.updated_at),
visible: record.visible === '1',
managing_stock: record.managing_stock === '1',
replenishable: record.replenishable === '1',
vendor: record.vendor,
vendor_reference: record.vendor_reference,
permalink: record.permalink,
categories: record.categories,
image: record.image,
brand: record.brand,
options: record.options,
tags: record.tags,
moq: parseInt(record.moq) || 1,
uom: parseInt(record.uom) || 1
});
break;
}
rowCount++;
// Update progress every 100ms to avoid console flooding
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Products import', startTime);
lastUpdate = now;
if (record.categories) {
categoryUpdates.set(record.product_id, record.categories);
}
// Process batch if it reaches BATCH_SIZE
if (batch.length >= BATCH_SIZE) {
await processBatch(batch, categoryUpdates);
batch = [];
categoryUpdates.clear();
}
}
// Add to batch
batch.push({
product_id: record.product_id,
title: record.title,
SKU: record.SKU,
created_at: convertDate(record.created_at),
stock_quantity: parseInt(record.stock_quantity) || 0,
price: parseFloat(record.price) || 0,
regular_price: parseFloat(record.regular_price) || 0,
cost_price: parseFloat(record.cost_price) || null,
landing_cost_price: parseFloat(record.landing_cost_price) || null,
barcode: record.barcode,
updated_at: convertDate(record.updated_at),
visible: record.visible === '1',
managing_stock: record.managing_stock === '1',
replenishable: record.replenishable === '1',
vendor: record.vendor,
vendor_reference: record.vendor_reference,
permalink: record.permalink,
categories: record.categories,
image: record.image,
brand: record.brand,
options: record.options,
tags: record.tags,
moq: parseInt(record.moq) || 1,
uom: parseInt(record.uom) || 1
});
// Store category updates for later
if (record.categories) {
categoryUpdates.set(record.product_id, record.categories);
}
// Process batch if it reaches BATCH_SIZE
if (batch.length >= BATCH_SIZE) {
// Process any remaining records in the final batch
if (batch.length > 0) {
await processBatch(batch, categoryUpdates);
batch = [];
categoryUpdates.clear();
}
outputProgress({
status: 'running',
operation: 'Products import completed',
current: rowCount,
total: totalRows,
added,
updated,
duration: formatDuration((Date.now() - startTime) / 1000),
percentage: '100'
});
} catch (error) {
console.error('Error during products import:', error);
throw error;
} finally {
if (connection) {
connection.release();
}
}
// Process any remaining records in the final batch
if (batch.length > 0) {
await processBatch(batch, categoryUpdates);
}
outputProgress({
status: 'running',
operation: 'Products import completed',
current: rowCount,
total: totalRows,
added,
updated,
duration: formatDuration((Date.now() - startTime) / 1000),
percentage: '100'
});
// Helper function to process a batch of records
async function processBatch(records, categoryUpdates) {
if (records.length === 0) return;
const connection = await pool.getConnection();
try {
await connection.beginTransaction();
try {
@@ -301,12 +647,6 @@ async function importProducts(pool, filePath) {
// Update stats
if (result.affectedRows > 0) {
// For INSERT ... ON DUPLICATE KEY UPDATE:
// - If a row is inserted, affectedRows = 1
// - If a row is updated, affectedRows = 2
// So we can calculate:
// - Number of inserts = number of rows where affectedRows = 1
// - Number of updates = number of rows where affectedRows = 2
const insertCount = result.affectedRows - result.changedRows;
const updateCount = result.changedRows;
added += insertCount;
@@ -321,13 +661,12 @@ async function importProducts(pool, filePath) {
await connection.commit();
} catch (error) {
await connection.rollback();
logError(error, `Error processing batch of ${records.length} records`);
throw error;
}
} catch (error) {
console.error(`\nError processing batch:`, error.message);
logError(error, `Error in batch processing:\nFirst record: ${JSON.stringify(records[0])}`);
// Continue with next batch instead of failing completely
} finally {
connection.release();
}
}
}
@@ -674,9 +1013,11 @@ async function main() {
});
const startTime = Date.now();
const pool = mysql.createPool(dbConfig);
let pool;
try {
pool = mysql.createPool(dbConfig);
// Check if tables exist, if not create them
outputProgress({
operation: 'Checking database schema',
@@ -686,19 +1027,79 @@ async function main() {
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
await pool.query(schemaSQL);
// Import products first since they're referenced by other tables
await importProducts(pool, path.join(__dirname, '../csv/39f2x83-products.csv'));
// Step 1: Import all data first
try {
// Import products first since they're referenced by other tables
await importProducts(pool, path.join(__dirname, '../csv/39f2x83-products.csv'));
// Process orders and purchase orders in parallel
outputProgress({
operation: 'Starting parallel import',
message: 'Processing orders and purchase orders simultaneously...'
});
// Process orders and purchase orders in parallel
outputProgress({
operation: 'Starting parallel import',
message: 'Processing orders and purchase orders simultaneously...'
});
await Promise.all([
importOrders(pool, path.join(__dirname, '../csv/39f2x83-orders.csv')),
importPurchaseOrders(pool, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'))
]);
await Promise.all([
importOrders(pool, path.join(__dirname, '../csv/39f2x83-orders.csv')),
importPurchaseOrders(pool, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'))
]);
// Step 2: Calculate all metrics after imports are complete
outputProgress({
operation: 'Starting metrics calculation',
message: 'Calculating metrics for all products and vendors...'
});
const connection = await pool.getConnection();
try {
// Calculate product metrics
const [products] = await connection.query('SELECT DISTINCT product_id FROM products');
const totalProducts = products.length;
let processedProducts = 0;
const metricsStartTime = Date.now();
outputProgress({
operation: 'Starting product metrics calculation',
message: `Calculating metrics for ${totalProducts} products...`,
current: 0,
total: totalProducts,
percentage: '0'
});
for (const product of products) {
try {
// Update progress every 5 products or 1 second
if (processedProducts % 5 === 0 || (Date.now() - lastUpdate) > 1000) {
updateProgress(processedProducts, totalProducts, 'Calculating product metrics', metricsStartTime);
lastUpdate = Date.now();
}
await updateProductMetrics(connection, product.product_id, metricsStartTime, processedProducts, totalProducts);
processedProducts++;
} catch (error) {
logError(error, `Error calculating metrics for product ${product.product_id}`);
// Continue with next product instead of failing completely
}
}
outputProgress({
operation: 'Product metrics calculation completed',
current: processedProducts,
total: totalProducts,
duration: formatDuration((Date.now() - metricsStartTime) / 1000),
percentage: '100'
});
// Calculate vendor metrics
await calculateVendorMetrics(connection);
} finally {
connection.release();
}
} catch (error) {
logError(error, 'Error during import/metrics calculation');
throw error;
}
outputProgress({
status: 'complete',
@@ -706,15 +1107,21 @@ async function main() {
duration: formatDuration((Date.now() - startTime) / 1000)
});
} catch (error) {
logError(error, 'Fatal error during import process');
outputProgress({
status: 'error',
error: error.message
});
process.exit(1);
} finally {
await pool.end();
if (pool) {
await pool.end();
}
}
}
// Run the import
main();
main().catch(error => {
logError(error, 'Unhandled error in main process');
process.exit(1);
});

View File

@@ -0,0 +1,129 @@
const fs = require('fs');
const path = require('path');
const mysql = require('mysql2/promise');
const dotenv = require('dotenv');
dotenv.config({ path: path.join(__dirname, '../.env') });
const dbConfig = {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
multipleStatements: true
};
const SNAPSHOTS_DIR = path.join(__dirname, '../snapshots');
async function restoreSnapshot() {
console.log('Restoring test database from snapshot...');
const pool = mysql.createPool(dbConfig);
try {
// Read snapshot
const snapshotPath = path.join(SNAPSHOTS_DIR, 'test_snapshot.json');
if (!fs.existsSync(snapshotPath)) {
throw new Error('Snapshot file not found. Run create-test-snapshot.js first.');
}
const snapshot = JSON.parse(fs.readFileSync(snapshotPath, 'utf8'));
// First, create schema (this will drop existing tables)
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
await pool.query(schemaSQL);
const connection = await pool.getConnection();
try {
await connection.beginTransaction();
// Insert categories first (they're referenced by product_categories)
if (snapshot.categories.length > 0) {
const categoryValues = snapshot.categories.map(c => [c.id, c.name, c.created_at]);
await connection.query(
'INSERT INTO categories (id, name, created_at) VALUES ?',
[categoryValues]
);
}
// Insert products
if (snapshot.products.length > 0) {
const productValues = snapshot.products.map(p => [
p.product_id, p.title, p.SKU, p.created_at, p.stock_quantity,
p.price, p.regular_price, p.cost_price, p.landing_cost_price,
p.barcode, p.updated_at, p.visible, p.managing_stock,
p.replenishable, p.vendor, p.vendor_reference, p.permalink,
p.categories, p.image, p.brand, p.options, p.tags, p.moq, p.uom
]);
await connection.query(
'INSERT INTO products VALUES ?',
[productValues]
);
}
// Insert product_categories relationships
if (snapshot.product_categories.length > 0) {
const pcValues = snapshot.product_categories.map(pc => [
pc.product_id, pc.category_id
]);
await connection.query(
'INSERT INTO product_categories (product_id, category_id) VALUES ?',
[pcValues]
);
}
// Insert orders
if (snapshot.orders.length > 0) {
const orderValues = snapshot.orders.map(o => [
o.id, o.order_number, o.product_id, o.SKU, o.date,
o.price, o.quantity, o.discount, o.tax, o.tax_included,
o.shipping, o.customer, o.status, o.payment_method,
o.shipping_method, o.shipping_address, o.billing_address,
o.canceled
]);
await connection.query(
'INSERT INTO orders VALUES ?',
[orderValues]
);
}
// Insert purchase orders
if (snapshot.purchase_orders.length > 0) {
const poValues = snapshot.purchase_orders.map(po => [
po.id, po.po_id, po.vendor, po.date, po.expected_date,
po.product_id, po.sku, po.cost_price, po.status, po.notes,
po.ordered, po.received, po.received_date
]);
await connection.query(
'INSERT INTO purchase_orders VALUES ?',
[poValues]
);
}
await connection.commit();
console.log('Snapshot restored successfully:');
console.log('Products:', snapshot.products.length);
console.log('Orders:', snapshot.orders.length);
console.log('Purchase Orders:', snapshot.purchase_orders.length);
console.log('Categories:', snapshot.categories.length);
} catch (error) {
await connection.rollback();
throw error;
} finally {
connection.release();
}
} catch (error) {
console.error('Error restoring snapshot:', error);
throw error;
} finally {
await pool.end();
}
}
// Restore snapshot if run directly
if (require.main === module) {
restoreSnapshot().catch(console.error);
}
module.exports = { restoreSnapshot };

View File

@@ -58,6 +58,9 @@ export function Settings() {
orders: 0,
purchaseOrders: 0
});
const [isCreatingSnapshot, setIsCreatingSnapshot] = useState(false);
const [isRestoringSnapshot, setIsRestoringSnapshot] = useState(false);
const [snapshotProgress, setSnapshotProgress] = useState<ImportProgress | null>(null);
// Helper function to update progress state
const updateProgressState = (progressData: any) => {
@@ -87,11 +90,15 @@ export function Settings() {
setImportProgress(prev => ({ ...prev, ...progressUpdate }));
} else if (operation.includes('purchase orders import')) {
setPurchaseOrdersProgress(prev => ({ ...prev, ...progressUpdate }));
} else if (operation.includes('metrics') || operation.includes('vendor metrics')) {
setImportProgress(prev => ({ ...prev, ...progressUpdate }));
} else if (operation.includes('snapshot')) {
setSnapshotProgress(prev => ({ ...prev, ...progressUpdate }));
}
};
// Helper to connect to event source
const connectToEventSource = useCallback((type: 'update' | 'import' | 'reset') => {
const connectToEventSource = useCallback((type: 'update' | 'import' | 'reset' | 'snapshot') => {
if (eventSource) {
eventSource.close();
}
@@ -389,6 +396,63 @@ export function Settings() {
}
};
// Add handlers for snapshot operations
const handleCreateSnapshot = async () => {
try {
setIsCreatingSnapshot(true);
setSnapshotProgress({ status: 'running', operation: 'Creating test data snapshot' });
// Connect to SSE for progress updates
connectToEventSource('snapshot');
const response = await fetch(`${config.apiUrl}/snapshot/create`, {
method: 'POST',
credentials: 'include'
});
if (!response.ok) {
throw new Error('Failed to create snapshot');
}
} catch (error) {
console.error('Error creating snapshot:', error);
if (eventSource) {
eventSource.close();
setEventSource(null);
}
setIsCreatingSnapshot(false);
setSnapshotProgress(null);
toast.error('Failed to create snapshot');
}
};
const handleRestoreSnapshot = async () => {
try {
setIsRestoringSnapshot(true);
setSnapshotProgress({ status: 'running', operation: 'Restoring test data snapshot' });
// Connect to SSE for progress updates
connectToEventSource('snapshot');
const response = await fetch(`${config.apiUrl}/snapshot/restore`, {
method: 'POST',
credentials: 'include'
});
if (!response.ok) {
throw new Error('Failed to restore snapshot');
}
} catch (error) {
console.error('Error restoring snapshot:', error);
if (eventSource) {
eventSource.close();
setEventSource(null);
}
setIsRestoringSnapshot(false);
setSnapshotProgress(null);
toast.error('Failed to restore snapshot');
}
};
// Cleanup on unmount
useEffect(() => {
return () => {
@@ -625,6 +689,30 @@ export function Settings() {
{renderProgress(purchaseOrdersProgress)}
</div>
)}
{/* Show metrics calculation progress */}
{importProgress?.operation?.toLowerCase().includes('metrics') && (
<div>
<Progress value={Number(importProgress.percentage)} className="mb-2" />
<p className="text-sm text-muted-foreground">
{importProgress.message || importProgress.operation || 'Calculating metrics...'}
{importProgress.current && importProgress.total && (
<> ({importProgress.current} of {importProgress.total})</>
)}
</p>
{importProgress.elapsed && (
<p className="text-xs text-muted-foreground">
Elapsed: {importProgress.elapsed}
{importProgress.remaining && <> Remaining: {importProgress.remaining}</>}
</p>
)}
</div>
)}
{/* Show vendor metrics progress */}
{importProgress?.operation?.toLowerCase().includes('vendor metrics') && (
<div>
{renderProgress(importProgress)}
</div>
)}
</div>
)}
</CardContent>
@@ -637,36 +725,104 @@ export function Settings() {
<CardDescription>Drop all tables and recreate the database schema. This will delete ALL data.</CardDescription>
</CardHeader>
<CardContent>
<div className="flex gap-2">
<AlertDialog>
<AlertDialogTrigger asChild>
<Button variant="destructive" disabled={isResetting || isImporting || isUpdating}>
Reset Database
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Are you absolutely sure?</AlertDialogTitle>
<AlertDialogDescription>
This action cannot be undone. This will permanently delete all data from the database.
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>Cancel</AlertDialogCancel>
<AlertDialogAction onClick={handleResetDB}>Continue</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
{resetProgress && (
<div className="mt-4">
<Progress value={Number(resetProgress.percentage)} className="mb-2" />
<p className="text-sm text-muted-foreground">
{resetProgress.message || 'Resetting database...'}
</p>
</div>
)}
</CardContent>
</Card>
{/* Test Data Snapshots Card */}
<Card>
<CardHeader>
<CardTitle>Test Data Snapshots</CardTitle>
<CardDescription>Create and restore test data snapshots for development and testing.</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<div className="flex space-x-4">
<Button
onClick={handleCreateSnapshot}
disabled={isCreatingSnapshot || isRestoringSnapshot || isImporting || isUpdating || isResetting}
>
{isCreatingSnapshot ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Creating Snapshot...
</>
) : (
<>Create Snapshot</>
)}
</Button>
<AlertDialog>
<AlertDialogTrigger asChild>
<Button
variant="destructive"
className="flex-1"
disabled={isUpdating || isImporting}
variant="secondary"
disabled={isCreatingSnapshot || isRestoringSnapshot || isImporting || isUpdating || isResetting}
>
Reset Database
{isRestoringSnapshot ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Restoring...
</>
) : (
<>Restore Snapshot</>
)}
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Are you absolutely sure?</AlertDialogTitle>
<AlertDialogTitle>Restore test data snapshot?</AlertDialogTitle>
<AlertDialogDescription>
This action cannot be undone. This will permanently delete all data
from the database and reset it to its initial state.
This will replace your current database with the test data snapshot. Any unsaved changes will be lost.
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>Cancel</AlertDialogCancel>
<AlertDialogAction onClick={handleResetDB}>
Reset Database
</AlertDialogAction>
<AlertDialogAction onClick={handleRestoreSnapshot}>Continue</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</div>
{isResetting && renderProgress(resetProgress)}
{snapshotProgress && (
<div>
<Progress value={Number(snapshotProgress.percentage)} className="mb-2" />
<p className="text-sm text-muted-foreground">
{snapshotProgress.message || 'Processing snapshot...'}
</p>
</div>
)}
<div className="text-sm text-muted-foreground">
<p>The test data snapshot includes:</p>
<ul className="list-disc list-inside mt-2">
<li>~100 diverse products with associated data</li>
<li>Orders from the last 6 months</li>
<li>Purchase orders from the last 6 months</li>
<li>Categories and product relationships</li>
</ul>
</div>
</CardContent>
</Card>