Add more tables to db and processing to import script, add error logging to import, add test data snapshots (untested)
This commit is contained in:
@@ -30,6 +30,67 @@ CREATE TABLE IF NOT EXISTS products (
|
|||||||
INDEX idx_brand (brand)
|
INDEX idx_brand (brand)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
-- New table for product metrics
|
||||||
|
CREATE TABLE IF NOT EXISTS product_metrics (
|
||||||
|
product_id BIGINT NOT NULL,
|
||||||
|
last_calculated_at TIMESTAMP NOT NULL,
|
||||||
|
-- Sales velocity metrics
|
||||||
|
daily_sales_avg DECIMAL(10,3),
|
||||||
|
weekly_sales_avg DECIMAL(10,3),
|
||||||
|
monthly_sales_avg DECIMAL(10,3),
|
||||||
|
-- Stock metrics
|
||||||
|
days_of_inventory INT,
|
||||||
|
weeks_of_inventory INT,
|
||||||
|
reorder_point INT,
|
||||||
|
safety_stock INT,
|
||||||
|
-- Financial metrics
|
||||||
|
avg_margin_percent DECIMAL(10,3),
|
||||||
|
total_revenue DECIMAL(10,3),
|
||||||
|
-- Purchase metrics
|
||||||
|
avg_lead_time_days INT,
|
||||||
|
last_purchase_date DATE,
|
||||||
|
last_received_date DATE,
|
||||||
|
-- Classification
|
||||||
|
abc_class CHAR(1),
|
||||||
|
stock_status VARCHAR(20),
|
||||||
|
PRIMARY KEY (product_id),
|
||||||
|
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- New table for time-based aggregates
|
||||||
|
CREATE TABLE IF NOT EXISTS product_time_aggregates (
|
||||||
|
product_id BIGINT NOT NULL,
|
||||||
|
year INT NOT NULL,
|
||||||
|
month INT NOT NULL,
|
||||||
|
-- Sales metrics
|
||||||
|
total_quantity_sold INT DEFAULT 0,
|
||||||
|
total_revenue DECIMAL(10,3) DEFAULT 0,
|
||||||
|
total_cost DECIMAL(10,3) DEFAULT 0,
|
||||||
|
order_count INT DEFAULT 0,
|
||||||
|
-- Stock changes
|
||||||
|
stock_received INT DEFAULT 0,
|
||||||
|
stock_ordered INT DEFAULT 0,
|
||||||
|
-- Calculated fields
|
||||||
|
avg_price DECIMAL(10,3),
|
||||||
|
profit_margin DECIMAL(10,3),
|
||||||
|
PRIMARY KEY (product_id, year, month),
|
||||||
|
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
|
||||||
|
INDEX idx_date (year, month)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- New table for vendor performance
|
||||||
|
CREATE TABLE IF NOT EXISTS vendor_metrics (
|
||||||
|
vendor VARCHAR(100) NOT NULL,
|
||||||
|
last_calculated_at TIMESTAMP NOT NULL,
|
||||||
|
avg_lead_time_days DECIMAL(10,3),
|
||||||
|
on_time_delivery_rate DECIMAL(5,2),
|
||||||
|
order_fill_rate DECIMAL(5,2),
|
||||||
|
total_orders INT,
|
||||||
|
total_late_orders INT,
|
||||||
|
PRIMARY KEY (vendor),
|
||||||
|
FOREIGN KEY (vendor) REFERENCES products(vendor) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS orders (
|
CREATE TABLE IF NOT EXISTS orders (
|
||||||
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
||||||
order_number VARCHAR(50) NOT NULL,
|
order_number VARCHAR(50) NOT NULL,
|
||||||
@@ -98,3 +159,47 @@ CREATE TABLE IF NOT EXISTS product_categories (
|
|||||||
INDEX idx_category (category_id),
|
INDEX idx_category (category_id),
|
||||||
INDEX idx_product (product_id)
|
INDEX idx_product (product_id)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
-- Create views for common calculations
|
||||||
|
CREATE OR REPLACE VIEW product_sales_trends AS
|
||||||
|
SELECT
|
||||||
|
p.product_id,
|
||||||
|
p.SKU,
|
||||||
|
p.title,
|
||||||
|
COALESCE(SUM(o.quantity), 0) as total_sold,
|
||||||
|
COALESCE(AVG(o.quantity), 0) as avg_quantity_per_order,
|
||||||
|
COALESCE(COUNT(DISTINCT o.order_number), 0) as number_of_orders,
|
||||||
|
MIN(o.date) as first_sale_date,
|
||||||
|
MAX(o.date) as last_sale_date
|
||||||
|
FROM
|
||||||
|
products p
|
||||||
|
LEFT JOIN
|
||||||
|
orders o ON p.product_id = o.product_id
|
||||||
|
WHERE
|
||||||
|
o.canceled = false
|
||||||
|
GROUP BY
|
||||||
|
p.product_id, p.SKU, p.title;
|
||||||
|
|
||||||
|
-- Create view for inventory health
|
||||||
|
CREATE OR REPLACE VIEW inventory_health AS
|
||||||
|
SELECT
|
||||||
|
p.product_id,
|
||||||
|
p.SKU,
|
||||||
|
p.title,
|
||||||
|
p.stock_quantity,
|
||||||
|
pm.daily_sales_avg,
|
||||||
|
pm.days_of_inventory,
|
||||||
|
pm.reorder_point,
|
||||||
|
pm.safety_stock,
|
||||||
|
CASE
|
||||||
|
WHEN p.stock_quantity <= pm.safety_stock THEN 'Critical'
|
||||||
|
WHEN p.stock_quantity <= pm.reorder_point THEN 'Reorder'
|
||||||
|
WHEN p.stock_quantity > (pm.daily_sales_avg * 90) THEN 'Overstocked'
|
||||||
|
ELSE 'Healthy'
|
||||||
|
END as stock_status
|
||||||
|
FROM
|
||||||
|
products p
|
||||||
|
LEFT JOIN
|
||||||
|
product_metrics pm ON p.product_id = pm.product_id
|
||||||
|
WHERE
|
||||||
|
p.managing_stock = true;
|
||||||
115
inventory-server/scripts/create-test-snapshot.js
Normal file
115
inventory-server/scripts/create-test-snapshot.js
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const mysql = require('mysql2/promise');
|
||||||
|
const dotenv = require('dotenv');
|
||||||
|
|
||||||
|
dotenv.config({ path: path.join(__dirname, '../.env') });
|
||||||
|
|
||||||
|
const dbConfig = {
|
||||||
|
host: process.env.DB_HOST,
|
||||||
|
user: process.env.DB_USER,
|
||||||
|
password: process.env.DB_PASSWORD,
|
||||||
|
database: process.env.DB_NAME,
|
||||||
|
multipleStatements: true
|
||||||
|
};
|
||||||
|
|
||||||
|
const SNAPSHOTS_DIR = path.join(__dirname, '../snapshots');
|
||||||
|
|
||||||
|
async function createSnapshot() {
|
||||||
|
console.log('Creating test database snapshot...');
|
||||||
|
const pool = mysql.createPool(dbConfig);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const connection = await pool.getConnection();
|
||||||
|
try {
|
||||||
|
// Create snapshots directory if it doesn't exist
|
||||||
|
if (!fs.existsSync(SNAPSHOTS_DIR)) {
|
||||||
|
fs.mkdirSync(SNAPSHOTS_DIR, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get categories
|
||||||
|
const [categories] = await connection.query(`
|
||||||
|
SELECT id, name, created_at
|
||||||
|
FROM categories
|
||||||
|
LIMIT 10
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Get a diverse set of ~100 products
|
||||||
|
const [products] = await connection.query(`
|
||||||
|
SELECT p.*
|
||||||
|
FROM products p
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM orders o WHERE o.product_id = p.product_id
|
||||||
|
UNION
|
||||||
|
SELECT 1 FROM purchase_orders po WHERE po.product_id = p.product_id
|
||||||
|
)
|
||||||
|
ORDER BY RAND()
|
||||||
|
LIMIT 100
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Get product_categories for selected products
|
||||||
|
const [product_categories] = await connection.query(`
|
||||||
|
SELECT pc.product_id, pc.category_id
|
||||||
|
FROM product_categories pc
|
||||||
|
WHERE pc.product_id IN (?)
|
||||||
|
`, [products.map(p => p.product_id)]);
|
||||||
|
|
||||||
|
// Get orders for selected products (last 6 months)
|
||||||
|
const [orders] = await connection.query(`
|
||||||
|
SELECT o.*
|
||||||
|
FROM orders o
|
||||||
|
WHERE o.product_id IN (?)
|
||||||
|
AND o.date >= DATE_SUB(NOW(), INTERVAL 6 MONTH)
|
||||||
|
ORDER BY o.date DESC
|
||||||
|
`, [products.map(p => p.product_id)]);
|
||||||
|
|
||||||
|
// Get purchase orders for selected products (last 6 months)
|
||||||
|
const [purchase_orders] = await connection.query(`
|
||||||
|
SELECT po.*
|
||||||
|
FROM purchase_orders po
|
||||||
|
WHERE po.product_id IN (?)
|
||||||
|
AND po.date >= DATE_SUB(NOW(), INTERVAL 6 MONTH)
|
||||||
|
ORDER BY po.date DESC
|
||||||
|
`, [products.map(p => p.product_id)]);
|
||||||
|
|
||||||
|
// Create snapshot object
|
||||||
|
const snapshot = {
|
||||||
|
metadata: {
|
||||||
|
created_at: new Date().toISOString(),
|
||||||
|
description: 'Test snapshot with ~100 diverse products and their related data'
|
||||||
|
},
|
||||||
|
categories,
|
||||||
|
products,
|
||||||
|
product_categories,
|
||||||
|
orders,
|
||||||
|
purchase_orders
|
||||||
|
};
|
||||||
|
|
||||||
|
// Save snapshot
|
||||||
|
const snapshotPath = path.join(SNAPSHOTS_DIR, 'test_snapshot.json');
|
||||||
|
fs.writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2));
|
||||||
|
|
||||||
|
console.log('Snapshot created successfully:');
|
||||||
|
console.log('Products:', products.length);
|
||||||
|
console.log('Orders:', orders.length);
|
||||||
|
console.log('Purchase Orders:', purchase_orders.length);
|
||||||
|
console.log('Categories:', categories.length);
|
||||||
|
console.log('Saved to:', snapshotPath);
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
connection.release();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error creating snapshot:', error);
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
await pool.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create snapshot if run directly
|
||||||
|
if (require.main === module) {
|
||||||
|
createSnapshot().catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { createSnapshot };
|
||||||
@@ -23,6 +23,37 @@ const dbConfig = {
|
|||||||
namedPlaceholders: true
|
namedPlaceholders: true
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Set up logging
|
||||||
|
const LOG_DIR = path.join(__dirname, '../logs');
|
||||||
|
const ERROR_LOG = path.join(LOG_DIR, 'import-errors.log');
|
||||||
|
const IMPORT_LOG = path.join(LOG_DIR, 'import.log');
|
||||||
|
|
||||||
|
// Ensure log directory exists
|
||||||
|
if (!fs.existsSync(LOG_DIR)) {
|
||||||
|
fs.mkdirSync(LOG_DIR, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to log errors
|
||||||
|
function logError(error, context = '') {
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
const errorMessage = `[${timestamp}] ${context}\nError: ${error.message}\nStack: ${error.stack}\n\n`;
|
||||||
|
|
||||||
|
// Log to error file
|
||||||
|
fs.appendFileSync(ERROR_LOG, errorMessage);
|
||||||
|
|
||||||
|
// Also log to console
|
||||||
|
console.error(`\n${context}\nError: ${error.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to log import progress
|
||||||
|
function logImport(message) {
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
const logMessage = `[${timestamp}] ${message}\n`;
|
||||||
|
|
||||||
|
// Log to import file
|
||||||
|
fs.appendFileSync(IMPORT_LOG, logMessage);
|
||||||
|
}
|
||||||
|
|
||||||
// Helper function to output progress in JSON format
|
// Helper function to output progress in JSON format
|
||||||
function outputProgress(data) {
|
function outputProgress(data) {
|
||||||
if (!data.status) {
|
if (!data.status) {
|
||||||
@@ -31,6 +62,11 @@ function outputProgress(data) {
|
|||||||
...data
|
...data
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Log progress to import log
|
||||||
|
logImport(JSON.stringify(data));
|
||||||
|
|
||||||
|
// Output to console
|
||||||
console.log(JSON.stringify(data));
|
console.log(JSON.stringify(data));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -137,6 +173,306 @@ async function handleCategories(connection, productId, categoriesStr) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Helper function to calculate sales velocity metrics
|
||||||
|
async function calculateSalesVelocity(connection, productId) {
|
||||||
|
const [rows] = await connection.query(`
|
||||||
|
SELECT
|
||||||
|
COALESCE(COUNT(*) / NULLIF(DATEDIFF(MAX(date), MIN(date)), 0), 0) as daily_sales_avg,
|
||||||
|
COALESCE(SUM(quantity) / NULLIF(DATEDIFF(MAX(date), MIN(date)), 0) * 7, 0) as weekly_sales_avg,
|
||||||
|
COALESCE(SUM(quantity) / NULLIF(DATEDIFF(MAX(date), MIN(date)), 0) * 30, 0) as monthly_sales_avg
|
||||||
|
FROM orders
|
||||||
|
WHERE product_id = ? AND canceled = false
|
||||||
|
GROUP BY product_id
|
||||||
|
`, [productId]);
|
||||||
|
|
||||||
|
return rows[0] || { daily_sales_avg: 0, weekly_sales_avg: 0, monthly_sales_avg: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to calculate stock metrics
|
||||||
|
async function calculateStockMetrics(connection, productId, dailySalesAvg) {
|
||||||
|
const [product] = await connection.query(
|
||||||
|
'SELECT stock_quantity FROM products WHERE product_id = ?',
|
||||||
|
[productId]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!product[0]) return null;
|
||||||
|
|
||||||
|
const stockQty = product[0].stock_quantity;
|
||||||
|
const daysOfInventory = dailySalesAvg > 0 ? Math.floor(stockQty / dailySalesAvg) : 999;
|
||||||
|
const weeksOfInventory = Math.floor(daysOfInventory / 7);
|
||||||
|
|
||||||
|
// Calculate safety stock (2 weeks of average sales)
|
||||||
|
const safetyStock = Math.ceil(dailySalesAvg * 14);
|
||||||
|
|
||||||
|
// Calculate reorder point (safety stock + 1 week of sales)
|
||||||
|
const reorderPoint = Math.ceil(safetyStock + (dailySalesAvg * 7));
|
||||||
|
|
||||||
|
return {
|
||||||
|
days_of_inventory: daysOfInventory,
|
||||||
|
weeks_of_inventory: weeksOfInventory,
|
||||||
|
safety_stock: safetyStock,
|
||||||
|
reorder_point: reorderPoint
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to calculate financial metrics
|
||||||
|
async function calculateFinancialMetrics(connection, productId) {
|
||||||
|
const [rows] = await connection.query(`
|
||||||
|
SELECT
|
||||||
|
SUM(o.price * o.quantity) as total_revenue,
|
||||||
|
AVG((o.price - p.cost_price) / o.price * 100) as avg_margin_percent
|
||||||
|
FROM orders o
|
||||||
|
JOIN products p ON o.product_id = p.product_id
|
||||||
|
WHERE o.product_id = ? AND o.canceled = false
|
||||||
|
GROUP BY o.product_id
|
||||||
|
`, [productId]);
|
||||||
|
|
||||||
|
return rows[0] || { total_revenue: 0, avg_margin_percent: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to calculate purchase metrics
|
||||||
|
async function calculatePurchaseMetrics(connection, productId) {
|
||||||
|
const [rows] = await connection.query(`
|
||||||
|
SELECT
|
||||||
|
AVG(DATEDIFF(received_date, date)) as avg_lead_time_days,
|
||||||
|
MAX(date) as last_purchase_date,
|
||||||
|
MAX(received_date) as last_received_date
|
||||||
|
FROM purchase_orders
|
||||||
|
WHERE product_id = ? AND status = 'closed'
|
||||||
|
GROUP BY product_id
|
||||||
|
`, [productId]);
|
||||||
|
|
||||||
|
return rows[0] || {
|
||||||
|
avg_lead_time_days: 0,
|
||||||
|
last_purchase_date: null,
|
||||||
|
last_received_date: null
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to calculate ABC classification
|
||||||
|
async function calculateABCClass(connection, productId) {
|
||||||
|
// Get total revenue for this product
|
||||||
|
const [productRevenue] = await connection.query(`
|
||||||
|
SELECT SUM(price * quantity) as revenue
|
||||||
|
FROM orders
|
||||||
|
WHERE product_id = ? AND canceled = false
|
||||||
|
`, [productId]);
|
||||||
|
|
||||||
|
// Get total revenue across all products
|
||||||
|
const [totalRevenue] = await connection.query(`
|
||||||
|
SELECT SUM(price * quantity) as total
|
||||||
|
FROM orders
|
||||||
|
WHERE canceled = false
|
||||||
|
`);
|
||||||
|
|
||||||
|
const revenue = productRevenue[0]?.revenue || 0;
|
||||||
|
const total = totalRevenue[0]?.total || 0;
|
||||||
|
|
||||||
|
if (total === 0) return 'C';
|
||||||
|
|
||||||
|
const percentage = (revenue / total) * 100;
|
||||||
|
|
||||||
|
// A: Top 20% of revenue
|
||||||
|
// B: Next 30% of revenue
|
||||||
|
// C: Remaining 50% of revenue
|
||||||
|
if (percentage >= 20) return 'A';
|
||||||
|
if (percentage >= 5) return 'B';
|
||||||
|
return 'C';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to calculate time-based aggregates
|
||||||
|
async function calculateTimeAggregates(connection, productId) {
|
||||||
|
await connection.query(`
|
||||||
|
INSERT INTO product_time_aggregates (
|
||||||
|
product_id, year, month,
|
||||||
|
total_quantity_sold, total_revenue, total_cost,
|
||||||
|
order_count, stock_received, stock_ordered,
|
||||||
|
avg_price, profit_margin
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
o.product_id,
|
||||||
|
YEAR(o.date) as year,
|
||||||
|
MONTH(o.date) as month,
|
||||||
|
SUM(o.quantity) as total_quantity_sold,
|
||||||
|
SUM(o.price * o.quantity) as total_revenue,
|
||||||
|
SUM(p.cost_price * o.quantity) as total_cost,
|
||||||
|
COUNT(DISTINCT o.order_number) as order_count,
|
||||||
|
COALESCE(SUM(po.received), 0) as stock_received,
|
||||||
|
COALESCE(SUM(po.ordered), 0) as stock_ordered,
|
||||||
|
AVG(o.price) as avg_price,
|
||||||
|
CASE
|
||||||
|
WHEN SUM(o.price * o.quantity) = 0 THEN 0
|
||||||
|
ELSE ((SUM(o.price * o.quantity) - COALESCE(SUM(p.cost_price * o.quantity), 0)) /
|
||||||
|
NULLIF(SUM(o.price * o.quantity), 0) * 100)
|
||||||
|
END as profit_margin
|
||||||
|
FROM orders o
|
||||||
|
JOIN products p ON o.product_id = p.product_id
|
||||||
|
LEFT JOIN purchase_orders po ON o.product_id = po.product_id
|
||||||
|
AND YEAR(o.date) = YEAR(po.date)
|
||||||
|
AND MONTH(o.date) = MONTH(po.date)
|
||||||
|
WHERE o.product_id = ? AND o.canceled = false
|
||||||
|
GROUP BY o.product_id, YEAR(o.date), MONTH(o.date)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
total_quantity_sold = VALUES(total_quantity_sold),
|
||||||
|
total_revenue = VALUES(total_revenue),
|
||||||
|
total_cost = VALUES(total_cost),
|
||||||
|
order_count = VALUES(order_count),
|
||||||
|
stock_received = VALUES(stock_received),
|
||||||
|
stock_ordered = VALUES(stock_ordered),
|
||||||
|
avg_price = VALUES(avg_price),
|
||||||
|
profit_margin = VALUES(profit_margin)
|
||||||
|
`, [productId]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to calculate vendor metrics
|
||||||
|
async function calculateVendorMetrics(connection) {
|
||||||
|
try {
|
||||||
|
// Get list of vendors
|
||||||
|
const [vendors] = await connection.query('SELECT DISTINCT vendor FROM products WHERE vendor IS NOT NULL');
|
||||||
|
const startTime = Date.now();
|
||||||
|
let current = 0;
|
||||||
|
const total = vendors.length;
|
||||||
|
|
||||||
|
outputProgress({
|
||||||
|
operation: 'Calculating vendor metrics',
|
||||||
|
current: 0,
|
||||||
|
total,
|
||||||
|
percentage: '0.0'
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const { vendor } of vendors) {
|
||||||
|
// Calculate average lead time
|
||||||
|
const [leadTimeResult] = await connection.query(`
|
||||||
|
SELECT
|
||||||
|
AVG(DATEDIFF(received_date, date)) as avg_lead_time,
|
||||||
|
COUNT(*) as total_orders,
|
||||||
|
SUM(CASE WHEN ordered = received THEN 1 ELSE 0 END) as fulfilled_orders
|
||||||
|
FROM purchase_orders
|
||||||
|
WHERE vendor = ? AND status = 'closed'
|
||||||
|
GROUP BY vendor
|
||||||
|
`, [vendor]);
|
||||||
|
|
||||||
|
const metrics = leadTimeResult[0] || {
|
||||||
|
avg_lead_time: 0,
|
||||||
|
total_orders: 0,
|
||||||
|
fulfilled_orders: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
// Calculate fill rate
|
||||||
|
const fillRate = metrics.total_orders > 0 ?
|
||||||
|
(metrics.fulfilled_orders / metrics.total_orders * 100) : 0;
|
||||||
|
|
||||||
|
// Update vendor metrics
|
||||||
|
await connection.query(`
|
||||||
|
INSERT INTO vendor_metrics (
|
||||||
|
vendor,
|
||||||
|
avg_lead_time_days,
|
||||||
|
total_orders,
|
||||||
|
fulfilled_orders,
|
||||||
|
fill_rate
|
||||||
|
) VALUES (?, ?, ?, ?, ?)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||||
|
total_orders = VALUES(total_orders),
|
||||||
|
fulfilled_orders = VALUES(fulfilled_orders),
|
||||||
|
fill_rate = VALUES(fill_rate)
|
||||||
|
`, [
|
||||||
|
vendor,
|
||||||
|
metrics.avg_lead_time || 0,
|
||||||
|
metrics.total_orders,
|
||||||
|
metrics.fulfilled_orders,
|
||||||
|
fillRate
|
||||||
|
]);
|
||||||
|
|
||||||
|
current++;
|
||||||
|
updateProgress(current, total, 'Calculating vendor metrics', startTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
outputProgress({
|
||||||
|
status: 'complete',
|
||||||
|
operation: 'Vendor metrics calculation completed',
|
||||||
|
current: total,
|
||||||
|
total,
|
||||||
|
percentage: '100.0'
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Error calculating vendor metrics');
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to update product metrics
|
||||||
|
async function updateProductMetrics(connection, productId, startTime, current, total) {
|
||||||
|
try {
|
||||||
|
// Calculate sales velocity metrics
|
||||||
|
const velocityMetrics = await calculateSalesVelocity(connection, productId);
|
||||||
|
|
||||||
|
// Calculate stock metrics
|
||||||
|
const stockMetrics = await calculateStockMetrics(connection, productId, velocityMetrics.daily_sales_avg);
|
||||||
|
|
||||||
|
// Calculate financial metrics
|
||||||
|
const financialMetrics = await calculateFinancialMetrics(connection, productId);
|
||||||
|
|
||||||
|
// Calculate purchase metrics
|
||||||
|
const purchaseMetrics = await calculatePurchaseMetrics(connection, productId);
|
||||||
|
|
||||||
|
// Update metrics in database
|
||||||
|
await connection.query(`
|
||||||
|
INSERT INTO product_metrics (
|
||||||
|
product_id,
|
||||||
|
daily_sales_avg,
|
||||||
|
weekly_sales_avg,
|
||||||
|
monthly_sales_avg,
|
||||||
|
days_of_inventory,
|
||||||
|
weeks_of_inventory,
|
||||||
|
safety_stock,
|
||||||
|
reorder_point,
|
||||||
|
total_revenue,
|
||||||
|
avg_margin_percent,
|
||||||
|
avg_lead_time_days,
|
||||||
|
last_purchase_date,
|
||||||
|
last_received_date
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
daily_sales_avg = VALUES(daily_sales_avg),
|
||||||
|
weekly_sales_avg = VALUES(weekly_sales_avg),
|
||||||
|
monthly_sales_avg = VALUES(monthly_sales_avg),
|
||||||
|
days_of_inventory = VALUES(days_of_inventory),
|
||||||
|
weeks_of_inventory = VALUES(weeks_of_inventory),
|
||||||
|
safety_stock = VALUES(safety_stock),
|
||||||
|
reorder_point = VALUES(reorder_point),
|
||||||
|
total_revenue = VALUES(total_revenue),
|
||||||
|
avg_margin_percent = VALUES(avg_margin_percent),
|
||||||
|
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||||
|
last_purchase_date = VALUES(last_purchase_date),
|
||||||
|
last_received_date = VALUES(last_received_date)
|
||||||
|
`, [
|
||||||
|
productId,
|
||||||
|
velocityMetrics.daily_sales_avg,
|
||||||
|
velocityMetrics.weekly_sales_avg,
|
||||||
|
velocityMetrics.monthly_sales_avg,
|
||||||
|
stockMetrics?.days_of_inventory || 0,
|
||||||
|
stockMetrics?.weeks_of_inventory || 0,
|
||||||
|
stockMetrics?.safety_stock || 0,
|
||||||
|
stockMetrics?.reorder_point || 0,
|
||||||
|
financialMetrics.total_revenue,
|
||||||
|
financialMetrics.avg_margin_percent,
|
||||||
|
purchaseMetrics.avg_lead_time_days,
|
||||||
|
purchaseMetrics.last_purchase_date,
|
||||||
|
purchaseMetrics.last_received_date
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Output progress every 5 products or every second
|
||||||
|
if (current % 5 === 0 || Date.now() - startTime > 1000) {
|
||||||
|
updateProgress(current, total, 'Calculating product metrics', startTime);
|
||||||
|
startTime = Date.now();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, `Error updating metrics for product ${productId}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function importProducts(pool, filePath) {
|
async function importProducts(pool, filePath) {
|
||||||
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
|
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
|
||||||
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
|
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
|
||||||
@@ -172,6 +508,10 @@ async function importProducts(pool, filePath) {
|
|||||||
let batch = [];
|
let batch = [];
|
||||||
let categoryUpdates = new Map(); // Store category updates for batch processing
|
let categoryUpdates = new Map(); // Store category updates for batch processing
|
||||||
|
|
||||||
|
// Get a connection from the pool that we'll reuse
|
||||||
|
const connection = await pool.getConnection();
|
||||||
|
|
||||||
|
try {
|
||||||
for await (const record of parser) {
|
for await (const record of parser) {
|
||||||
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
|
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
|
||||||
// Process remaining batch
|
// Process remaining batch
|
||||||
@@ -223,7 +563,6 @@ async function importProducts(pool, filePath) {
|
|||||||
uom: parseInt(record.uom) || 1
|
uom: parseInt(record.uom) || 1
|
||||||
});
|
});
|
||||||
|
|
||||||
// Store category updates for later
|
|
||||||
if (record.categories) {
|
if (record.categories) {
|
||||||
categoryUpdates.set(record.product_id, record.categories);
|
categoryUpdates.set(record.product_id, record.categories);
|
||||||
}
|
}
|
||||||
@@ -251,12 +590,19 @@ async function importProducts(pool, filePath) {
|
|||||||
duration: formatDuration((Date.now() - startTime) / 1000),
|
duration: formatDuration((Date.now() - startTime) / 1000),
|
||||||
percentage: '100'
|
percentage: '100'
|
||||||
});
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during products import:', error);
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
if (connection) {
|
||||||
|
connection.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Helper function to process a batch of records
|
// Helper function to process a batch of records
|
||||||
async function processBatch(records, categoryUpdates) {
|
async function processBatch(records, categoryUpdates) {
|
||||||
if (records.length === 0) return;
|
if (records.length === 0) return;
|
||||||
|
|
||||||
const connection = await pool.getConnection();
|
|
||||||
try {
|
try {
|
||||||
await connection.beginTransaction();
|
await connection.beginTransaction();
|
||||||
try {
|
try {
|
||||||
@@ -301,12 +647,6 @@ async function importProducts(pool, filePath) {
|
|||||||
|
|
||||||
// Update stats
|
// Update stats
|
||||||
if (result.affectedRows > 0) {
|
if (result.affectedRows > 0) {
|
||||||
// For INSERT ... ON DUPLICATE KEY UPDATE:
|
|
||||||
// - If a row is inserted, affectedRows = 1
|
|
||||||
// - If a row is updated, affectedRows = 2
|
|
||||||
// So we can calculate:
|
|
||||||
// - Number of inserts = number of rows where affectedRows = 1
|
|
||||||
// - Number of updates = number of rows where affectedRows = 2
|
|
||||||
const insertCount = result.affectedRows - result.changedRows;
|
const insertCount = result.affectedRows - result.changedRows;
|
||||||
const updateCount = result.changedRows;
|
const updateCount = result.changedRows;
|
||||||
added += insertCount;
|
added += insertCount;
|
||||||
@@ -321,13 +661,12 @@ async function importProducts(pool, filePath) {
|
|||||||
await connection.commit();
|
await connection.commit();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await connection.rollback();
|
await connection.rollback();
|
||||||
|
logError(error, `Error processing batch of ${records.length} records`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`\nError processing batch:`, error.message);
|
logError(error, `Error in batch processing:\nFirst record: ${JSON.stringify(records[0])}`);
|
||||||
// Continue with next batch instead of failing completely
|
// Continue with next batch instead of failing completely
|
||||||
} finally {
|
|
||||||
connection.release();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -674,9 +1013,11 @@ async function main() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const pool = mysql.createPool(dbConfig);
|
let pool;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
pool = mysql.createPool(dbConfig);
|
||||||
|
|
||||||
// Check if tables exist, if not create them
|
// Check if tables exist, if not create them
|
||||||
outputProgress({
|
outputProgress({
|
||||||
operation: 'Checking database schema',
|
operation: 'Checking database schema',
|
||||||
@@ -686,6 +1027,8 @@ async function main() {
|
|||||||
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
|
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
|
||||||
await pool.query(schemaSQL);
|
await pool.query(schemaSQL);
|
||||||
|
|
||||||
|
// Step 1: Import all data first
|
||||||
|
try {
|
||||||
// Import products first since they're referenced by other tables
|
// Import products first since they're referenced by other tables
|
||||||
await importProducts(pool, path.join(__dirname, '../csv/39f2x83-products.csv'));
|
await importProducts(pool, path.join(__dirname, '../csv/39f2x83-products.csv'));
|
||||||
|
|
||||||
@@ -700,21 +1043,85 @@ async function main() {
|
|||||||
importPurchaseOrders(pool, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'))
|
importPurchaseOrders(pool, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'))
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
// Step 2: Calculate all metrics after imports are complete
|
||||||
|
outputProgress({
|
||||||
|
operation: 'Starting metrics calculation',
|
||||||
|
message: 'Calculating metrics for all products and vendors...'
|
||||||
|
});
|
||||||
|
|
||||||
|
const connection = await pool.getConnection();
|
||||||
|
try {
|
||||||
|
// Calculate product metrics
|
||||||
|
const [products] = await connection.query('SELECT DISTINCT product_id FROM products');
|
||||||
|
const totalProducts = products.length;
|
||||||
|
let processedProducts = 0;
|
||||||
|
const metricsStartTime = Date.now();
|
||||||
|
|
||||||
|
outputProgress({
|
||||||
|
operation: 'Starting product metrics calculation',
|
||||||
|
message: `Calculating metrics for ${totalProducts} products...`,
|
||||||
|
current: 0,
|
||||||
|
total: totalProducts,
|
||||||
|
percentage: '0'
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const product of products) {
|
||||||
|
try {
|
||||||
|
// Update progress every 5 products or 1 second
|
||||||
|
if (processedProducts % 5 === 0 || (Date.now() - lastUpdate) > 1000) {
|
||||||
|
updateProgress(processedProducts, totalProducts, 'Calculating product metrics', metricsStartTime);
|
||||||
|
lastUpdate = Date.now();
|
||||||
|
}
|
||||||
|
|
||||||
|
await updateProductMetrics(connection, product.product_id, metricsStartTime, processedProducts, totalProducts);
|
||||||
|
processedProducts++;
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, `Error calculating metrics for product ${product.product_id}`);
|
||||||
|
// Continue with next product instead of failing completely
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
outputProgress({
|
||||||
|
operation: 'Product metrics calculation completed',
|
||||||
|
current: processedProducts,
|
||||||
|
total: totalProducts,
|
||||||
|
duration: formatDuration((Date.now() - metricsStartTime) / 1000),
|
||||||
|
percentage: '100'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Calculate vendor metrics
|
||||||
|
await calculateVendorMetrics(connection);
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
connection.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Error during import/metrics calculation');
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: 'complete',
|
status: 'complete',
|
||||||
operation: 'Import process completed',
|
operation: 'Import process completed',
|
||||||
duration: formatDuration((Date.now() - startTime) / 1000)
|
duration: formatDuration((Date.now() - startTime) / 1000)
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logError(error, 'Fatal error during import process');
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: 'error',
|
status: 'error',
|
||||||
error: error.message
|
error: error.message
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
} finally {
|
} finally {
|
||||||
|
if (pool) {
|
||||||
await pool.end();
|
await pool.end();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run the import
|
// Run the import
|
||||||
main();
|
main().catch(error => {
|
||||||
|
logError(error, 'Unhandled error in main process');
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
129
inventory-server/scripts/restore-test-snapshot.js
Normal file
129
inventory-server/scripts/restore-test-snapshot.js
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const mysql = require('mysql2/promise');
|
||||||
|
const dotenv = require('dotenv');
|
||||||
|
|
||||||
|
dotenv.config({ path: path.join(__dirname, '../.env') });
|
||||||
|
|
||||||
|
const dbConfig = {
|
||||||
|
host: process.env.DB_HOST,
|
||||||
|
user: process.env.DB_USER,
|
||||||
|
password: process.env.DB_PASSWORD,
|
||||||
|
database: process.env.DB_NAME,
|
||||||
|
multipleStatements: true
|
||||||
|
};
|
||||||
|
|
||||||
|
const SNAPSHOTS_DIR = path.join(__dirname, '../snapshots');
|
||||||
|
|
||||||
|
async function restoreSnapshot() {
|
||||||
|
console.log('Restoring test database from snapshot...');
|
||||||
|
const pool = mysql.createPool(dbConfig);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Read snapshot
|
||||||
|
const snapshotPath = path.join(SNAPSHOTS_DIR, 'test_snapshot.json');
|
||||||
|
if (!fs.existsSync(snapshotPath)) {
|
||||||
|
throw new Error('Snapshot file not found. Run create-test-snapshot.js first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const snapshot = JSON.parse(fs.readFileSync(snapshotPath, 'utf8'));
|
||||||
|
|
||||||
|
// First, create schema (this will drop existing tables)
|
||||||
|
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
|
||||||
|
await pool.query(schemaSQL);
|
||||||
|
|
||||||
|
const connection = await pool.getConnection();
|
||||||
|
try {
|
||||||
|
await connection.beginTransaction();
|
||||||
|
|
||||||
|
// Insert categories first (they're referenced by product_categories)
|
||||||
|
if (snapshot.categories.length > 0) {
|
||||||
|
const categoryValues = snapshot.categories.map(c => [c.id, c.name, c.created_at]);
|
||||||
|
await connection.query(
|
||||||
|
'INSERT INTO categories (id, name, created_at) VALUES ?',
|
||||||
|
[categoryValues]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert products
|
||||||
|
if (snapshot.products.length > 0) {
|
||||||
|
const productValues = snapshot.products.map(p => [
|
||||||
|
p.product_id, p.title, p.SKU, p.created_at, p.stock_quantity,
|
||||||
|
p.price, p.regular_price, p.cost_price, p.landing_cost_price,
|
||||||
|
p.barcode, p.updated_at, p.visible, p.managing_stock,
|
||||||
|
p.replenishable, p.vendor, p.vendor_reference, p.permalink,
|
||||||
|
p.categories, p.image, p.brand, p.options, p.tags, p.moq, p.uom
|
||||||
|
]);
|
||||||
|
await connection.query(
|
||||||
|
'INSERT INTO products VALUES ?',
|
||||||
|
[productValues]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert product_categories relationships
|
||||||
|
if (snapshot.product_categories.length > 0) {
|
||||||
|
const pcValues = snapshot.product_categories.map(pc => [
|
||||||
|
pc.product_id, pc.category_id
|
||||||
|
]);
|
||||||
|
await connection.query(
|
||||||
|
'INSERT INTO product_categories (product_id, category_id) VALUES ?',
|
||||||
|
[pcValues]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert orders
|
||||||
|
if (snapshot.orders.length > 0) {
|
||||||
|
const orderValues = snapshot.orders.map(o => [
|
||||||
|
o.id, o.order_number, o.product_id, o.SKU, o.date,
|
||||||
|
o.price, o.quantity, o.discount, o.tax, o.tax_included,
|
||||||
|
o.shipping, o.customer, o.status, o.payment_method,
|
||||||
|
o.shipping_method, o.shipping_address, o.billing_address,
|
||||||
|
o.canceled
|
||||||
|
]);
|
||||||
|
await connection.query(
|
||||||
|
'INSERT INTO orders VALUES ?',
|
||||||
|
[orderValues]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert purchase orders
|
||||||
|
if (snapshot.purchase_orders.length > 0) {
|
||||||
|
const poValues = snapshot.purchase_orders.map(po => [
|
||||||
|
po.id, po.po_id, po.vendor, po.date, po.expected_date,
|
||||||
|
po.product_id, po.sku, po.cost_price, po.status, po.notes,
|
||||||
|
po.ordered, po.received, po.received_date
|
||||||
|
]);
|
||||||
|
await connection.query(
|
||||||
|
'INSERT INTO purchase_orders VALUES ?',
|
||||||
|
[poValues]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await connection.commit();
|
||||||
|
|
||||||
|
console.log('Snapshot restored successfully:');
|
||||||
|
console.log('Products:', snapshot.products.length);
|
||||||
|
console.log('Orders:', snapshot.orders.length);
|
||||||
|
console.log('Purchase Orders:', snapshot.purchase_orders.length);
|
||||||
|
console.log('Categories:', snapshot.categories.length);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
await connection.rollback();
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
connection.release();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error restoring snapshot:', error);
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
await pool.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore snapshot if run directly
|
||||||
|
if (require.main === module) {
|
||||||
|
restoreSnapshot().catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { restoreSnapshot };
|
||||||
@@ -58,6 +58,9 @@ export function Settings() {
|
|||||||
orders: 0,
|
orders: 0,
|
||||||
purchaseOrders: 0
|
purchaseOrders: 0
|
||||||
});
|
});
|
||||||
|
const [isCreatingSnapshot, setIsCreatingSnapshot] = useState(false);
|
||||||
|
const [isRestoringSnapshot, setIsRestoringSnapshot] = useState(false);
|
||||||
|
const [snapshotProgress, setSnapshotProgress] = useState<ImportProgress | null>(null);
|
||||||
|
|
||||||
// Helper function to update progress state
|
// Helper function to update progress state
|
||||||
const updateProgressState = (progressData: any) => {
|
const updateProgressState = (progressData: any) => {
|
||||||
@@ -87,11 +90,15 @@ export function Settings() {
|
|||||||
setImportProgress(prev => ({ ...prev, ...progressUpdate }));
|
setImportProgress(prev => ({ ...prev, ...progressUpdate }));
|
||||||
} else if (operation.includes('purchase orders import')) {
|
} else if (operation.includes('purchase orders import')) {
|
||||||
setPurchaseOrdersProgress(prev => ({ ...prev, ...progressUpdate }));
|
setPurchaseOrdersProgress(prev => ({ ...prev, ...progressUpdate }));
|
||||||
|
} else if (operation.includes('metrics') || operation.includes('vendor metrics')) {
|
||||||
|
setImportProgress(prev => ({ ...prev, ...progressUpdate }));
|
||||||
|
} else if (operation.includes('snapshot')) {
|
||||||
|
setSnapshotProgress(prev => ({ ...prev, ...progressUpdate }));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Helper to connect to event source
|
// Helper to connect to event source
|
||||||
const connectToEventSource = useCallback((type: 'update' | 'import' | 'reset') => {
|
const connectToEventSource = useCallback((type: 'update' | 'import' | 'reset' | 'snapshot') => {
|
||||||
if (eventSource) {
|
if (eventSource) {
|
||||||
eventSource.close();
|
eventSource.close();
|
||||||
}
|
}
|
||||||
@@ -389,6 +396,63 @@ export function Settings() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Add handlers for snapshot operations
|
||||||
|
const handleCreateSnapshot = async () => {
|
||||||
|
try {
|
||||||
|
setIsCreatingSnapshot(true);
|
||||||
|
setSnapshotProgress({ status: 'running', operation: 'Creating test data snapshot' });
|
||||||
|
|
||||||
|
// Connect to SSE for progress updates
|
||||||
|
connectToEventSource('snapshot');
|
||||||
|
|
||||||
|
const response = await fetch(`${config.apiUrl}/snapshot/create`, {
|
||||||
|
method: 'POST',
|
||||||
|
credentials: 'include'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error('Failed to create snapshot');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error creating snapshot:', error);
|
||||||
|
if (eventSource) {
|
||||||
|
eventSource.close();
|
||||||
|
setEventSource(null);
|
||||||
|
}
|
||||||
|
setIsCreatingSnapshot(false);
|
||||||
|
setSnapshotProgress(null);
|
||||||
|
toast.error('Failed to create snapshot');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleRestoreSnapshot = async () => {
|
||||||
|
try {
|
||||||
|
setIsRestoringSnapshot(true);
|
||||||
|
setSnapshotProgress({ status: 'running', operation: 'Restoring test data snapshot' });
|
||||||
|
|
||||||
|
// Connect to SSE for progress updates
|
||||||
|
connectToEventSource('snapshot');
|
||||||
|
|
||||||
|
const response = await fetch(`${config.apiUrl}/snapshot/restore`, {
|
||||||
|
method: 'POST',
|
||||||
|
credentials: 'include'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error('Failed to restore snapshot');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error restoring snapshot:', error);
|
||||||
|
if (eventSource) {
|
||||||
|
eventSource.close();
|
||||||
|
setEventSource(null);
|
||||||
|
}
|
||||||
|
setIsRestoringSnapshot(false);
|
||||||
|
setSnapshotProgress(null);
|
||||||
|
toast.error('Failed to restore snapshot');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Cleanup on unmount
|
// Cleanup on unmount
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
return () => {
|
return () => {
|
||||||
@@ -625,6 +689,30 @@ export function Settings() {
|
|||||||
{renderProgress(purchaseOrdersProgress)}
|
{renderProgress(purchaseOrdersProgress)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
{/* Show metrics calculation progress */}
|
||||||
|
{importProgress?.operation?.toLowerCase().includes('metrics') && (
|
||||||
|
<div>
|
||||||
|
<Progress value={Number(importProgress.percentage)} className="mb-2" />
|
||||||
|
<p className="text-sm text-muted-foreground">
|
||||||
|
{importProgress.message || importProgress.operation || 'Calculating metrics...'}
|
||||||
|
{importProgress.current && importProgress.total && (
|
||||||
|
<> ({importProgress.current} of {importProgress.total})</>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
{importProgress.elapsed && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Elapsed: {importProgress.elapsed}
|
||||||
|
{importProgress.remaining && <> • Remaining: {importProgress.remaining}</>}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{/* Show vendor metrics progress */}
|
||||||
|
{importProgress?.operation?.toLowerCase().includes('vendor metrics') && (
|
||||||
|
<div>
|
||||||
|
{renderProgress(importProgress)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</CardContent>
|
</CardContent>
|
||||||
@@ -637,14 +725,9 @@ export function Settings() {
|
|||||||
<CardDescription>Drop all tables and recreate the database schema. This will delete ALL data.</CardDescription>
|
<CardDescription>Drop all tables and recreate the database schema. This will delete ALL data.</CardDescription>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<div className="flex gap-2">
|
|
||||||
<AlertDialog>
|
<AlertDialog>
|
||||||
<AlertDialogTrigger asChild>
|
<AlertDialogTrigger asChild>
|
||||||
<Button
|
<Button variant="destructive" disabled={isResetting || isImporting || isUpdating}>
|
||||||
variant="destructive"
|
|
||||||
className="flex-1"
|
|
||||||
disabled={isUpdating || isImporting}
|
|
||||||
>
|
|
||||||
Reset Database
|
Reset Database
|
||||||
</Button>
|
</Button>
|
||||||
</AlertDialogTrigger>
|
</AlertDialogTrigger>
|
||||||
@@ -652,21 +735,94 @@ export function Settings() {
|
|||||||
<AlertDialogHeader>
|
<AlertDialogHeader>
|
||||||
<AlertDialogTitle>Are you absolutely sure?</AlertDialogTitle>
|
<AlertDialogTitle>Are you absolutely sure?</AlertDialogTitle>
|
||||||
<AlertDialogDescription>
|
<AlertDialogDescription>
|
||||||
This action cannot be undone. This will permanently delete all data
|
This action cannot be undone. This will permanently delete all data from the database.
|
||||||
from the database and reset it to its initial state.
|
|
||||||
</AlertDialogDescription>
|
</AlertDialogDescription>
|
||||||
</AlertDialogHeader>
|
</AlertDialogHeader>
|
||||||
<AlertDialogFooter>
|
<AlertDialogFooter>
|
||||||
<AlertDialogCancel>Cancel</AlertDialogCancel>
|
<AlertDialogCancel>Cancel</AlertDialogCancel>
|
||||||
<AlertDialogAction onClick={handleResetDB}>
|
<AlertDialogAction onClick={handleResetDB}>Continue</AlertDialogAction>
|
||||||
Reset Database
|
</AlertDialogFooter>
|
||||||
</AlertDialogAction>
|
</AlertDialogContent>
|
||||||
|
</AlertDialog>
|
||||||
|
{resetProgress && (
|
||||||
|
<div className="mt-4">
|
||||||
|
<Progress value={Number(resetProgress.percentage)} className="mb-2" />
|
||||||
|
<p className="text-sm text-muted-foreground">
|
||||||
|
{resetProgress.message || 'Resetting database...'}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Test Data Snapshots Card */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Test Data Snapshots</CardTitle>
|
||||||
|
<CardDescription>Create and restore test data snapshots for development and testing.</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-4">
|
||||||
|
<div className="flex space-x-4">
|
||||||
|
<Button
|
||||||
|
onClick={handleCreateSnapshot}
|
||||||
|
disabled={isCreatingSnapshot || isRestoringSnapshot || isImporting || isUpdating || isResetting}
|
||||||
|
>
|
||||||
|
{isCreatingSnapshot ? (
|
||||||
|
<>
|
||||||
|
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||||
|
Creating Snapshot...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>Create Snapshot</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
<AlertDialog>
|
||||||
|
<AlertDialogTrigger asChild>
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
disabled={isCreatingSnapshot || isRestoringSnapshot || isImporting || isUpdating || isResetting}
|
||||||
|
>
|
||||||
|
{isRestoringSnapshot ? (
|
||||||
|
<>
|
||||||
|
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||||
|
Restoring...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>Restore Snapshot</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
</AlertDialogTrigger>
|
||||||
|
<AlertDialogContent>
|
||||||
|
<AlertDialogHeader>
|
||||||
|
<AlertDialogTitle>Restore test data snapshot?</AlertDialogTitle>
|
||||||
|
<AlertDialogDescription>
|
||||||
|
This will replace your current database with the test data snapshot. Any unsaved changes will be lost.
|
||||||
|
</AlertDialogDescription>
|
||||||
|
</AlertDialogHeader>
|
||||||
|
<AlertDialogFooter>
|
||||||
|
<AlertDialogCancel>Cancel</AlertDialogCancel>
|
||||||
|
<AlertDialogAction onClick={handleRestoreSnapshot}>Continue</AlertDialogAction>
|
||||||
</AlertDialogFooter>
|
</AlertDialogFooter>
|
||||||
</AlertDialogContent>
|
</AlertDialogContent>
|
||||||
</AlertDialog>
|
</AlertDialog>
|
||||||
</div>
|
</div>
|
||||||
|
{snapshotProgress && (
|
||||||
{isResetting && renderProgress(resetProgress)}
|
<div>
|
||||||
|
<Progress value={Number(snapshotProgress.percentage)} className="mb-2" />
|
||||||
|
<p className="text-sm text-muted-foreground">
|
||||||
|
{snapshotProgress.message || 'Processing snapshot...'}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="text-sm text-muted-foreground">
|
||||||
|
<p>The test data snapshot includes:</p>
|
||||||
|
<ul className="list-disc list-inside mt-2">
|
||||||
|
<li>~100 diverse products with associated data</li>
|
||||||
|
<li>Orders from the last 6 months</li>
|
||||||
|
<li>Purchase orders from the last 6 months</li>
|
||||||
|
<li>Categories and product relationships</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user