Integrate config tables into existing scripts, add new config tables and settings pages
This commit is contained in:
@@ -8,6 +8,8 @@ CREATE TABLE IF NOT EXISTS stock_thresholds (
|
||||
critical_days INT NOT NULL DEFAULT 7,
|
||||
reorder_days INT NOT NULL DEFAULT 14,
|
||||
overstock_days INT NOT NULL DEFAULT 90,
|
||||
low_stock_threshold INT NOT NULL DEFAULT 5,
|
||||
min_reorder_quantity INT NOT NULL DEFAULT 1,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
@@ -15,7 +17,75 @@ CREATE TABLE IF NOT EXISTS stock_thresholds (
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor)
|
||||
);
|
||||
|
||||
-- Insert default thresholds with ID=1 if not exists
|
||||
-- Lead time threshold configurations
|
||||
CREATE TABLE IF NOT EXISTS lead_time_thresholds (
|
||||
id INT NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
target_days INT NOT NULL DEFAULT 14,
|
||||
warning_days INT NOT NULL DEFAULT 21,
|
||||
critical_days INT NOT NULL DEFAULT 30,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor)
|
||||
);
|
||||
|
||||
-- Sales velocity window configurations
|
||||
CREATE TABLE IF NOT EXISTS sales_velocity_config (
|
||||
id INT NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
daily_window_days INT NOT NULL DEFAULT 30,
|
||||
weekly_window_days INT NOT NULL DEFAULT 7,
|
||||
monthly_window_days INT NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor)
|
||||
);
|
||||
|
||||
-- ABC Classification configurations
|
||||
CREATE TABLE IF NOT EXISTS abc_classification_config (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
a_threshold DECIMAL(5,2) NOT NULL DEFAULT 20.0,
|
||||
b_threshold DECIMAL(5,2) NOT NULL DEFAULT 50.0,
|
||||
classification_period_days INT NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Safety stock configurations
|
||||
CREATE TABLE IF NOT EXISTS safety_stock_config (
|
||||
id INT NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
coverage_days INT NOT NULL DEFAULT 14,
|
||||
service_level DECIMAL(5,2) NOT NULL DEFAULT 95.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor)
|
||||
);
|
||||
|
||||
-- Turnover rate configurations
|
||||
CREATE TABLE IF NOT EXISTS turnover_config (
|
||||
id INT NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
calculation_period_days INT NOT NULL DEFAULT 30,
|
||||
target_rate DECIMAL(10,2) NOT NULL DEFAULT 1.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor)
|
||||
);
|
||||
|
||||
-- Insert default global thresholds if not exists
|
||||
INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
|
||||
VALUES (1, NULL, NULL, 7, 14, 90)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
@@ -23,6 +93,39 @@ ON DUPLICATE KEY UPDATE
|
||||
reorder_days = VALUES(reorder_days),
|
||||
overstock_days = VALUES(overstock_days);
|
||||
|
||||
INSERT INTO lead_time_thresholds (id, category_id, vendor, target_days, warning_days, critical_days)
|
||||
VALUES (1, NULL, NULL, 14, 21, 30)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
target_days = VALUES(target_days),
|
||||
warning_days = VALUES(warning_days),
|
||||
critical_days = VALUES(critical_days);
|
||||
|
||||
INSERT INTO sales_velocity_config (id, category_id, vendor, daily_window_days, weekly_window_days, monthly_window_days)
|
||||
VALUES (1, NULL, NULL, 30, 7, 90)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
daily_window_days = VALUES(daily_window_days),
|
||||
weekly_window_days = VALUES(weekly_window_days),
|
||||
monthly_window_days = VALUES(monthly_window_days);
|
||||
|
||||
INSERT INTO abc_classification_config (id, a_threshold, b_threshold, classification_period_days)
|
||||
VALUES (1, 20.0, 50.0, 90)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
a_threshold = VALUES(a_threshold),
|
||||
b_threshold = VALUES(b_threshold),
|
||||
classification_period_days = VALUES(classification_period_days);
|
||||
|
||||
INSERT INTO safety_stock_config (id, category_id, vendor, coverage_days, service_level)
|
||||
VALUES (1, NULL, NULL, 14, 95.0)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
coverage_days = VALUES(coverage_days),
|
||||
service_level = VALUES(service_level);
|
||||
|
||||
INSERT INTO turnover_config (id, category_id, vendor, calculation_period_days, target_rate)
|
||||
VALUES (1, NULL, NULL, 30, 1.0)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
calculation_period_days = VALUES(calculation_period_days),
|
||||
target_rate = VALUES(target_rate);
|
||||
|
||||
-- View to show thresholds with category names
|
||||
CREATE OR REPLACE VIEW stock_thresholds_view AS
|
||||
SELECT
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
-- Indexes for orders table
|
||||
CREATE INDEX IF NOT EXISTS idx_orders_product_date ON orders(product_id, date);
|
||||
CREATE INDEX IF NOT EXISTS idx_orders_date ON orders(date);
|
||||
|
||||
-- Indexes for purchase_orders table
|
||||
CREATE INDEX IF NOT EXISTS idx_po_product_date ON purchase_orders(product_id, date);
|
||||
CREATE INDEX IF NOT EXISTS idx_po_product_status ON purchase_orders(product_id, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_po_vendor ON purchase_orders(vendor);
|
||||
|
||||
-- Indexes for product_metrics table
|
||||
CREATE INDEX IF NOT EXISTS idx_metrics_revenue ON product_metrics(total_revenue);
|
||||
|
||||
-- Indexes for stock_thresholds table
|
||||
CREATE INDEX IF NOT EXISTS idx_thresholds_category_vendor ON stock_thresholds(category_id, vendor);
|
||||
|
||||
-- Indexes for product_categories table
|
||||
CREATE INDEX IF NOT EXISTS idx_product_categories_both ON product_categories(product_id, category_id);
|
||||
@@ -155,7 +155,142 @@ async function calculateMetrics() {
|
||||
const metricsUpdates = [];
|
||||
for (const product of products) {
|
||||
try {
|
||||
// Calculate sales metrics with trends
|
||||
// Get configuration values for this product
|
||||
const [configs] = await connection.query(`
|
||||
WITH product_info AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.vendor,
|
||||
pc.category_id
|
||||
FROM products p
|
||||
LEFT JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
WHERE p.product_id = ?
|
||||
),
|
||||
threshold_options AS (
|
||||
SELECT
|
||||
st.*,
|
||||
CASE
|
||||
WHEN st.category_id = pi.category_id AND st.vendor = pi.vendor THEN 1 -- Category + vendor match
|
||||
WHEN st.category_id = pi.category_id AND st.vendor IS NULL THEN 2 -- Category match
|
||||
WHEN st.category_id IS NULL AND st.vendor = pi.vendor THEN 3 -- Vendor match
|
||||
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 4 -- Default
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN stock_thresholds st
|
||||
WHERE (st.category_id = pi.category_id OR st.category_id IS NULL)
|
||||
AND (st.vendor = pi.vendor OR st.vendor IS NULL)
|
||||
),
|
||||
velocity_options AS (
|
||||
SELECT
|
||||
sv.*,
|
||||
CASE
|
||||
WHEN sv.category_id = pi.category_id AND sv.vendor = pi.vendor THEN 1
|
||||
WHEN sv.category_id = pi.category_id AND sv.vendor IS NULL THEN 2
|
||||
WHEN sv.category_id IS NULL AND sv.vendor = pi.vendor THEN 3
|
||||
WHEN sv.category_id IS NULL AND sv.vendor IS NULL THEN 4
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN sales_velocity_config sv
|
||||
WHERE (sv.category_id = pi.category_id OR sv.category_id IS NULL)
|
||||
AND (sv.vendor = pi.vendor OR sv.vendor IS NULL)
|
||||
),
|
||||
safety_options AS (
|
||||
SELECT
|
||||
ss.*,
|
||||
CASE
|
||||
WHEN ss.category_id = pi.category_id AND ss.vendor = pi.vendor THEN 1
|
||||
WHEN ss.category_id = pi.category_id AND ss.vendor IS NULL THEN 2
|
||||
WHEN ss.category_id IS NULL AND ss.vendor = pi.vendor THEN 3
|
||||
WHEN ss.category_id IS NULL AND ss.vendor IS NULL THEN 4
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN safety_stock_config ss
|
||||
WHERE (ss.category_id = pi.category_id OR ss.category_id IS NULL)
|
||||
AND (ss.vendor = pi.vendor OR ss.vendor IS NULL)
|
||||
)
|
||||
SELECT
|
||||
-- Stock thresholds
|
||||
COALESCE(
|
||||
(SELECT critical_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
7
|
||||
) as critical_days,
|
||||
COALESCE(
|
||||
(SELECT reorder_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
14
|
||||
) as reorder_days,
|
||||
COALESCE(
|
||||
(SELECT overstock_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
90
|
||||
) as overstock_days,
|
||||
COALESCE(
|
||||
(SELECT low_stock_threshold
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
5
|
||||
) as low_stock_threshold,
|
||||
-- Sales velocity windows
|
||||
COALESCE(
|
||||
(SELECT daily_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
30
|
||||
) as daily_window_days,
|
||||
COALESCE(
|
||||
(SELECT weekly_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
7
|
||||
) as weekly_window_days,
|
||||
COALESCE(
|
||||
(SELECT monthly_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
90
|
||||
) as monthly_window_days,
|
||||
-- Safety stock config
|
||||
COALESCE(
|
||||
(SELECT coverage_days
|
||||
FROM safety_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
14
|
||||
) as safety_stock_days,
|
||||
COALESCE(
|
||||
(SELECT service_level
|
||||
FROM safety_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
95.0
|
||||
) as service_level,
|
||||
-- ABC Classification
|
||||
(SELECT a_threshold FROM abc_classification_config WHERE id = 1) as abc_a_threshold,
|
||||
(SELECT b_threshold FROM abc_classification_config WHERE id = 1) as abc_b_threshold,
|
||||
(SELECT classification_period_days FROM abc_classification_config WHERE id = 1) as abc_period_days
|
||||
`, [product.product_id]);
|
||||
|
||||
const config = configs[0] || {
|
||||
critical_days: 7,
|
||||
reorder_days: 14,
|
||||
overstock_days: 90,
|
||||
low_stock_threshold: 5,
|
||||
daily_window_days: 30,
|
||||
weekly_window_days: 7,
|
||||
monthly_window_days: 90,
|
||||
safety_stock_days: 14,
|
||||
service_level: 95.0,
|
||||
abc_a_threshold: 20.0,
|
||||
abc_b_threshold: 50.0,
|
||||
abc_period_days: 90
|
||||
};
|
||||
|
||||
// Calculate sales metrics with trends using configured windows
|
||||
const [salesMetrics] = await connection.query(`
|
||||
WITH sales_summary AS (
|
||||
SELECT
|
||||
@@ -166,9 +301,10 @@ async function calculateMetrics() {
|
||||
MIN(o.date) as first_sale_date,
|
||||
COUNT(DISTINCT o.order_number) as number_of_orders,
|
||||
AVG(o.quantity) as avg_quantity_per_order,
|
||||
-- Calculate rolling averages
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) THEN o.quantity ELSE 0 END) as last_30_days_qty,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 7 DAY) THEN o.quantity ELSE 0 END) as last_7_days_qty
|
||||
-- Calculate rolling averages using configured windows
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_30_days_qty,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_7_days_qty,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_month_qty
|
||||
FROM orders o
|
||||
JOIN products p ON o.product_id = p.product_id
|
||||
WHERE o.canceled = 0 AND o.product_id = ?
|
||||
@@ -182,11 +318,20 @@ async function calculateMetrics() {
|
||||
first_sale_date,
|
||||
number_of_orders,
|
||||
avg_quantity_per_order,
|
||||
last_30_days_qty / 30 as rolling_daily_avg,
|
||||
last_7_days_qty / 7 as rolling_weekly_avg,
|
||||
last_30_days_qty / ? as rolling_daily_avg,
|
||||
last_7_days_qty / ? as rolling_weekly_avg,
|
||||
last_month_qty / ? as rolling_monthly_avg,
|
||||
total_quantity_sold as total_sales_to_date
|
||||
FROM sales_summary
|
||||
`, [product.product_id]).catch(err => {
|
||||
`, [
|
||||
config.daily_window_days,
|
||||
config.weekly_window_days,
|
||||
config.monthly_window_days,
|
||||
product.product_id,
|
||||
config.daily_window_days,
|
||||
config.weekly_window_days,
|
||||
config.monthly_window_days
|
||||
]).catch(err => {
|
||||
logError(err, `Failed to calculate sales metrics for product ${product.product_id}`);
|
||||
throw err;
|
||||
});
|
||||
@@ -307,9 +452,14 @@ async function calculateMetrics() {
|
||||
|
||||
// Calculate stock status using configurable thresholds with proper handling of zero sales
|
||||
const stock_status = daily_sales_avg === 0 ? 'New' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * threshold.critical_days)) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * threshold.reorder_days)) ? 'Reorder' :
|
||||
stock.stock_quantity > Math.max(1, daily_sales_avg * threshold.overstock_days) ? 'Overstocked' : 'Healthy';
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * config.critical_days)) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * config.reorder_days)) ? 'Reorder' :
|
||||
stock.stock_quantity > Math.max(1, daily_sales_avg * config.overstock_days) ? 'Overstocked' : 'Healthy';
|
||||
|
||||
// Calculate safety stock using configured values
|
||||
const safety_stock = daily_sales_avg > 0 ?
|
||||
Math.max(1, Math.ceil(daily_sales_avg * config.safety_stock_days * (config.service_level / 100))) :
|
||||
null;
|
||||
|
||||
// Add to batch update
|
||||
metricsUpdates.push([
|
||||
@@ -323,8 +473,8 @@ async function calculateMetrics() {
|
||||
metrics.last_sale_date || null,
|
||||
daily_sales_avg > 0 ? stock.stock_quantity / daily_sales_avg : null,
|
||||
weekly_sales_avg > 0 ? stock.stock_quantity / weekly_sales_avg : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * threshold.reorder_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * threshold.critical_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * config.reorder_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * config.critical_days)) : null,
|
||||
margin_percent,
|
||||
metrics.total_revenue || 0,
|
||||
inventory_value || 0,
|
||||
@@ -403,21 +553,21 @@ async function calculateMetrics() {
|
||||
percentage: '100'
|
||||
});
|
||||
|
||||
// Calculate ABC classification
|
||||
// Calculate ABC classification using configured thresholds
|
||||
await connection.query(`
|
||||
WITH revenue_rankings AS (
|
||||
SELECT
|
||||
product_id,
|
||||
total_revenue,
|
||||
PERCENT_RANK() OVER (ORDER BY COALESCE(total_revenue, 0) DESC) as revenue_rank
|
||||
PERCENT_RANK() OVER (ORDER BY COALESCE(total_revenue, 0) DESC) * 100 as revenue_percentile
|
||||
FROM product_metrics
|
||||
),
|
||||
classification_update AS (
|
||||
SELECT
|
||||
product_id,
|
||||
CASE
|
||||
WHEN revenue_rank <= 0.2 THEN 'A'
|
||||
WHEN revenue_rank <= 0.5 THEN 'B'
|
||||
WHEN revenue_percentile <= ? THEN 'A'
|
||||
WHEN revenue_percentile <= ? THEN 'B'
|
||||
ELSE 'C'
|
||||
END as abc_class
|
||||
FROM revenue_rankings
|
||||
@@ -426,7 +576,7 @@ async function calculateMetrics() {
|
||||
JOIN classification_update cu ON pm.product_id = cu.product_id
|
||||
SET pm.abc_class = cu.abc_class,
|
||||
pm.last_calculated_at = NOW()
|
||||
`);
|
||||
`, [config.abc_a_threshold, config.abc_b_threshold]);
|
||||
|
||||
// Update progress for time-based aggregates
|
||||
outputProgress({
|
||||
|
||||
@@ -33,6 +33,11 @@ const CORE_TABLES = [
|
||||
'product_categories'
|
||||
];
|
||||
|
||||
// Config tables that must be created
|
||||
const CONFIG_TABLES = [
|
||||
'stock_thresholds'
|
||||
];
|
||||
|
||||
// Split SQL into individual statements
|
||||
function splitSQLStatements(sql) {
|
||||
// First, normalize line endings
|
||||
@@ -361,6 +366,95 @@ async function resetDatabase() {
|
||||
message: `Successfully created tables: ${CORE_TABLES.join(', ')}`
|
||||
});
|
||||
|
||||
// Read and execute config schema
|
||||
outputProgress({
|
||||
operation: 'Running config setup',
|
||||
message: 'Creating configuration tables...'
|
||||
});
|
||||
const configSchemaSQL = fs.readFileSync(
|
||||
path.join(__dirname, '../db/config-schema.sql'),
|
||||
'utf8'
|
||||
);
|
||||
|
||||
// Execute config schema statements one at a time
|
||||
const configStatements = splitSQLStatements(configSchemaSQL);
|
||||
outputProgress({
|
||||
operation: 'Config SQL Execution',
|
||||
message: {
|
||||
totalStatements: configStatements.length,
|
||||
statements: configStatements.map((stmt, i) => ({
|
||||
number: i + 1,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
for (let i = 0; i < configStatements.length; i++) {
|
||||
const stmt = configStatements[i];
|
||||
try {
|
||||
const [result, fields] = await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'Config SQL Warning',
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
});
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config SQL Progress',
|
||||
message: {
|
||||
statement: i + 1,
|
||||
total: configStatements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
affectedRows: result.affectedRows
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Config SQL Error',
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
});
|
||||
throw sqlError;
|
||||
}
|
||||
}
|
||||
|
||||
// Verify config tables were created
|
||||
const [showConfigTables] = await connection.query('SHOW TABLES');
|
||||
const existingConfigTables = showConfigTables.map(t => Object.values(t)[0]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config tables verification',
|
||||
message: {
|
||||
found: existingConfigTables,
|
||||
expected: CONFIG_TABLES
|
||||
}
|
||||
});
|
||||
|
||||
const missingConfigTables = CONFIG_TABLES.filter(
|
||||
t => !existingConfigTables.includes(t)
|
||||
);
|
||||
|
||||
if (missingConfigTables.length > 0) {
|
||||
throw new Error(
|
||||
`Failed to create config tables: ${missingConfigTables.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config tables created',
|
||||
message: `Successfully created tables: ${CONFIG_TABLES.join(', ')}`
|
||||
});
|
||||
|
||||
// Read and execute metrics schema (metrics tables)
|
||||
outputProgress({
|
||||
operation: 'Running metrics setup',
|
||||
|
||||
@@ -24,6 +24,11 @@ const METRICS_TABLES = [
|
||||
'vendor_metrics'
|
||||
];
|
||||
|
||||
// Config tables that must exist
|
||||
const CONFIG_TABLES = [
|
||||
'stock_thresholds'
|
||||
];
|
||||
|
||||
// Core tables that must exist
|
||||
const REQUIRED_CORE_TABLES = [
|
||||
'products',
|
||||
@@ -129,10 +134,23 @@ async function resetMetrics() {
|
||||
// the metrics tables and indexes in one shot
|
||||
await connection.query(schemaSQL);
|
||||
|
||||
// Read and execute config schema
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Creating configuration tables',
|
||||
percentage: '60'
|
||||
});
|
||||
const configSchemaPath = path.join(__dirname, '../db/config-schema.sql');
|
||||
const configSchemaSQL = fs.readFileSync(configSchemaPath, 'utf8');
|
||||
|
||||
// Run the config schema
|
||||
await connection.query(configSchemaSQL);
|
||||
|
||||
// Verify all tables were actually created using SHOW TABLES
|
||||
const [verifyTables] = await connection.query('SHOW TABLES');
|
||||
const tablesAfterCreation = verifyTables.map(t => Object.values(t)[0]);
|
||||
|
||||
// First verify metrics tables
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Verifying metrics tables',
|
||||
@@ -142,13 +160,33 @@ async function resetMetrics() {
|
||||
}
|
||||
});
|
||||
|
||||
const missingTables = METRICS_TABLES.filter(
|
||||
const missingMetricsTables = METRICS_TABLES.filter(
|
||||
t => !tablesAfterCreation.includes(t)
|
||||
);
|
||||
|
||||
if (missingTables.length > 0) {
|
||||
if (missingMetricsTables.length > 0) {
|
||||
throw new Error(
|
||||
`Failed to create tables: ${missingTables.join(', ')}`
|
||||
`Failed to create metrics tables: ${missingMetricsTables.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
// Then verify config tables
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Verifying config tables',
|
||||
message: {
|
||||
found: tablesAfterCreation,
|
||||
required: CONFIG_TABLES
|
||||
}
|
||||
});
|
||||
|
||||
const missingConfigTables = CONFIG_TABLES.filter(
|
||||
t => !tablesAfterCreation.includes(t)
|
||||
);
|
||||
|
||||
if (missingConfigTables.length > 0) {
|
||||
throw new Error(
|
||||
`Failed to create config tables: ${missingConfigTables.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -157,7 +195,7 @@ async function resetMetrics() {
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Metrics tables have been reset',
|
||||
operation: 'Metrics and config tables have been reset',
|
||||
percentage: '100'
|
||||
});
|
||||
|
||||
|
||||
@@ -189,6 +189,21 @@ router.get('/stock', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// Get global configuration values
|
||||
const [configs] = await pool.query(`
|
||||
SELECT
|
||||
st.low_stock_threshold,
|
||||
tc.calculation_period_days as turnover_period
|
||||
FROM stock_thresholds st
|
||||
CROSS JOIN turnover_config tc
|
||||
WHERE st.id = 1 AND tc.id = 1
|
||||
`);
|
||||
|
||||
const config = configs[0] || {
|
||||
low_stock_threshold: 5,
|
||||
turnover_period: 30
|
||||
};
|
||||
|
||||
// Get turnover by category
|
||||
const [turnoverByCategory] = await pool.query(`
|
||||
SELECT
|
||||
@@ -200,48 +215,84 @@ router.get('/stock', async (req, res) => {
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
JOIN categories c ON pc.category_id = c.id
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
|
||||
GROUP BY c.name
|
||||
HAVING turnoverRate > 0
|
||||
ORDER BY turnoverRate DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
`, [config.turnover_period]);
|
||||
|
||||
// Get stock levels over time (last 30 days)
|
||||
// Get stock levels over time
|
||||
const [stockLevels] = await pool.query(`
|
||||
SELECT
|
||||
DATE_FORMAT(o.date, '%Y-%m-%d') as date,
|
||||
SUM(CASE WHEN p.stock_quantity > 5 THEN 1 ELSE 0 END) as inStock,
|
||||
SUM(CASE WHEN p.stock_quantity <= 5 AND p.stock_quantity > 0 THEN 1 ELSE 0 END) as lowStock,
|
||||
SUM(CASE WHEN p.stock_quantity > ? THEN 1 ELSE 0 END) as inStock,
|
||||
SUM(CASE WHEN p.stock_quantity <= ? AND p.stock_quantity > 0 THEN 1 ELSE 0 END) as lowStock,
|
||||
SUM(CASE WHEN p.stock_quantity = 0 THEN 1 ELSE 0 END) as outOfStock
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
|
||||
GROUP BY DATE_FORMAT(o.date, '%Y-%m-%d')
|
||||
ORDER BY date
|
||||
`);
|
||||
`, [
|
||||
config.low_stock_threshold,
|
||||
config.low_stock_threshold,
|
||||
config.turnover_period
|
||||
]);
|
||||
|
||||
// Get critical stock items
|
||||
const [criticalItems] = await pool.query(`
|
||||
WITH product_thresholds AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
COALESCE(
|
||||
(SELECT reorder_days
|
||||
FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.category_id
|
||||
WHERE pc.product_id = p.product_id
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
(SELECT reorder_days
|
||||
FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.category_id
|
||||
WHERE pc.product_id = p.product_id
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
(SELECT reorder_days
|
||||
FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
(SELECT reorder_days
|
||||
FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
14
|
||||
) as reorder_days
|
||||
FROM products p
|
||||
)
|
||||
SELECT
|
||||
p.title as product,
|
||||
p.SKU as sku,
|
||||
p.stock_quantity as stockQuantity,
|
||||
GREATEST(ROUND(AVG(o.quantity) * 7), 5) as reorderPoint,
|
||||
GREATEST(ROUND(AVG(o.quantity) * pt.reorder_days), ?) as reorderPoint,
|
||||
ROUND(SUM(o.quantity) / NULLIF(p.stock_quantity, 0), 1) as turnoverRate,
|
||||
CASE
|
||||
WHEN p.stock_quantity = 0 THEN 0
|
||||
ELSE ROUND(p.stock_quantity / NULLIF((SUM(o.quantity) / 30), 0))
|
||||
ELSE ROUND(p.stock_quantity / NULLIF((SUM(o.quantity) / ?), 0))
|
||||
END as daysUntilStockout
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
JOIN product_thresholds pt ON p.product_id = pt.product_id
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
|
||||
AND p.managing_stock = true
|
||||
GROUP BY p.product_id
|
||||
HAVING daysUntilStockout < 30 AND daysUntilStockout >= 0
|
||||
HAVING daysUntilStockout < ? AND daysUntilStockout >= 0
|
||||
ORDER BY daysUntilStockout
|
||||
LIMIT 10
|
||||
`);
|
||||
`, [
|
||||
config.low_stock_threshold,
|
||||
config.turnover_period,
|
||||
config.turnover_period,
|
||||
config.turnover_period
|
||||
]);
|
||||
|
||||
res.json({ turnoverByCategory, stockLevels, criticalItems });
|
||||
} catch (error) {
|
||||
|
||||
@@ -267,12 +267,27 @@ router.get('/trending-products', async (req, res) => {
|
||||
router.get('/inventory-metrics', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get global configuration values
|
||||
const [configs] = await pool.query(`
|
||||
SELECT
|
||||
st.low_stock_threshold,
|
||||
tc.calculation_period_days as turnover_period
|
||||
FROM stock_thresholds st
|
||||
CROSS JOIN turnover_config tc
|
||||
WHERE st.id = 1 AND tc.id = 1
|
||||
`);
|
||||
|
||||
const config = configs[0] || {
|
||||
low_stock_threshold: 5,
|
||||
turnover_period: 30
|
||||
};
|
||||
|
||||
// Get stock levels by category
|
||||
const [stockLevels] = await pool.query(`
|
||||
SELECT
|
||||
c.name as category,
|
||||
SUM(CASE WHEN stock_quantity > 5 THEN 1 ELSE 0 END) as inStock,
|
||||
SUM(CASE WHEN stock_quantity > 0 AND stock_quantity <= 5 THEN 1 ELSE 0 END) as lowStock,
|
||||
SUM(CASE WHEN stock_quantity > ? THEN 1 ELSE 0 END) as inStock,
|
||||
SUM(CASE WHEN stock_quantity > 0 AND stock_quantity <= ? THEN 1 ELSE 0 END) as lowStock,
|
||||
SUM(CASE WHEN stock_quantity = 0 THEN 1 ELSE 0 END) as outOfStock
|
||||
FROM products p
|
||||
JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
@@ -280,7 +295,7 @@ router.get('/inventory-metrics', async (req, res) => {
|
||||
WHERE visible = true
|
||||
GROUP BY c.name
|
||||
ORDER BY c.name ASC
|
||||
`);
|
||||
`, [config.low_stock_threshold, config.low_stock_threshold]);
|
||||
|
||||
// Get top vendors with product counts and average stock
|
||||
const [topVendors] = await pool.query(`
|
||||
@@ -298,7 +313,6 @@ router.get('/inventory-metrics', async (req, res) => {
|
||||
`);
|
||||
|
||||
// Calculate stock turnover rate by category
|
||||
// Turnover = Units sold in last 30 days / Average inventory level
|
||||
const [stockTurnover] = await pool.query(`
|
||||
WITH CategorySales AS (
|
||||
SELECT
|
||||
@@ -309,7 +323,7 @@ router.get('/inventory-metrics', async (req, res) => {
|
||||
JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
JOIN categories c ON pc.category_id = c.id
|
||||
WHERE o.canceled = false
|
||||
AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
|
||||
GROUP BY c.name
|
||||
),
|
||||
CategoryStock AS (
|
||||
@@ -331,25 +345,9 @@ router.get('/inventory-metrics', async (req, res) => {
|
||||
FROM CategorySales cs
|
||||
JOIN CategoryStock cst ON cs.category = cst.category
|
||||
ORDER BY rate DESC
|
||||
`);
|
||||
`, [config.turnover_period]);
|
||||
|
||||
res.json({
|
||||
stockLevels: stockLevels.map(row => ({
|
||||
...row,
|
||||
inStock: parseInt(row.inStock || 0),
|
||||
lowStock: parseInt(row.lowStock || 0),
|
||||
outOfStock: parseInt(row.outOfStock || 0)
|
||||
})),
|
||||
topVendors: topVendors.map(row => ({
|
||||
vendor: row.vendor,
|
||||
productCount: parseInt(row.productCount || 0),
|
||||
averageStockLevel: parseFloat(row.averageStockLevel || 0)
|
||||
})),
|
||||
stockTurnover: stockTurnover.map(row => ({
|
||||
category: row.category,
|
||||
rate: parseFloat(row.rate || 0)
|
||||
}))
|
||||
});
|
||||
res.json({ stockLevels, topVendors, stockTurnover });
|
||||
} catch (error) {
|
||||
console.error('Error fetching inventory metrics:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch inventory metrics' });
|
||||
|
||||
Reference in New Issue
Block a user