Integrate config tables into existing scripts, add new config tables and settings pages
This commit is contained in:
@@ -155,7 +155,142 @@ async function calculateMetrics() {
|
||||
const metricsUpdates = [];
|
||||
for (const product of products) {
|
||||
try {
|
||||
// Calculate sales metrics with trends
|
||||
// Get configuration values for this product
|
||||
const [configs] = await connection.query(`
|
||||
WITH product_info AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.vendor,
|
||||
pc.category_id
|
||||
FROM products p
|
||||
LEFT JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
WHERE p.product_id = ?
|
||||
),
|
||||
threshold_options AS (
|
||||
SELECT
|
||||
st.*,
|
||||
CASE
|
||||
WHEN st.category_id = pi.category_id AND st.vendor = pi.vendor THEN 1 -- Category + vendor match
|
||||
WHEN st.category_id = pi.category_id AND st.vendor IS NULL THEN 2 -- Category match
|
||||
WHEN st.category_id IS NULL AND st.vendor = pi.vendor THEN 3 -- Vendor match
|
||||
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 4 -- Default
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN stock_thresholds st
|
||||
WHERE (st.category_id = pi.category_id OR st.category_id IS NULL)
|
||||
AND (st.vendor = pi.vendor OR st.vendor IS NULL)
|
||||
),
|
||||
velocity_options AS (
|
||||
SELECT
|
||||
sv.*,
|
||||
CASE
|
||||
WHEN sv.category_id = pi.category_id AND sv.vendor = pi.vendor THEN 1
|
||||
WHEN sv.category_id = pi.category_id AND sv.vendor IS NULL THEN 2
|
||||
WHEN sv.category_id IS NULL AND sv.vendor = pi.vendor THEN 3
|
||||
WHEN sv.category_id IS NULL AND sv.vendor IS NULL THEN 4
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN sales_velocity_config sv
|
||||
WHERE (sv.category_id = pi.category_id OR sv.category_id IS NULL)
|
||||
AND (sv.vendor = pi.vendor OR sv.vendor IS NULL)
|
||||
),
|
||||
safety_options AS (
|
||||
SELECT
|
||||
ss.*,
|
||||
CASE
|
||||
WHEN ss.category_id = pi.category_id AND ss.vendor = pi.vendor THEN 1
|
||||
WHEN ss.category_id = pi.category_id AND ss.vendor IS NULL THEN 2
|
||||
WHEN ss.category_id IS NULL AND ss.vendor = pi.vendor THEN 3
|
||||
WHEN ss.category_id IS NULL AND ss.vendor IS NULL THEN 4
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN safety_stock_config ss
|
||||
WHERE (ss.category_id = pi.category_id OR ss.category_id IS NULL)
|
||||
AND (ss.vendor = pi.vendor OR ss.vendor IS NULL)
|
||||
)
|
||||
SELECT
|
||||
-- Stock thresholds
|
||||
COALESCE(
|
||||
(SELECT critical_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
7
|
||||
) as critical_days,
|
||||
COALESCE(
|
||||
(SELECT reorder_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
14
|
||||
) as reorder_days,
|
||||
COALESCE(
|
||||
(SELECT overstock_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
90
|
||||
) as overstock_days,
|
||||
COALESCE(
|
||||
(SELECT low_stock_threshold
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
5
|
||||
) as low_stock_threshold,
|
||||
-- Sales velocity windows
|
||||
COALESCE(
|
||||
(SELECT daily_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
30
|
||||
) as daily_window_days,
|
||||
COALESCE(
|
||||
(SELECT weekly_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
7
|
||||
) as weekly_window_days,
|
||||
COALESCE(
|
||||
(SELECT monthly_window_days
|
||||
FROM velocity_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
90
|
||||
) as monthly_window_days,
|
||||
-- Safety stock config
|
||||
COALESCE(
|
||||
(SELECT coverage_days
|
||||
FROM safety_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
14
|
||||
) as safety_stock_days,
|
||||
COALESCE(
|
||||
(SELECT service_level
|
||||
FROM safety_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
95.0
|
||||
) as service_level,
|
||||
-- ABC Classification
|
||||
(SELECT a_threshold FROM abc_classification_config WHERE id = 1) as abc_a_threshold,
|
||||
(SELECT b_threshold FROM abc_classification_config WHERE id = 1) as abc_b_threshold,
|
||||
(SELECT classification_period_days FROM abc_classification_config WHERE id = 1) as abc_period_days
|
||||
`, [product.product_id]);
|
||||
|
||||
const config = configs[0] || {
|
||||
critical_days: 7,
|
||||
reorder_days: 14,
|
||||
overstock_days: 90,
|
||||
low_stock_threshold: 5,
|
||||
daily_window_days: 30,
|
||||
weekly_window_days: 7,
|
||||
monthly_window_days: 90,
|
||||
safety_stock_days: 14,
|
||||
service_level: 95.0,
|
||||
abc_a_threshold: 20.0,
|
||||
abc_b_threshold: 50.0,
|
||||
abc_period_days: 90
|
||||
};
|
||||
|
||||
// Calculate sales metrics with trends using configured windows
|
||||
const [salesMetrics] = await connection.query(`
|
||||
WITH sales_summary AS (
|
||||
SELECT
|
||||
@@ -166,9 +301,10 @@ async function calculateMetrics() {
|
||||
MIN(o.date) as first_sale_date,
|
||||
COUNT(DISTINCT o.order_number) as number_of_orders,
|
||||
AVG(o.quantity) as avg_quantity_per_order,
|
||||
-- Calculate rolling averages
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) THEN o.quantity ELSE 0 END) as last_30_days_qty,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 7 DAY) THEN o.quantity ELSE 0 END) as last_7_days_qty
|
||||
-- Calculate rolling averages using configured windows
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_30_days_qty,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_7_days_qty,
|
||||
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_month_qty
|
||||
FROM orders o
|
||||
JOIN products p ON o.product_id = p.product_id
|
||||
WHERE o.canceled = 0 AND o.product_id = ?
|
||||
@@ -182,11 +318,20 @@ async function calculateMetrics() {
|
||||
first_sale_date,
|
||||
number_of_orders,
|
||||
avg_quantity_per_order,
|
||||
last_30_days_qty / 30 as rolling_daily_avg,
|
||||
last_7_days_qty / 7 as rolling_weekly_avg,
|
||||
last_30_days_qty / ? as rolling_daily_avg,
|
||||
last_7_days_qty / ? as rolling_weekly_avg,
|
||||
last_month_qty / ? as rolling_monthly_avg,
|
||||
total_quantity_sold as total_sales_to_date
|
||||
FROM sales_summary
|
||||
`, [product.product_id]).catch(err => {
|
||||
`, [
|
||||
config.daily_window_days,
|
||||
config.weekly_window_days,
|
||||
config.monthly_window_days,
|
||||
product.product_id,
|
||||
config.daily_window_days,
|
||||
config.weekly_window_days,
|
||||
config.monthly_window_days
|
||||
]).catch(err => {
|
||||
logError(err, `Failed to calculate sales metrics for product ${product.product_id}`);
|
||||
throw err;
|
||||
});
|
||||
@@ -307,9 +452,14 @@ async function calculateMetrics() {
|
||||
|
||||
// Calculate stock status using configurable thresholds with proper handling of zero sales
|
||||
const stock_status = daily_sales_avg === 0 ? 'New' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * threshold.critical_days)) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * threshold.reorder_days)) ? 'Reorder' :
|
||||
stock.stock_quantity > Math.max(1, daily_sales_avg * threshold.overstock_days) ? 'Overstocked' : 'Healthy';
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * config.critical_days)) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * config.reorder_days)) ? 'Reorder' :
|
||||
stock.stock_quantity > Math.max(1, daily_sales_avg * config.overstock_days) ? 'Overstocked' : 'Healthy';
|
||||
|
||||
// Calculate safety stock using configured values
|
||||
const safety_stock = daily_sales_avg > 0 ?
|
||||
Math.max(1, Math.ceil(daily_sales_avg * config.safety_stock_days * (config.service_level / 100))) :
|
||||
null;
|
||||
|
||||
// Add to batch update
|
||||
metricsUpdates.push([
|
||||
@@ -323,8 +473,8 @@ async function calculateMetrics() {
|
||||
metrics.last_sale_date || null,
|
||||
daily_sales_avg > 0 ? stock.stock_quantity / daily_sales_avg : null,
|
||||
weekly_sales_avg > 0 ? stock.stock_quantity / weekly_sales_avg : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * threshold.reorder_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * threshold.critical_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * config.reorder_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * config.critical_days)) : null,
|
||||
margin_percent,
|
||||
metrics.total_revenue || 0,
|
||||
inventory_value || 0,
|
||||
@@ -403,21 +553,21 @@ async function calculateMetrics() {
|
||||
percentage: '100'
|
||||
});
|
||||
|
||||
// Calculate ABC classification
|
||||
// Calculate ABC classification using configured thresholds
|
||||
await connection.query(`
|
||||
WITH revenue_rankings AS (
|
||||
SELECT
|
||||
product_id,
|
||||
total_revenue,
|
||||
PERCENT_RANK() OVER (ORDER BY COALESCE(total_revenue, 0) DESC) as revenue_rank
|
||||
PERCENT_RANK() OVER (ORDER BY COALESCE(total_revenue, 0) DESC) * 100 as revenue_percentile
|
||||
FROM product_metrics
|
||||
),
|
||||
classification_update AS (
|
||||
SELECT
|
||||
product_id,
|
||||
CASE
|
||||
WHEN revenue_rank <= 0.2 THEN 'A'
|
||||
WHEN revenue_rank <= 0.5 THEN 'B'
|
||||
WHEN revenue_percentile <= ? THEN 'A'
|
||||
WHEN revenue_percentile <= ? THEN 'B'
|
||||
ELSE 'C'
|
||||
END as abc_class
|
||||
FROM revenue_rankings
|
||||
@@ -426,7 +576,7 @@ async function calculateMetrics() {
|
||||
JOIN classification_update cu ON pm.product_id = cu.product_id
|
||||
SET pm.abc_class = cu.abc_class,
|
||||
pm.last_calculated_at = NOW()
|
||||
`);
|
||||
`, [config.abc_a_threshold, config.abc_b_threshold]);
|
||||
|
||||
// Update progress for time-based aggregates
|
||||
outputProgress({
|
||||
|
||||
@@ -33,6 +33,11 @@ const CORE_TABLES = [
|
||||
'product_categories'
|
||||
];
|
||||
|
||||
// Config tables that must be created
|
||||
const CONFIG_TABLES = [
|
||||
'stock_thresholds'
|
||||
];
|
||||
|
||||
// Split SQL into individual statements
|
||||
function splitSQLStatements(sql) {
|
||||
// First, normalize line endings
|
||||
@@ -361,6 +366,95 @@ async function resetDatabase() {
|
||||
message: `Successfully created tables: ${CORE_TABLES.join(', ')}`
|
||||
});
|
||||
|
||||
// Read and execute config schema
|
||||
outputProgress({
|
||||
operation: 'Running config setup',
|
||||
message: 'Creating configuration tables...'
|
||||
});
|
||||
const configSchemaSQL = fs.readFileSync(
|
||||
path.join(__dirname, '../db/config-schema.sql'),
|
||||
'utf8'
|
||||
);
|
||||
|
||||
// Execute config schema statements one at a time
|
||||
const configStatements = splitSQLStatements(configSchemaSQL);
|
||||
outputProgress({
|
||||
operation: 'Config SQL Execution',
|
||||
message: {
|
||||
totalStatements: configStatements.length,
|
||||
statements: configStatements.map((stmt, i) => ({
|
||||
number: i + 1,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
for (let i = 0; i < configStatements.length; i++) {
|
||||
const stmt = configStatements[i];
|
||||
try {
|
||||
const [result, fields] = await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'Config SQL Warning',
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
});
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config SQL Progress',
|
||||
message: {
|
||||
statement: i + 1,
|
||||
total: configStatements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
affectedRows: result.affectedRows
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Config SQL Error',
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
});
|
||||
throw sqlError;
|
||||
}
|
||||
}
|
||||
|
||||
// Verify config tables were created
|
||||
const [showConfigTables] = await connection.query('SHOW TABLES');
|
||||
const existingConfigTables = showConfigTables.map(t => Object.values(t)[0]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config tables verification',
|
||||
message: {
|
||||
found: existingConfigTables,
|
||||
expected: CONFIG_TABLES
|
||||
}
|
||||
});
|
||||
|
||||
const missingConfigTables = CONFIG_TABLES.filter(
|
||||
t => !existingConfigTables.includes(t)
|
||||
);
|
||||
|
||||
if (missingConfigTables.length > 0) {
|
||||
throw new Error(
|
||||
`Failed to create config tables: ${missingConfigTables.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config tables created',
|
||||
message: `Successfully created tables: ${CONFIG_TABLES.join(', ')}`
|
||||
});
|
||||
|
||||
// Read and execute metrics schema (metrics tables)
|
||||
outputProgress({
|
||||
operation: 'Running metrics setup',
|
||||
|
||||
@@ -24,6 +24,11 @@ const METRICS_TABLES = [
|
||||
'vendor_metrics'
|
||||
];
|
||||
|
||||
// Config tables that must exist
|
||||
const CONFIG_TABLES = [
|
||||
'stock_thresholds'
|
||||
];
|
||||
|
||||
// Core tables that must exist
|
||||
const REQUIRED_CORE_TABLES = [
|
||||
'products',
|
||||
@@ -129,10 +134,23 @@ async function resetMetrics() {
|
||||
// the metrics tables and indexes in one shot
|
||||
await connection.query(schemaSQL);
|
||||
|
||||
// Read and execute config schema
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Creating configuration tables',
|
||||
percentage: '60'
|
||||
});
|
||||
const configSchemaPath = path.join(__dirname, '../db/config-schema.sql');
|
||||
const configSchemaSQL = fs.readFileSync(configSchemaPath, 'utf8');
|
||||
|
||||
// Run the config schema
|
||||
await connection.query(configSchemaSQL);
|
||||
|
||||
// Verify all tables were actually created using SHOW TABLES
|
||||
const [verifyTables] = await connection.query('SHOW TABLES');
|
||||
const tablesAfterCreation = verifyTables.map(t => Object.values(t)[0]);
|
||||
|
||||
// First verify metrics tables
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Verifying metrics tables',
|
||||
@@ -142,13 +160,33 @@ async function resetMetrics() {
|
||||
}
|
||||
});
|
||||
|
||||
const missingTables = METRICS_TABLES.filter(
|
||||
const missingMetricsTables = METRICS_TABLES.filter(
|
||||
t => !tablesAfterCreation.includes(t)
|
||||
);
|
||||
|
||||
if (missingTables.length > 0) {
|
||||
if (missingMetricsTables.length > 0) {
|
||||
throw new Error(
|
||||
`Failed to create tables: ${missingTables.join(', ')}`
|
||||
`Failed to create metrics tables: ${missingMetricsTables.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
// Then verify config tables
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Verifying config tables',
|
||||
message: {
|
||||
found: tablesAfterCreation,
|
||||
required: CONFIG_TABLES
|
||||
}
|
||||
});
|
||||
|
||||
const missingConfigTables = CONFIG_TABLES.filter(
|
||||
t => !tablesAfterCreation.includes(t)
|
||||
);
|
||||
|
||||
if (missingConfigTables.length > 0) {
|
||||
throw new Error(
|
||||
`Failed to create config tables: ${missingConfigTables.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -157,7 +195,7 @@ async function resetMetrics() {
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Metrics tables have been reset',
|
||||
operation: 'Metrics and config tables have been reset',
|
||||
percentage: '100'
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user