Add new dashboard backend
This commit is contained in:
@@ -43,6 +43,9 @@ CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
avg_margin_percent DECIMAL(10,3),
|
||||
total_revenue DECIMAL(10,3),
|
||||
inventory_value DECIMAL(10,3),
|
||||
cost_of_goods_sold DECIMAL(10,3),
|
||||
gross_profit DECIMAL(10,3),
|
||||
gmroi DECIMAL(10,3),
|
||||
-- Purchase metrics
|
||||
avg_lead_time_days INT,
|
||||
last_purchase_date DATE,
|
||||
@@ -50,9 +53,18 @@ CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
-- Classification
|
||||
abc_class CHAR(1),
|
||||
stock_status VARCHAR(20),
|
||||
-- Turnover metrics
|
||||
turnover_rate DECIMAL(10,3),
|
||||
-- Lead time metrics
|
||||
current_lead_time INT,
|
||||
target_lead_time INT,
|
||||
lead_time_status VARCHAR(20),
|
||||
PRIMARY KEY (product_id),
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
|
||||
INDEX idx_metrics_revenue (total_revenue)
|
||||
INDEX idx_metrics_revenue (total_revenue),
|
||||
INDEX idx_metrics_stock_status (stock_status),
|
||||
INDEX idx_metrics_lead_time (lead_time_status),
|
||||
INDEX idx_metrics_turnover (turnover_rate)
|
||||
);
|
||||
|
||||
-- New table for time-based aggregates
|
||||
@@ -71,6 +83,8 @@ CREATE TABLE IF NOT EXISTS product_time_aggregates (
|
||||
-- Calculated fields
|
||||
avg_price DECIMAL(10,3),
|
||||
profit_margin DECIMAL(10,3),
|
||||
inventory_value DECIMAL(10,3),
|
||||
gmroi DECIMAL(10,3),
|
||||
PRIMARY KEY (product_id, year, month),
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
|
||||
INDEX idx_date (year, month)
|
||||
@@ -85,7 +99,10 @@ CREATE TABLE IF NOT EXISTS vendor_metrics (
|
||||
order_fill_rate DECIMAL(5,2),
|
||||
total_orders INT,
|
||||
total_late_orders INT,
|
||||
PRIMARY KEY (vendor)
|
||||
total_purchase_value DECIMAL(10,3),
|
||||
avg_order_value DECIMAL(10,3),
|
||||
PRIMARY KEY (vendor),
|
||||
INDEX idx_vendor_performance (on_time_delivery_rate)
|
||||
);
|
||||
|
||||
-- Re-enable foreign key checks
|
||||
|
||||
@@ -179,6 +179,294 @@ function cancelCalculation() {
|
||||
// Handle SIGTERM signal for cancellation
|
||||
process.on('SIGTERM', cancelCalculation);
|
||||
|
||||
// Calculate GMROI and other financial metrics
|
||||
async function calculateFinancialMetrics(connection, startTime, totalProducts) {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating financial metrics',
|
||||
current: Math.floor(totalProducts * 0.6),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.6), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.6)),
|
||||
percentage: '60'
|
||||
});
|
||||
|
||||
await connection.query(`
|
||||
UPDATE product_metrics pm
|
||||
JOIN (
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
SUM(o.quantity * o.price) as total_revenue,
|
||||
SUM(o.quantity * p.cost_price) as cost_of_goods_sold,
|
||||
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
WHERE o.canceled = false
|
||||
AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH)
|
||||
GROUP BY p.product_id
|
||||
) fin ON pm.product_id = fin.product_id
|
||||
SET
|
||||
pm.inventory_value = COALESCE(fin.inventory_value, 0),
|
||||
pm.total_revenue = COALESCE(fin.total_revenue, 0),
|
||||
pm.cost_of_goods_sold = COALESCE(fin.cost_of_goods_sold, 0),
|
||||
pm.gross_profit = COALESCE(fin.gross_profit, 0),
|
||||
pm.gmroi = CASE
|
||||
WHEN COALESCE(fin.inventory_value, 0) > 0
|
||||
THEN (COALESCE(fin.gross_profit, 0) / COALESCE(fin.inventory_value, 0)) * 100
|
||||
ELSE 0
|
||||
END
|
||||
`);
|
||||
|
||||
// Update time-based aggregates with financial metrics
|
||||
await connection.query(`
|
||||
UPDATE product_time_aggregates pta
|
||||
JOIN (
|
||||
SELECT
|
||||
p.product_id,
|
||||
YEAR(o.date) as year,
|
||||
MONTH(o.date) as month,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
SUM((o.price - p.cost_price) * o.quantity) /
|
||||
NULLIF(p.cost_price * p.stock_quantity, 0) * 100 as gmroi
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
WHERE o.canceled = false
|
||||
GROUP BY p.product_id, YEAR(o.date), MONTH(o.date)
|
||||
) fin ON pta.product_id = fin.product_id
|
||||
AND pta.year = fin.year
|
||||
AND pta.month = fin.month
|
||||
SET
|
||||
pta.inventory_value = COALESCE(fin.inventory_value, 0),
|
||||
pta.gmroi = COALESCE(fin.gmroi, 0)
|
||||
`);
|
||||
}
|
||||
|
||||
// Calculate vendor metrics
|
||||
async function calculateVendorMetrics(connection, startTime, totalProducts) {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating vendor metrics',
|
||||
current: Math.floor(totalProducts * 0.7),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.7), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.7)),
|
||||
percentage: '70'
|
||||
});
|
||||
|
||||
await connection.query(`
|
||||
INSERT INTO vendor_metrics (
|
||||
vendor,
|
||||
last_calculated_at,
|
||||
avg_lead_time_days,
|
||||
on_time_delivery_rate,
|
||||
order_fill_rate,
|
||||
total_orders,
|
||||
total_late_orders,
|
||||
total_purchase_value,
|
||||
avg_order_value
|
||||
)
|
||||
SELECT
|
||||
po.vendor,
|
||||
NOW() as last_calculated_at,
|
||||
AVG(
|
||||
CASE
|
||||
WHEN po.received_date IS NOT NULL
|
||||
THEN DATEDIFF(po.received_date, po.date)
|
||||
ELSE NULL
|
||||
END
|
||||
) as avg_lead_time_days,
|
||||
(
|
||||
COUNT(CASE WHEN po.received_date <= po.expected_date THEN 1 END) * 100.0 /
|
||||
NULLIF(COUNT(*), 0)
|
||||
) as on_time_delivery_rate,
|
||||
(
|
||||
SUM(CASE WHEN po.received >= po.ordered THEN 1 ELSE 0 END) * 100.0 /
|
||||
NULLIF(COUNT(*), 0)
|
||||
) as order_fill_rate,
|
||||
COUNT(*) as total_orders,
|
||||
COUNT(CASE WHEN po.received_date > po.expected_date THEN 1 END) as total_late_orders,
|
||||
SUM(po.ordered * po.cost_price) as total_purchase_value,
|
||||
AVG(po.ordered * po.cost_price) as avg_order_value
|
||||
FROM purchase_orders po
|
||||
WHERE po.status = 'closed'
|
||||
AND po.date >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH)
|
||||
GROUP BY po.vendor
|
||||
ON DUPLICATE KEY UPDATE
|
||||
last_calculated_at = VALUES(last_calculated_at),
|
||||
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||
on_time_delivery_rate = VALUES(on_time_delivery_rate),
|
||||
order_fill_rate = VALUES(order_fill_rate),
|
||||
total_orders = VALUES(total_orders),
|
||||
total_late_orders = VALUES(total_late_orders),
|
||||
total_purchase_value = VALUES(total_purchase_value),
|
||||
avg_order_value = VALUES(avg_order_value)
|
||||
`);
|
||||
}
|
||||
|
||||
// Calculate turnover rate metrics
|
||||
async function calculateTurnoverMetrics(connection, startTime, totalProducts) {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating turnover metrics',
|
||||
current: Math.floor(totalProducts * 0.75),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.75), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.75)),
|
||||
percentage: '75'
|
||||
});
|
||||
|
||||
await connection.query(`
|
||||
WITH product_turnover AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.vendor,
|
||||
pc.category_id,
|
||||
COALESCE(
|
||||
(SELECT calculation_period_days
|
||||
FROM turnover_config tc
|
||||
WHERE tc.category_id = pc.category_id
|
||||
AND tc.vendor = p.vendor
|
||||
LIMIT 1),
|
||||
COALESCE(
|
||||
(SELECT calculation_period_days
|
||||
FROM turnover_config tc
|
||||
WHERE tc.category_id = pc.category_id
|
||||
AND tc.vendor IS NULL
|
||||
LIMIT 1),
|
||||
COALESCE(
|
||||
(SELECT calculation_period_days
|
||||
FROM turnover_config tc
|
||||
WHERE tc.category_id IS NULL
|
||||
AND tc.vendor = p.vendor
|
||||
LIMIT 1),
|
||||
(SELECT calculation_period_days
|
||||
FROM turnover_config
|
||||
WHERE category_id IS NULL
|
||||
AND vendor IS NULL
|
||||
LIMIT 1)
|
||||
)
|
||||
)
|
||||
) as calculation_period_days,
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
AVG(p.stock_quantity) as avg_stock_level
|
||||
FROM products p
|
||||
LEFT JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
AND o.canceled = 0
|
||||
GROUP BY p.product_id, p.vendor, pc.category_id
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
JOIN product_turnover pt ON pm.product_id = pt.product_id
|
||||
SET
|
||||
pm.turnover_rate = CASE
|
||||
WHEN pt.avg_stock_level > 0
|
||||
THEN (pt.total_quantity_sold / pt.avg_stock_level) * (30.0 / pt.calculation_period_days)
|
||||
ELSE 0
|
||||
END,
|
||||
pm.last_calculated_at = NOW()
|
||||
`);
|
||||
}
|
||||
|
||||
// Enhance lead time calculations
|
||||
async function calculateLeadTimeMetrics(connection, startTime, totalProducts) {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating lead time metrics',
|
||||
current: Math.floor(totalProducts * 0.8),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.8), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.8)),
|
||||
percentage: '80'
|
||||
});
|
||||
|
||||
await connection.query(`
|
||||
WITH lead_time_stats AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.vendor,
|
||||
pc.category_id,
|
||||
AVG(DATEDIFF(po.received_date, po.date)) as current_lead_time,
|
||||
COALESCE(
|
||||
(SELECT target_days
|
||||
FROM lead_time_thresholds lt
|
||||
WHERE lt.category_id = pc.category_id
|
||||
AND lt.vendor = p.vendor
|
||||
LIMIT 1),
|
||||
COALESCE(
|
||||
(SELECT target_days
|
||||
FROM lead_time_thresholds lt
|
||||
WHERE lt.category_id = pc.category_id
|
||||
AND lt.vendor IS NULL
|
||||
LIMIT 1),
|
||||
COALESCE(
|
||||
(SELECT target_days
|
||||
FROM lead_time_thresholds lt
|
||||
WHERE lt.category_id IS NULL
|
||||
AND lt.vendor = p.vendor
|
||||
LIMIT 1),
|
||||
(SELECT target_days
|
||||
FROM lead_time_thresholds
|
||||
WHERE category_id IS NULL
|
||||
AND vendor IS NULL
|
||||
LIMIT 1)
|
||||
)
|
||||
)
|
||||
) as target_lead_time,
|
||||
COALESCE(
|
||||
(SELECT warning_days
|
||||
FROM lead_time_thresholds lt
|
||||
WHERE lt.category_id = pc.category_id
|
||||
AND lt.vendor = p.vendor
|
||||
LIMIT 1),
|
||||
COALESCE(
|
||||
(SELECT warning_days
|
||||
FROM lead_time_thresholds lt
|
||||
WHERE lt.category_id = pc.category_id
|
||||
AND lt.vendor IS NULL
|
||||
LIMIT 1),
|
||||
COALESCE(
|
||||
(SELECT warning_days
|
||||
FROM lead_time_thresholds lt
|
||||
WHERE lt.category_id IS NULL
|
||||
AND lt.vendor = p.vendor
|
||||
LIMIT 1),
|
||||
(SELECT warning_days
|
||||
FROM lead_time_thresholds
|
||||
WHERE category_id IS NULL
|
||||
AND vendor IS NULL
|
||||
LIMIT 1)
|
||||
)
|
||||
)
|
||||
) as warning_lead_time
|
||||
FROM products p
|
||||
LEFT JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
LEFT JOIN purchase_orders po ON p.product_id = po.product_id
|
||||
WHERE po.status = 'completed'
|
||||
AND po.received_date IS NOT NULL
|
||||
AND po.date >= DATE_SUB(CURDATE(), INTERVAL 90 DAY)
|
||||
GROUP BY p.product_id, p.vendor, pc.category_id
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
JOIN lead_time_stats lt ON pm.product_id = lt.product_id
|
||||
SET
|
||||
pm.current_lead_time = lt.current_lead_time,
|
||||
pm.target_lead_time = lt.target_lead_time,
|
||||
pm.lead_time_status = CASE
|
||||
WHEN lt.current_lead_time <= lt.target_lead_time THEN 'On Target'
|
||||
WHEN lt.current_lead_time <= lt.warning_lead_time THEN 'Warning'
|
||||
ELSE 'Critical'
|
||||
END,
|
||||
pm.last_calculated_at = NOW()
|
||||
`);
|
||||
}
|
||||
|
||||
// Update the main calculation function to include our new calculations
|
||||
async function calculateMetrics() {
|
||||
let pool;
|
||||
const startTime = Date.now();
|
||||
@@ -191,6 +479,17 @@ async function calculateMetrics() {
|
||||
const connection = await pool.getConnection();
|
||||
|
||||
try {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting metrics calculation',
|
||||
current: 0,
|
||||
total: 100,
|
||||
elapsed: '0s',
|
||||
remaining: 'Calculating...',
|
||||
rate: 0,
|
||||
percentage: '0'
|
||||
});
|
||||
|
||||
// Get total number of products
|
||||
const [countResult] = await connection.query('SELECT COUNT(*) as total FROM products')
|
||||
.catch(err => {
|
||||
@@ -202,7 +501,7 @@ async function calculateMetrics() {
|
||||
// Initial progress with percentage
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Processing products',
|
||||
operation: 'Processing sales and stock metrics',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: '0s',
|
||||
@@ -467,64 +766,6 @@ async function calculateMetrics() {
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Get stock thresholds for this product's category/vendor
|
||||
const [thresholds] = await connection.query(`
|
||||
WITH product_info AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.vendor,
|
||||
pc.category_id
|
||||
FROM products p
|
||||
LEFT JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
WHERE p.product_id = ?
|
||||
),
|
||||
threshold_options AS (
|
||||
SELECT
|
||||
st.*,
|
||||
CASE
|
||||
WHEN st.category_id = pi.category_id AND st.vendor = pi.vendor THEN 1 -- Category + vendor match
|
||||
WHEN st.category_id = pi.category_id AND st.vendor IS NULL THEN 2 -- Category match
|
||||
WHEN st.category_id IS NULL AND st.vendor = pi.vendor THEN 3 -- Vendor match
|
||||
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 4 -- Default
|
||||
ELSE 5
|
||||
END as priority
|
||||
FROM product_info pi
|
||||
CROSS JOIN stock_thresholds st
|
||||
WHERE (st.category_id = pi.category_id OR st.category_id IS NULL)
|
||||
AND (st.vendor = pi.vendor OR st.vendor IS NULL)
|
||||
)
|
||||
SELECT
|
||||
COALESCE(
|
||||
(SELECT critical_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
7
|
||||
) as critical_days,
|
||||
COALESCE(
|
||||
(SELECT reorder_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
14
|
||||
) as reorder_days,
|
||||
COALESCE(
|
||||
(SELECT overstock_days
|
||||
FROM threshold_options
|
||||
ORDER BY priority LIMIT 1),
|
||||
90
|
||||
) as overstock_days
|
||||
`, [product.product_id]).catch(err => {
|
||||
logError(err, `Failed to get thresholds for product ${product.product_id}`);
|
||||
throw err;
|
||||
});
|
||||
|
||||
const threshold = thresholds[0] || {
|
||||
critical_days: 7,
|
||||
reorder_days: 14,
|
||||
overstock_days: 90,
|
||||
safety_stock_days: 14, // Add default safety stock days
|
||||
service_level: 95.0 // Add default service level
|
||||
};
|
||||
|
||||
// Calculate metrics
|
||||
const metrics = salesMetrics[0] || {};
|
||||
const purchases = purchaseMetrics[0] || {};
|
||||
@@ -544,13 +785,13 @@ async function calculateMetrics() {
|
||||
|
||||
// Calculate stock status using configurable thresholds with proper handling of zero sales
|
||||
const stock_status = daily_sales_avg === 0 ? 'New' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * threshold.critical_days)) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * threshold.reorder_days)) ? 'Reorder' :
|
||||
stock.stock_quantity > Math.max(1, daily_sales_avg * threshold.overstock_days) ? 'Overstocked' : 'Healthy';
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * config.critical_days)) ? 'Critical' :
|
||||
stock.stock_quantity <= Math.max(1, Math.ceil(daily_sales_avg * config.reorder_days)) ? 'Reorder' :
|
||||
stock.stock_quantity > Math.max(1, daily_sales_avg * config.overstock_days) ? 'Overstocked' : 'Healthy';
|
||||
|
||||
// Calculate safety stock using configured values with proper defaults
|
||||
const safety_stock = daily_sales_avg > 0 ?
|
||||
Math.max(1, Math.ceil(daily_sales_avg * (threshold.safety_stock_days || 14) * ((threshold.service_level || 95.0) / 100))) :
|
||||
Math.max(1, Math.ceil(daily_sales_avg * (config.safety_stock_days || 14) * ((config.service_level || 95.0) / 100))) :
|
||||
null;
|
||||
|
||||
// Add to batch update
|
||||
@@ -565,8 +806,8 @@ async function calculateMetrics() {
|
||||
metrics.last_sale_date || null,
|
||||
daily_sales_avg > 0 ? stock.stock_quantity / daily_sales_avg : null,
|
||||
weekly_sales_avg > 0 ? stock.stock_quantity / weekly_sales_avg : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * threshold.reorder_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * threshold.critical_days)) : null,
|
||||
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * config.reorder_days)) : null,
|
||||
safety_stock,
|
||||
margin_percent,
|
||||
metrics.total_revenue || 0,
|
||||
inventory_value || 0,
|
||||
@@ -633,31 +874,45 @@ async function calculateMetrics() {
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress for ABC classification
|
||||
if (isCancelled) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting ABC classification',
|
||||
current: Math.floor(totalProducts * 0.7), // Start from 70% after product processing
|
||||
operation: 'Starting financial metrics calculation',
|
||||
current: Math.floor(totalProducts * 0.6),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.6), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.6)),
|
||||
percentage: '60'
|
||||
});
|
||||
await calculateFinancialMetrics(connection, startTime, totalProducts);
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting vendor metrics calculation',
|
||||
current: Math.floor(totalProducts * 0.7),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.7), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.7)),
|
||||
percentage: '70'
|
||||
});
|
||||
|
||||
// Calculate ABC classification using configured thresholds
|
||||
if (isCancelled) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
|
||||
const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 };
|
||||
await calculateVendorMetrics(connection, startTime, totalProducts);
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating ABC rankings',
|
||||
operation: 'Starting turnover metrics calculation',
|
||||
current: Math.floor(totalProducts * 0.75),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.75), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.75)),
|
||||
percentage: '75'
|
||||
});
|
||||
await calculateTurnoverMetrics(connection, startTime, totalProducts);
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting lead time metrics calculation',
|
||||
current: Math.floor(totalProducts * 0.8),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
@@ -665,6 +920,25 @@ async function calculateMetrics() {
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.8)),
|
||||
percentage: '80'
|
||||
});
|
||||
await calculateLeadTimeMetrics(connection, startTime, totalProducts);
|
||||
|
||||
// Calculate ABC classification
|
||||
if (isCancelled) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting ABC classification',
|
||||
current: Math.floor(totalProducts * 0.7),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.7), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.7)),
|
||||
percentage: '70'
|
||||
});
|
||||
|
||||
const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
|
||||
const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 };
|
||||
|
||||
await connection.query(`
|
||||
WITH revenue_rankings AS (
|
||||
@@ -690,7 +964,7 @@ async function calculateMetrics() {
|
||||
pm.last_calculated_at = NOW()
|
||||
`, [abcThresholds.a_threshold, abcThresholds.b_threshold]);
|
||||
|
||||
// Update progress for time-based aggregates
|
||||
// Calculate time-based aggregates
|
||||
if (isCancelled) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
@@ -705,23 +979,8 @@ async function calculateMetrics() {
|
||||
percentage: '85'
|
||||
});
|
||||
|
||||
// Calculate time-based aggregates
|
||||
if (isCancelled) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
await connection.query('TRUNCATE TABLE product_time_aggregates;');
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Calculating sales aggregates',
|
||||
current: Math.floor(totalProducts * 0.9),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.9), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.9)),
|
||||
percentage: '90'
|
||||
});
|
||||
|
||||
await connection.query(`
|
||||
INSERT INTO product_time_aggregates (
|
||||
product_id,
|
||||
@@ -806,83 +1065,6 @@ async function calculateMetrics() {
|
||||
WHERE s.product_id IS NULL
|
||||
`);
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Time-based aggregates complete',
|
||||
current: totalProducts,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, totalProducts, totalProducts),
|
||||
rate: calculateRate(startTime, totalProducts),
|
||||
percentage: '100'
|
||||
});
|
||||
|
||||
// Update progress for vendor metrics
|
||||
if (isCancelled) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting vendor metrics calculation',
|
||||
current: Math.floor(totalProducts * 0.95),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.95), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.95)),
|
||||
percentage: '95'
|
||||
});
|
||||
|
||||
// Calculate vendor metrics with fixed order fill rate calculation
|
||||
if (isCancelled) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
await connection.query(`
|
||||
INSERT INTO vendor_metrics (
|
||||
vendor,
|
||||
last_calculated_at,
|
||||
avg_lead_time_days,
|
||||
on_time_delivery_rate,
|
||||
order_fill_rate,
|
||||
total_orders,
|
||||
total_late_orders
|
||||
)
|
||||
SELECT
|
||||
vendor,
|
||||
NOW() as last_calculated_at,
|
||||
COALESCE(AVG(DATEDIFF(received_date, date)), 0) as avg_lead_time_days,
|
||||
COALESCE(
|
||||
(COUNT(CASE WHEN DATEDIFF(received_date, date) <= 14 THEN 1 END) * 100.0 / NULLIF(COUNT(*), 0)),
|
||||
0
|
||||
) as on_time_delivery_rate,
|
||||
CASE
|
||||
WHEN SUM(ordered) = 0 THEN 0
|
||||
ELSE LEAST(100, GREATEST(0, (SUM(CASE WHEN received >= 0 THEN received ELSE 0 END) * 100.0 / SUM(ordered))))
|
||||
END as order_fill_rate,
|
||||
COUNT(DISTINCT po_id) as total_orders,
|
||||
COUNT(CASE WHEN DATEDIFF(received_date, date) > 14 THEN 1 END) as total_late_orders
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed'
|
||||
GROUP BY vendor
|
||||
ON DUPLICATE KEY UPDATE
|
||||
last_calculated_at = VALUES(last_calculated_at),
|
||||
avg_lead_time_days = VALUES(avg_lead_time_days),
|
||||
on_time_delivery_rate = VALUES(on_time_delivery_rate),
|
||||
order_fill_rate = VALUES(order_fill_rate),
|
||||
total_orders = VALUES(total_orders),
|
||||
total_late_orders = VALUES(total_late_orders)
|
||||
`);
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Vendor metrics complete',
|
||||
current: Math.floor(totalProducts * 0.98),
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.98), totalProducts),
|
||||
rate: calculateRate(startTime, Math.floor(totalProducts * 0.98)),
|
||||
percentage: '98'
|
||||
});
|
||||
|
||||
// Final success message
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
|
||||
@@ -192,165 +192,125 @@ router.get('/sales-by-category', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Get trending products
|
||||
router.get('/trending-products', async (req, res) => {
|
||||
// Get inventory health summary
|
||||
router.get('/inventory/health/summary', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// First check what statuses exist
|
||||
const [checkStatuses] = await pool.query(`
|
||||
SELECT DISTINCT stock_status
|
||||
FROM product_metrics
|
||||
WHERE stock_status IS NOT NULL
|
||||
`);
|
||||
console.log('Available stock statuses:', checkStatuses.map(row => row.stock_status));
|
||||
|
||||
const [rows] = await pool.query(`
|
||||
WITH CurrentSales AS (
|
||||
WITH normalized_status AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.title,
|
||||
p.sku,
|
||||
p.stock_quantity,
|
||||
p.image,
|
||||
COALESCE(SUM(o.price * o.quantity), 0) as total_sales
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
AND o.canceled = false
|
||||
AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
WHERE p.visible = true
|
||||
GROUP BY p.product_id, p.title, p.sku, p.stock_quantity, p.image
|
||||
HAVING total_sales > 0
|
||||
),
|
||||
PreviousSales AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
COALESCE(SUM(o.price * o.quantity), 0) as previous_sales
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
AND o.canceled = false
|
||||
AND DATE(o.date) BETWEEN DATE_SUB(CURDATE(), INTERVAL 60 DAY) AND DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
WHERE p.visible = true
|
||||
GROUP BY p.product_id
|
||||
CASE
|
||||
WHEN stock_status = 'Overstocked' THEN 'Overstock'
|
||||
WHEN stock_status = 'New' THEN 'Healthy'
|
||||
ELSE stock_status
|
||||
END as status
|
||||
FROM product_metrics
|
||||
WHERE stock_status IS NOT NULL
|
||||
)
|
||||
SELECT
|
||||
cs.*,
|
||||
CASE
|
||||
WHEN COALESCE(ps.previous_sales, 0) = 0 THEN
|
||||
CASE WHEN cs.total_sales > 0 THEN 100 ELSE 0 END
|
||||
ELSE ((cs.total_sales - ps.previous_sales) / ps.previous_sales * 100)
|
||||
END as sales_growth
|
||||
FROM CurrentSales cs
|
||||
LEFT JOIN PreviousSales ps ON cs.product_id = ps.product_id
|
||||
ORDER BY cs.total_sales DESC
|
||||
LIMIT 5
|
||||
status as stock_status,
|
||||
COUNT(*) as count
|
||||
FROM normalized_status
|
||||
GROUP BY status
|
||||
`);
|
||||
|
||||
console.log('Raw inventory health summary:', rows);
|
||||
|
||||
// Convert array to object with lowercase keys
|
||||
const summary = {
|
||||
critical: 0,
|
||||
reorder: 0,
|
||||
healthy: 0,
|
||||
overstock: 0
|
||||
};
|
||||
|
||||
console.log('Trending products query result:', rows);
|
||||
rows.forEach(row => {
|
||||
const key = row.stock_status.toLowerCase();
|
||||
if (key in summary) {
|
||||
summary[key] = parseInt(row.count);
|
||||
}
|
||||
});
|
||||
|
||||
res.json(rows.map(row => ({
|
||||
product_id: row.product_id,
|
||||
title: row.title,
|
||||
sku: row.sku,
|
||||
total_sales: parseFloat(row.total_sales || 0),
|
||||
sales_growth: parseFloat(row.sales_growth || 0),
|
||||
stock_quantity: parseInt(row.stock_quantity || 0),
|
||||
image_url: row.image || null
|
||||
})));
|
||||
// Calculate total
|
||||
summary.total = Object.values(summary).reduce((a, b) => a + b, 0);
|
||||
|
||||
console.log('Final inventory health summary:', summary);
|
||||
res.json(summary);
|
||||
} catch (error) {
|
||||
console.error('Error in trending products:', {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
code: error.code,
|
||||
sqlState: error.sqlState,
|
||||
sqlMessage: error.sqlMessage
|
||||
});
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch trending products',
|
||||
details: error.message
|
||||
});
|
||||
console.error('Error fetching inventory health summary:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch inventory health summary' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get inventory metrics
|
||||
router.get('/inventory-metrics', async (req, res) => {
|
||||
// Get low stock alerts
|
||||
router.get('/inventory/low-stock', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get global configuration values
|
||||
const [configs] = await pool.query(`
|
||||
const [rows] = await pool.query(`
|
||||
SELECT
|
||||
st.low_stock_threshold,
|
||||
tc.calculation_period_days as turnover_period
|
||||
FROM stock_thresholds st
|
||||
CROSS JOIN turnover_config tc
|
||||
WHERE st.id = 1 AND tc.id = 1
|
||||
p.product_id,
|
||||
p.sku,
|
||||
p.title,
|
||||
p.stock_quantity,
|
||||
pm.reorder_point,
|
||||
pm.days_of_inventory,
|
||||
pm.daily_sales_avg,
|
||||
pm.stock_status
|
||||
FROM product_metrics pm
|
||||
JOIN products p ON pm.product_id = p.product_id
|
||||
WHERE pm.stock_status IN ('Critical', 'Reorder')
|
||||
ORDER BY
|
||||
CASE pm.stock_status
|
||||
WHEN 'Critical' THEN 1
|
||||
WHEN 'Reorder' THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
pm.days_of_inventory ASC
|
||||
LIMIT 50
|
||||
`);
|
||||
|
||||
const config = configs[0] || {
|
||||
low_stock_threshold: 5,
|
||||
turnover_period: 30
|
||||
};
|
||||
res.json(rows);
|
||||
} catch (error) {
|
||||
console.error('Error fetching low stock alerts:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch low stock alerts' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get stock levels by category
|
||||
const [stockLevels] = await pool.query(`
|
||||
SELECT
|
||||
c.name as category,
|
||||
SUM(CASE WHEN stock_quantity > ? THEN 1 ELSE 0 END) as inStock,
|
||||
SUM(CASE WHEN stock_quantity > 0 AND stock_quantity <= ? THEN 1 ELSE 0 END) as lowStock,
|
||||
SUM(CASE WHEN stock_quantity = 0 THEN 1 ELSE 0 END) as outOfStock
|
||||
FROM products p
|
||||
JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
JOIN categories c ON pc.category_id = c.id
|
||||
WHERE visible = true
|
||||
GROUP BY c.name
|
||||
ORDER BY c.name ASC
|
||||
`, [config.low_stock_threshold, config.low_stock_threshold]);
|
||||
|
||||
// Get top vendors with product counts and average stock
|
||||
const [topVendors] = await pool.query(`
|
||||
// Get vendor performance metrics
|
||||
router.get('/vendors/metrics', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const [rows] = await pool.query(`
|
||||
SELECT
|
||||
vendor,
|
||||
COUNT(*) as productCount,
|
||||
AVG(stock_quantity) as averageStockLevel
|
||||
FROM products
|
||||
WHERE visible = true
|
||||
AND vendor IS NOT NULL
|
||||
AND vendor != ''
|
||||
GROUP BY vendor
|
||||
ORDER BY productCount DESC
|
||||
LIMIT 5
|
||||
avg_lead_time_days,
|
||||
on_time_delivery_rate,
|
||||
order_fill_rate,
|
||||
total_orders,
|
||||
total_late_orders,
|
||||
total_purchase_value,
|
||||
avg_order_value
|
||||
FROM vendor_metrics
|
||||
ORDER BY on_time_delivery_rate DESC
|
||||
`);
|
||||
|
||||
// Calculate stock turnover rate by category
|
||||
const [stockTurnover] = await pool.query(`
|
||||
WITH CategorySales AS (
|
||||
SELECT
|
||||
c.name as category,
|
||||
SUM(o.quantity) as units_sold
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.product_id = o.product_id
|
||||
JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
JOIN categories c ON pc.category_id = c.id
|
||||
WHERE o.canceled = false
|
||||
AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
|
||||
GROUP BY c.name
|
||||
),
|
||||
CategoryStock AS (
|
||||
SELECT
|
||||
c.name as category,
|
||||
AVG(p.stock_quantity) as avg_stock
|
||||
FROM products p
|
||||
JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
JOIN categories c ON pc.category_id = c.id
|
||||
WHERE p.visible = true
|
||||
GROUP BY c.name
|
||||
)
|
||||
SELECT
|
||||
cs.category,
|
||||
CASE
|
||||
WHEN cst.avg_stock > 0 THEN (cs.units_sold / cst.avg_stock)
|
||||
ELSE 0
|
||||
END as rate
|
||||
FROM CategorySales cs
|
||||
JOIN CategoryStock cst ON cs.category = cst.category
|
||||
ORDER BY rate DESC
|
||||
`, [config.turnover_period]);
|
||||
|
||||
res.json({ stockLevels, topVendors, stockTurnover });
|
||||
res.json(rows.map(row => ({
|
||||
...row,
|
||||
avg_lead_time_days: parseFloat(row.avg_lead_time_days || 0),
|
||||
on_time_delivery_rate: parseFloat(row.on_time_delivery_rate || 0),
|
||||
order_fill_rate: parseFloat(row.order_fill_rate || 0),
|
||||
total_purchase_value: parseFloat(row.total_purchase_value || 0),
|
||||
avg_order_value: parseFloat(row.avg_order_value || 0)
|
||||
})));
|
||||
} catch (error) {
|
||||
console.error('Error fetching inventory metrics:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch inventory metrics' });
|
||||
console.error('Error fetching vendor metrics:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch vendor metrics' });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
60
inventory-server/src/routes/metrics.js
Normal file
60
inventory-server/src/routes/metrics.js
Normal file
@@ -0,0 +1,60 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Get key metrics trends (revenue, inventory value, GMROI)
|
||||
router.get('/trends', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const [rows] = await pool.query(`
|
||||
WITH MonthlyMetrics AS (
|
||||
SELECT
|
||||
DATE(CONCAT(pta.year, '-', LPAD(pta.month, 2, '0'), '-01')) as date,
|
||||
SUM(pta.total_revenue) as revenue,
|
||||
SUM(pta.total_cost) as cost,
|
||||
SUM(pm.inventory_value) as inventory_value,
|
||||
CASE
|
||||
WHEN SUM(pm.inventory_value) > 0
|
||||
THEN (SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value)) * 100
|
||||
ELSE 0
|
||||
END as gmroi
|
||||
FROM product_time_aggregates pta
|
||||
JOIN product_metrics pm ON pta.product_id = pm.product_id
|
||||
WHERE (pta.year * 100 + pta.month) >= DATE_FORMAT(DATE_SUB(CURDATE(), INTERVAL 12 MONTH), '%Y%m')
|
||||
GROUP BY pta.year, pta.month
|
||||
ORDER BY date ASC
|
||||
)
|
||||
SELECT
|
||||
DATE_FORMAT(date, '%b %y') as date,
|
||||
ROUND(revenue, 2) as revenue,
|
||||
ROUND(inventory_value, 2) as inventory_value,
|
||||
ROUND(gmroi, 2) as gmroi
|
||||
FROM MonthlyMetrics
|
||||
`);
|
||||
|
||||
console.log('Raw metrics trends data:', rows);
|
||||
|
||||
// Transform the data into the format expected by the frontend
|
||||
const transformedData = {
|
||||
revenue: rows.map(row => ({
|
||||
date: row.date,
|
||||
value: parseFloat(row.revenue || 0)
|
||||
})),
|
||||
inventory_value: rows.map(row => ({
|
||||
date: row.date,
|
||||
value: parseFloat(row.inventory_value || 0)
|
||||
})),
|
||||
gmroi: rows.map(row => ({
|
||||
date: row.date,
|
||||
value: parseFloat(row.gmroi || 0)
|
||||
}))
|
||||
};
|
||||
|
||||
console.log('Transformed metrics data:', transformedData);
|
||||
res.json(transformedData);
|
||||
} catch (error) {
|
||||
console.error('Error fetching metrics trends:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch metrics trends' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -139,6 +139,56 @@ router.get('/', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Get trending products
|
||||
router.get('/trending', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// First check if we have any data
|
||||
const [checkData] = await pool.query(`
|
||||
SELECT COUNT(*) as count,
|
||||
MAX(total_revenue) as max_revenue,
|
||||
MAX(daily_sales_avg) as max_daily_sales,
|
||||
COUNT(DISTINCT product_id) as products_with_metrics
|
||||
FROM product_metrics
|
||||
WHERE total_revenue > 0 OR daily_sales_avg > 0
|
||||
`);
|
||||
console.log('Product metrics stats:', checkData[0]);
|
||||
|
||||
if (checkData[0].count === 0) {
|
||||
console.log('No products with metrics found');
|
||||
return res.json([]);
|
||||
}
|
||||
|
||||
// Get trending products
|
||||
const [rows] = await pool.query(`
|
||||
SELECT
|
||||
p.product_id,
|
||||
p.sku,
|
||||
p.title,
|
||||
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
|
||||
COALESCE(pm.weekly_sales_avg, 0) as weekly_sales_avg,
|
||||
CASE
|
||||
WHEN pm.weekly_sales_avg > 0 AND pm.daily_sales_avg > 0
|
||||
THEN ((pm.daily_sales_avg - pm.weekly_sales_avg) / pm.weekly_sales_avg) * 100
|
||||
ELSE 0
|
||||
END as growth_rate,
|
||||
COALESCE(pm.total_revenue, 0) as total_revenue
|
||||
FROM products p
|
||||
INNER JOIN product_metrics pm ON p.product_id = pm.product_id
|
||||
WHERE (pm.total_revenue > 0 OR pm.daily_sales_avg > 0)
|
||||
AND p.visible = true
|
||||
ORDER BY growth_rate DESC
|
||||
LIMIT 50
|
||||
`);
|
||||
|
||||
console.log('Trending products:', rows);
|
||||
res.json(rows);
|
||||
} catch (error) {
|
||||
console.error('Error fetching trending products:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch trending products' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get a single product
|
||||
router.get('/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
@@ -13,6 +13,7 @@ const csvRouter = require('./routes/csv');
|
||||
const analyticsRouter = require('./routes/analytics');
|
||||
const purchaseOrdersRouter = require('./routes/purchase-orders');
|
||||
const configRouter = require('./routes/config');
|
||||
const metricsRouter = require('./routes/metrics');
|
||||
|
||||
// Get the absolute path to the .env file
|
||||
const envPath = path.resolve(process.cwd(), '.env');
|
||||
@@ -78,13 +79,14 @@ const pool = initPool({
|
||||
app.locals.pool = pool;
|
||||
|
||||
// Routes
|
||||
app.use('/api/products', productsRouter);
|
||||
app.use('/api/dashboard/products', productsRouter);
|
||||
app.use('/api/dashboard', dashboardRouter);
|
||||
app.use('/api/orders', ordersRouter);
|
||||
app.use('/api/csv', csvRouter);
|
||||
app.use('/api/analytics', analyticsRouter);
|
||||
app.use('/api/purchase-orders', purchaseOrdersRouter);
|
||||
app.use('/api/config', configRouter);
|
||||
app.use('/api/metrics', metricsRouter);
|
||||
|
||||
// Basic health check route
|
||||
app.get('/health', (req, res) => {
|
||||
|
||||
Reference in New Issue
Block a user