Fix data in product detail
This commit is contained in:
@@ -367,15 +367,129 @@ router.get('/trending', async (req, res) => {
|
||||
router.get('/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get basic product data with metrics
|
||||
const [rows] = await pool.query(
|
||||
'SELECT * FROM products WHERE product_id = ? AND visible = true',
|
||||
`SELECT
|
||||
p.*,
|
||||
GROUP_CONCAT(DISTINCT c.name) as categories,
|
||||
pm.daily_sales_avg,
|
||||
pm.weekly_sales_avg,
|
||||
pm.monthly_sales_avg,
|
||||
pm.days_of_inventory,
|
||||
pm.reorder_point,
|
||||
pm.safety_stock,
|
||||
pm.avg_margin_percent,
|
||||
pm.total_revenue,
|
||||
pm.inventory_value,
|
||||
pm.turnover_rate,
|
||||
pm.abc_class,
|
||||
pm.stock_status,
|
||||
pm.avg_lead_time_days,
|
||||
pm.current_lead_time,
|
||||
pm.target_lead_time,
|
||||
pm.lead_time_status,
|
||||
pm.gmroi,
|
||||
pm.cost_of_goods_sold,
|
||||
pm.gross_profit
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id
|
||||
LEFT JOIN product_categories pc ON p.product_id = pc.product_id
|
||||
LEFT JOIN categories c ON pc.category_id = c.id
|
||||
WHERE p.product_id = ? AND p.visible = true
|
||||
GROUP BY p.product_id`,
|
||||
[req.params.id]
|
||||
);
|
||||
|
||||
if (rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Product not found' });
|
||||
}
|
||||
res.json(rows[0]);
|
||||
|
||||
// Get vendor performance metrics
|
||||
const [vendorMetrics] = await pool.query(
|
||||
`SELECT * FROM vendor_metrics WHERE vendor = ?`,
|
||||
[rows[0].vendor]
|
||||
);
|
||||
|
||||
// Transform the data to match frontend expectations
|
||||
const product = {
|
||||
// Basic product info
|
||||
product_id: rows[0].product_id,
|
||||
title: rows[0].title,
|
||||
SKU: rows[0].SKU,
|
||||
barcode: rows[0].barcode,
|
||||
created_at: rows[0].created_at,
|
||||
updated_at: rows[0].updated_at,
|
||||
|
||||
// Inventory fields
|
||||
stock_quantity: parseInt(rows[0].stock_quantity),
|
||||
moq: parseInt(rows[0].moq),
|
||||
uom: parseInt(rows[0].uom),
|
||||
managing_stock: Boolean(rows[0].managing_stock),
|
||||
replenishable: Boolean(rows[0].replenishable),
|
||||
|
||||
// Pricing fields
|
||||
price: parseFloat(rows[0].price),
|
||||
regular_price: parseFloat(rows[0].regular_price),
|
||||
cost_price: parseFloat(rows[0].cost_price),
|
||||
landing_cost_price: parseFloat(rows[0].landing_cost_price),
|
||||
|
||||
// Categorization
|
||||
categories: rows[0].categories ? rows[0].categories.split(',') : [],
|
||||
tags: rows[0].tags ? rows[0].tags.split(',') : [],
|
||||
options: rows[0].options ? JSON.parse(rows[0].options) : {},
|
||||
|
||||
// Vendor info
|
||||
vendor: rows[0].vendor,
|
||||
vendor_reference: rows[0].vendor_reference,
|
||||
brand: rows[0].brand,
|
||||
|
||||
// URLs
|
||||
permalink: rows[0].permalink,
|
||||
image: rows[0].image,
|
||||
|
||||
// Metrics
|
||||
metrics: {
|
||||
// Sales metrics
|
||||
daily_sales_avg: parseFloat(rows[0].daily_sales_avg) || 0,
|
||||
weekly_sales_avg: parseFloat(rows[0].weekly_sales_avg) || 0,
|
||||
monthly_sales_avg: parseFloat(rows[0].monthly_sales_avg) || 0,
|
||||
|
||||
// Inventory metrics
|
||||
days_of_inventory: parseInt(rows[0].days_of_inventory) || 0,
|
||||
reorder_point: parseInt(rows[0].reorder_point) || 0,
|
||||
safety_stock: parseInt(rows[0].safety_stock) || 0,
|
||||
stock_status: rows[0].stock_status || 'Unknown',
|
||||
abc_class: rows[0].abc_class || 'C',
|
||||
|
||||
// Financial metrics
|
||||
avg_margin_percent: parseFloat(rows[0].avg_margin_percent) || 0,
|
||||
total_revenue: parseFloat(rows[0].total_revenue) || 0,
|
||||
inventory_value: parseFloat(rows[0].inventory_value) || 0,
|
||||
turnover_rate: parseFloat(rows[0].turnover_rate) || 0,
|
||||
gmroi: parseFloat(rows[0].gmroi) || 0,
|
||||
cost_of_goods_sold: parseFloat(rows[0].cost_of_goods_sold) || 0,
|
||||
gross_profit: parseFloat(rows[0].gross_profit) || 0,
|
||||
|
||||
// Lead time metrics
|
||||
avg_lead_time_days: parseInt(rows[0].avg_lead_time_days) || 0,
|
||||
current_lead_time: parseInt(rows[0].current_lead_time) || 0,
|
||||
target_lead_time: parseInt(rows[0].target_lead_time) || 14,
|
||||
lead_time_status: rows[0].lead_time_status || 'Unknown'
|
||||
},
|
||||
|
||||
// Vendor performance (if available)
|
||||
vendor_performance: vendorMetrics.length ? {
|
||||
avg_lead_time_days: parseFloat(vendorMetrics[0].avg_lead_time_days) || 0,
|
||||
on_time_delivery_rate: parseFloat(vendorMetrics[0].on_time_delivery_rate) || 0,
|
||||
order_fill_rate: parseFloat(vendorMetrics[0].order_fill_rate) || 0,
|
||||
total_orders: parseInt(vendorMetrics[0].total_orders) || 0,
|
||||
total_late_orders: parseInt(vendorMetrics[0].total_late_orders) || 0,
|
||||
total_purchase_value: parseFloat(vendorMetrics[0].total_purchase_value) || 0,
|
||||
avg_order_value: parseFloat(vendorMetrics[0].avg_order_value) || 0
|
||||
} : null
|
||||
};
|
||||
|
||||
res.json(product);
|
||||
} catch (error) {
|
||||
console.error('Error fetching product:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product' });
|
||||
@@ -458,4 +572,202 @@ router.put('/:id', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Get product metrics
|
||||
router.get('/:id/metrics', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
// Get metrics from product_metrics table with inventory health data
|
||||
const [metrics] = await pool.query(`
|
||||
WITH inventory_status AS (
|
||||
SELECT
|
||||
p.product_id,
|
||||
CASE
|
||||
WHEN pm.daily_sales_avg = 0 THEN 'New'
|
||||
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * 7) THEN 'Critical'
|
||||
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * 14) THEN 'Reorder'
|
||||
WHEN p.stock_quantity > (pm.daily_sales_avg * 90) THEN 'Overstocked'
|
||||
ELSE 'Healthy'
|
||||
END as calculated_status
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id
|
||||
WHERE p.product_id = ?
|
||||
)
|
||||
SELECT
|
||||
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
|
||||
COALESCE(pm.weekly_sales_avg, 0) as weekly_sales_avg,
|
||||
COALESCE(pm.monthly_sales_avg, 0) as monthly_sales_avg,
|
||||
COALESCE(pm.days_of_inventory, 0) as days_of_inventory,
|
||||
COALESCE(pm.reorder_point, CEIL(COALESCE(pm.daily_sales_avg, 0) * 14)) as reorder_point,
|
||||
COALESCE(pm.safety_stock, CEIL(COALESCE(pm.daily_sales_avg, 0) * 7)) as safety_stock,
|
||||
COALESCE(pm.avg_margin_percent,
|
||||
((p.price - COALESCE(p.cost_price, 0)) / NULLIF(p.price, 0)) * 100
|
||||
) as avg_margin_percent,
|
||||
COALESCE(pm.total_revenue, 0) as total_revenue,
|
||||
COALESCE(pm.inventory_value, p.stock_quantity * COALESCE(p.cost_price, 0)) as inventory_value,
|
||||
COALESCE(pm.turnover_rate, 0) as turnover_rate,
|
||||
COALESCE(pm.abc_class, 'C') as abc_class,
|
||||
COALESCE(pm.stock_status, is.calculated_status) as stock_status,
|
||||
COALESCE(pm.avg_lead_time_days, 0) as avg_lead_time_days,
|
||||
COALESCE(pm.current_lead_time, 0) as current_lead_time,
|
||||
COALESCE(pm.target_lead_time, 14) as target_lead_time,
|
||||
COALESCE(pm.lead_time_status, 'Unknown') as lead_time_status
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id
|
||||
LEFT JOIN inventory_status is ON p.product_id = is.product_id
|
||||
WHERE p.product_id = ?
|
||||
`, [id, id]);
|
||||
|
||||
if (!metrics.length) {
|
||||
// Return default metrics structure if no data found
|
||||
res.json({
|
||||
daily_sales_avg: 0,
|
||||
weekly_sales_avg: 0,
|
||||
monthly_sales_avg: 0,
|
||||
days_of_inventory: 0,
|
||||
reorder_point: 0,
|
||||
safety_stock: 0,
|
||||
avg_margin_percent: 0,
|
||||
total_revenue: 0,
|
||||
inventory_value: 0,
|
||||
turnover_rate: 0,
|
||||
abc_class: 'C',
|
||||
stock_status: 'New',
|
||||
avg_lead_time_days: 0,
|
||||
current_lead_time: 0,
|
||||
target_lead_time: 14,
|
||||
lead_time_status: 'Unknown'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(metrics[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching product metrics:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product metrics' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get product time series data
|
||||
router.get('/:id/time-series', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const months = parseInt(req.query.months) || 12;
|
||||
|
||||
// Get monthly sales data with running totals and growth rates
|
||||
const [monthlySales] = await pool.query(`
|
||||
WITH monthly_data AS (
|
||||
SELECT
|
||||
CONCAT(year, '-', LPAD(month, 2, '0')) as month,
|
||||
total_quantity_sold as quantity,
|
||||
total_revenue as revenue,
|
||||
total_cost as cost,
|
||||
avg_price,
|
||||
profit_margin,
|
||||
inventory_value
|
||||
FROM product_time_aggregates
|
||||
WHERE product_id = ?
|
||||
ORDER BY year DESC, month DESC
|
||||
LIMIT ?
|
||||
)
|
||||
SELECT
|
||||
month,
|
||||
quantity,
|
||||
revenue,
|
||||
cost,
|
||||
avg_price,
|
||||
profit_margin,
|
||||
inventory_value,
|
||||
LAG(quantity) OVER (ORDER BY month) as prev_month_quantity,
|
||||
LAG(revenue) OVER (ORDER BY month) as prev_month_revenue
|
||||
FROM monthly_data
|
||||
ORDER BY month ASC
|
||||
`, [id, months]);
|
||||
|
||||
// Calculate growth rates and format data
|
||||
const formattedMonthlySales = monthlySales.map(row => ({
|
||||
month: row.month,
|
||||
quantity: parseInt(row.quantity) || 0,
|
||||
revenue: parseFloat(row.revenue) || 0,
|
||||
cost: parseFloat(row.cost) || 0,
|
||||
avg_price: parseFloat(row.avg_price) || 0,
|
||||
profit_margin: parseFloat(row.profit_margin) || 0,
|
||||
inventory_value: parseFloat(row.inventory_value) || 0,
|
||||
quantity_growth: row.prev_month_quantity ?
|
||||
((row.quantity - row.prev_month_quantity) / row.prev_month_quantity) * 100 : 0,
|
||||
revenue_growth: row.prev_month_revenue ?
|
||||
((row.revenue - row.prev_month_revenue) / row.prev_month_revenue) * 100 : 0
|
||||
}));
|
||||
|
||||
// Get recent orders with customer info and status
|
||||
const [recentOrders] = await pool.query(`
|
||||
SELECT
|
||||
DATE_FORMAT(date, '%Y-%m-%d') as date,
|
||||
order_number,
|
||||
quantity,
|
||||
price,
|
||||
discount,
|
||||
tax,
|
||||
shipping,
|
||||
customer,
|
||||
status,
|
||||
payment_method
|
||||
FROM orders
|
||||
WHERE product_id = ?
|
||||
AND canceled = false
|
||||
ORDER BY date DESC
|
||||
LIMIT 10
|
||||
`, [id]);
|
||||
|
||||
// Get recent purchase orders with detailed status
|
||||
const [recentPurchases] = await pool.query(`
|
||||
SELECT
|
||||
DATE_FORMAT(date, '%Y-%m-%d') as date,
|
||||
DATE_FORMAT(expected_date, '%Y-%m-%d') as expected_date,
|
||||
DATE_FORMAT(received_date, '%Y-%m-%d') as received_date,
|
||||
po_id,
|
||||
ordered,
|
||||
received,
|
||||
status,
|
||||
cost_price,
|
||||
notes,
|
||||
CASE
|
||||
WHEN received_date IS NOT NULL THEN
|
||||
DATEDIFF(received_date, date)
|
||||
WHEN expected_date < CURDATE() AND status != 'received' THEN
|
||||
DATEDIFF(CURDATE(), expected_date)
|
||||
ELSE NULL
|
||||
END as lead_time_days
|
||||
FROM purchase_orders
|
||||
WHERE product_id = ?
|
||||
ORDER BY date DESC
|
||||
LIMIT 10
|
||||
`, [id]);
|
||||
|
||||
res.json({
|
||||
monthly_sales: formattedMonthlySales,
|
||||
recent_orders: recentOrders.map(order => ({
|
||||
...order,
|
||||
price: parseFloat(order.price),
|
||||
discount: parseFloat(order.discount),
|
||||
tax: parseFloat(order.tax),
|
||||
shipping: parseFloat(order.shipping),
|
||||
quantity: parseInt(order.quantity)
|
||||
})),
|
||||
recent_purchases: recentPurchases.map(po => ({
|
||||
...po,
|
||||
ordered: parseInt(po.ordered),
|
||||
received: parseInt(po.received),
|
||||
cost_price: parseFloat(po.cost_price),
|
||||
lead_time_days: po.lead_time_days ? parseInt(po.lead_time_days) : null
|
||||
}))
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching product time series:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product time series' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
Reference in New Issue
Block a user