Put back files
This commit is contained in:
40
inventory-server/src/middleware/cors.js
Normal file
40
inventory-server/src/middleware/cors.js
Normal file
@@ -0,0 +1,40 @@
|
||||
const cors = require('cors');
|
||||
|
||||
// Single CORS middleware for all endpoints
|
||||
const corsMiddleware = cors({
|
||||
origin: [
|
||||
'https://inventory.kent.pw',
|
||||
'http://localhost:5175',
|
||||
'https://acot.site',
|
||||
/^http:\/\/192\.168\.\d+\.\d+(:\d+)?$/,
|
||||
/^http:\/\/10\.\d+\.\d+\.\d+(:\d+)?$/
|
||||
],
|
||||
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
|
||||
allowedHeaders: ['Content-Type', 'Authorization'],
|
||||
exposedHeaders: ['Content-Type'],
|
||||
credentials: true
|
||||
});
|
||||
|
||||
// Error handler for CORS
|
||||
const corsErrorHandler = (err, req, res, next) => {
|
||||
if (err.message === 'CORS not allowed') {
|
||||
console.error('CORS Error:', {
|
||||
origin: req.get('Origin'),
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
headers: req.headers
|
||||
});
|
||||
res.status(403).json({
|
||||
error: 'CORS not allowed',
|
||||
origin: req.get('Origin'),
|
||||
message: 'Origin not in allowed list: https://inventory.kent.pw, https://acot.site, localhost:5175, 192.168.x.x, or 10.x.x.x'
|
||||
});
|
||||
} else {
|
||||
next(err);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
corsMiddleware,
|
||||
corsErrorHandler
|
||||
};
|
||||
319
inventory-server/src/routes/ai-prompts.js
Normal file
319
inventory-server/src/routes/ai-prompts.js
Normal file
@@ -0,0 +1,319 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Get all AI prompts
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
ORDER BY prompt_type ASC, company ASC
|
||||
`);
|
||||
res.json(result.rows);
|
||||
} catch (error) {
|
||||
console.error('Error fetching AI prompts:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch AI prompts',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get prompt by ID
|
||||
router.get('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
WHERE id = $1
|
||||
`, [id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'AI prompt not found' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching AI prompt:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get prompt by type (general, system, company_specific)
|
||||
router.get('/by-type', async (req, res) => {
|
||||
try {
|
||||
const { type, company } = req.query;
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Validate prompt type
|
||||
if (!type || !['general', 'system', 'company_specific'].includes(type)) {
|
||||
return res.status(400).json({
|
||||
error: 'Valid type query parameter is required (general, system, or company_specific)'
|
||||
});
|
||||
}
|
||||
|
||||
// For company_specific type, company ID is required
|
||||
if (type === 'company_specific' && !company) {
|
||||
return res.status(400).json({
|
||||
error: 'Company ID is required for company_specific prompt type'
|
||||
});
|
||||
}
|
||||
|
||||
// For general and system types, company should not be provided
|
||||
if ((type === 'general' || type === 'system') && company) {
|
||||
return res.status(400).json({
|
||||
error: 'Company ID should not be provided for general or system prompt types'
|
||||
});
|
||||
}
|
||||
|
||||
// Build the query based on the type
|
||||
let query, params;
|
||||
if (type === 'company_specific') {
|
||||
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1 AND company = $2';
|
||||
params = [type, company];
|
||||
} else {
|
||||
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1';
|
||||
params = [type];
|
||||
}
|
||||
|
||||
// Execute the query
|
||||
const result = await pool.query(query, params);
|
||||
|
||||
// Check if any prompt was found
|
||||
if (result.rows.length === 0) {
|
||||
let errorMessage;
|
||||
if (type === 'company_specific') {
|
||||
errorMessage = `AI prompt not found for company ${company}`;
|
||||
} else {
|
||||
errorMessage = `${type.charAt(0).toUpperCase() + type.slice(1)} AI prompt not found`;
|
||||
}
|
||||
return res.status(404).json({ error: errorMessage });
|
||||
}
|
||||
|
||||
// Return the first matching prompt
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching AI prompt by type:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Create new AI prompt
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
prompt_text,
|
||||
prompt_type,
|
||||
company
|
||||
} = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!prompt_text || !prompt_type) {
|
||||
return res.status(400).json({ error: 'Prompt text and type are required' });
|
||||
}
|
||||
|
||||
// Validate prompt type
|
||||
if (!['general', 'company_specific', 'system'].includes(prompt_type)) {
|
||||
return res.status(400).json({ error: 'Prompt type must be either "general", "company_specific", or "system"' });
|
||||
}
|
||||
|
||||
// Validate company is provided for company-specific prompts
|
||||
if (prompt_type === 'company_specific' && !company) {
|
||||
return res.status(400).json({ error: 'Company is required for company-specific prompts' });
|
||||
}
|
||||
|
||||
// Validate company is not provided for general or system prompts
|
||||
if ((prompt_type === 'general' || prompt_type === 'system') && company) {
|
||||
return res.status(400).json({ error: 'Company should not be provided for general or system prompts' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
INSERT INTO ai_prompts (
|
||||
prompt_text,
|
||||
prompt_type,
|
||||
company
|
||||
) VALUES ($1, $2, $3)
|
||||
RETURNING *
|
||||
`, [
|
||||
prompt_text,
|
||||
prompt_type,
|
||||
company
|
||||
]);
|
||||
|
||||
res.status(201).json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error creating AI prompt:', error);
|
||||
|
||||
// Check for unique constraint violations
|
||||
if (error instanceof Error && error.message.includes('unique constraint')) {
|
||||
if (error.message.includes('unique_company_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A prompt already exists for this company',
|
||||
details: error.message
|
||||
});
|
||||
} else if (error.message.includes('idx_unique_general_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A general prompt already exists',
|
||||
details: error.message
|
||||
});
|
||||
} else if (error.message.includes('idx_unique_system_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A system prompt already exists',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Failed to create AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Update AI prompt
|
||||
router.put('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const {
|
||||
prompt_text,
|
||||
prompt_type,
|
||||
company
|
||||
} = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!prompt_text || !prompt_type) {
|
||||
return res.status(400).json({ error: 'Prompt text and type are required' });
|
||||
}
|
||||
|
||||
// Validate prompt type
|
||||
if (!['general', 'company_specific', 'system'].includes(prompt_type)) {
|
||||
return res.status(400).json({ error: 'Prompt type must be either "general", "company_specific", or "system"' });
|
||||
}
|
||||
|
||||
// Validate company is provided for company-specific prompts
|
||||
if (prompt_type === 'company_specific' && !company) {
|
||||
return res.status(400).json({ error: 'Company is required for company-specific prompts' });
|
||||
}
|
||||
|
||||
// Validate company is not provided for general or system prompts
|
||||
if ((prompt_type === 'general' || prompt_type === 'system') && company) {
|
||||
return res.status(400).json({ error: 'Company should not be provided for general or system prompts' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Check if the prompt exists
|
||||
const checkResult = await pool.query('SELECT * FROM ai_prompts WHERE id = $1', [id]);
|
||||
if (checkResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'AI prompt not found' });
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
UPDATE ai_prompts
|
||||
SET
|
||||
prompt_text = $1,
|
||||
prompt_type = $2,
|
||||
company = $3
|
||||
WHERE id = $4
|
||||
RETURNING *
|
||||
`, [
|
||||
prompt_text,
|
||||
prompt_type,
|
||||
company,
|
||||
id
|
||||
]);
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error updating AI prompt:', error);
|
||||
|
||||
// Check for unique constraint violations
|
||||
if (error instanceof Error && error.message.includes('unique constraint')) {
|
||||
if (error.message.includes('unique_company_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A prompt already exists for this company',
|
||||
details: error.message
|
||||
});
|
||||
} else if (error.message.includes('idx_unique_general_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A general prompt already exists',
|
||||
details: error.message
|
||||
});
|
||||
} else if (error.message.includes('idx_unique_system_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A system prompt already exists',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Failed to update AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Delete AI prompt
|
||||
router.delete('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query('DELETE FROM ai_prompts WHERE id = $1 RETURNING *', [id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'AI prompt not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'AI prompt deleted successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error deleting AI prompt:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to delete AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Error handling middleware
|
||||
router.use((err, req, res, next) => {
|
||||
console.error('AI prompts route error:', err);
|
||||
res.status(500).json({
|
||||
error: 'Internal server error',
|
||||
details: err.message
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1 +1,774 @@
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Forecasting: summarize sales for products received in a period by brand
|
||||
router.get('/forecast', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const brand = (req.query.brand || '').toString();
|
||||
const titleSearch = (req.query.search || req.query.q || '').toString().trim() || null;
|
||||
const startDateStr = req.query.startDate;
|
||||
const endDateStr = req.query.endDate;
|
||||
|
||||
if (!brand) {
|
||||
return res.status(400).json({ error: 'Missing required parameter: brand' });
|
||||
}
|
||||
|
||||
// Default to last 30 days if no dates provided
|
||||
const endDate = endDateStr ? new Date(endDateStr) : new Date();
|
||||
const startDate = startDateStr ? new Date(startDateStr) : new Date(endDate.getTime() - 29 * 24 * 60 * 60 * 1000);
|
||||
|
||||
// Normalize to date boundaries for consistency
|
||||
const startISO = new Date(Date.UTC(startDate.getUTCFullYear(), startDate.getUTCMonth(), startDate.getUTCDate())).toISOString();
|
||||
const endISO = new Date(Date.UTC(endDate.getUTCFullYear(), endDate.getUTCMonth(), endDate.getUTCDate())).toISOString();
|
||||
|
||||
const sql = `
|
||||
WITH params AS (
|
||||
SELECT
|
||||
$1::date AS start_date,
|
||||
$2::date AS end_date,
|
||||
$3::text AS brand,
|
||||
$4::text AS title_search,
|
||||
(($2::date - $1::date) + 1)::int AS days
|
||||
),
|
||||
category_path AS (
|
||||
WITH RECURSIVE cp AS (
|
||||
SELECT c.cat_id, c.name, c.parent_id, c.name::text AS path
|
||||
FROM categories c WHERE c.parent_id IS NULL
|
||||
UNION ALL
|
||||
SELECT c.cat_id, c.name, c.parent_id, (cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN cp ON c.parent_id = cp.cat_id
|
||||
)
|
||||
SELECT * FROM cp
|
||||
),
|
||||
product_first_received AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
COALESCE(p.first_received::date, MIN(r.received_date)::date) AS first_received_date
|
||||
FROM products p
|
||||
LEFT JOIN receivings r ON r.pid = p.pid
|
||||
GROUP BY p.pid, p.first_received
|
||||
),
|
||||
recent_products AS (
|
||||
SELECT p.pid
|
||||
FROM products p
|
||||
JOIN product_first_received fr ON fr.pid = p.pid
|
||||
JOIN params pr ON 1=1
|
||||
WHERE p.visible = true
|
||||
AND COALESCE(p.brand,'Unbranded') = pr.brand
|
||||
AND fr.first_received_date BETWEEN pr.start_date AND pr.end_date
|
||||
AND (pr.title_search IS NULL OR p.title ILIKE '%' || pr.title_search || '%')
|
||||
),
|
||||
product_pick_category AS (
|
||||
(
|
||||
SELECT DISTINCT ON (pc.pid)
|
||||
pc.pid,
|
||||
c.name AS category_name,
|
||||
COALESCE(cp.path, c.name) AS path
|
||||
FROM product_categories pc
|
||||
JOIN categories c ON c.cat_id = pc.cat_id AND (c.type IS NULL OR c.type NOT IN (20,21))
|
||||
LEFT JOIN category_path cp ON cp.cat_id = c.cat_id
|
||||
WHERE pc.pid IN (SELECT pid FROM recent_products)
|
||||
AND (cp.path IS NULL OR (
|
||||
cp.path NOT ILIKE '%Black Friday%'
|
||||
AND cp.path NOT ILIKE '%Deals%'
|
||||
))
|
||||
AND COALESCE(c.name, '') NOT IN ('Black Friday', 'Deals')
|
||||
ORDER BY pc.pid, length(COALESCE(cp.path,'')) DESC
|
||||
)
|
||||
UNION ALL
|
||||
(
|
||||
SELECT
|
||||
rp.pid,
|
||||
'Uncategorized'::text AS category_name,
|
||||
'Uncategorized'::text AS path
|
||||
FROM recent_products rp
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM product_categories pc
|
||||
JOIN categories c ON c.cat_id = pc.cat_id AND (c.type IS NULL OR c.type NOT IN (20,21))
|
||||
LEFT JOIN category_path cp ON cp.cat_id = c.cat_id
|
||||
WHERE pc.pid = rp.pid
|
||||
AND (cp.path IS NULL OR (
|
||||
cp.path NOT ILIKE '%Black Friday%'
|
||||
AND cp.path NOT ILIKE '%Deals%'
|
||||
))
|
||||
AND COALESCE(c.name, '') NOT IN ('Black Friday', 'Deals')
|
||||
)
|
||||
)
|
||||
),
|
||||
product_sales AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.title,
|
||||
p.sku,
|
||||
COALESCE(p.stock_quantity, 0) AS stock_quantity,
|
||||
COALESCE(p.price, 0) AS price,
|
||||
COALESCE(SUM(o.quantity), 0) AS total_sold
|
||||
FROM recent_products rp
|
||||
JOIN products p ON p.pid = rp.pid
|
||||
LEFT JOIN params pr ON true
|
||||
LEFT JOIN orders o ON o.pid = p.pid
|
||||
AND o.date::date BETWEEN pr.start_date AND pr.end_date
|
||||
AND (o.canceled IS DISTINCT FROM TRUE)
|
||||
GROUP BY p.pid, p.title, p.sku, p.stock_quantity, p.price
|
||||
)
|
||||
SELECT
|
||||
ppc.category_name,
|
||||
ppc.path,
|
||||
COUNT(ps.pid) AS num_products,
|
||||
SUM(ps.total_sold) AS total_sold,
|
||||
ROUND(AVG(COALESCE(ps.total_sold,0) / NULLIF(pr.days,0)), 2) AS avg_daily_sales,
|
||||
ROUND(AVG(COALESCE(ps.total_sold,0)), 2) AS avg_total_sold,
|
||||
MIN(ps.total_sold) AS min_total_sold,
|
||||
MAX(ps.total_sold) AS max_total_sold,
|
||||
JSON_AGG(
|
||||
JSON_BUILD_OBJECT(
|
||||
'pid', ps.pid,
|
||||
'title', ps.title,
|
||||
'sku', ps.sku,
|
||||
'total_sold', ps.total_sold,
|
||||
'categoryPath', ppc.path
|
||||
)
|
||||
) AS products
|
||||
FROM product_sales ps
|
||||
JOIN product_pick_category ppc ON ppc.pid = ps.pid
|
||||
JOIN params pr ON true
|
||||
GROUP BY ppc.category_name, ppc.path
|
||||
HAVING SUM(ps.total_sold) >= 0
|
||||
ORDER BY (ppc.category_name = 'Uncategorized') ASC, avg_total_sold DESC NULLS LAST
|
||||
LIMIT 200;
|
||||
`;
|
||||
|
||||
const { rows } = await pool.query(sql, [startISO, endISO, brand, titleSearch]);
|
||||
|
||||
// Normalize/shape response keys to match front-end expectations
|
||||
const shaped = rows.map(r => ({
|
||||
category_name: r.category_name,
|
||||
path: r.path,
|
||||
avg_daily_sales: Number(r.avg_daily_sales) || 0,
|
||||
total_sold: Number(r.total_sold) || 0,
|
||||
num_products: Number(r.num_products) || 0,
|
||||
avgTotalSold: Number(r.avg_total_sold) || 0,
|
||||
minSold: Number(r.min_total_sold) || 0,
|
||||
maxSold: Number(r.max_total_sold) || 0,
|
||||
products: Array.isArray(r.products) ? r.products : []
|
||||
}));
|
||||
|
||||
res.json(shaped);
|
||||
} catch (error) {
|
||||
console.error('Error fetching forecast data:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch forecast data' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get overall analytics stats
|
||||
router.get('/stats', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const { rows: [results] } = await pool.query(`
|
||||
WITH vendor_count AS (
|
||||
SELECT COUNT(DISTINCT vendor_name) AS count
|
||||
FROM vendor_metrics
|
||||
),
|
||||
category_count AS (
|
||||
SELECT COUNT(DISTINCT category_id) AS count
|
||||
FROM category_metrics
|
||||
),
|
||||
metrics_summary AS (
|
||||
SELECT
|
||||
AVG(margin_30d) AS avg_profit_margin,
|
||||
AVG(markup_30d) AS avg_markup,
|
||||
AVG(stockturn_30d) AS avg_stock_turnover,
|
||||
AVG(asp_30d) AS avg_order_value
|
||||
FROM product_metrics
|
||||
WHERE sales_30d > 0
|
||||
)
|
||||
SELECT
|
||||
COALESCE(ms.avg_profit_margin, 0) AS profitMargin,
|
||||
COALESCE(ms.avg_markup, 0) AS averageMarkup,
|
||||
COALESCE(ms.avg_stock_turnover, 0) AS stockTurnoverRate,
|
||||
COALESCE(vc.count, 0) AS vendorCount,
|
||||
COALESCE(cc.count, 0) AS categoryCount,
|
||||
COALESCE(ms.avg_order_value, 0) AS averageOrderValue
|
||||
FROM metrics_summary ms
|
||||
CROSS JOIN vendor_count vc
|
||||
CROSS JOIN category_count cc
|
||||
`);
|
||||
|
||||
// Ensure all values are numbers
|
||||
const stats = {
|
||||
profitMargin: Number(results.profitmargin) || 0,
|
||||
averageMarkup: Number(results.averagemarkup) || 0,
|
||||
stockTurnoverRate: Number(results.stockturnoverrate) || 0,
|
||||
vendorCount: Number(results.vendorcount) || 0,
|
||||
categoryCount: Number(results.categorycount) || 0,
|
||||
averageOrderValue: Number(results.averageordervalue) || 0
|
||||
};
|
||||
|
||||
res.json(stats);
|
||||
} catch (error) {
|
||||
console.error('Error fetching analytics stats:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch analytics stats' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get profit analysis data
|
||||
router.get('/profit', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// Get profit margins by category with full path
|
||||
const { rows: byCategory } = await pool.query(`
|
||||
WITH RECURSIVE category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
(cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
)
|
||||
SELECT
|
||||
cm.category_name as category,
|
||||
COALESCE(cp.path, cm.category_name) as categorypath,
|
||||
cm.avg_margin_30d as profitmargin,
|
||||
cm.revenue_30d as revenue,
|
||||
cm.cogs_30d as cost
|
||||
FROM category_metrics cm
|
||||
LEFT JOIN category_path cp ON cm.category_id = cp.cat_id
|
||||
WHERE cm.revenue_30d > 0
|
||||
ORDER BY cm.revenue_30d DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
// Get profit margin over time
|
||||
const { rows: overTime } = await pool.query(`
|
||||
WITH time_series AS (
|
||||
SELECT
|
||||
date_trunc('day', generate_series(
|
||||
CURRENT_DATE - INTERVAL '30 days',
|
||||
CURRENT_DATE,
|
||||
'1 day'::interval
|
||||
))::date AS date
|
||||
),
|
||||
daily_profits AS (
|
||||
SELECT
|
||||
snapshot_date as date,
|
||||
SUM(net_revenue) as revenue,
|
||||
SUM(cogs) as cost,
|
||||
CASE
|
||||
WHEN SUM(net_revenue) > 0
|
||||
THEN (SUM(net_revenue - cogs) / SUM(net_revenue)) * 100
|
||||
ELSE 0
|
||||
END as profit_margin
|
||||
FROM daily_product_snapshots
|
||||
WHERE snapshot_date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY snapshot_date
|
||||
)
|
||||
SELECT
|
||||
to_char(ts.date, 'YYYY-MM-DD') as date,
|
||||
COALESCE(dp.profit_margin, 0) as profitmargin,
|
||||
COALESCE(dp.revenue, 0) as revenue,
|
||||
COALESCE(dp.cost, 0) as cost
|
||||
FROM time_series ts
|
||||
LEFT JOIN daily_profits dp ON ts.date = dp.date
|
||||
ORDER BY ts.date
|
||||
`);
|
||||
|
||||
// Get top performing products by profit margin
|
||||
const { rows: topProducts } = await pool.query(`
|
||||
WITH RECURSIVE category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
(cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
),
|
||||
product_categories AS (
|
||||
SELECT
|
||||
pc.pid,
|
||||
c.name as category,
|
||||
COALESCE(cp.path, c.name) as categorypath
|
||||
FROM product_categories pc
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
LEFT JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
)
|
||||
SELECT
|
||||
pm.title as product,
|
||||
COALESCE(pc.category, 'Uncategorized') as category,
|
||||
COALESCE(pc.categorypath, 'Uncategorized') as categorypath,
|
||||
pm.margin_30d as profitmargin,
|
||||
pm.revenue_30d as revenue,
|
||||
pm.cogs_30d as cost
|
||||
FROM product_metrics pm
|
||||
LEFT JOIN product_categories pc ON pm.pid = pc.pid
|
||||
WHERE pm.revenue_30d > 100
|
||||
AND pm.margin_30d > 0
|
||||
ORDER BY pm.margin_30d DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
res.json({ byCategory, overTime, topProducts });
|
||||
} catch (error) {
|
||||
console.error('Error fetching profit analysis:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch profit analysis' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get vendor performance data
|
||||
router.get('/vendors', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// Set cache control headers to prevent 304
|
||||
res.set({
|
||||
'Cache-Control': 'no-cache, no-store, must-revalidate',
|
||||
'Pragma': 'no-cache',
|
||||
'Expires': '0'
|
||||
});
|
||||
|
||||
console.log('Fetching vendor performance data...');
|
||||
|
||||
// Get vendor performance metrics from the vendor_metrics table
|
||||
const { rows: rawPerformance } = await pool.query(`
|
||||
SELECT
|
||||
vendor_name as vendor,
|
||||
revenue_30d as sales_volume,
|
||||
avg_margin_30d as profit_margin,
|
||||
COALESCE(
|
||||
sales_30d / NULLIF(current_stock_units, 0),
|
||||
0
|
||||
) as stock_turnover,
|
||||
product_count,
|
||||
-- Use actual growth metrics from the vendor_metrics table
|
||||
sales_growth_30d_vs_prev as growth
|
||||
FROM vendor_metrics
|
||||
WHERE revenue_30d > 0
|
||||
ORDER BY revenue_30d DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
|
||||
// Format the performance data
|
||||
const performance = rawPerformance.map(vendor => ({
|
||||
vendor: vendor.vendor,
|
||||
salesVolume: Number(vendor.sales_volume) || 0,
|
||||
profitMargin: Number(vendor.profit_margin) || 0,
|
||||
stockTurnover: Number(vendor.stock_turnover) || 0,
|
||||
productCount: Number(vendor.product_count) || 0,
|
||||
growth: Number(vendor.growth) || 0
|
||||
}));
|
||||
|
||||
// Get vendor comparison metrics (sales per product vs margin)
|
||||
const { rows: rawComparison } = await pool.query(`
|
||||
SELECT
|
||||
vendor_name as vendor,
|
||||
CASE
|
||||
WHEN active_product_count > 0
|
||||
THEN revenue_30d / active_product_count
|
||||
ELSE 0
|
||||
END as sales_per_product,
|
||||
avg_margin_30d as average_margin,
|
||||
product_count as size
|
||||
FROM vendor_metrics
|
||||
WHERE active_product_count > 0
|
||||
ORDER BY sales_per_product DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
// Transform comparison data
|
||||
const comparison = rawComparison.map(item => ({
|
||||
vendor: item.vendor,
|
||||
salesPerProduct: Number(item.sales_per_product) || 0,
|
||||
averageMargin: Number(item.average_margin) || 0,
|
||||
size: Number(item.size) || 0
|
||||
}));
|
||||
|
||||
console.log('Performance data ready. Sending response...');
|
||||
|
||||
// Return complete structure that the front-end expects
|
||||
res.json({
|
||||
performance,
|
||||
comparison,
|
||||
// Add empty trends array to complete the structure
|
||||
trends: []
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching vendor performance:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch vendor performance data' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get stock analysis data
|
||||
router.get('/stock', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('Fetching stock analysis data...');
|
||||
|
||||
// Use the new metrics tables to get data
|
||||
|
||||
// Get turnover by category
|
||||
const { rows: turnoverByCategory } = await pool.query(`
|
||||
WITH category_metrics_with_path AS (
|
||||
WITH RECURSIVE category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
(cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
)
|
||||
SELECT
|
||||
cm.category_id,
|
||||
cm.category_name,
|
||||
cp.path as category_path,
|
||||
cm.current_stock_units,
|
||||
cm.sales_30d,
|
||||
cm.stock_turn_30d
|
||||
FROM category_metrics cm
|
||||
LEFT JOIN category_path cp ON cm.category_id = cp.cat_id
|
||||
WHERE cm.sales_30d > 0
|
||||
)
|
||||
SELECT
|
||||
category_name as category,
|
||||
COALESCE(stock_turn_30d, 0) as turnoverRate,
|
||||
current_stock_units as averageStock,
|
||||
sales_30d as totalSales
|
||||
FROM category_metrics_with_path
|
||||
ORDER BY stock_turn_30d DESC NULLS LAST
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
// Get stock levels over time (last 30 days)
|
||||
const { rows: stockLevels } = await pool.query(`
|
||||
WITH date_range AS (
|
||||
SELECT generate_series(
|
||||
CURRENT_DATE - INTERVAL '30 days',
|
||||
CURRENT_DATE,
|
||||
'1 day'::interval
|
||||
)::date AS date
|
||||
),
|
||||
daily_stock_counts AS (
|
||||
SELECT
|
||||
snapshot_date,
|
||||
COUNT(DISTINCT pid) as total_products,
|
||||
COUNT(DISTINCT CASE WHEN eod_stock_quantity > 5 THEN pid END) as in_stock,
|
||||
COUNT(DISTINCT CASE WHEN eod_stock_quantity <= 5 AND eod_stock_quantity > 0 THEN pid END) as low_stock,
|
||||
COUNT(DISTINCT CASE WHEN eod_stock_quantity = 0 THEN pid END) as out_of_stock
|
||||
FROM daily_product_snapshots
|
||||
WHERE snapshot_date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY snapshot_date
|
||||
)
|
||||
SELECT
|
||||
to_char(dr.date, 'YYYY-MM-DD') as date,
|
||||
COALESCE(dsc.in_stock, 0) as inStock,
|
||||
COALESCE(dsc.low_stock, 0) as lowStock,
|
||||
COALESCE(dsc.out_of_stock, 0) as outOfStock
|
||||
FROM date_range dr
|
||||
LEFT JOIN daily_stock_counts dsc ON dr.date = dsc.snapshot_date
|
||||
ORDER BY dr.date
|
||||
`);
|
||||
|
||||
// Get critical items (products that need reordering)
|
||||
const { rows: criticalItems } = await pool.query(`
|
||||
SELECT
|
||||
pm.title as product,
|
||||
pm.sku as sku,
|
||||
pm.current_stock as stockQuantity,
|
||||
COALESCE(pm.config_safety_stock, 0) as reorderPoint,
|
||||
COALESCE(pm.stockturn_30d, 0) as turnoverRate,
|
||||
CASE
|
||||
WHEN pm.sales_velocity_daily > 0
|
||||
THEN ROUND(pm.current_stock / pm.sales_velocity_daily)
|
||||
ELSE 999
|
||||
END as daysUntilStockout
|
||||
FROM product_metrics pm
|
||||
WHERE pm.is_visible = true
|
||||
AND pm.is_replenishable = true
|
||||
AND pm.sales_30d > 0
|
||||
AND pm.current_stock <= pm.config_safety_stock * 2
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN pm.sales_velocity_daily > 0
|
||||
THEN pm.current_stock / pm.sales_velocity_daily
|
||||
ELSE 999
|
||||
END ASC,
|
||||
pm.revenue_30d DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
res.json({
|
||||
turnoverByCategory,
|
||||
stockLevels,
|
||||
criticalItems
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching stock analysis:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch stock analysis', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get price analysis data
|
||||
router.get('/pricing', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// Get price points analysis
|
||||
const { rows: pricePoints } = await pool.query(`
|
||||
SELECT
|
||||
CAST(p.price AS DECIMAL(15,3)) as price,
|
||||
CAST(SUM(o.quantity) AS DECIMAL(15,3)) as salesVolume,
|
||||
CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
|
||||
c.name as category
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
JOIN product_categories pc ON p.pid = pc.pid
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY p.price, c.name
|
||||
HAVING SUM(o.quantity) > 0
|
||||
ORDER BY revenue DESC
|
||||
LIMIT 50
|
||||
`);
|
||||
|
||||
// Get price elasticity data (price changes vs demand)
|
||||
const { rows: elasticity } = await pool.query(`
|
||||
SELECT
|
||||
to_char(o.date, 'YYYY-MM-DD') as date,
|
||||
CAST(AVG(o.price) AS DECIMAL(15,3)) as price,
|
||||
CAST(SUM(o.quantity) AS DECIMAL(15,3)) as demand
|
||||
FROM orders o
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY to_char(o.date, 'YYYY-MM-DD')
|
||||
ORDER BY date
|
||||
`);
|
||||
|
||||
// Get price optimization recommendations
|
||||
const { rows: recommendations } = await pool.query(`
|
||||
SELECT
|
||||
p.title as product,
|
||||
CAST(p.price AS DECIMAL(15,3)) as currentPrice,
|
||||
CAST(
|
||||
ROUND(
|
||||
CASE
|
||||
WHEN AVG(o.quantity) > 10 THEN p.price * 1.1
|
||||
WHEN AVG(o.quantity) < 2 THEN p.price * 0.9
|
||||
ELSE p.price
|
||||
END, 2
|
||||
) AS DECIMAL(15,3)
|
||||
) as recommendedPrice,
|
||||
CAST(
|
||||
ROUND(
|
||||
SUM(o.price * o.quantity) *
|
||||
CASE
|
||||
WHEN AVG(o.quantity) > 10 THEN 1.15
|
||||
WHEN AVG(o.quantity) < 2 THEN 0.95
|
||||
ELSE 1
|
||||
END, 2
|
||||
) AS DECIMAL(15,3)
|
||||
) as potentialRevenue,
|
||||
CASE
|
||||
WHEN AVG(o.quantity) > 10 THEN 85
|
||||
WHEN AVG(o.quantity) < 2 THEN 75
|
||||
ELSE 65
|
||||
END as confidence
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY p.pid, p.price, p.title
|
||||
HAVING ABS(
|
||||
CAST(
|
||||
ROUND(
|
||||
CASE
|
||||
WHEN AVG(o.quantity) > 10 THEN p.price * 1.1
|
||||
WHEN AVG(o.quantity) < 2 THEN p.price * 0.9
|
||||
ELSE p.price
|
||||
END, 2
|
||||
) AS DECIMAL(15,3)
|
||||
) - CAST(p.price AS DECIMAL(15,3))
|
||||
) > 0
|
||||
ORDER BY
|
||||
CAST(
|
||||
ROUND(
|
||||
SUM(o.price * o.quantity) *
|
||||
CASE
|
||||
WHEN AVG(o.quantity) > 10 THEN 1.15
|
||||
WHEN AVG(o.quantity) < 2 THEN 0.95
|
||||
ELSE 1
|
||||
END, 2
|
||||
) AS DECIMAL(15,3)
|
||||
) - CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
res.json({ pricePoints, elasticity, recommendations });
|
||||
} catch (error) {
|
||||
console.error('Error fetching price analysis:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch price analysis' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get category performance data
|
||||
router.get('/categories', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// Common CTE for category paths
|
||||
const categoryPathCTE = `
|
||||
WITH RECURSIVE category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
(cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
)
|
||||
`;
|
||||
|
||||
// Get category performance metrics with full path
|
||||
const { rows: performance } = await pool.query(`
|
||||
${categoryPathCTE},
|
||||
monthly_sales AS (
|
||||
SELECT
|
||||
c.name,
|
||||
cp.path,
|
||||
SUM(CASE
|
||||
WHEN o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
THEN o.price * o.quantity
|
||||
ELSE 0
|
||||
END) as current_month,
|
||||
SUM(CASE
|
||||
WHEN o.date >= CURRENT_DATE - INTERVAL '60 days'
|
||||
AND o.date < CURRENT_DATE - INTERVAL '30 days'
|
||||
THEN o.price * o.quantity
|
||||
ELSE 0
|
||||
END) as previous_month
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
JOIN product_categories pc ON p.pid = pc.pid
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '60 days'
|
||||
GROUP BY c.name, cp.path
|
||||
)
|
||||
SELECT
|
||||
c.name as category,
|
||||
cp.path as categoryPath,
|
||||
SUM(o.price * o.quantity) as revenue,
|
||||
SUM(o.price * o.quantity - p.cost_price * o.quantity) as profit,
|
||||
ROUND(
|
||||
((ms.current_month / NULLIF(ms.previous_month, 0)) - 1) * 100,
|
||||
1
|
||||
) as growth,
|
||||
COUNT(DISTINCT p.pid) as productCount
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
JOIN product_categories pc ON p.pid = pc.pid
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
LEFT JOIN monthly_sales ms ON c.name = ms.name AND cp.path = ms.path
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '60 days'
|
||||
GROUP BY c.name, cp.path, ms.current_month, ms.previous_month
|
||||
HAVING SUM(o.price * o.quantity) > 0
|
||||
ORDER BY revenue DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
// Get category revenue distribution with full path
|
||||
const { rows: distribution } = await pool.query(`
|
||||
${categoryPathCTE}
|
||||
SELECT
|
||||
c.name as category,
|
||||
cp.path as categoryPath,
|
||||
SUM(o.price * o.quantity) as value
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
JOIN product_categories pc ON p.pid = pc.pid
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY c.name, cp.path
|
||||
HAVING SUM(o.price * o.quantity) > 0
|
||||
ORDER BY value DESC
|
||||
LIMIT 6
|
||||
`);
|
||||
|
||||
// Get category sales trends with full path
|
||||
const { rows: trends } = await pool.query(`
|
||||
${categoryPathCTE}
|
||||
SELECT
|
||||
c.name as category,
|
||||
cp.path as categoryPath,
|
||||
to_char(o.date, 'Mon YYYY') as month,
|
||||
SUM(o.price * o.quantity) as sales
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
JOIN product_categories pc ON p.pid = pc.pid
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '6 months'
|
||||
GROUP BY
|
||||
c.name,
|
||||
cp.path,
|
||||
to_char(o.date, 'Mon YYYY'),
|
||||
to_char(o.date, 'YYYY-MM')
|
||||
ORDER BY
|
||||
c.name,
|
||||
to_char(o.date, 'YYYY-MM')
|
||||
`);
|
||||
|
||||
res.json({ performance, distribution, trends });
|
||||
} catch (error) {
|
||||
console.error('Error fetching category performance:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch category performance' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1 +1,284 @@
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { parseValue } = require('../utils/apiHelpers'); // Adjust path if needed
|
||||
|
||||
// --- Configuration & Helpers ---
|
||||
const DEFAULT_PAGE_LIMIT = 50;
|
||||
const MAX_PAGE_LIMIT = 200;
|
||||
|
||||
// Maps query keys to DB columns in brand_metrics
|
||||
const COLUMN_MAP = {
|
||||
brandName: { dbCol: 'bm.brand_name', type: 'string' },
|
||||
productCount: { dbCol: 'bm.product_count', type: 'number' },
|
||||
activeProductCount: { dbCol: 'bm.active_product_count', type: 'number' },
|
||||
replenishableProductCount: { dbCol: 'bm.replenishable_product_count', type: 'number' },
|
||||
currentStockUnits: { dbCol: 'bm.current_stock_units', type: 'number' },
|
||||
currentStockCost: { dbCol: 'bm.current_stock_cost', type: 'number' },
|
||||
currentStockRetail: { dbCol: 'bm.current_stock_retail', type: 'number' },
|
||||
sales7d: { dbCol: 'bm.sales_7d', type: 'number' },
|
||||
revenue7d: { dbCol: 'bm.revenue_7d', type: 'number' },
|
||||
sales30d: { dbCol: 'bm.sales_30d', type: 'number' },
|
||||
revenue30d: { dbCol: 'bm.revenue_30d', type: 'number' },
|
||||
profit30d: { dbCol: 'bm.profit_30d', type: 'number' },
|
||||
cogs30d: { dbCol: 'bm.cogs_30d', type: 'number' },
|
||||
sales365d: { dbCol: 'bm.sales_365d', type: 'number' },
|
||||
revenue365d: { dbCol: 'bm.revenue_365d', type: 'number' },
|
||||
lifetimeSales: { dbCol: 'bm.lifetime_sales', type: 'number' },
|
||||
lifetimeRevenue: { dbCol: 'bm.lifetime_revenue', type: 'number' },
|
||||
avgMargin30d: { dbCol: 'bm.avg_margin_30d', type: 'number' },
|
||||
// Growth metrics
|
||||
salesGrowth30dVsPrev: { dbCol: 'bm.sales_growth_30d_vs_prev', type: 'number' },
|
||||
revenueGrowth30dVsPrev: { dbCol: 'bm.revenue_growth_30d_vs_prev', type: 'number' },
|
||||
// Add aliases if needed
|
||||
name: { dbCol: 'bm.brand_name', type: 'string' },
|
||||
// Add status for filtering
|
||||
status: { dbCol: 'brand_status', type: 'string' },
|
||||
};
|
||||
|
||||
function getSafeColumnInfo(queryParamKey) {
|
||||
return COLUMN_MAP[queryParamKey] || null;
|
||||
}
|
||||
|
||||
// --- Route Handlers ---
|
||||
|
||||
// GET /brands-aggregate/filter-options (Just brands list for now)
|
||||
router.get('/filter-options', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /brands-aggregate/filter-options');
|
||||
try {
|
||||
// Get brand names
|
||||
const { rows: brandRows } = await pool.query(`
|
||||
SELECT DISTINCT brand_name FROM public.brand_metrics ORDER BY brand_name
|
||||
`);
|
||||
|
||||
// Get status values - calculate them since they're derived
|
||||
const { rows: statusRows } = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
CASE
|
||||
WHEN active_product_count > 0 AND sales_30d > 0 THEN 'active'
|
||||
WHEN active_product_count > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as status
|
||||
FROM public.brand_metrics
|
||||
ORDER BY status
|
||||
`);
|
||||
|
||||
res.json({
|
||||
brands: brandRows.map(r => r.brand_name),
|
||||
statuses: statusRows.map(r => r.status)
|
||||
});
|
||||
} catch(error) {
|
||||
console.error('Error fetching brand filter options:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch filter options' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /brands-aggregate/stats (Overall brand stats)
|
||||
router.get('/stats', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /brands-aggregate/stats');
|
||||
try {
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) AS total_brands,
|
||||
COUNT(CASE WHEN active_product_count > 0 THEN 1 END) AS active_brands,
|
||||
SUM(active_product_count) AS total_active_products,
|
||||
SUM(current_stock_cost) AS total_stock_value,
|
||||
-- Weighted Average Margin
|
||||
SUM(profit_30d) * 100.0 / NULLIF(SUM(revenue_30d), 0) AS overall_avg_margin_weighted
|
||||
FROM public.brand_metrics bm
|
||||
`);
|
||||
|
||||
res.json({
|
||||
totalBrands: parseInt(stats?.total_brands || 0),
|
||||
activeBrands: parseInt(stats?.active_brands || 0),
|
||||
totalActiveProducts: parseInt(stats?.total_active_products || 0),
|
||||
totalValue: parseFloat(stats?.total_stock_value || 0),
|
||||
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || 0),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching brand stats:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch brand stats.' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /brands-aggregate/ (List brands)
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /brands-aggregate received query:', req.query);
|
||||
try {
|
||||
// --- Pagination ---
|
||||
let page = parseInt(req.query.page, 10) || 1;
|
||||
let limit = parseInt(req.query.limit, 10) || DEFAULT_PAGE_LIMIT;
|
||||
limit = Math.min(limit, MAX_PAGE_LIMIT);
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
// --- Sorting ---
|
||||
const sortQueryKey = req.query.sort || 'brandName'; // Default sort
|
||||
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
|
||||
const sortColumn = sortColumnInfo ? sortColumnInfo.dbCol : 'bm.brand_name';
|
||||
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
|
||||
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST');
|
||||
const sortClause = `ORDER BY ${sortColumn} ${sortDirection} ${nullsOrder}`;
|
||||
|
||||
// --- Filtering ---
|
||||
const conditions = [];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
// Build conditions based on req.query, using COLUMN_MAP and parseValue
|
||||
for (const key in req.query) {
|
||||
if (['page', 'limit', 'sort', 'order'].includes(key)) continue;
|
||||
|
||||
let filterKey = key;
|
||||
let operator = '='; // Default operator
|
||||
const value = req.query[key];
|
||||
|
||||
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
|
||||
if (operatorMatch) {
|
||||
filterKey = operatorMatch[1];
|
||||
operator = operatorMatch[2];
|
||||
}
|
||||
|
||||
const columnInfo = getSafeColumnInfo(filterKey);
|
||||
if (columnInfo) {
|
||||
const dbColumn = columnInfo.dbCol;
|
||||
const valueType = columnInfo.type;
|
||||
try {
|
||||
let conditionFragment = '';
|
||||
let needsParam = true;
|
||||
switch (operator.toLowerCase()) { // Normalize operator
|
||||
case 'eq': operator = '='; break;
|
||||
case 'ne': operator = '<>'; break;
|
||||
case 'gt': operator = '>'; break;
|
||||
case 'gte': operator = '>='; break;
|
||||
case 'lt': operator = '<'; break;
|
||||
case 'lte': operator = '<='; break;
|
||||
case 'like': operator = 'LIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
|
||||
case 'ilike': operator = 'ILIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
|
||||
case 'between':
|
||||
const [val1, val2] = String(value).split(',');
|
||||
if (val1 !== undefined && val2 !== undefined) {
|
||||
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
|
||||
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
|
||||
needsParam = false;
|
||||
} else continue;
|
||||
break;
|
||||
case 'in':
|
||||
const inValues = String(value).split(',');
|
||||
if (inValues.length > 0) {
|
||||
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
|
||||
conditionFragment = `${dbColumn} IN (${placeholders})`;
|
||||
params.push(...inValues.map(v => parseValue(v, valueType)));
|
||||
needsParam = false;
|
||||
} else continue;
|
||||
break;
|
||||
default: operator = '='; break;
|
||||
}
|
||||
|
||||
if (needsParam) {
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
params.push(parseValue(value, valueType));
|
||||
} else if (!conditionFragment) { // For LIKE/ILIKE
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
}
|
||||
|
||||
|
||||
if (conditionFragment) {
|
||||
conditions.push(`(${conditionFragment})`);
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
|
||||
if (needsParam) paramCounter--;
|
||||
}
|
||||
} else {
|
||||
console.warn(`Invalid filter key ignored: ${key}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// --- Execute Queries ---
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Status calculation similar to vendors
|
||||
const statusCase = `
|
||||
CASE
|
||||
WHEN active_product_count > 0 AND sales_30d > 0 THEN 'active'
|
||||
WHEN active_product_count > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as brand_status
|
||||
`;
|
||||
|
||||
const baseSql = `
|
||||
FROM (
|
||||
SELECT
|
||||
bm.*,
|
||||
${statusCase}
|
||||
FROM public.brand_metrics bm
|
||||
) bm
|
||||
${whereClause}
|
||||
`;
|
||||
|
||||
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
|
||||
const dataSql = `
|
||||
WITH brand_data AS (
|
||||
SELECT
|
||||
bm.*,
|
||||
${statusCase}
|
||||
FROM public.brand_metrics bm
|
||||
)
|
||||
SELECT bm.*
|
||||
FROM brand_data bm
|
||||
${whereClause}
|
||||
${sortClause}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
const dataParams = [...params, limit, offset];
|
||||
|
||||
console.log("Count SQL:", countSql, params);
|
||||
console.log("Data SQL:", dataSql, dataParams);
|
||||
|
||||
const [countResult, dataResult] = await Promise.all([
|
||||
pool.query(countSql, params),
|
||||
pool.query(dataSql, dataParams)
|
||||
]);
|
||||
|
||||
const total = parseInt(countResult.rows[0].total, 10);
|
||||
const brands = dataResult.rows.map(row => {
|
||||
// Create a new object with both snake_case and camelCase keys
|
||||
const transformedRow = { ...row }; // Start with original data
|
||||
|
||||
for (const key in row) {
|
||||
// Skip null/undefined values
|
||||
if (row[key] === null || row[key] === undefined) {
|
||||
continue; // Original already has the null value
|
||||
}
|
||||
|
||||
// Transform keys to match frontend expectations (add camelCase versions)
|
||||
// First handle cases like sales_7d -> sales7d
|
||||
let camelKey = key.replace(/_(\d+[a-z])/g, '$1');
|
||||
|
||||
// Then handle regular snake_case -> camelCase
|
||||
camelKey = camelKey.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
|
||||
if (camelKey !== key) { // Only add if different from original
|
||||
transformedRow[camelKey] = row[key];
|
||||
}
|
||||
}
|
||||
return transformedRow;
|
||||
});
|
||||
|
||||
// --- Respond ---
|
||||
res.json({
|
||||
brands,
|
||||
pagination: { total, pages: Math.ceil(total / limit), currentPage: page, limit },
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching brand metrics list:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch brand metrics.' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /brands-aggregate/:name (Get single brand metric)
|
||||
// Implement if needed, remember to URL-decode the name parameter
|
||||
|
||||
module.exports = router;
|
||||
363
inventory-server/src/routes/categoriesAggregate.js
Normal file
363
inventory-server/src/routes/categoriesAggregate.js
Normal file
@@ -0,0 +1,363 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { parseValue } = require('../utils/apiHelpers'); // Adjust path if needed
|
||||
|
||||
// --- Configuration & Helpers ---
|
||||
const DEFAULT_PAGE_LIMIT = 50;
|
||||
const MAX_PAGE_LIMIT = 5000; // Increase this to allow retrieving all categories in one request
|
||||
|
||||
// Maps query keys to DB columns in category_metrics and categories tables
|
||||
const COLUMN_MAP = {
|
||||
categoryId: { dbCol: 'cm.category_id', type: 'integer' },
|
||||
categoryName: { dbCol: 'cm.category_name', type: 'string' }, // From aggregate table
|
||||
categoryType: { dbCol: 'cm.category_type', type: 'integer' }, // From aggregate table
|
||||
parentId: { dbCol: 'cm.parent_id', type: 'integer' }, // From aggregate table
|
||||
parentName: { dbCol: 'p.name', type: 'string' }, // Requires JOIN to categories
|
||||
productCount: { dbCol: 'cm.product_count', type: 'number' },
|
||||
activeProductCount: { dbCol: 'cm.active_product_count', type: 'number' },
|
||||
replenishableProductCount: { dbCol: 'cm.replenishable_product_count', type: 'number' },
|
||||
currentStockUnits: { dbCol: 'cm.current_stock_units', type: 'number' },
|
||||
currentStockCost: { dbCol: 'cm.current_stock_cost', type: 'number' },
|
||||
currentStockRetail: { dbCol: 'cm.current_stock_retail', type: 'number' },
|
||||
sales7d: { dbCol: 'cm.sales_7d', type: 'number' },
|
||||
revenue7d: { dbCol: 'cm.revenue_7d', type: 'number' },
|
||||
sales30d: { dbCol: 'cm.sales_30d', type: 'number' },
|
||||
revenue30d: { dbCol: 'cm.revenue_30d', type: 'number' },
|
||||
profit30d: { dbCol: 'cm.profit_30d', type: 'number' },
|
||||
cogs30d: { dbCol: 'cm.cogs_30d', type: 'number' },
|
||||
sales365d: { dbCol: 'cm.sales_365d', type: 'number' },
|
||||
revenue365d: { dbCol: 'cm.revenue_365d', type: 'number' },
|
||||
lifetimeSales: { dbCol: 'cm.lifetime_sales', type: 'number' },
|
||||
lifetimeRevenue: { dbCol: 'cm.lifetime_revenue', type: 'number' },
|
||||
avgMargin30d: { dbCol: 'cm.avg_margin_30d', type: 'number' },
|
||||
stockTurn30d: { dbCol: 'cm.stock_turn_30d', type: 'number' },
|
||||
// Growth metrics
|
||||
salesGrowth30dVsPrev: { dbCol: 'cm.sales_growth_30d_vs_prev', type: 'number' },
|
||||
revenueGrowth30dVsPrev: { dbCol: 'cm.revenue_growth_30d_vs_prev', type: 'number' },
|
||||
// Add status from the categories table for filtering
|
||||
status: { dbCol: 'c.status', type: 'string' },
|
||||
};
|
||||
|
||||
function getSafeColumnInfo(queryParamKey) {
|
||||
return COLUMN_MAP[queryParamKey] || null;
|
||||
}
|
||||
|
||||
// Type Labels (Consider moving to a shared config or fetching from DB)
|
||||
const TYPE_LABELS = {
|
||||
10: 'Section', 11: 'Category', 12: 'Subcategory', 13: 'Sub-subcategory',
|
||||
1: 'Company', 2: 'Line', 3: 'Subline', 40: 'Artist', // From old schema comments
|
||||
20: 'Theme', 21: 'Subtheme' // Additional types from categories.js
|
||||
};
|
||||
|
||||
// --- Route Handlers ---
|
||||
|
||||
// GET /categories-aggregate/filter-options
|
||||
router.get('/filter-options', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /categories-aggregate/filter-options');
|
||||
try {
|
||||
// Fetch distinct types directly from the aggregate table if reliable
|
||||
// Or join with categories table if source of truth is needed
|
||||
const { rows: typeRows } = await pool.query(`
|
||||
SELECT DISTINCT category_type
|
||||
FROM public.category_metrics
|
||||
ORDER BY category_type
|
||||
`);
|
||||
|
||||
const typeOptions = typeRows.map(r => ({
|
||||
value: r.category_type,
|
||||
label: TYPE_LABELS[r.category_type] || `Type ${r.category_type}` // Add labels
|
||||
}));
|
||||
|
||||
// Add status options for filtering (from categories.js)
|
||||
const { rows: statusRows } = await pool.query(`
|
||||
SELECT DISTINCT status FROM public.categories ORDER BY status
|
||||
`);
|
||||
|
||||
// Get type counts (from categories.js)
|
||||
const { rows: typeCounts } = await pool.query(`
|
||||
SELECT
|
||||
type,
|
||||
COUNT(*)::integer as count
|
||||
FROM categories
|
||||
GROUP BY type
|
||||
ORDER BY type
|
||||
`);
|
||||
|
||||
res.json({
|
||||
types: typeOptions,
|
||||
statuses: statusRows.map(r => r.status),
|
||||
typeCounts: typeCounts.map(tc => ({
|
||||
type: tc.type,
|
||||
count: tc.count
|
||||
}))
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching category filter options:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch filter options' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /categories-aggregate/stats
|
||||
router.get('/stats', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /categories-aggregate/stats');
|
||||
try {
|
||||
// Calculate stats directly from the aggregate table
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) AS total_categories,
|
||||
-- Count active based on the source categories table status
|
||||
COUNT(CASE WHEN c.status = 'active' THEN cm.category_id END) AS active_categories,
|
||||
SUM(cm.active_product_count) AS total_active_products, -- Sum from aggregates
|
||||
SUM(cm.current_stock_cost) AS total_stock_value, -- Sum from aggregates
|
||||
-- Weighted Average Margin (Revenue as weight)
|
||||
SUM(cm.profit_30d) * 100.0 / NULLIF(SUM(cm.revenue_30d), 0) AS overall_avg_margin_weighted,
|
||||
-- Simple Average Margin (less accurate if categories vary greatly in size)
|
||||
AVG(NULLIF(cm.avg_margin_30d, 0)) AS overall_avg_margin_simple
|
||||
-- Growth rate can be calculated from 30d vs previous 30d revenue if needed
|
||||
FROM public.category_metrics cm
|
||||
JOIN public.categories c ON cm.category_id = c.cat_id -- Join to check category status
|
||||
`);
|
||||
|
||||
res.json({
|
||||
totalCategories: parseInt(stats?.total_categories || 0),
|
||||
activeCategories: parseInt(stats?.active_categories || 0), // Based on categories.status
|
||||
totalActiveProducts: parseInt(stats?.total_active_products || 0),
|
||||
totalValue: parseFloat(stats?.total_stock_value || 0),
|
||||
// Choose which avg margin calculation to expose
|
||||
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || stats?.overall_avg_margin_simple || 0)
|
||||
// Growth rate could be added if we implement the calculation
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching category stats:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch category stats.' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /categories-aggregate/ (List categories)
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /categories-aggregate received query:', req.query);
|
||||
try {
|
||||
// --- Pagination ---
|
||||
let page = parseInt(req.query.page, 10) || 1;
|
||||
let limit = parseInt(req.query.limit, 10) || DEFAULT_PAGE_LIMIT;
|
||||
limit = Math.min(limit, MAX_PAGE_LIMIT);
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
// --- Sorting ---
|
||||
const sortQueryKey = req.query.sort || 'categoryName';
|
||||
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
|
||||
|
||||
// Hierarchical sorting logic from categories.js
|
||||
const hierarchicalSortOrder = `
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN cm.category_type = 10 THEN 1 -- sections first
|
||||
WHEN cm.category_type = 11 THEN 2 -- categories second
|
||||
WHEN cm.category_type = 12 THEN 3 -- subcategories third
|
||||
WHEN cm.category_type = 13 THEN 4 -- subsubcategories fourth
|
||||
WHEN cm.category_type = 20 THEN 5 -- themes fifth
|
||||
WHEN cm.category_type = 21 THEN 6 -- subthemes last
|
||||
ELSE 7
|
||||
END,
|
||||
cm.category_name ASC
|
||||
`;
|
||||
|
||||
// Use hierarchical sort as default
|
||||
let sortClause = hierarchicalSortOrder;
|
||||
|
||||
// Override with custom sort if specified
|
||||
if (sortColumnInfo && sortQueryKey !== 'categoryName') {
|
||||
const sortColumn = sortColumnInfo.dbCol;
|
||||
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
|
||||
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST');
|
||||
sortClause = `ORDER BY ${sortColumn} ${sortDirection} ${nullsOrder}`;
|
||||
}
|
||||
|
||||
// --- Filtering ---
|
||||
const conditions = [];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
console.log("Starting to process filters from query:", req.query);
|
||||
|
||||
// Add filters based on req.query using COLUMN_MAP and parseValue
|
||||
for (const key in req.query) {
|
||||
if (['page', 'limit', 'sort', 'order'].includes(key)) continue;
|
||||
|
||||
let filterKey = key;
|
||||
let operator = '='; // Default operator
|
||||
const value = req.query[key];
|
||||
|
||||
console.log(`Processing filter key: "${key}" with value: "${value}"`);
|
||||
|
||||
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
|
||||
if (operatorMatch) {
|
||||
filterKey = operatorMatch[1];
|
||||
operator = operatorMatch[2];
|
||||
console.log(`Parsed filter key: "${filterKey}" with operator: "${operator}"`);
|
||||
}
|
||||
|
||||
// Special case for parentName requires join
|
||||
const requiresJoin = filterKey === 'parentName';
|
||||
const columnInfo = getSafeColumnInfo(filterKey);
|
||||
|
||||
if (columnInfo) {
|
||||
console.log(`Column info for "${filterKey}":`, columnInfo);
|
||||
const dbColumn = columnInfo.dbCol;
|
||||
const valueType = columnInfo.type;
|
||||
try {
|
||||
let conditionFragment = '';
|
||||
let needsParam = true;
|
||||
switch (operator.toLowerCase()) {
|
||||
case 'eq': operator = '='; break;
|
||||
case 'ne': operator = '<>'; break;
|
||||
case 'gt': operator = '>'; break;
|
||||
case 'gte': operator = '>='; break;
|
||||
case 'lt': operator = '<'; break;
|
||||
case 'lte': operator = '<='; break;
|
||||
case 'like': operator = 'LIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
|
||||
case 'ilike': operator = 'ILIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
|
||||
case 'between':
|
||||
const [val1, val2] = String(value).split(',');
|
||||
if (val1 !== undefined && val2 !== undefined) {
|
||||
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
|
||||
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
|
||||
needsParam = false;
|
||||
} else continue;
|
||||
break;
|
||||
case 'in':
|
||||
const inValues = String(value).split(',');
|
||||
if (inValues.length > 0) {
|
||||
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
|
||||
conditionFragment = `${dbColumn} IN (${placeholders})`;
|
||||
params.push(...inValues.map(v => parseValue(v, valueType)));
|
||||
needsParam = false;
|
||||
} else continue;
|
||||
break;
|
||||
default: operator = '='; break;
|
||||
}
|
||||
|
||||
if (needsParam) {
|
||||
try {
|
||||
// Special handling for categoryType to ensure it works
|
||||
if (filterKey === 'categoryType') {
|
||||
console.log(`Special handling for categoryType: ${value}`);
|
||||
// Force conversion to integer
|
||||
const numericValue = parseInt(value, 10);
|
||||
if (!isNaN(numericValue)) {
|
||||
console.log(`Successfully converted categoryType to integer: ${numericValue}`);
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
params.push(numericValue);
|
||||
} else {
|
||||
console.error(`Failed to convert categoryType to integer: "${value}"`);
|
||||
throw new Error(`Invalid categoryType value: "${value}"`);
|
||||
}
|
||||
} else {
|
||||
// Normal handling for other fields
|
||||
const parsedValue = parseValue(value, valueType);
|
||||
console.log(`Parsed "${value}" as ${valueType}: ${parsedValue}`);
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
params.push(parsedValue);
|
||||
}
|
||||
} catch (innerError) {
|
||||
console.error(`Failed to parse "${value}" as ${valueType}:`, innerError);
|
||||
throw innerError;
|
||||
}
|
||||
} else if (!conditionFragment) { // For LIKE/ILIKE where needsParam is false
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`; // paramCounter was already incremented in push
|
||||
}
|
||||
|
||||
|
||||
if (conditionFragment) {
|
||||
console.log(`Adding condition: ${conditionFragment}`);
|
||||
conditions.push(`(${conditionFragment})`);
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.error(`Skipping filter for key "${key}" due to parsing error:`, parseError);
|
||||
if (needsParam) paramCounter--; // Roll back counter if param push failed
|
||||
}
|
||||
} else {
|
||||
console.warn(`Invalid filter key ignored: "${key}", not found in COLUMN_MAP`);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Execute Queries ---
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Need JOIN for parent_name if sorting/filtering by it, or always include for display
|
||||
const sortColumn = sortColumnInfo?.dbCol;
|
||||
|
||||
// Always include the category and parent joins for status and parent_name
|
||||
const joinSql = `
|
||||
JOIN public.categories c ON cm.category_id = c.cat_id
|
||||
LEFT JOIN public.categories p ON cm.parent_id = p.cat_id
|
||||
`;
|
||||
|
||||
const baseSql = `
|
||||
FROM public.category_metrics cm
|
||||
${joinSql}
|
||||
${whereClause}
|
||||
`;
|
||||
|
||||
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
|
||||
const dataSql = `
|
||||
SELECT
|
||||
cm.*,
|
||||
c.status,
|
||||
c.description,
|
||||
p.name as parent_name,
|
||||
p.type as parent_type
|
||||
${baseSql}
|
||||
${sortClause}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
const dataParams = [...params, limit, offset];
|
||||
|
||||
console.log("Count SQL:", countSql, params);
|
||||
console.log("Data SQL:", dataSql, dataParams);
|
||||
|
||||
const [countResult, dataResult] = await Promise.all([
|
||||
pool.query(countSql, params),
|
||||
pool.query(dataSql, dataParams)
|
||||
]);
|
||||
|
||||
const total = parseInt(countResult.rows[0].total, 10);
|
||||
const categories = dataResult.rows.map(row => {
|
||||
// Create a new object with both snake_case and camelCase keys
|
||||
const transformedRow = { ...row }; // Start with original data
|
||||
|
||||
for (const key in row) {
|
||||
// Skip null/undefined values
|
||||
if (row[key] === null || row[key] === undefined) {
|
||||
continue; // Original already has the null value
|
||||
}
|
||||
|
||||
// Transform keys to match frontend expectations (add camelCase versions)
|
||||
// First handle cases like sales_7d -> sales7d
|
||||
let camelKey = key.replace(/_(\d+[a-z])/g, '$1');
|
||||
|
||||
// Then handle regular snake_case -> camelCase
|
||||
camelKey = camelKey.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
|
||||
if (camelKey !== key) { // Only add if different from original
|
||||
transformedRow[camelKey] = row[key];
|
||||
}
|
||||
}
|
||||
return transformedRow;
|
||||
});
|
||||
|
||||
// --- Respond ---
|
||||
res.json({
|
||||
categories,
|
||||
pagination: { total, pages: Math.ceil(total / limit), currentPage: page, limit },
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching category metrics list:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch category metrics.' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
325
inventory-server/src/routes/config.js
Normal file
325
inventory-server/src/routes/config.js
Normal file
@@ -0,0 +1,325 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Debug middleware
|
||||
router.use((req, res, next) => {
|
||||
console.log(`[Config Route] ${req.method} ${req.path}`);
|
||||
next();
|
||||
});
|
||||
|
||||
// ===== GLOBAL SETTINGS =====
|
||||
|
||||
// Get all global settings
|
||||
router.get('/global', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
console.log('[Config Route] Fetching global settings...');
|
||||
|
||||
const { rows } = await pool.query('SELECT * FROM settings_global ORDER BY setting_key');
|
||||
|
||||
console.log('[Config Route] Sending global settings:', rows);
|
||||
res.json(rows);
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error fetching global settings:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch global settings', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Update global settings
|
||||
router.put('/global', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
console.log('[Config Route] Updating global settings:', req.body);
|
||||
|
||||
// Validate request
|
||||
if (!Array.isArray(req.body)) {
|
||||
return res.status(400).json({ error: 'Request body must be an array of settings' });
|
||||
}
|
||||
|
||||
// Begin transaction
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
await client.query('BEGIN');
|
||||
|
||||
for (const setting of req.body) {
|
||||
if (!setting.setting_key || !setting.setting_value) {
|
||||
throw new Error('Each setting must have a key and value');
|
||||
}
|
||||
|
||||
await client.query(
|
||||
`UPDATE settings_global
|
||||
SET setting_value = $1,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE setting_key = $2`,
|
||||
[setting.setting_value, setting.setting_key]
|
||||
);
|
||||
}
|
||||
|
||||
await client.query('COMMIT');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
await client.query('ROLLBACK');
|
||||
throw error;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error updating global settings:', error);
|
||||
res.status(500).json({ error: 'Failed to update global settings', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ===== PRODUCT SETTINGS =====
|
||||
|
||||
// Get product settings with pagination and search
|
||||
router.get('/products', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
console.log('[Config Route] Fetching product settings...');
|
||||
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const pageSize = parseInt(req.query.pageSize) || 10;
|
||||
const offset = (page - 1) * pageSize;
|
||||
const search = req.query.search || '';
|
||||
|
||||
// Get total count for pagination
|
||||
const countQuery = search
|
||||
? `SELECT COUNT(*) FROM settings_product sp
|
||||
JOIN products p ON sp.pid::text = p.pid::text
|
||||
WHERE sp.pid::text ILIKE $1 OR p.title ILIKE $1`
|
||||
: 'SELECT COUNT(*) FROM settings_product';
|
||||
|
||||
const countParams = search ? [`%${search}%`] : [];
|
||||
const { rows: countResult } = await pool.query(countQuery, countParams);
|
||||
const total = parseInt(countResult[0].count);
|
||||
|
||||
// Get paginated settings
|
||||
const query = search
|
||||
? `SELECT sp.*, p.title as product_name
|
||||
FROM settings_product sp
|
||||
JOIN products p ON sp.pid::text = p.pid::text
|
||||
WHERE sp.pid::text ILIKE $1 OR p.title ILIKE $1
|
||||
ORDER BY sp.pid
|
||||
LIMIT $2 OFFSET $3`
|
||||
: `SELECT sp.*, p.title as product_name
|
||||
FROM settings_product sp
|
||||
JOIN products p ON sp.pid::text = p.pid::text
|
||||
ORDER BY sp.pid
|
||||
LIMIT $1 OFFSET $2`;
|
||||
|
||||
const queryParams = search
|
||||
? [`%${search}%`, pageSize, offset]
|
||||
: [pageSize, offset];
|
||||
|
||||
const { rows } = await pool.query(query, queryParams);
|
||||
|
||||
const response = {
|
||||
items: rows,
|
||||
total,
|
||||
page,
|
||||
pageSize
|
||||
};
|
||||
|
||||
console.log(`[Config Route] Sending ${rows.length} product settings`);
|
||||
res.json(response);
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error fetching product settings:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product settings', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Update product settings
|
||||
router.put('/products/:pid', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { pid } = req.params;
|
||||
const { lead_time_days, days_of_stock, safety_stock, forecast_method, exclude_from_forecast } = req.body;
|
||||
|
||||
console.log(`[Config Route] Updating product settings for ${pid}:`, req.body);
|
||||
|
||||
// Check if product exists
|
||||
const { rows: checkProduct } = await pool.query(
|
||||
'SELECT 1 FROM settings_product WHERE pid::text = $1',
|
||||
[pid]
|
||||
);
|
||||
|
||||
if (checkProduct.length === 0) {
|
||||
// Insert if it doesn't exist
|
||||
await pool.query(
|
||||
`INSERT INTO settings_product
|
||||
(pid, lead_time_days, days_of_stock, safety_stock, forecast_method, exclude_from_forecast)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)`,
|
||||
[pid, lead_time_days, days_of_stock, safety_stock, forecast_method, exclude_from_forecast]
|
||||
);
|
||||
} else {
|
||||
// Update if it exists
|
||||
await pool.query(
|
||||
`UPDATE settings_product
|
||||
SET lead_time_days = $2,
|
||||
days_of_stock = $3,
|
||||
safety_stock = $4,
|
||||
forecast_method = $5,
|
||||
exclude_from_forecast = $6,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE pid::text = $1`,
|
||||
[pid, lead_time_days, days_of_stock, safety_stock, forecast_method, exclude_from_forecast]
|
||||
);
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`[Config Route] Error updating product settings for ${req.params.pid}:`, error);
|
||||
res.status(500).json({ error: 'Failed to update product settings', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Reset product settings to defaults
|
||||
router.post('/products/:pid/reset', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { pid } = req.params;
|
||||
|
||||
console.log(`[Config Route] Resetting product settings for ${pid}`);
|
||||
|
||||
// Reset by setting everything to null/default
|
||||
await pool.query(
|
||||
`UPDATE settings_product
|
||||
SET lead_time_days = NULL,
|
||||
days_of_stock = NULL,
|
||||
safety_stock = 0,
|
||||
forecast_method = NULL,
|
||||
exclude_from_forecast = false,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE pid::text = $1`,
|
||||
[pid]
|
||||
);
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`[Config Route] Error resetting product settings for ${req.params.pid}:`, error);
|
||||
res.status(500).json({ error: 'Failed to reset product settings', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ===== VENDOR SETTINGS =====
|
||||
|
||||
// Get vendor settings with pagination and search
|
||||
router.get('/vendors', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
console.log('[Config Route] Fetching vendor settings...');
|
||||
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const pageSize = parseInt(req.query.pageSize) || 10;
|
||||
const offset = (page - 1) * pageSize;
|
||||
const search = req.query.search || '';
|
||||
|
||||
// Get total count for pagination
|
||||
const countQuery = search
|
||||
? 'SELECT COUNT(*) FROM settings_vendor WHERE vendor ILIKE $1'
|
||||
: 'SELECT COUNT(*) FROM settings_vendor';
|
||||
|
||||
const countParams = search ? [`%${search}%`] : [];
|
||||
const { rows: countResult } = await pool.query(countQuery, countParams);
|
||||
const total = parseInt(countResult[0].count);
|
||||
|
||||
// Get paginated settings
|
||||
const query = search
|
||||
? `SELECT * FROM settings_vendor
|
||||
WHERE vendor ILIKE $1
|
||||
ORDER BY vendor
|
||||
LIMIT $2 OFFSET $3`
|
||||
: `SELECT * FROM settings_vendor
|
||||
ORDER BY vendor
|
||||
LIMIT $1 OFFSET $2`;
|
||||
|
||||
const queryParams = search
|
||||
? [`%${search}%`, pageSize, offset]
|
||||
: [pageSize, offset];
|
||||
|
||||
const { rows } = await pool.query(query, queryParams);
|
||||
|
||||
const response = {
|
||||
items: rows,
|
||||
total,
|
||||
page,
|
||||
pageSize
|
||||
};
|
||||
|
||||
console.log(`[Config Route] Sending ${rows.length} vendor settings`);
|
||||
res.json(response);
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error fetching vendor settings:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch vendor settings', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Update vendor settings
|
||||
router.put('/vendors/:vendor', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const vendor = req.params.vendor;
|
||||
const { default_lead_time_days, default_days_of_stock } = req.body;
|
||||
|
||||
console.log(`[Config Route] Updating vendor settings for ${vendor}:`, req.body);
|
||||
|
||||
// Check if vendor exists
|
||||
const { rows: checkVendor } = await pool.query(
|
||||
'SELECT 1 FROM settings_vendor WHERE vendor = $1',
|
||||
[vendor]
|
||||
);
|
||||
|
||||
if (checkVendor.length === 0) {
|
||||
// Insert if it doesn't exist
|
||||
await pool.query(
|
||||
`INSERT INTO settings_vendor
|
||||
(vendor, default_lead_time_days, default_days_of_stock)
|
||||
VALUES ($1, $2, $3)`,
|
||||
[vendor, default_lead_time_days, default_days_of_stock]
|
||||
);
|
||||
} else {
|
||||
// Update if it exists
|
||||
await pool.query(
|
||||
`UPDATE settings_vendor
|
||||
SET default_lead_time_days = $2,
|
||||
default_days_of_stock = $3,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE vendor = $1`,
|
||||
[vendor, default_lead_time_days, default_days_of_stock]
|
||||
);
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`[Config Route] Error updating vendor settings for ${req.params.vendor}:`, error);
|
||||
res.status(500).json({ error: 'Failed to update vendor settings', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Reset vendor settings to defaults
|
||||
router.post('/vendors/:vendor/reset', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const vendor = req.params.vendor;
|
||||
|
||||
console.log(`[Config Route] Resetting vendor settings for ${vendor}`);
|
||||
|
||||
// Reset by setting everything to null
|
||||
await pool.query(
|
||||
`UPDATE settings_vendor
|
||||
SET default_lead_time_days = NULL,
|
||||
default_days_of_stock = NULL,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE vendor = $1`,
|
||||
[vendor]
|
||||
);
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`[Config Route] Error resetting vendor settings for ${req.params.vendor}:`, error);
|
||||
res.status(500).json({ error: 'Failed to reset vendor settings', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Export the router
|
||||
module.exports = router;
|
||||
File diff suppressed because it is too large
Load Diff
440
inventory-server/src/routes/data-management.js
Normal file
440
inventory-server/src/routes/data-management.js
Normal file
@@ -0,0 +1,440 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const db = require('../utils/db');
|
||||
|
||||
// Debug middleware MUST be first
|
||||
router.use((req, res, next) => {
|
||||
console.log(`[CSV Route Debug] ${req.method} ${req.path}`);
|
||||
next();
|
||||
});
|
||||
|
||||
// Store active processes and their progress
|
||||
let activeImport = null;
|
||||
let importProgress = null;
|
||||
let activeFullUpdate = null;
|
||||
let activeFullReset = null;
|
||||
|
||||
// SSE clients for progress updates
|
||||
const updateClients = new Set();
|
||||
const importClients = new Set();
|
||||
const resetClients = new Set();
|
||||
const resetMetricsClients = new Set();
|
||||
const calculateMetricsClients = new Set();
|
||||
const fullUpdateClients = new Set();
|
||||
const fullResetClients = new Set();
|
||||
|
||||
// Helper to send progress to specific clients
|
||||
function sendProgressToClients(clients, data) {
|
||||
// If data is a string, send it directly
|
||||
// If it's an object, convert it to JSON
|
||||
const message = typeof data === 'string'
|
||||
? `data: ${data}\n\n`
|
||||
: `data: ${JSON.stringify(data)}\n\n`;
|
||||
|
||||
clients.forEach(client => {
|
||||
try {
|
||||
client.write(message);
|
||||
// Immediately flush the response
|
||||
if (typeof client.flush === 'function') {
|
||||
client.flush();
|
||||
}
|
||||
} catch (error) {
|
||||
// Silently remove failed client
|
||||
clients.delete(client);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to run a script and stream progress
|
||||
function runScript(scriptPath, type, clients) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Kill any existing process of this type
|
||||
let activeProcess;
|
||||
switch (type) {
|
||||
case 'update':
|
||||
if (activeFullUpdate) {
|
||||
try { activeFullUpdate.kill(); } catch (e) { }
|
||||
}
|
||||
activeProcess = activeFullUpdate;
|
||||
break;
|
||||
case 'reset':
|
||||
if (activeFullReset) {
|
||||
try { activeFullReset.kill(); } catch (e) { }
|
||||
}
|
||||
activeProcess = activeFullReset;
|
||||
break;
|
||||
}
|
||||
|
||||
const child = spawn('node', [scriptPath], {
|
||||
stdio: ['inherit', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = child;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = child;
|
||||
break;
|
||||
}
|
||||
|
||||
let output = '';
|
||||
|
||||
child.stdout.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
output += text;
|
||||
|
||||
// Split by lines to handle multiple JSON outputs
|
||||
const lines = text.split('\n');
|
||||
lines.filter(line => line.trim()).forEach(line => {
|
||||
try {
|
||||
// Try to parse as JSON but don't let it affect the display
|
||||
const jsonData = JSON.parse(line);
|
||||
// Only end the process if we get a final status
|
||||
if (jsonData.status === 'complete' || jsonData.status === 'error' || jsonData.status === 'cancelled') {
|
||||
if (jsonData.status === 'complete' && !jsonData.operation?.includes('complete')) {
|
||||
// Don't close for intermediate completion messages
|
||||
sendProgressToClients(clients, line);
|
||||
return;
|
||||
}
|
||||
// Close only on final completion/error/cancellation
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
if (jsonData.status === 'error') {
|
||||
reject(new Error(jsonData.error || 'Unknown error'));
|
||||
} else {
|
||||
resolve({ output });
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Not JSON, just display as is
|
||||
}
|
||||
// Always send the raw line
|
||||
sendProgressToClients(clients, line);
|
||||
});
|
||||
});
|
||||
|
||||
child.stderr.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
console.error(text);
|
||||
// Send stderr output directly too
|
||||
sendProgressToClients(clients, text);
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
|
||||
if (code !== 0) {
|
||||
const error = `Script ${scriptPath} exited with code ${code}`;
|
||||
sendProgressToClients(clients, error);
|
||||
reject(new Error(error));
|
||||
}
|
||||
// Don't resolve here - let the completion message from the script trigger the resolve
|
||||
});
|
||||
|
||||
child.on('error', (err) => {
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
sendProgressToClients(clients, err.message);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Progress endpoints
|
||||
router.get('/:type/progress', (req, res) => {
|
||||
const { type } = req.params;
|
||||
if (!['update', 'reset'].includes(type)) {
|
||||
return res.status(400).json({ error: 'Invalid operation type' });
|
||||
}
|
||||
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
'Access-Control-Allow-Origin': req.headers.origin || '*',
|
||||
'Access-Control-Allow-Credentials': 'true'
|
||||
});
|
||||
|
||||
// Add this client to the correct set
|
||||
const clients = type === 'update' ? fullUpdateClients : fullResetClients;
|
||||
clients.add(res);
|
||||
|
||||
// Send initial connection message
|
||||
sendProgressToClients(new Set([res]), JSON.stringify({
|
||||
status: 'running',
|
||||
operation: 'Initializing connection...'
|
||||
}));
|
||||
|
||||
// Handle client disconnect
|
||||
req.on('close', () => {
|
||||
clients.delete(res);
|
||||
});
|
||||
});
|
||||
|
||||
// GET /status - Check for active processes
|
||||
router.get('/status', (req, res) => {
|
||||
try {
|
||||
const hasActiveUpdate = activeFullUpdate !== null;
|
||||
const hasActiveReset = activeFullReset !== null;
|
||||
|
||||
if (hasActiveUpdate || hasActiveReset) {
|
||||
res.json({
|
||||
active: true,
|
||||
progress: {
|
||||
status: 'running',
|
||||
operation: hasActiveUpdate ? 'Full update in progress' : 'Full reset in progress',
|
||||
type: hasActiveUpdate ? 'update' : 'reset'
|
||||
}
|
||||
});
|
||||
} else {
|
||||
res.json({
|
||||
active: false,
|
||||
progress: null
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error checking status:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Route to cancel active process
|
||||
router.post('/cancel', (req, res) => {
|
||||
let killed = false;
|
||||
|
||||
// Get the operation type from the request
|
||||
const { type } = req.query;
|
||||
const clients = type === 'update' ? fullUpdateClients : fullResetClients;
|
||||
const activeProcess = type === 'update' ? activeFullUpdate : activeFullReset;
|
||||
|
||||
if (activeProcess) {
|
||||
try {
|
||||
activeProcess.kill('SIGTERM');
|
||||
if (type === 'update') {
|
||||
activeFullUpdate = null;
|
||||
} else {
|
||||
activeFullReset = null;
|
||||
}
|
||||
killed = true;
|
||||
sendProgressToClients(clients, JSON.stringify({
|
||||
status: 'cancelled',
|
||||
operation: 'Operation cancelled'
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error(`Error killing ${type} process:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
if (killed) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'No active process to cancel' });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /csv/full-update - Run full update script
|
||||
router.post('/full-update', async (req, res) => {
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '../../scripts/full-update.js');
|
||||
runScript(scriptPath, 'update', fullUpdateClients)
|
||||
.catch(error => {
|
||||
console.error('Update failed:', error);
|
||||
});
|
||||
res.status(202).json({ message: 'Update started' });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /csv/full-reset - Run full reset script
|
||||
router.post('/full-reset', async (req, res) => {
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '../../scripts/full-reset.js');
|
||||
runScript(scriptPath, 'reset', fullResetClients)
|
||||
.catch(error => {
|
||||
console.error('Reset failed:', error);
|
||||
});
|
||||
res.status(202).json({ message: 'Reset started' });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /history/import - Get recent import history
|
||||
router.get('/history/import', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// First check which columns exist
|
||||
const { rows: columns } = await pool.query(`
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'import_history'
|
||||
AND column_name IN ('records_deleted', 'records_skipped', 'total_processed')
|
||||
`);
|
||||
|
||||
const hasDeletedColumn = columns.some(col => col.column_name === 'records_deleted');
|
||||
const hasSkippedColumn = columns.some(col => col.column_name === 'records_skipped');
|
||||
const hasTotalProcessedColumn = columns.some(col => col.column_name === 'total_processed');
|
||||
|
||||
// Build query dynamically based on available columns
|
||||
const query = `
|
||||
SELECT
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
status,
|
||||
error_message,
|
||||
records_added::integer,
|
||||
records_updated::integer,
|
||||
${hasDeletedColumn ? 'records_deleted::integer,' : '0 as records_deleted,'}
|
||||
${hasSkippedColumn ? 'records_skipped::integer,' : '0 as records_skipped,'}
|
||||
${hasTotalProcessedColumn ? 'total_processed::integer,' : '0 as total_processed,'}
|
||||
is_incremental,
|
||||
additional_info,
|
||||
EXTRACT(EPOCH FROM (COALESCE(end_time, NOW()) - start_time)) / 60 as duration_minutes
|
||||
FROM import_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
`;
|
||||
|
||||
const { rows } = await pool.query(query);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import history:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /history/calculate - Get recent calculation history
|
||||
router.get('/history/calculate', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
EXTRACT(EPOCH FROM (COALESCE(end_time, NOW()) - start_time)) / 60 as duration_minutes,
|
||||
duration_seconds,
|
||||
status,
|
||||
error_message,
|
||||
total_products,
|
||||
total_orders,
|
||||
total_purchase_orders,
|
||||
processed_products,
|
||||
processed_orders,
|
||||
processed_purchase_orders,
|
||||
additional_info
|
||||
FROM calculate_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching calculate history:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/modules - Get module calculation status
|
||||
router.get('/status/modules', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
module_name,
|
||||
last_calculation_timestamp::timestamp
|
||||
FROM calculate_status
|
||||
ORDER BY module_name
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching module status:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/tables - Get table sync status
|
||||
router.get('/status/tables', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
table_name,
|
||||
last_sync_timestamp::timestamp
|
||||
FROM sync_status
|
||||
ORDER BY table_name
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching table status:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/table-counts - Get record counts for all tables
|
||||
router.get('/status/table-counts', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const tables = [
|
||||
// Core tables
|
||||
'products', 'categories', 'product_categories', 'orders', 'purchase_orders', 'receivings',
|
||||
// New metrics tables
|
||||
'product_metrics', 'daily_product_snapshots','brand_metrics','category_metrics','vendor_metrics',
|
||||
// Config tables
|
||||
'settings_global', 'settings_vendor', 'settings_product'
|
||||
];
|
||||
|
||||
const counts = await Promise.all(
|
||||
tables.map(table =>
|
||||
pool.query(`SELECT COUNT(*) as count FROM ${table}`)
|
||||
.then(result => ({
|
||||
table_name: table,
|
||||
count: parseInt(result.rows[0].count)
|
||||
}))
|
||||
.catch(err => ({
|
||||
table_name: table,
|
||||
count: null,
|
||||
error: err.message
|
||||
}))
|
||||
)
|
||||
);
|
||||
|
||||
// Group tables by type
|
||||
const groupedCounts = {
|
||||
core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders', 'receivings'].includes(c.table_name)),
|
||||
metrics: counts.filter(c => ['product_metrics', 'daily_product_snapshots','brand_metrics','category_metrics','vendor_metrics'].includes(c.table_name)),
|
||||
config: counts.filter(c => ['settings_global', 'settings_vendor', 'settings_product'].includes(c.table_name))
|
||||
};
|
||||
|
||||
res.json(groupedCounts);
|
||||
} catch (error) {
|
||||
console.error('Error fetching table counts:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
1152
inventory-server/src/routes/import.js
Normal file
1152
inventory-server/src/routes/import.js
Normal file
File diff suppressed because it is too large
Load Diff
590
inventory-server/src/routes/metrics.js
Normal file
590
inventory-server/src/routes/metrics.js
Normal file
@@ -0,0 +1,590 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { Pool } = require('pg'); // Assuming pg driver
|
||||
|
||||
// --- Configuration & Helpers ---
|
||||
|
||||
const DEFAULT_PAGE_LIMIT = 50;
|
||||
const MAX_PAGE_LIMIT = 200; // Prevent excessive data requests
|
||||
|
||||
// Define direct mapping from frontend column names to database columns
|
||||
// This simplifies the code by eliminating conversion logic
|
||||
const COLUMN_MAP = {
|
||||
// Product Info
|
||||
pid: 'pm.pid',
|
||||
sku: 'pm.sku',
|
||||
title: 'pm.title',
|
||||
brand: 'pm.brand',
|
||||
vendor: 'pm.vendor',
|
||||
imageUrl: 'pm.image_url',
|
||||
isVisible: 'pm.is_visible',
|
||||
isReplenishable: 'pm.is_replenishable',
|
||||
// Additional Product Fields
|
||||
barcode: 'pm.barcode',
|
||||
harmonizedTariffCode: 'pm.harmonized_tariff_code',
|
||||
vendorReference: 'pm.vendor_reference',
|
||||
notionsReference: 'pm.notions_reference',
|
||||
line: 'pm.line',
|
||||
subline: 'pm.subline',
|
||||
artist: 'pm.artist',
|
||||
moq: 'pm.moq',
|
||||
rating: 'pm.rating',
|
||||
reviews: 'pm.reviews',
|
||||
weight: 'pm.weight',
|
||||
length: 'pm.length',
|
||||
width: 'pm.width',
|
||||
height: 'pm.height',
|
||||
countryOfOrigin: 'pm.country_of_origin',
|
||||
location: 'pm.location',
|
||||
baskets: 'pm.baskets',
|
||||
notifies: 'pm.notifies',
|
||||
preorderCount: 'pm.preorder_count',
|
||||
notionsInvCount: 'pm.notions_inv_count',
|
||||
// Current Status
|
||||
currentPrice: 'pm.current_price',
|
||||
currentRegularPrice: 'pm.current_regular_price',
|
||||
currentCostPrice: 'pm.current_cost_price',
|
||||
currentLandingCostPrice: 'pm.current_landing_cost_price',
|
||||
currentStock: 'pm.current_stock',
|
||||
currentStockCost: 'pm.current_stock_cost',
|
||||
currentStockRetail: 'pm.current_stock_retail',
|
||||
currentStockGross: 'pm.current_stock_gross',
|
||||
onOrderQty: 'pm.on_order_qty',
|
||||
onOrderCost: 'pm.on_order_cost',
|
||||
onOrderRetail: 'pm.on_order_retail',
|
||||
earliestExpectedDate: 'pm.earliest_expected_date',
|
||||
// Historical Dates
|
||||
dateCreated: 'pm.date_created',
|
||||
dateFirstReceived: 'pm.date_first_received',
|
||||
dateLastReceived: 'pm.date_last_received',
|
||||
dateFirstSold: 'pm.date_first_sold',
|
||||
dateLastSold: 'pm.date_last_sold',
|
||||
ageDays: 'pm.age_days',
|
||||
// Rolling Period Metrics
|
||||
sales7d: 'pm.sales_7d',
|
||||
revenue7d: 'pm.revenue_7d',
|
||||
sales14d: 'pm.sales_14d',
|
||||
revenue14d: 'pm.revenue_14d',
|
||||
sales30d: 'pm.sales_30d',
|
||||
revenue30d: 'pm.revenue_30d',
|
||||
cogs30d: 'pm.cogs_30d',
|
||||
profit30d: 'pm.profit_30d',
|
||||
returnsUnits30d: 'pm.returns_units_30d',
|
||||
returnsRevenue30d: 'pm.returns_revenue_30d',
|
||||
discounts30d: 'pm.discounts_30d',
|
||||
grossRevenue30d: 'pm.gross_revenue_30d',
|
||||
grossRegularRevenue30d: 'pm.gross_regular_revenue_30d',
|
||||
stockoutDays30d: 'pm.stockout_days_30d',
|
||||
sales365d: 'pm.sales_365d',
|
||||
revenue365d: 'pm.revenue_365d',
|
||||
avgStockUnits30d: 'pm.avg_stock_units_30d',
|
||||
avgStockCost30d: 'pm.avg_stock_cost_30d',
|
||||
avgStockRetail30d: 'pm.avg_stock_retail_30d',
|
||||
avgStockGross30d: 'pm.avg_stock_gross_30d',
|
||||
receivedQty30d: 'pm.received_qty_30d',
|
||||
receivedCost30d: 'pm.received_cost_30d',
|
||||
// Lifetime Metrics
|
||||
lifetimeSales: 'pm.lifetime_sales',
|
||||
lifetimeRevenue: 'pm.lifetime_revenue',
|
||||
// First Period Metrics
|
||||
first7DaysSales: 'pm.first_7_days_sales',
|
||||
first7DaysRevenue: 'pm.first_7_days_revenue',
|
||||
first30DaysSales: 'pm.first_30_days_sales',
|
||||
first30DaysRevenue: 'pm.first_30_days_revenue',
|
||||
first60DaysSales: 'pm.first_60_days_sales',
|
||||
first60DaysRevenue: 'pm.first_60_days_revenue',
|
||||
first90DaysSales: 'pm.first_90_days_sales',
|
||||
first90DaysRevenue: 'pm.first_90_days_revenue',
|
||||
// Calculated KPIs
|
||||
asp30d: 'pm.asp_30d',
|
||||
acp30d: 'pm.acp_30d',
|
||||
avgRos30d: 'pm.avg_ros_30d',
|
||||
avgSalesPerDay30d: 'pm.avg_sales_per_day_30d',
|
||||
avgSalesPerMonth30d: 'pm.avg_sales_per_month_30d',
|
||||
margin30d: 'pm.margin_30d',
|
||||
markup30d: 'pm.markup_30d',
|
||||
gmroi30d: 'pm.gmroi_30d',
|
||||
stockturn30d: 'pm.stockturn_30d',
|
||||
returnRate30d: 'pm.return_rate_30d',
|
||||
discountRate30d: 'pm.discount_rate_30d',
|
||||
stockoutRate30d: 'pm.stockout_rate_30d',
|
||||
markdown30d: 'pm.markdown_30d',
|
||||
markdownRate30d: 'pm.markdown_rate_30d',
|
||||
sellThrough30d: 'pm.sell_through_30d',
|
||||
avgLeadTimeDays: 'pm.avg_lead_time_days',
|
||||
// Forecasting & Replenishment
|
||||
abcClass: 'pm.abc_class',
|
||||
salesVelocityDaily: 'pm.sales_velocity_daily',
|
||||
configLeadTime: 'pm.config_lead_time',
|
||||
configDaysOfStock: 'pm.config_days_of_stock',
|
||||
configSafetyStock: 'pm.config_safety_stock',
|
||||
planningPeriodDays: 'pm.planning_period_days',
|
||||
leadTimeForecastUnits: 'pm.lead_time_forecast_units',
|
||||
daysOfStockForecastUnits: 'pm.days_of_stock_forecast_units',
|
||||
planningPeriodForecastUnits: 'pm.planning_period_forecast_units',
|
||||
leadTimeClosingStock: 'pm.lead_time_closing_stock',
|
||||
daysOfStockClosingStock: 'pm.days_of_stock_closing_stock',
|
||||
replenishmentNeededRaw: 'pm.replenishment_needed_raw',
|
||||
replenishmentUnits: 'pm.replenishment_units',
|
||||
replenishmentCost: 'pm.replenishment_cost',
|
||||
replenishmentRetail: 'pm.replenishment_retail',
|
||||
replenishmentProfit: 'pm.replenishment_profit',
|
||||
toOrderUnits: 'pm.to_order_units',
|
||||
forecastLostSalesUnits: 'pm.forecast_lost_sales_units',
|
||||
forecastLostRevenue: 'pm.forecast_lost_revenue',
|
||||
stockCoverInDays: 'pm.stock_cover_in_days',
|
||||
poCoverInDays: 'pm.po_cover_in_days',
|
||||
sellsOutInDays: 'pm.sells_out_in_days',
|
||||
replenishDate: 'pm.replenish_date',
|
||||
overstockedUnits: 'pm.overstocked_units',
|
||||
overstockedCost: 'pm.overstocked_cost',
|
||||
overstockedRetail: 'pm.overstocked_retail',
|
||||
isOldStock: 'pm.is_old_stock',
|
||||
// Yesterday
|
||||
yesterdaySales: 'pm.yesterday_sales',
|
||||
// Map status column - directly mapped now instead of calculated on frontend
|
||||
status: 'pm.status',
|
||||
|
||||
// Growth Metrics (P3)
|
||||
salesGrowth30dVsPrev: 'pm.sales_growth_30d_vs_prev',
|
||||
revenueGrowth30dVsPrev: 'pm.revenue_growth_30d_vs_prev',
|
||||
salesGrowthYoy: 'pm.sales_growth_yoy',
|
||||
revenueGrowthYoy: 'pm.revenue_growth_yoy',
|
||||
|
||||
// Demand Variability Metrics (P3)
|
||||
salesVariance30d: 'pm.sales_variance_30d',
|
||||
salesStdDev30d: 'pm.sales_std_dev_30d',
|
||||
salesCv30d: 'pm.sales_cv_30d',
|
||||
demandPattern: 'pm.demand_pattern',
|
||||
|
||||
// Service Level Metrics (P5)
|
||||
fillRate30d: 'pm.fill_rate_30d',
|
||||
stockoutIncidents30d: 'pm.stockout_incidents_30d',
|
||||
serviceLevel30d: 'pm.service_level_30d',
|
||||
lostSalesIncidents30d: 'pm.lost_sales_incidents_30d',
|
||||
|
||||
// Seasonality Metrics (P5)
|
||||
seasonalityIndex: 'pm.seasonality_index',
|
||||
seasonalPattern: 'pm.seasonal_pattern',
|
||||
peakSeason: 'pm.peak_season',
|
||||
|
||||
// Lifetime Revenue Quality
|
||||
lifetimeRevenueQuality: 'pm.lifetime_revenue_quality'
|
||||
};
|
||||
|
||||
// Define column types for use in sorting/filtering
|
||||
// This helps apply correct comparison operators and sorting logic
|
||||
const COLUMN_TYPES = {
|
||||
// Numeric columns (use numeric operators and sorting)
|
||||
numeric: [
|
||||
'pid', 'currentPrice', 'currentRegularPrice', 'currentCostPrice', 'currentLandingCostPrice',
|
||||
'currentStock', 'currentStockCost', 'currentStockRetail', 'currentStockGross',
|
||||
'onOrderQty', 'onOrderCost', 'onOrderRetail', 'ageDays',
|
||||
'sales7d', 'revenue7d', 'sales14d', 'revenue14d', 'sales30d', 'revenue30d',
|
||||
'cogs30d', 'profit30d', 'returnsUnits30d', 'returnsRevenue30d', 'discounts30d',
|
||||
'grossRevenue30d', 'grossRegularRevenue30d', 'stockoutDays30d', 'sales365d', 'revenue365d',
|
||||
'avgStockUnits30d', 'avgStockCost30d', 'avgStockRetail30d', 'avgStockGross30d',
|
||||
'receivedQty30d', 'receivedCost30d', 'lifetimeSales', 'lifetimeRevenue',
|
||||
'first7DaysSales', 'first7DaysRevenue', 'first30DaysSales', 'first30DaysRevenue',
|
||||
'first60DaysSales', 'first60DaysRevenue', 'first90DaysSales', 'first90DaysRevenue',
|
||||
'asp30d', 'acp30d', 'avgRos30d', 'avgSalesPerDay30d', 'avgSalesPerMonth30d',
|
||||
'margin30d', 'markup30d', 'gmroi30d', 'stockturn30d', 'returnRate30d', 'discountRate30d',
|
||||
'stockoutRate30d', 'markdown30d', 'markdownRate30d', 'sellThrough30d', 'avgLeadTimeDays',
|
||||
'salesVelocityDaily', 'configLeadTime', 'configDaysOfStock', 'configSafetyStock',
|
||||
'planningPeriodDays', 'leadTimeForecastUnits', 'daysOfStockForecastUnits',
|
||||
'planningPeriodForecastUnits', 'leadTimeClosingStock', 'daysOfStockClosingStock',
|
||||
'replenishmentNeededRaw', 'replenishmentUnits', 'replenishmentCost', 'replenishmentRetail',
|
||||
'replenishmentProfit', 'toOrderUnits', 'forecastLostSalesUnits', 'forecastLostRevenue',
|
||||
'stockCoverInDays', 'poCoverInDays', 'sellsOutInDays', 'overstockedUnits',
|
||||
'overstockedCost', 'overstockedRetail', 'yesterdaySales',
|
||||
// New numeric columns
|
||||
'moq', 'rating', 'reviews', 'weight', 'length', 'width', 'height',
|
||||
'baskets', 'notifies', 'preorderCount', 'notionsInvCount',
|
||||
// Growth metrics
|
||||
'salesGrowth30dVsPrev', 'revenueGrowth30dVsPrev', 'salesGrowthYoy', 'revenueGrowthYoy',
|
||||
// Demand variability metrics
|
||||
'salesVariance30d', 'salesStdDev30d', 'salesCv30d',
|
||||
// Service level metrics
|
||||
'fillRate30d', 'stockoutIncidents30d', 'serviceLevel30d', 'lostSalesIncidents30d',
|
||||
// Seasonality metrics
|
||||
'seasonalityIndex'
|
||||
],
|
||||
// Date columns (use date operators and sorting)
|
||||
date: [
|
||||
'dateCreated', 'dateFirstReceived', 'dateLastReceived', 'dateFirstSold', 'dateLastSold',
|
||||
'earliestExpectedDate', 'replenishDate', 'forecastedOutOfStockDate'
|
||||
],
|
||||
// String columns (use string operators and sorting)
|
||||
string: [
|
||||
'sku', 'title', 'brand', 'vendor', 'imageUrl', 'abcClass', 'status',
|
||||
// New string columns
|
||||
'barcode', 'harmonizedTariffCode', 'vendorReference', 'notionsReference',
|
||||
'line', 'subline', 'artist', 'countryOfOrigin', 'location',
|
||||
// New string columns for patterns
|
||||
'demandPattern', 'seasonalPattern', 'peakSeason', 'lifetimeRevenueQuality'
|
||||
],
|
||||
// Boolean columns (use boolean operators and sorting)
|
||||
boolean: ['isVisible', 'isReplenishable', 'isOldStock']
|
||||
};
|
||||
|
||||
// Special sort handling for certain columns
|
||||
const SPECIAL_SORT_COLUMNS = {
|
||||
// Percentage columns where we want to sort by the numeric value
|
||||
margin30d: true,
|
||||
markup30d: true,
|
||||
sellThrough30d: true,
|
||||
discountRate30d: true,
|
||||
stockoutRate30d: true,
|
||||
returnRate30d: true,
|
||||
markdownRate30d: true,
|
||||
|
||||
// Columns where we may want to sort by absolute value
|
||||
profit30d: 'abs',
|
||||
|
||||
// Velocity columns
|
||||
salesVelocityDaily: true,
|
||||
|
||||
// Growth rate columns
|
||||
salesGrowth30dVsPrev: 'abs',
|
||||
revenueGrowth30dVsPrev: 'abs',
|
||||
salesGrowthYoy: 'abs',
|
||||
revenueGrowthYoy: 'abs',
|
||||
|
||||
// Status column needs special ordering
|
||||
status: 'priority'
|
||||
};
|
||||
|
||||
// Status priority for sorting (lower number = higher priority)
|
||||
const STATUS_PRIORITY = {
|
||||
'Critical': 1,
|
||||
'At Risk': 2,
|
||||
'Reorder': 3,
|
||||
'Overstocked': 4,
|
||||
'Healthy': 5,
|
||||
'New': 6
|
||||
// Any other status will be sorted alphabetically after these
|
||||
};
|
||||
|
||||
// Get database column name from frontend column name
|
||||
function getDbColumn(frontendColumn) {
|
||||
return COLUMN_MAP[frontendColumn] || 'pm.title'; // Default to title if not found
|
||||
}
|
||||
|
||||
// Get column type for proper sorting
|
||||
function getColumnType(frontendColumn) {
|
||||
return COLUMN_TYPES[frontendColumn] || 'string';
|
||||
}
|
||||
|
||||
// --- Route Handlers ---
|
||||
|
||||
// GET /metrics/filter-options - Provide distinct values for filter dropdowns
|
||||
router.get('/filter-options', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /metrics/filter-options');
|
||||
try {
|
||||
const [vendorRes, brandRes, abcClassRes] = await Promise.all([
|
||||
pool.query(`SELECT DISTINCT vendor FROM public.product_metrics WHERE vendor IS NOT NULL AND vendor <> '' ORDER BY vendor`),
|
||||
pool.query(`SELECT DISTINCT COALESCE(brand, 'Unbranded') as brand FROM public.product_metrics WHERE brand IS NOT NULL AND brand <> '' ORDER BY brand`),
|
||||
pool.query(`SELECT DISTINCT abc_class FROM public.product_metrics WHERE abc_class IS NOT NULL ORDER BY abc_class`)
|
||||
// Add queries for other distinct options if needed (e.g., categories if stored on pm)
|
||||
]);
|
||||
|
||||
res.json({
|
||||
vendors: vendorRes.rows.map(r => r.vendor),
|
||||
brands: brandRes.rows.map(r => r.brand),
|
||||
abcClasses: abcClassRes.rows.map(r => r.abc_class),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching filter options:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch filter options' });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// GET /metrics/ - List all product metrics with filtering, sorting, pagination
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /metrics received query:', req.query);
|
||||
|
||||
try {
|
||||
// --- Pagination ---
|
||||
let page = parseInt(req.query.page, 10);
|
||||
let limit = parseInt(req.query.limit, 10);
|
||||
if (isNaN(page) || page < 1) page = 1;
|
||||
if (isNaN(limit) || limit < 1) limit = DEFAULT_PAGE_LIMIT;
|
||||
limit = Math.min(limit, MAX_PAGE_LIMIT); // Cap the limit
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
// --- Sorting ---
|
||||
const sortQueryKey = req.query.sort || 'title'; // Default sort field key
|
||||
const dbColumn = getDbColumn(sortQueryKey);
|
||||
const columnType = getColumnType(sortQueryKey);
|
||||
|
||||
console.log(`Sorting request: ${sortQueryKey} -> ${dbColumn} (${columnType})`);
|
||||
|
||||
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
|
||||
|
||||
// Always put nulls last regardless of sort direction or column type
|
||||
const nullsOrder = 'NULLS LAST';
|
||||
|
||||
// Build the ORDER BY clause based on column type and special handling
|
||||
let orderByClause;
|
||||
|
||||
if (SPECIAL_SORT_COLUMNS[sortQueryKey] === 'abs') {
|
||||
// Sort by absolute value for columns where negative values matter
|
||||
orderByClause = `ABS(${dbColumn}::numeric) ${sortDirection} ${nullsOrder}`;
|
||||
} else if (columnType === 'number' || SPECIAL_SORT_COLUMNS[sortQueryKey] === true) {
|
||||
// For numeric columns, cast to numeric to ensure proper sorting
|
||||
orderByClause = `${dbColumn}::numeric ${sortDirection} ${nullsOrder}`;
|
||||
} else if (columnType === 'date') {
|
||||
// For date columns, cast to timestamp to ensure proper sorting
|
||||
orderByClause = `CASE WHEN ${dbColumn} IS NULL THEN 1 ELSE 0 END, ${dbColumn}::timestamp ${sortDirection}`;
|
||||
} else if (columnType === 'status' || SPECIAL_SORT_COLUMNS[sortQueryKey] === 'priority') {
|
||||
// Special handling for status column, using priority for known statuses
|
||||
orderByClause = `
|
||||
CASE WHEN ${dbColumn} IS NULL THEN 999
|
||||
WHEN ${dbColumn} = 'Critical' THEN 1
|
||||
WHEN ${dbColumn} = 'At Risk' THEN 2
|
||||
WHEN ${dbColumn} = 'Reorder' THEN 3
|
||||
WHEN ${dbColumn} = 'Overstocked' THEN 4
|
||||
WHEN ${dbColumn} = 'Healthy' THEN 5
|
||||
WHEN ${dbColumn} = 'New' THEN 6
|
||||
ELSE 100
|
||||
END ${sortDirection} ${nullsOrder},
|
||||
${dbColumn} ${sortDirection}`;
|
||||
} else {
|
||||
// For string and boolean columns, no special casting needed
|
||||
orderByClause = `CASE WHEN ${dbColumn} IS NULL THEN 1 ELSE 0 END, ${dbColumn} ${sortDirection}`;
|
||||
}
|
||||
|
||||
// --- Filtering ---
|
||||
const conditions = [];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
// Add default visibility/replenishable filters unless overridden
|
||||
if (req.query.showInvisible !== 'true') conditions.push(`pm.is_visible = true`);
|
||||
if (req.query.showNonReplenishable !== 'true') conditions.push(`pm.is_replenishable = true`);
|
||||
|
||||
// Special handling for stock_status
|
||||
if (req.query.stock_status) {
|
||||
const status = req.query.stock_status;
|
||||
// Handle special case for "at-risk" which is stored as "At Risk" in the database
|
||||
if (status.toLowerCase() === 'at-risk') {
|
||||
conditions.push(`pm.status = $${paramCounter++}`);
|
||||
params.push('At Risk');
|
||||
} else {
|
||||
// Capitalize first letter to match database values
|
||||
conditions.push(`pm.status = $${paramCounter++}`);
|
||||
params.push(status.charAt(0).toUpperCase() + status.slice(1));
|
||||
}
|
||||
}
|
||||
|
||||
// Process other filters from query parameters
|
||||
for (const key in req.query) {
|
||||
// Skip control params
|
||||
if (['page', 'limit', 'sort', 'order', 'showInvisible', 'showNonReplenishable', 'stock_status'].includes(key)) continue;
|
||||
|
||||
let filterKey = key;
|
||||
let operator = '='; // Default operator
|
||||
let value = req.query[key];
|
||||
|
||||
// Check for operator suffixes (e.g., sales30d_gt, title_like)
|
||||
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
|
||||
if (operatorMatch) {
|
||||
filterKey = operatorMatch[1]; // e.g., "sales30d"
|
||||
operator = operatorMatch[2]; // e.g., "gt"
|
||||
}
|
||||
|
||||
// Get the database column for this filter key
|
||||
const dbColumn = getDbColumn(filterKey);
|
||||
const valueType = getColumnType(filterKey);
|
||||
|
||||
if (!dbColumn) {
|
||||
console.warn(`Invalid filter key ignored: ${key}`);
|
||||
continue; // Skip if the key doesn't map to a known column
|
||||
}
|
||||
|
||||
// --- Build WHERE clause fragment ---
|
||||
try {
|
||||
let conditionFragment = '';
|
||||
let needsParam = true; // Most operators need a parameter
|
||||
|
||||
switch (operator.toLowerCase()) {
|
||||
case 'eq': operator = '='; break;
|
||||
case 'ne': operator = '<>'; break;
|
||||
case 'gt': operator = '>'; break;
|
||||
case 'gte': operator = '>='; break;
|
||||
case 'lt': operator = '<'; break;
|
||||
case 'lte': operator = '<='; break;
|
||||
case 'like': operator = 'LIKE'; value = `%${value}%`; break; // Add wildcards for LIKE
|
||||
case 'ilike': operator = 'ILIKE'; value = `%${value}%`; break; // Add wildcards for ILIKE
|
||||
case 'between':
|
||||
const [val1, val2] = String(value).split(',');
|
||||
if (val1 !== undefined && val2 !== undefined) {
|
||||
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
|
||||
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
|
||||
needsParam = false; // Params added manually
|
||||
} else {
|
||||
console.warn(`Invalid 'between' value for ${key}: ${value}`);
|
||||
continue; // Skip this filter
|
||||
}
|
||||
break;
|
||||
case 'in':
|
||||
const inValues = String(value).split(',');
|
||||
if (inValues.length > 0) {
|
||||
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
|
||||
conditionFragment = `${dbColumn} IN (${placeholders})`;
|
||||
params.push(...inValues.map(v => parseValue(v, valueType))); // Add all parsed values
|
||||
needsParam = false; // Params added manually
|
||||
} else {
|
||||
console.warn(`Invalid 'in' value for ${key}: ${value}`);
|
||||
continue; // Skip this filter
|
||||
}
|
||||
break;
|
||||
// Add other operators as needed (IS NULL, IS NOT NULL, etc.)
|
||||
case '=': // Keep default '='
|
||||
default: operator = '='; break; // Ensure default is handled
|
||||
}
|
||||
|
||||
if (needsParam) {
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
params.push(parseValue(value, valueType));
|
||||
}
|
||||
|
||||
if (conditionFragment) {
|
||||
conditions.push(`(${conditionFragment})`); // Wrap condition in parentheses
|
||||
}
|
||||
|
||||
} catch (parseError) {
|
||||
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
|
||||
// Decrement counter if param wasn't actually used due to error
|
||||
if (needsParam) paramCounter--;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Construct and Execute Queries ---
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Debug log of conditions and parameters
|
||||
console.log('Constructed WHERE conditions:', conditions);
|
||||
console.log('Parameters:', params);
|
||||
|
||||
// Count Query
|
||||
const countSql = `SELECT COUNT(*) AS total FROM public.product_metrics pm ${whereClause}`;
|
||||
console.log('Executing Count Query:', countSql, params);
|
||||
const countPromise = pool.query(countSql, params);
|
||||
|
||||
// Data Query (Select all columns from metrics table for now)
|
||||
const dataSql = `
|
||||
SELECT pm.*
|
||||
FROM public.product_metrics pm
|
||||
${whereClause}
|
||||
ORDER BY ${orderByClause}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
const dataParams = [...params, limit, offset];
|
||||
|
||||
// Log detailed query information for debugging
|
||||
console.log('Executing Data Query:');
|
||||
console.log(' - Sort Column:', dbColumn);
|
||||
console.log(' - Column Type:', columnType);
|
||||
console.log(' - Sort Direction:', sortDirection);
|
||||
console.log(' - Order By Clause:', orderByClause);
|
||||
console.log(' - Full SQL:', dataSql);
|
||||
console.log(' - Parameters:', dataParams);
|
||||
|
||||
const dataPromise = pool.query(dataSql, dataParams);
|
||||
|
||||
// Execute queries in parallel
|
||||
const [countResult, dataResult] = await Promise.all([countPromise, dataPromise]);
|
||||
|
||||
const total = parseInt(countResult.rows[0].total, 10);
|
||||
const metrics = dataResult.rows;
|
||||
console.log(`Total: ${total}, Fetched: ${metrics.length} for page ${page}`);
|
||||
|
||||
// --- Respond ---
|
||||
res.json({
|
||||
metrics,
|
||||
pagination: {
|
||||
total,
|
||||
pages: Math.ceil(total / limit),
|
||||
currentPage: page,
|
||||
limit,
|
||||
},
|
||||
// Optionally include applied filters/sort for frontend confirmation
|
||||
appliedQuery: {
|
||||
filters: req.query, // Send back raw query filters
|
||||
sort: sortQueryKey,
|
||||
order: sortDirection.toLowerCase()
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching metrics list:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product metrics list.' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /metrics/:pid - Get metrics for a single product
|
||||
router.get('/:pid', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
const pid = parseInt(req.params.pid, 10);
|
||||
|
||||
if (isNaN(pid)) {
|
||||
return res.status(400).json({ error: 'Invalid Product ID.' });
|
||||
}
|
||||
|
||||
console.log(`GET /metrics/${pid}`);
|
||||
try {
|
||||
const { rows } = await pool.query(
|
||||
`SELECT * FROM public.product_metrics WHERE pid = $1`,
|
||||
[pid]
|
||||
);
|
||||
|
||||
if (rows.length === 0) {
|
||||
console.log(`Metrics not found for PID: ${pid}`);
|
||||
return res.status(404).json({ error: 'Metrics not found for this product.' });
|
||||
}
|
||||
|
||||
console.log(`Metrics found for PID: ${pid}`);
|
||||
// Data is pre-calculated, return the first (only) row
|
||||
res.json(rows[0]);
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error fetching metrics for PID ${pid}:`, error);
|
||||
res.status(500).json({ error: 'Failed to fetch product metrics.' });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Parses a value based on its expected type.
|
||||
* Throws error for invalid formats.
|
||||
*/
|
||||
function parseValue(value, type) {
|
||||
if (value === null || value === undefined || value === '') return null; // Allow empty strings? Or handle differently?
|
||||
|
||||
switch (type) {
|
||||
case 'number':
|
||||
const num = parseFloat(value);
|
||||
if (isNaN(num)) throw new Error(`Invalid number format: "${value}"`);
|
||||
return num;
|
||||
case 'boolean':
|
||||
if (String(value).toLowerCase() === 'true') return true;
|
||||
if (String(value).toLowerCase() === 'false') return false;
|
||||
throw new Error(`Invalid boolean format: "${value}"`);
|
||||
case 'date':
|
||||
// Basic validation, rely on DB to handle actual date conversion
|
||||
if (!String(value).match(/^\d{4}-\d{2}-\d{2}$/)) {
|
||||
// Allow full timestamps too? Adjust regex if needed
|
||||
// console.warn(`Potentially invalid date format: "${value}"`); // Warn instead of throwing?
|
||||
}
|
||||
return String(value); // Send as string, let DB handle it
|
||||
case 'string':
|
||||
default:
|
||||
return String(value);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
261
inventory-server/src/routes/orders.js
Normal file
261
inventory-server/src/routes/orders.js
Normal file
@@ -0,0 +1,261 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Get all orders with pagination, filtering, and sorting
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const limit = parseInt(req.query.limit) || 50;
|
||||
const offset = (page - 1) * limit;
|
||||
const search = req.query.search || '';
|
||||
const status = req.query.status || 'all';
|
||||
const fromDate = req.query.fromDate ? new Date(req.query.fromDate) : null;
|
||||
const toDate = req.query.toDate ? new Date(req.query.toDate) : null;
|
||||
const minAmount = parseFloat(req.query.minAmount) || 0;
|
||||
const maxAmount = req.query.maxAmount ? parseFloat(req.query.maxAmount) : null;
|
||||
const sortColumn = req.query.sortColumn || 'date';
|
||||
const sortDirection = req.query.sortDirection === 'desc' ? 'DESC' : 'ASC';
|
||||
|
||||
// Build the WHERE clause
|
||||
const conditions = ['o1.canceled = false'];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
if (search) {
|
||||
conditions.push(`(o1.order_number ILIKE $${paramCounter} OR o1.customer ILIKE $${paramCounter})`);
|
||||
params.push(`%${search}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (status !== 'all') {
|
||||
conditions.push(`o1.status = $${paramCounter}`);
|
||||
params.push(status);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (fromDate) {
|
||||
conditions.push(`DATE(o1.date) >= DATE($${paramCounter})`);
|
||||
params.push(fromDate.toISOString());
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (toDate) {
|
||||
conditions.push(`DATE(o1.date) <= DATE($${paramCounter})`);
|
||||
params.push(toDate.toISOString());
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (minAmount > 0) {
|
||||
conditions.push(`total_amount >= $${paramCounter}`);
|
||||
params.push(minAmount);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (maxAmount) {
|
||||
conditions.push(`total_amount <= $${paramCounter}`);
|
||||
params.push(maxAmount);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Get total count for pagination
|
||||
const { rows: [countResult] } = await pool.query(`
|
||||
SELECT COUNT(DISTINCT o1.order_number) as total
|
||||
FROM orders o1
|
||||
LEFT JOIN (
|
||||
SELECT order_number, SUM(price * quantity) as total_amount
|
||||
FROM orders
|
||||
GROUP BY order_number
|
||||
) totals ON o1.order_number = totals.order_number
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
`, params);
|
||||
|
||||
const total = countResult.total;
|
||||
|
||||
// Get paginated results
|
||||
const query = `
|
||||
SELECT
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
o1.date,
|
||||
o1.status,
|
||||
o1.payment_method,
|
||||
o1.shipping_method,
|
||||
COUNT(o2.pid) as items_count,
|
||||
ROUND(SUM(o2.price * o2.quantity)::numeric, 3) as total_amount
|
||||
FROM orders o1
|
||||
JOIN orders o2 ON o1.order_number = o2.order_number
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
GROUP BY
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
o1.date,
|
||||
o1.status,
|
||||
o1.payment_method,
|
||||
o1.shipping_method
|
||||
ORDER BY ${
|
||||
sortColumn === 'items_count' || sortColumn === 'total_amount'
|
||||
? `${sortColumn} ${sortDirection}`
|
||||
: `o1.${sortColumn} ${sortDirection}`
|
||||
}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
|
||||
params.push(limit, offset);
|
||||
const { rows } = await pool.query(query, params);
|
||||
|
||||
// Get order statistics
|
||||
const { rows: [orderStats] } = await pool.query(`
|
||||
WITH CurrentStats AS (
|
||||
SELECT
|
||||
COUNT(DISTINCT order_number) as total_orders,
|
||||
ROUND(SUM(price * quantity)::numeric, 3) as total_revenue
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) >= CURRENT_DATE - INTERVAL '30 days'
|
||||
),
|
||||
PreviousStats AS (
|
||||
SELECT
|
||||
COUNT(DISTINCT order_number) as prev_orders,
|
||||
ROUND(SUM(price * quantity)::numeric, 3) as prev_revenue
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) BETWEEN CURRENT_DATE - INTERVAL '60 days' AND CURRENT_DATE - INTERVAL '30 days'
|
||||
),
|
||||
OrderValues AS (
|
||||
SELECT
|
||||
order_number,
|
||||
ROUND(SUM(price * quantity)::numeric, 3) as order_value
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY order_number
|
||||
)
|
||||
SELECT
|
||||
cs.total_orders,
|
||||
cs.total_revenue,
|
||||
CASE
|
||||
WHEN ps.prev_orders > 0
|
||||
THEN ROUND(((cs.total_orders - ps.prev_orders)::numeric / ps.prev_orders * 100), 1)
|
||||
ELSE 0
|
||||
END as order_growth,
|
||||
CASE
|
||||
WHEN ps.prev_revenue > 0
|
||||
THEN ROUND(((cs.total_revenue - ps.prev_revenue)::numeric / ps.prev_revenue * 100), 1)
|
||||
ELSE 0
|
||||
END as revenue_growth,
|
||||
CASE
|
||||
WHEN cs.total_orders > 0
|
||||
THEN ROUND((cs.total_revenue::numeric / cs.total_orders), 3)
|
||||
ELSE 0
|
||||
END as average_order_value,
|
||||
CASE
|
||||
WHEN ps.prev_orders > 0
|
||||
THEN ROUND((ps.prev_revenue::numeric / ps.prev_orders), 3)
|
||||
ELSE 0
|
||||
END as prev_average_order_value
|
||||
FROM CurrentStats cs
|
||||
CROSS JOIN PreviousStats ps
|
||||
`);
|
||||
|
||||
res.json({
|
||||
orders: rows.map(row => ({
|
||||
...row,
|
||||
total_amount: parseFloat(row.total_amount) || 0,
|
||||
items_count: parseInt(row.items_count) || 0,
|
||||
date: row.date
|
||||
})),
|
||||
pagination: {
|
||||
total,
|
||||
pages: Math.ceil(total / limit),
|
||||
currentPage: page,
|
||||
limit
|
||||
},
|
||||
stats: {
|
||||
totalOrders: parseInt(orderStats.total_orders) || 0,
|
||||
totalRevenue: parseFloat(orderStats.total_revenue) || 0,
|
||||
orderGrowth: parseFloat(orderStats.order_growth) || 0,
|
||||
revenueGrowth: parseFloat(orderStats.revenue_growth) || 0,
|
||||
averageOrderValue: parseFloat(orderStats.average_order_value) || 0,
|
||||
aovGrowth: orderStats.prev_average_order_value > 0
|
||||
? ((orderStats.average_order_value - orderStats.prev_average_order_value) / orderStats.prev_average_order_value * 100)
|
||||
: 0,
|
||||
conversionRate: 2.5, // Placeholder - would need actual visitor data
|
||||
conversionGrowth: 0.5 // Placeholder - would need actual visitor data
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching orders:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch orders' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get a single order with its items
|
||||
router.get('/:orderNumber', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get order details
|
||||
const { rows: orderRows } = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
o1.date,
|
||||
o1.status,
|
||||
o1.payment_method,
|
||||
o1.shipping_method,
|
||||
o1.shipping_address,
|
||||
o1.billing_address,
|
||||
COUNT(o2.pid) as items_count,
|
||||
ROUND(SUM(o2.price * o2.quantity)::numeric, 3) as total_amount
|
||||
FROM orders o1
|
||||
JOIN orders o2 ON o1.order_number = o2.order_number
|
||||
WHERE o1.order_number = $1 AND o1.canceled = false
|
||||
GROUP BY
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
o1.date,
|
||||
o1.status,
|
||||
o1.payment_method,
|
||||
o1.shipping_method,
|
||||
o1.shipping_address,
|
||||
o1.billing_address
|
||||
`, [req.params.orderNumber]);
|
||||
|
||||
if (orderRows.length === 0) {
|
||||
return res.status(404).json({ error: 'Order not found' });
|
||||
}
|
||||
|
||||
// Get order items
|
||||
const { rows: itemRows } = await pool.query(`
|
||||
SELECT
|
||||
o.pid,
|
||||
p.title,
|
||||
p.SKU,
|
||||
o.quantity,
|
||||
o.price,
|
||||
ROUND((o.price * o.quantity)::numeric, 3) as total
|
||||
FROM orders o
|
||||
JOIN products p ON o.pid = p.pid
|
||||
WHERE o.order_number = $1 AND o.canceled = false
|
||||
`, [req.params.orderNumber]);
|
||||
|
||||
const order = {
|
||||
...orderRows[0],
|
||||
total_amount: parseFloat(orderRows[0].total_amount) || 0,
|
||||
items_count: parseInt(orderRows[0].items_count) || 0,
|
||||
items: itemRows.map(item => ({
|
||||
...item,
|
||||
price: parseFloat(item.price) || 0,
|
||||
total: parseFloat(item.total) || 0,
|
||||
quantity: parseInt(item.quantity) || 0
|
||||
}))
|
||||
};
|
||||
|
||||
res.json(order);
|
||||
} catch (error) {
|
||||
console.error('Error fetching order:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch order' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
747
inventory-server/src/routes/products.js
Normal file
747
inventory-server/src/routes/products.js
Normal file
@@ -0,0 +1,747 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const multer = require('multer');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { importProductsFromCSV } = require('../utils/csvImporter');
|
||||
const { PurchaseOrderStatus, ReceivingStatus } = require('../types/status-codes');
|
||||
|
||||
// Configure multer for file uploads without silent fallbacks
|
||||
const configuredUploadsDir = process.env.UPLOADS_DIR;
|
||||
const uploadsDir = configuredUploadsDir
|
||||
? (path.isAbsolute(configuredUploadsDir)
|
||||
? configuredUploadsDir
|
||||
: path.resolve(__dirname, '../../', configuredUploadsDir))
|
||||
: path.resolve(__dirname, '../../uploads');
|
||||
|
||||
try {
|
||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
||||
} catch (error) {
|
||||
console.error(`Failed to initialize uploads directory at ${uploadsDir}:`, error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const upload = multer({ dest: uploadsDir });
|
||||
|
||||
// Get unique brands
|
||||
router.get('/brands', async (req, res) => {
|
||||
console.log('Brands endpoint hit:', {
|
||||
url: req.url,
|
||||
method: req.method,
|
||||
headers: req.headers,
|
||||
path: req.path
|
||||
});
|
||||
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('Fetching brands from database...');
|
||||
|
||||
const { rows } = await pool.query(`
|
||||
SELECT DISTINCT COALESCE(p.brand, 'Unbranded') as brand
|
||||
FROM products p
|
||||
WHERE p.visible = true
|
||||
ORDER BY COALESCE(p.brand, 'Unbranded')
|
||||
`);
|
||||
|
||||
console.log(`Found ${rows.length} brands:`, rows.slice(0, 3));
|
||||
res.json(rows.map(r => r.brand));
|
||||
} catch (error) {
|
||||
console.error('Error fetching brands:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch brands' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get all products with pagination, filtering, and sorting
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const limit = parseInt(req.query.limit) || 50;
|
||||
const offset = (page - 1) * limit;
|
||||
const sortColumn = req.query.sort || 'title';
|
||||
const sortDirection = req.query.order === 'desc' ? 'DESC' : 'ASC';
|
||||
|
||||
const conditions = ['p.visible = true'];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
// Add default replenishable filter unless explicitly showing non-replenishable
|
||||
if (req.query.showNonReplenishable !== 'true') {
|
||||
conditions.push('p.replenishable = true');
|
||||
}
|
||||
|
||||
// Handle search filter
|
||||
if (req.query.search) {
|
||||
conditions.push(`(p.title ILIKE $${paramCounter} OR p.SKU ILIKE $${paramCounter} OR p.barcode ILIKE $${paramCounter})`);
|
||||
const searchTerm = `%${req.query.search}%`;
|
||||
params.push(searchTerm);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Handle text filters for specific fields
|
||||
if (req.query.barcode) {
|
||||
conditions.push(`p.barcode ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.barcode}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.vendor_reference) {
|
||||
conditions.push(`p.vendor_reference ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.vendor_reference}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Add new text filters for the additional fields
|
||||
if (req.query.description) {
|
||||
conditions.push(`p.description ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.description}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.harmonized_tariff_code) {
|
||||
conditions.push(`p.harmonized_tariff_code ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.harmonized_tariff_code}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.notions_reference) {
|
||||
conditions.push(`p.notions_reference ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.notions_reference}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.line) {
|
||||
conditions.push(`p.line ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.line}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.subline) {
|
||||
conditions.push(`p.subline ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.subline}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.artist) {
|
||||
conditions.push(`p.artist ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.artist}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.country_of_origin) {
|
||||
conditions.push(`p.country_of_origin ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.country_of_origin}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.location) {
|
||||
conditions.push(`p.location ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.location}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Handle numeric filters with operators
|
||||
const numericFields = {
|
||||
stock: 'p.stock_quantity',
|
||||
price: 'p.price',
|
||||
costPrice: 'p.cost_price',
|
||||
landingCost: 'p.landing_cost_price',
|
||||
dailySalesAvg: 'pm.daily_sales_avg',
|
||||
weeklySalesAvg: 'pm.weekly_sales_avg',
|
||||
monthlySalesAvg: 'pm.monthly_sales_avg',
|
||||
avgQuantityPerOrder: 'pm.avg_quantity_per_order',
|
||||
numberOfOrders: 'pm.number_of_orders',
|
||||
margin: 'pm.avg_margin_percent',
|
||||
gmroi: 'pm.gmroi',
|
||||
inventoryValue: 'pm.inventory_value',
|
||||
costOfGoodsSold: 'pm.cost_of_goods_sold',
|
||||
grossProfit: 'pm.gross_profit',
|
||||
turnoverRate: 'pm.turnover_rate',
|
||||
leadTime: 'pm.current_lead_time',
|
||||
currentLeadTime: 'pm.current_lead_time',
|
||||
targetLeadTime: 'pm.target_lead_time',
|
||||
stockCoverage: 'pm.days_of_inventory',
|
||||
daysOfStock: 'pm.days_of_inventory',
|
||||
weeksOfStock: 'pm.weeks_of_inventory',
|
||||
reorderPoint: 'pm.reorder_point',
|
||||
safetyStock: 'pm.safety_stock',
|
||||
// Add new numeric fields
|
||||
preorderCount: 'p.preorder_count',
|
||||
notionsInvCount: 'p.notions_inv_count',
|
||||
rating: 'p.rating',
|
||||
reviews: 'p.reviews',
|
||||
weight: 'p.weight',
|
||||
totalSold: 'p.total_sold',
|
||||
baskets: 'p.baskets',
|
||||
notifies: 'p.notifies'
|
||||
};
|
||||
|
||||
Object.entries(req.query).forEach(([key, value]) => {
|
||||
const field = numericFields[key];
|
||||
if (field) {
|
||||
const operator = req.query[`${key}_operator`] || '=';
|
||||
if (operator === 'between') {
|
||||
try {
|
||||
const [min, max] = JSON.parse(value);
|
||||
conditions.push(`${field} BETWEEN $${paramCounter} AND $${paramCounter + 1}`);
|
||||
params.push(min, max);
|
||||
paramCounter += 2;
|
||||
} catch (e) {
|
||||
console.error(`Invalid between value for ${key}:`, value);
|
||||
}
|
||||
} else {
|
||||
conditions.push(`${field} ${operator} $${paramCounter}`);
|
||||
params.push(parseFloat(value));
|
||||
paramCounter++;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle date filters
|
||||
const dateFields = {
|
||||
firstSaleDate: 'pm.first_sale_date',
|
||||
lastSaleDate: 'pm.last_sale_date',
|
||||
lastPurchaseDate: 'pm.last_purchase_date',
|
||||
firstReceivedDate: 'pm.first_received_date',
|
||||
lastReceivedDate: 'pm.last_received_date'
|
||||
};
|
||||
|
||||
Object.entries(req.query).forEach(([key, value]) => {
|
||||
const field = dateFields[key];
|
||||
if (field) {
|
||||
conditions.push(`${field}::TEXT LIKE $${paramCounter}`);
|
||||
params.push(`${value}%`); // Format like '2023-01%' to match by month or '2023-01-01' for exact date
|
||||
paramCounter++;
|
||||
}
|
||||
});
|
||||
|
||||
// Handle select filters
|
||||
if (req.query.vendor) {
|
||||
conditions.push(`p.vendor = $${paramCounter}`);
|
||||
params.push(req.query.vendor);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.brand) {
|
||||
conditions.push(`p.brand = $${paramCounter}`);
|
||||
params.push(req.query.brand);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.category) {
|
||||
conditions.push(`p.categories ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.category}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.stockStatus && req.query.stockStatus !== 'all') {
|
||||
conditions.push(`pm.stock_status = $${paramCounter}`);
|
||||
params.push(req.query.stockStatus);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.abcClass) {
|
||||
conditions.push(`pm.abc_class = $${paramCounter}`);
|
||||
params.push(req.query.abcClass);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.leadTimeStatus) {
|
||||
conditions.push(`pm.lead_time_status = $${paramCounter}`);
|
||||
params.push(req.query.leadTimeStatus);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.replenishable !== undefined) {
|
||||
conditions.push(`p.replenishable = $${paramCounter}`);
|
||||
params.push(req.query.replenishable === 'true');
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.managingStock !== undefined) {
|
||||
conditions.push(`p.managing_stock = $${paramCounter}`);
|
||||
params.push(req.query.managingStock === 'true');
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Combine all conditions with AND
|
||||
const whereClause = conditions.length > 0 ? 'WHERE ' + conditions.join(' AND ') : '';
|
||||
|
||||
// Get total count for pagination
|
||||
const countQuery = `
|
||||
SELECT COUNT(DISTINCT p.pid) as total
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
${whereClause}
|
||||
`;
|
||||
const { rows: [countResult] } = await pool.query(countQuery, params);
|
||||
const total = countResult.total;
|
||||
|
||||
// Get available filters
|
||||
const { rows: categories } = await pool.query(
|
||||
'SELECT name FROM categories ORDER BY name'
|
||||
);
|
||||
const { rows: vendors } = await pool.query(
|
||||
'SELECT DISTINCT vendor FROM products WHERE visible = true AND vendor IS NOT NULL AND vendor != \'\' ORDER BY vendor'
|
||||
);
|
||||
const { rows: brands } = await pool.query(
|
||||
'SELECT DISTINCT COALESCE(brand, \'Unbranded\') as brand FROM products WHERE visible = true ORDER BY brand'
|
||||
);
|
||||
|
||||
// Main query with all fields
|
||||
const query = `
|
||||
WITH RECURSIVE
|
||||
category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
(cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
),
|
||||
product_thresholds AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
COALESCE(
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
WHERE st.category_id IN (
|
||||
SELECT pc.cat_id
|
||||
FROM product_categories pc
|
||||
WHERE pc.pid = p.pid
|
||||
)
|
||||
AND (st.vendor = p.vendor OR st.vendor IS NULL)
|
||||
ORDER BY st.vendor IS NULL
|
||||
LIMIT 1),
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND (st.vendor = p.vendor OR st.vendor IS NULL)
|
||||
ORDER BY st.vendor IS NULL
|
||||
LIMIT 1),
|
||||
90
|
||||
) as target_days
|
||||
FROM products p
|
||||
),
|
||||
product_leaf_categories AS (
|
||||
SELECT DISTINCT pc.cat_id
|
||||
FROM product_categories pc
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM categories child
|
||||
JOIN product_categories child_pc ON child.cat_id = child_pc.cat_id
|
||||
WHERE child.parent_id = pc.cat_id
|
||||
AND child_pc.pid = pc.pid
|
||||
)
|
||||
)
|
||||
SELECT
|
||||
p.*,
|
||||
COALESCE(p.brand, 'Unbranded') as brand,
|
||||
string_agg(DISTINCT (c.cat_id || ':' || c.name), ',') as categories,
|
||||
pm.daily_sales_avg,
|
||||
pm.weekly_sales_avg,
|
||||
pm.monthly_sales_avg,
|
||||
pm.avg_quantity_per_order,
|
||||
pm.number_of_orders,
|
||||
pm.first_sale_date,
|
||||
pm.last_sale_date,
|
||||
pm.days_of_inventory,
|
||||
pm.weeks_of_inventory,
|
||||
pm.reorder_point,
|
||||
pm.safety_stock,
|
||||
pm.avg_margin_percent,
|
||||
CAST(pm.total_revenue AS DECIMAL(15,3)) as total_revenue,
|
||||
CAST(pm.inventory_value AS DECIMAL(15,3)) as inventory_value,
|
||||
CAST(pm.cost_of_goods_sold AS DECIMAL(15,3)) as cost_of_goods_sold,
|
||||
CAST(pm.gross_profit AS DECIMAL(15,3)) as gross_profit,
|
||||
pm.gmroi,
|
||||
pm.avg_lead_time_days,
|
||||
pm.last_purchase_date,
|
||||
pm.last_received_date,
|
||||
pm.abc_class,
|
||||
pm.stock_status,
|
||||
pm.turnover_rate,
|
||||
p.date_last_sold
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
LEFT JOIN product_categories pc ON p.pid = pc.pid
|
||||
LEFT JOIN categories c ON pc.cat_id = c.cat_id
|
||||
${whereClause}
|
||||
GROUP BY p.pid, pm.pid
|
||||
ORDER BY ${sortColumn} ${sortDirection}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
|
||||
params.push(limit, offset);
|
||||
const { rows: products } = await pool.query(query, params);
|
||||
|
||||
res.json({
|
||||
products,
|
||||
pagination: {
|
||||
total,
|
||||
pages: Math.ceil(total / limit),
|
||||
currentPage: page,
|
||||
limit
|
||||
},
|
||||
filters: {
|
||||
categories: categories.map(c => c.name),
|
||||
vendors: vendors.map(v => v.vendor),
|
||||
brands: brands.map(b => b.brand)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching products:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch products' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get trending products
|
||||
router.get('/trending', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// First check if we have any data
|
||||
const { rows } = await pool.query(`
|
||||
SELECT COUNT(*) as count,
|
||||
MAX(total_revenue) as max_revenue,
|
||||
MAX(daily_sales_avg) as max_daily_sales,
|
||||
COUNT(DISTINCT pid) as products_with_metrics
|
||||
FROM product_metrics
|
||||
WHERE total_revenue > 0 OR daily_sales_avg > 0
|
||||
`);
|
||||
console.log('Product metrics stats:', rows[0]);
|
||||
|
||||
if (parseInt(rows[0].count) === 0) {
|
||||
console.log('No products with metrics found');
|
||||
return res.json([]);
|
||||
}
|
||||
|
||||
// Get trending products
|
||||
const { rows: trendingProducts } = await pool.query(`
|
||||
SELECT
|
||||
p.pid,
|
||||
p.sku,
|
||||
p.title,
|
||||
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
|
||||
COALESCE(pm.weekly_sales_avg, 0) as weekly_sales_avg,
|
||||
CASE
|
||||
WHEN pm.weekly_sales_avg > 0 AND pm.daily_sales_avg > 0
|
||||
THEN ((pm.daily_sales_avg - pm.weekly_sales_avg) / pm.weekly_sales_avg) * 100
|
||||
ELSE 0
|
||||
END as growth_rate,
|
||||
COALESCE(pm.total_revenue, 0) as total_revenue
|
||||
FROM products p
|
||||
INNER JOIN product_metrics pm ON p.pid = pm.pid
|
||||
WHERE (pm.total_revenue > 0 OR pm.daily_sales_avg > 0)
|
||||
AND p.visible = true
|
||||
ORDER BY growth_rate DESC
|
||||
LIMIT 50
|
||||
`);
|
||||
|
||||
console.log('Trending products:', trendingProducts);
|
||||
res.json(trendingProducts);
|
||||
} catch (error) {
|
||||
console.error('Error fetching trending products:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch trending products' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get a single product
|
||||
router.get('/:id', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const id = parseInt(req.params.id);
|
||||
|
||||
// Common CTE for category paths
|
||||
const categoryPathCTE = `
|
||||
WITH RECURSIVE category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
(cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
)
|
||||
`;
|
||||
|
||||
// Get product details with category paths
|
||||
const { rows: productRows } = await pool.query(`
|
||||
SELECT
|
||||
p.*,
|
||||
pm.daily_sales_avg,
|
||||
pm.weekly_sales_avg,
|
||||
pm.monthly_sales_avg,
|
||||
pm.days_of_inventory,
|
||||
pm.reorder_point,
|
||||
pm.safety_stock,
|
||||
pm.stock_status,
|
||||
pm.abc_class,
|
||||
pm.avg_margin_percent,
|
||||
pm.total_revenue,
|
||||
pm.inventory_value,
|
||||
pm.turnover_rate,
|
||||
pm.gmroi,
|
||||
pm.cost_of_goods_sold,
|
||||
pm.gross_profit,
|
||||
pm.avg_lead_time_days,
|
||||
pm.current_lead_time,
|
||||
pm.target_lead_time,
|
||||
pm.lead_time_status,
|
||||
pm.reorder_qty,
|
||||
pm.overstocked_amt
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
WHERE p.pid = $1
|
||||
`, [id]);
|
||||
|
||||
if (!productRows.length) {
|
||||
return res.status(404).json({ error: 'Product not found' });
|
||||
}
|
||||
|
||||
// Get categories and their paths separately to avoid GROUP BY issues
|
||||
const { rows: categoryRows } = await pool.query(`
|
||||
WITH RECURSIVE
|
||||
category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
(cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
),
|
||||
product_leaf_categories AS (
|
||||
-- Find categories assigned to this product that aren't parents
|
||||
-- of other categories assigned to this product
|
||||
SELECT pc.cat_id
|
||||
FROM product_categories pc
|
||||
WHERE pc.pid = $1
|
||||
AND NOT EXISTS (
|
||||
-- Check if there are any child categories also assigned to this product
|
||||
SELECT 1
|
||||
FROM categories child
|
||||
JOIN product_categories child_pc ON child.cat_id = child_pc.cat_id
|
||||
WHERE child.parent_id = pc.cat_id
|
||||
AND child_pc.pid = pc.pid
|
||||
)
|
||||
)
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name as category_name,
|
||||
cp.path as full_path
|
||||
FROM product_categories pc
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
JOIN product_leaf_categories plc ON c.cat_id = plc.cat_id
|
||||
WHERE pc.pid = $2
|
||||
ORDER BY cp.path
|
||||
`, [id, id]);
|
||||
|
||||
// Transform the results
|
||||
const categoryPathMap = categoryRows.reduce((acc, row) => {
|
||||
// Use cat_id in the key to differentiate categories with the same name
|
||||
acc[`${row.cat_id}:${row.category_name}`] = row.full_path;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const product = {
|
||||
...productRows[0],
|
||||
// Include cat_id in categories array to match the keys in categoryPathMap
|
||||
categories: categoryRows.map(row => `${row.cat_id}:${row.category_name}`),
|
||||
category_paths: categoryPathMap,
|
||||
price: parseFloat(productRows[0].price),
|
||||
regular_price: parseFloat(productRows[0].regular_price),
|
||||
cost_price: parseFloat(productRows[0].cost_price),
|
||||
landing_cost_price: parseFloat(productRows[0].landing_cost_price),
|
||||
stock_quantity: parseInt(productRows[0].stock_quantity),
|
||||
moq: parseInt(productRows[0].moq),
|
||||
uom: parseInt(productRows[0].uom),
|
||||
managing_stock: Boolean(productRows[0].managing_stock),
|
||||
replenishable: Boolean(productRows[0].replenishable),
|
||||
// Format new fields
|
||||
preorder_count: parseInt(productRows[0].preorder_count || 0),
|
||||
notions_inv_count: parseInt(productRows[0].notions_inv_count || 0),
|
||||
harmonized_tariff_code: productRows[0].harmonized_tariff_code || '',
|
||||
notions_reference: productRows[0].notions_reference || '',
|
||||
line: productRows[0].line || '',
|
||||
subline: productRows[0].subline || '',
|
||||
artist: productRows[0].artist || '',
|
||||
rating: parseFloat(productRows[0].rating || 0),
|
||||
reviews: parseInt(productRows[0].reviews || 0),
|
||||
weight: parseFloat(productRows[0].weight || 0),
|
||||
dimensions: {
|
||||
length: parseFloat(productRows[0].length || 0),
|
||||
width: parseFloat(productRows[0].width || 0),
|
||||
height: parseFloat(productRows[0].height || 0),
|
||||
},
|
||||
country_of_origin: productRows[0].country_of_origin || '',
|
||||
location: productRows[0].location || '',
|
||||
total_sold: parseInt(productRows[0].total_sold || 0),
|
||||
baskets: parseInt(productRows[0].baskets || 0),
|
||||
notifies: parseInt(productRows[0].notifies || 0),
|
||||
date_last_sold: productRows[0].date_last_sold || null,
|
||||
// Format existing analytics fields
|
||||
daily_sales_avg: parseFloat(productRows[0].daily_sales_avg) || 0,
|
||||
weekly_sales_avg: parseFloat(productRows[0].weekly_sales_avg) || 0,
|
||||
monthly_sales_avg: parseFloat(productRows[0].monthly_sales_avg) || 0,
|
||||
avg_quantity_per_order: parseFloat(productRows[0].avg_quantity_per_order) || 0,
|
||||
number_of_orders: parseInt(productRows[0].number_of_orders) || 0,
|
||||
first_sale_date: productRows[0].first_sale_date || null,
|
||||
last_sale_date: productRows[0].last_sale_date || null,
|
||||
days_of_inventory: parseFloat(productRows[0].days_of_inventory) || 0,
|
||||
weeks_of_inventory: parseFloat(productRows[0].weeks_of_inventory) || 0,
|
||||
reorder_point: parseFloat(productRows[0].reorder_point) || 0,
|
||||
safety_stock: parseFloat(productRows[0].safety_stock) || 0,
|
||||
avg_margin_percent: parseFloat(productRows[0].avg_margin_percent) || 0,
|
||||
total_revenue: parseFloat(productRows[0].total_revenue) || 0,
|
||||
inventory_value: parseFloat(productRows[0].inventory_value) || 0,
|
||||
cost_of_goods_sold: parseFloat(productRows[0].cost_of_goods_sold) || 0,
|
||||
gross_profit: parseFloat(productRows[0].gross_profit) || 0,
|
||||
gmroi: parseFloat(productRows[0].gmroi) || 0,
|
||||
avg_lead_time_days: parseFloat(productRows[0].avg_lead_time_days) || 0,
|
||||
current_lead_time: parseFloat(productRows[0].current_lead_time) || 0,
|
||||
target_lead_time: parseFloat(productRows[0].target_lead_time) || 0,
|
||||
lead_time_status: productRows[0].lead_time_status || null,
|
||||
reorder_qty: parseInt(productRows[0].reorder_qty) || 0,
|
||||
overstocked_amt: parseInt(productRows[0].overstocked_amt) || 0
|
||||
};
|
||||
|
||||
res.json(product);
|
||||
} catch (error) {
|
||||
console.error('Error fetching product:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get product time series data
|
||||
router.get('/:id/time-series', async (req, res) => {
|
||||
const { id } = req.params;
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// Get monthly sales data
|
||||
const { rows: monthlySales } = await pool.query(`
|
||||
SELECT
|
||||
TO_CHAR(date, 'YYYY-MM') as month,
|
||||
COUNT(DISTINCT order_number) as order_count,
|
||||
SUM(quantity) as units_sold,
|
||||
ROUND(SUM(price * quantity)::numeric, 3) as revenue
|
||||
FROM orders
|
||||
WHERE pid = $1
|
||||
AND canceled = false
|
||||
GROUP BY TO_CHAR(date, 'YYYY-MM')
|
||||
ORDER BY month DESC
|
||||
LIMIT 12
|
||||
`, [id]);
|
||||
|
||||
// Format monthly sales data
|
||||
const formattedMonthlySales = monthlySales.map(month => ({
|
||||
month: month.month,
|
||||
order_count: parseInt(month.order_count),
|
||||
units_sold: parseInt(month.units_sold),
|
||||
revenue: parseFloat(month.revenue),
|
||||
profit: 0 // Set to 0 since we don't have cost data in orders table
|
||||
}));
|
||||
|
||||
// Get recent orders
|
||||
const { rows: recentOrders } = await pool.query(`
|
||||
SELECT
|
||||
TO_CHAR(date, 'YYYY-MM-DD') as date,
|
||||
order_number,
|
||||
quantity,
|
||||
price,
|
||||
discount,
|
||||
tax,
|
||||
shipping,
|
||||
customer_name as customer,
|
||||
status
|
||||
FROM orders
|
||||
WHERE pid = $1
|
||||
AND canceled = false
|
||||
ORDER BY date DESC
|
||||
LIMIT 10
|
||||
`, [id]);
|
||||
|
||||
// Get recent purchase orders with detailed status
|
||||
const { rows: recentPurchases } = await pool.query(`
|
||||
SELECT
|
||||
TO_CHAR(date, 'YYYY-MM-DD') as date,
|
||||
TO_CHAR(expected_date, 'YYYY-MM-DD') as expected_date,
|
||||
TO_CHAR(received_date, 'YYYY-MM-DD') as received_date,
|
||||
po_id,
|
||||
ordered,
|
||||
received,
|
||||
status,
|
||||
receiving_status,
|
||||
cost_price,
|
||||
notes,
|
||||
CASE
|
||||
WHEN received_date IS NOT NULL THEN
|
||||
(received_date - date)
|
||||
WHEN expected_date < CURRENT_DATE AND status < $2 THEN
|
||||
(CURRENT_DATE - expected_date)
|
||||
ELSE NULL
|
||||
END as lead_time_days
|
||||
FROM purchase_orders
|
||||
WHERE pid = $1
|
||||
AND status != $3
|
||||
ORDER BY date DESC
|
||||
LIMIT 10
|
||||
`, [id, PurchaseOrderStatus.ReceivingStarted, PurchaseOrderStatus.Canceled]);
|
||||
|
||||
res.json({
|
||||
monthly_sales: formattedMonthlySales,
|
||||
recent_orders: recentOrders.map(order => ({
|
||||
...order,
|
||||
price: parseFloat(order.price),
|
||||
discount: parseFloat(order.discount),
|
||||
tax: parseFloat(order.tax),
|
||||
shipping: parseFloat(order.shipping),
|
||||
quantity: parseInt(order.quantity)
|
||||
})),
|
||||
recent_purchases: recentPurchases.map(po => ({
|
||||
...po,
|
||||
ordered: parseInt(po.ordered),
|
||||
received: parseInt(po.received),
|
||||
status: parseInt(po.status),
|
||||
receiving_status: parseInt(po.receiving_status),
|
||||
cost_price: parseFloat(po.cost_price),
|
||||
lead_time_days: po.lead_time_days ? parseInt(po.lead_time_days) : null
|
||||
}))
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching product time series:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product time series' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
1188
inventory-server/src/routes/purchase-orders.js
Normal file
1188
inventory-server/src/routes/purchase-orders.js
Normal file
File diff suppressed because it is too large
Load Diff
396
inventory-server/src/routes/reusable-images.js
Normal file
396
inventory-server/src/routes/reusable-images.js
Normal file
@@ -0,0 +1,396 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const multer = require('multer');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
// Create reusable uploads directory if it doesn't exist
|
||||
const uploadsDir = path.join('/var/www/html/inventory/uploads/reusable');
|
||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
||||
|
||||
// Configure multer for file uploads
|
||||
const storage = multer.diskStorage({
|
||||
destination: function (req, file, cb) {
|
||||
console.log(`Saving reusable image to: ${uploadsDir}`);
|
||||
cb(null, uploadsDir);
|
||||
},
|
||||
filename: function (req, file, cb) {
|
||||
// Create unique filename with original extension
|
||||
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
|
||||
|
||||
// Make sure we preserve the original file extension
|
||||
let fileExt = path.extname(file.originalname).toLowerCase();
|
||||
|
||||
// Ensure there is a proper extension based on mimetype if none exists
|
||||
if (!fileExt) {
|
||||
switch (file.mimetype) {
|
||||
case 'image/jpeg': fileExt = '.jpg'; break;
|
||||
case 'image/png': fileExt = '.png'; break;
|
||||
case 'image/gif': fileExt = '.gif'; break;
|
||||
case 'image/webp': fileExt = '.webp'; break;
|
||||
default: fileExt = '.jpg'; // Default to jpg
|
||||
}
|
||||
}
|
||||
|
||||
const fileName = `reusable-${uniqueSuffix}${fileExt}`;
|
||||
console.log(`Generated filename: ${fileName} with mimetype: ${file.mimetype}`);
|
||||
cb(null, fileName);
|
||||
}
|
||||
});
|
||||
|
||||
const upload = multer({
|
||||
storage: storage,
|
||||
limits: {
|
||||
fileSize: 5 * 1024 * 1024, // 5MB max file size
|
||||
},
|
||||
fileFilter: function (req, file, cb) {
|
||||
// Accept only image files
|
||||
const filetypes = /jpeg|jpg|png|gif|webp/;
|
||||
const mimetype = filetypes.test(file.mimetype);
|
||||
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
|
||||
|
||||
if (mimetype && extname) {
|
||||
return cb(null, true);
|
||||
}
|
||||
cb(new Error('Only image files are allowed'));
|
||||
}
|
||||
});
|
||||
|
||||
// Get all reusable images
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM reusable_images
|
||||
ORDER BY created_at DESC
|
||||
`);
|
||||
res.json(result.rows);
|
||||
} catch (error) {
|
||||
console.error('Error fetching reusable images:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch reusable images',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get images by company or global images
|
||||
router.get('/by-company/:companyId', async (req, res) => {
|
||||
try {
|
||||
const { companyId } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Get images that are either global or belong to this company
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM reusable_images
|
||||
WHERE is_global = true OR company = $1
|
||||
ORDER BY created_at DESC
|
||||
`, [companyId]);
|
||||
|
||||
res.json(result.rows);
|
||||
} catch (error) {
|
||||
console.error('Error fetching reusable images by company:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch reusable images by company',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get global images only
|
||||
router.get('/global', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM reusable_images
|
||||
WHERE is_global = true
|
||||
ORDER BY created_at DESC
|
||||
`);
|
||||
|
||||
res.json(result.rows);
|
||||
} catch (error) {
|
||||
console.error('Error fetching global reusable images:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch global reusable images',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get a single image by ID
|
||||
router.get('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM reusable_images
|
||||
WHERE id = $1
|
||||
`, [id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Reusable image not found' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching reusable image:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch reusable image',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Upload a new reusable image
|
||||
router.post('/upload', upload.single('image'), async (req, res) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ error: 'No image file provided' });
|
||||
}
|
||||
|
||||
const { name, is_global, company } = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!name) {
|
||||
return res.status(400).json({ error: 'Image name is required' });
|
||||
}
|
||||
|
||||
// Convert is_global from string to boolean
|
||||
const isGlobal = is_global === 'true' || is_global === true;
|
||||
|
||||
// Validate company is provided for non-global images
|
||||
if (!isGlobal && !company) {
|
||||
return res.status(400).json({ error: 'Company is required for non-global images' });
|
||||
}
|
||||
|
||||
// Log file information
|
||||
console.log('Reusable image uploaded:', {
|
||||
filename: req.file.filename,
|
||||
originalname: req.file.originalname,
|
||||
mimetype: req.file.mimetype,
|
||||
size: req.file.size,
|
||||
path: req.file.path
|
||||
});
|
||||
|
||||
// Ensure the file exists
|
||||
const filePath = path.join(uploadsDir, req.file.filename);
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return res.status(500).json({ error: 'File was not saved correctly' });
|
||||
}
|
||||
|
||||
// Create URL for the uploaded file
|
||||
const baseUrl = 'https://acot.site';
|
||||
const imageUrl = `${baseUrl}/uploads/reusable/${req.file.filename}`;
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Insert record into database
|
||||
const result = await pool.query(`
|
||||
INSERT INTO reusable_images (
|
||||
name,
|
||||
filename,
|
||||
file_path,
|
||||
image_url,
|
||||
is_global,
|
||||
company,
|
||||
mime_type,
|
||||
file_size
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||
RETURNING *
|
||||
`, [
|
||||
name,
|
||||
req.file.filename,
|
||||
filePath,
|
||||
imageUrl,
|
||||
isGlobal,
|
||||
isGlobal ? null : company,
|
||||
req.file.mimetype,
|
||||
req.file.size
|
||||
]);
|
||||
|
||||
// Return success response with image data
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
image: result.rows[0],
|
||||
message: 'Image uploaded successfully'
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error uploading reusable image:', error);
|
||||
res.status(500).json({ error: error.message || 'Failed to upload image' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update image details (name, is_global, company)
|
||||
router.put('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const { name, is_global, company } = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!name) {
|
||||
return res.status(400).json({ error: 'Image name is required' });
|
||||
}
|
||||
|
||||
// Convert is_global from string to boolean if necessary
|
||||
const isGlobal = typeof is_global === 'string' ? is_global === 'true' : !!is_global;
|
||||
|
||||
// Validate company is provided for non-global images
|
||||
if (!isGlobal && !company) {
|
||||
return res.status(400).json({ error: 'Company is required for non-global images' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Check if the image exists
|
||||
const checkResult = await pool.query('SELECT * FROM reusable_images WHERE id = $1', [id]);
|
||||
if (checkResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Reusable image not found' });
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
UPDATE reusable_images
|
||||
SET
|
||||
name = $1,
|
||||
is_global = $2,
|
||||
company = $3
|
||||
WHERE id = $4
|
||||
RETURNING *
|
||||
`, [
|
||||
name,
|
||||
isGlobal,
|
||||
isGlobal ? null : company,
|
||||
id
|
||||
]);
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error updating reusable image:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to update reusable image',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Delete a reusable image
|
||||
router.delete('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Get the image data first to get the filename
|
||||
const imageResult = await pool.query('SELECT * FROM reusable_images WHERE id = $1', [id]);
|
||||
|
||||
if (imageResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Reusable image not found' });
|
||||
}
|
||||
|
||||
const image = imageResult.rows[0];
|
||||
|
||||
// Delete from database
|
||||
await pool.query('DELETE FROM reusable_images WHERE id = $1', [id]);
|
||||
|
||||
// Delete the file from filesystem
|
||||
const filePath = path.join(uploadsDir, image.filename);
|
||||
if (fs.existsSync(filePath)) {
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
res.json({
|
||||
message: 'Reusable image deleted successfully',
|
||||
image
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error deleting reusable image:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to delete reusable image',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Check if file exists and permissions
|
||||
router.get('/check-file/:filename', (req, res) => {
|
||||
const { filename } = req.params;
|
||||
|
||||
// Prevent directory traversal
|
||||
if (filename.includes('..') || filename.includes('/')) {
|
||||
return res.status(400).json({ error: 'Invalid filename' });
|
||||
}
|
||||
|
||||
const filePath = path.join(uploadsDir, filename);
|
||||
|
||||
try {
|
||||
// Check if file exists
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return res.status(404).json({
|
||||
error: 'File not found',
|
||||
path: filePath,
|
||||
exists: false,
|
||||
readable: false
|
||||
});
|
||||
}
|
||||
|
||||
// Check if file is readable
|
||||
fs.accessSync(filePath, fs.constants.R_OK);
|
||||
|
||||
// Get file stats
|
||||
const stats = fs.statSync(filePath);
|
||||
|
||||
return res.json({
|
||||
filename,
|
||||
path: filePath,
|
||||
exists: true,
|
||||
readable: true,
|
||||
isFile: stats.isFile(),
|
||||
isDirectory: stats.isDirectory(),
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
modified: stats.mtime,
|
||||
permissions: stats.mode.toString(8)
|
||||
});
|
||||
} catch (error) {
|
||||
return res.status(500).json({
|
||||
error: error.message,
|
||||
path: filePath,
|
||||
exists: fs.existsSync(filePath),
|
||||
readable: false
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Error handling middleware
|
||||
router.use((err, req, res, next) => {
|
||||
console.error('Reusable images route error:', err);
|
||||
res.status(500).json({
|
||||
error: 'Internal server error',
|
||||
details: err.message
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
283
inventory-server/src/routes/templates.js
Normal file
283
inventory-server/src/routes/templates.js
Normal file
@@ -0,0 +1,283 @@
|
||||
const express = require('express');
|
||||
const { getPool } = require('../utils/db');
|
||||
const dotenv = require('dotenv');
|
||||
const path = require('path');
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env") });
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get all templates
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const pool = getPool();
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM templates
|
||||
ORDER BY company ASC, product_type ASC
|
||||
`);
|
||||
res.json(result.rows);
|
||||
} catch (error) {
|
||||
console.error('Error fetching templates:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch templates',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get template by company and product type
|
||||
router.get('/:company/:productType', async (req, res) => {
|
||||
try {
|
||||
const { company, productType } = req.params;
|
||||
const pool = getPool();
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM templates
|
||||
WHERE company = $1 AND product_type = $2
|
||||
`, [company, productType]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Template not found' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching template:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch template',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Create new template
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
company,
|
||||
product_type,
|
||||
supplier,
|
||||
msrp,
|
||||
cost_each,
|
||||
qty_per_unit,
|
||||
case_qty,
|
||||
hts_code,
|
||||
description,
|
||||
weight,
|
||||
length,
|
||||
width,
|
||||
height,
|
||||
tax_cat,
|
||||
size_cat,
|
||||
categories,
|
||||
ship_restrictions
|
||||
} = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!company || !product_type) {
|
||||
return res.status(400).json({ error: 'Company and Product Type are required' });
|
||||
}
|
||||
|
||||
const pool = getPool();
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
INSERT INTO templates (
|
||||
company,
|
||||
product_type,
|
||||
supplier,
|
||||
msrp,
|
||||
cost_each,
|
||||
qty_per_unit,
|
||||
case_qty,
|
||||
hts_code,
|
||||
description,
|
||||
weight,
|
||||
length,
|
||||
width,
|
||||
height,
|
||||
tax_cat,
|
||||
size_cat,
|
||||
categories,
|
||||
ship_restrictions
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17)
|
||||
RETURNING *
|
||||
`, [
|
||||
company,
|
||||
product_type,
|
||||
supplier,
|
||||
msrp,
|
||||
cost_each,
|
||||
qty_per_unit,
|
||||
case_qty,
|
||||
hts_code,
|
||||
description,
|
||||
weight,
|
||||
length,
|
||||
width,
|
||||
height,
|
||||
tax_cat,
|
||||
size_cat,
|
||||
categories,
|
||||
ship_restrictions
|
||||
]);
|
||||
|
||||
res.status(201).json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error creating template:', error);
|
||||
// Check for unique constraint violation
|
||||
if (error instanceof Error && error.message.includes('unique constraint')) {
|
||||
return res.status(409).json({
|
||||
error: 'Template already exists for this company and product type',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
res.status(500).json({
|
||||
error: 'Failed to create template',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Update template
|
||||
router.put('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const {
|
||||
company,
|
||||
product_type,
|
||||
supplier,
|
||||
msrp,
|
||||
cost_each,
|
||||
qty_per_unit,
|
||||
case_qty,
|
||||
hts_code,
|
||||
description,
|
||||
weight,
|
||||
length,
|
||||
width,
|
||||
height,
|
||||
tax_cat,
|
||||
size_cat,
|
||||
categories,
|
||||
ship_restrictions
|
||||
} = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!company || !product_type) {
|
||||
return res.status(400).json({ error: 'Company and Product Type are required' });
|
||||
}
|
||||
|
||||
const pool = getPool();
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
UPDATE templates
|
||||
SET
|
||||
company = $1,
|
||||
product_type = $2,
|
||||
supplier = $3,
|
||||
msrp = $4,
|
||||
cost_each = $5,
|
||||
qty_per_unit = $6,
|
||||
case_qty = $7,
|
||||
hts_code = $8,
|
||||
description = $9,
|
||||
weight = $10,
|
||||
length = $11,
|
||||
width = $12,
|
||||
height = $13,
|
||||
tax_cat = $14,
|
||||
size_cat = $15,
|
||||
categories = $16,
|
||||
ship_restrictions = $17
|
||||
WHERE id = $18
|
||||
RETURNING *
|
||||
`, [
|
||||
company,
|
||||
product_type,
|
||||
supplier,
|
||||
msrp,
|
||||
cost_each,
|
||||
qty_per_unit,
|
||||
case_qty,
|
||||
hts_code,
|
||||
description,
|
||||
weight,
|
||||
length,
|
||||
width,
|
||||
height,
|
||||
tax_cat,
|
||||
size_cat,
|
||||
categories,
|
||||
ship_restrictions,
|
||||
id
|
||||
]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Template not found' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error updating template:', error);
|
||||
// Check for unique constraint violation
|
||||
if (error instanceof Error && error.message.includes('unique constraint')) {
|
||||
return res.status(409).json({
|
||||
error: 'Template already exists for this company and product type',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
res.status(500).json({
|
||||
error: 'Failed to update template',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Delete template
|
||||
router.delete('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pool = getPool();
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query('DELETE FROM templates WHERE id = $1 RETURNING *', [id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Template not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Template deleted successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error deleting template:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to delete template',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Error handling middleware
|
||||
router.use((err, req, res, next) => {
|
||||
console.error('Template route error:', err);
|
||||
res.status(500).json({
|
||||
error: 'Internal server error',
|
||||
details: err.message
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -1 +1,323 @@
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { parseValue } = require('../utils/apiHelpers'); // Adjust path if needed
|
||||
|
||||
// --- Configuration & Helpers ---
|
||||
const DEFAULT_PAGE_LIMIT = 50;
|
||||
const MAX_PAGE_LIMIT = 200;
|
||||
|
||||
// Maps query keys to DB columns in vendor_metrics
|
||||
const COLUMN_MAP = {
|
||||
vendorName: { dbCol: 'vm.vendor_name', type: 'string' },
|
||||
productCount: { dbCol: 'vm.product_count', type: 'number' },
|
||||
activeProductCount: { dbCol: 'vm.active_product_count', type: 'number' },
|
||||
replenishableProductCount: { dbCol: 'vm.replenishable_product_count', type: 'number' },
|
||||
currentStockUnits: { dbCol: 'vm.current_stock_units', type: 'number' },
|
||||
currentStockCost: { dbCol: 'vm.current_stock_cost', type: 'number' },
|
||||
currentStockRetail: { dbCol: 'vm.current_stock_retail', type: 'number' },
|
||||
onOrderUnits: { dbCol: 'vm.on_order_units', type: 'number' },
|
||||
onOrderCost: { dbCol: 'vm.on_order_cost', type: 'number' },
|
||||
poCount365d: { dbCol: 'vm.po_count_365d', type: 'number' },
|
||||
avgLeadTimeDays: { dbCol: 'vm.avg_lead_time_days', type: 'number' },
|
||||
sales7d: { dbCol: 'vm.sales_7d', type: 'number' },
|
||||
revenue7d: { dbCol: 'vm.revenue_7d', type: 'number' },
|
||||
sales30d: { dbCol: 'vm.sales_30d', type: 'number' },
|
||||
revenue30d: { dbCol: 'vm.revenue_30d', type: 'number' },
|
||||
profit30d: { dbCol: 'vm.profit_30d', type: 'number' },
|
||||
cogs30d: { dbCol: 'vm.cogs_30d', type: 'number' },
|
||||
sales365d: { dbCol: 'vm.sales_365d', type: 'number' },
|
||||
revenue365d: { dbCol: 'vm.revenue_365d', type: 'number' },
|
||||
lifetimeSales: { dbCol: 'vm.lifetime_sales', type: 'number' },
|
||||
lifetimeRevenue: { dbCol: 'vm.lifetime_revenue', type: 'number' },
|
||||
avgMargin30d: { dbCol: 'vm.avg_margin_30d', type: 'number' },
|
||||
// Growth metrics
|
||||
salesGrowth30dVsPrev: { dbCol: 'vm.sales_growth_30d_vs_prev', type: 'number' },
|
||||
revenueGrowth30dVsPrev: { dbCol: 'vm.revenue_growth_30d_vs_prev', type: 'number' },
|
||||
// Add aliases if needed for frontend compatibility
|
||||
name: { dbCol: 'vm.vendor_name', type: 'string' },
|
||||
leadTime: { dbCol: 'vm.avg_lead_time_days', type: 'number' },
|
||||
// Add status for filtering
|
||||
status: { dbCol: 'vendor_status', type: 'string' },
|
||||
};
|
||||
|
||||
function getSafeColumnInfo(queryParamKey) {
|
||||
return COLUMN_MAP[queryParamKey] || null;
|
||||
}
|
||||
|
||||
// --- Route Handlers ---
|
||||
|
||||
// GET /vendors-aggregate/filter-options (Just vendors list for now)
|
||||
router.get('/filter-options', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /vendors-aggregate/filter-options');
|
||||
try {
|
||||
// Get vendor names
|
||||
const { rows: vendorRows } = await pool.query(`
|
||||
SELECT DISTINCT vendor_name FROM public.vendor_metrics ORDER BY vendor_name
|
||||
`);
|
||||
|
||||
// Get status values - calculate them since they're derived
|
||||
const { rows: statusRows } = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
CASE
|
||||
WHEN po_count_365d > 0 AND sales_30d > 0 THEN 'active'
|
||||
WHEN po_count_365d > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as status
|
||||
FROM public.vendor_metrics
|
||||
ORDER BY status
|
||||
`);
|
||||
|
||||
res.json({
|
||||
vendors: vendorRows.map(r => r.vendor_name),
|
||||
statuses: statusRows.map(r => r.status)
|
||||
});
|
||||
} catch(error) {
|
||||
console.error('Error fetching vendor filter options:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch filter options' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /vendors-aggregate/stats (Overall vendor stats)
|
||||
router.get('/stats', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /vendors-aggregate/stats');
|
||||
try {
|
||||
// Get basic vendor stats from aggregate table
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) AS total_vendors,
|
||||
SUM(active_product_count) AS total_active_products,
|
||||
SUM(current_stock_cost) AS total_stock_value,
|
||||
SUM(on_order_cost) AS total_on_order_value,
|
||||
AVG(NULLIF(avg_lead_time_days, 0)) AS overall_avg_lead_time
|
||||
FROM public.vendor_metrics vm
|
||||
`);
|
||||
|
||||
// Count active vendors based on criteria (from old vendors.js)
|
||||
const { rows: [activeStats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(DISTINCT CASE
|
||||
WHEN po_count_365d > 0
|
||||
THEN vendor_name
|
||||
END) as active_vendors
|
||||
FROM public.vendor_metrics
|
||||
`);
|
||||
|
||||
// Get overall cost metrics from purchase orders
|
||||
const { rows: [overallCostMetrics] } = await pool.query(`
|
||||
SELECT
|
||||
ROUND((SUM(ordered * po_cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * po_cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE po_cost_price IS NOT NULL
|
||||
AND ordered > 0
|
||||
AND vendor IS NOT NULL AND vendor != ''
|
||||
`);
|
||||
|
||||
res.json({
|
||||
totalVendors: parseInt(stats?.total_vendors || 0),
|
||||
activeVendors: parseInt(activeStats?.active_vendors || 0),
|
||||
totalActiveProducts: parseInt(stats?.total_active_products || 0),
|
||||
totalValue: parseFloat(stats?.total_stock_value || 0),
|
||||
totalOnOrderValue: parseFloat(stats?.total_on_order_value || 0),
|
||||
avgLeadTime: parseFloat(stats?.overall_avg_lead_time || 0),
|
||||
avgUnitCost: parseFloat(overallCostMetrics?.avg_unit_cost || 0),
|
||||
totalSpend: parseFloat(overallCostMetrics?.total_spend || 0)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching vendor stats:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch vendor stats.' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /vendors-aggregate/ (List vendors)
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /vendors-aggregate received query:', req.query);
|
||||
try {
|
||||
// --- Pagination ---
|
||||
let page = parseInt(req.query.page, 10) || 1;
|
||||
let limit = parseInt(req.query.limit, 10) || DEFAULT_PAGE_LIMIT;
|
||||
limit = Math.min(limit, MAX_PAGE_LIMIT);
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
// --- Sorting ---
|
||||
const sortQueryKey = req.query.sort || 'vendorName'; // Default sort
|
||||
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
|
||||
const sortColumn = sortColumnInfo ? sortColumnInfo.dbCol : 'vm.vendor_name';
|
||||
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
|
||||
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST');
|
||||
const sortClause = `ORDER BY ${sortColumn} ${sortDirection} ${nullsOrder}`;
|
||||
|
||||
// --- Filtering ---
|
||||
const conditions = [];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
// Build conditions based on req.query, using COLUMN_MAP and parseValue
|
||||
for (const key in req.query) {
|
||||
if (['page', 'limit', 'sort', 'order'].includes(key)) continue;
|
||||
|
||||
let filterKey = key;
|
||||
let operator = '='; // Default operator
|
||||
const value = req.query[key];
|
||||
|
||||
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
|
||||
if (operatorMatch) {
|
||||
filterKey = operatorMatch[1];
|
||||
operator = operatorMatch[2];
|
||||
}
|
||||
|
||||
const columnInfo = getSafeColumnInfo(filterKey);
|
||||
if (columnInfo) {
|
||||
const dbColumn = columnInfo.dbCol;
|
||||
const valueType = columnInfo.type;
|
||||
try {
|
||||
let conditionFragment = '';
|
||||
let needsParam = true;
|
||||
switch (operator.toLowerCase()) { // Normalize operator
|
||||
case 'eq': operator = '='; break;
|
||||
case 'ne': operator = '<>'; break;
|
||||
case 'gt': operator = '>'; break;
|
||||
case 'gte': operator = '>='; break;
|
||||
case 'lt': operator = '<'; break;
|
||||
case 'lte': operator = '<='; break;
|
||||
case 'like': operator = 'LIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
|
||||
case 'ilike': operator = 'ILIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
|
||||
case 'between':
|
||||
const [val1, val2] = String(value).split(',');
|
||||
if (val1 !== undefined && val2 !== undefined) {
|
||||
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
|
||||
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
|
||||
needsParam = false;
|
||||
} else continue;
|
||||
break;
|
||||
case 'in':
|
||||
const inValues = String(value).split(',');
|
||||
if (inValues.length > 0) {
|
||||
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
|
||||
conditionFragment = `${dbColumn} IN (${placeholders})`;
|
||||
params.push(...inValues.map(v => parseValue(v, valueType)));
|
||||
needsParam = false;
|
||||
} else continue;
|
||||
break;
|
||||
default: operator = '='; break;
|
||||
}
|
||||
|
||||
if (needsParam) {
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
params.push(parseValue(value, valueType));
|
||||
} else if (!conditionFragment) { // For LIKE/ILIKE
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
}
|
||||
|
||||
if (conditionFragment) {
|
||||
conditions.push(`(${conditionFragment})`);
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
|
||||
if (needsParam) paramCounter--;
|
||||
}
|
||||
} else {
|
||||
console.warn(`Invalid filter key ignored: ${key}`);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Execute Queries ---
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Status calculation from vendors.js
|
||||
const statusCase = `
|
||||
CASE
|
||||
WHEN po_count_365d > 0 AND sales_30d > 0 THEN 'active'
|
||||
WHEN po_count_365d > 0 THEN 'inactive'
|
||||
ELSE 'pending'
|
||||
END as vendor_status
|
||||
`;
|
||||
|
||||
const baseSql = `
|
||||
FROM (
|
||||
SELECT
|
||||
vm.*,
|
||||
${statusCase}
|
||||
FROM public.vendor_metrics vm
|
||||
) vm
|
||||
${whereClause}
|
||||
`;
|
||||
|
||||
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
|
||||
const dataSql = `
|
||||
WITH vendor_data AS (
|
||||
SELECT
|
||||
vm.*,
|
||||
${statusCase}
|
||||
FROM public.vendor_metrics vm
|
||||
)
|
||||
SELECT
|
||||
vm.*,
|
||||
COALESCE(po.avg_unit_cost, 0) as avg_unit_cost,
|
||||
COALESCE(po.total_spend, 0) as total_spend
|
||||
FROM vendor_data vm
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
vendor,
|
||||
ROUND((SUM(ordered * po_cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * po_cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE po_cost_price IS NOT NULL AND ordered > 0
|
||||
GROUP BY vendor
|
||||
) po ON vm.vendor_name = po.vendor
|
||||
${whereClause}
|
||||
${sortClause}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
const dataParams = [...params, limit, offset];
|
||||
|
||||
console.log("Count SQL:", countSql, params);
|
||||
console.log("Data SQL:", dataSql, dataParams);
|
||||
|
||||
const [countResult, dataResult] = await Promise.all([
|
||||
pool.query(countSql, params),
|
||||
pool.query(dataSql, dataParams)
|
||||
]);
|
||||
|
||||
const total = parseInt(countResult.rows[0].total, 10);
|
||||
const vendors = dataResult.rows.map(row => {
|
||||
// Create a new object with both snake_case and camelCase keys
|
||||
const transformedRow = { ...row }; // Start with original data
|
||||
|
||||
for (const key in row) {
|
||||
// Skip null/undefined values
|
||||
if (row[key] === null || row[key] === undefined) {
|
||||
continue; // Original already has the null value
|
||||
}
|
||||
|
||||
// Transform keys to match frontend expectations (add camelCase versions)
|
||||
// First handle cases like sales_7d -> sales7d
|
||||
let camelKey = key.replace(/_(\d+[a-z])/g, '$1');
|
||||
|
||||
// Then handle regular snake_case -> camelCase
|
||||
camelKey = camelKey.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
|
||||
if (camelKey !== key) { // Only add if different from original
|
||||
transformedRow[camelKey] = row[key];
|
||||
}
|
||||
}
|
||||
return transformedRow;
|
||||
});
|
||||
|
||||
// --- Respond ---
|
||||
res.json({
|
||||
vendors,
|
||||
pagination: { total, pages: Math.ceil(total / limit), currentPage: page, limit },
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching vendor metrics list:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch vendor metrics.' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /vendors-aggregate/:name (Get single vendor metric)
|
||||
// Implement if needed, remember to URL-decode the name parameter
|
||||
|
||||
module.exports = router;
|
||||
225
inventory-server/src/server.js
Normal file
225
inventory-server/src/server.js
Normal file
@@ -0,0 +1,225 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { corsMiddleware, corsErrorHandler } = require('./middleware/cors');
|
||||
const { initPool } = require('./utils/db');
|
||||
const productsRouter = require('./routes/products');
|
||||
const dashboardRouter = require('./routes/dashboard');
|
||||
const ordersRouter = require('./routes/orders');
|
||||
const csvRouter = require('./routes/data-management');
|
||||
const analyticsRouter = require('./routes/analytics');
|
||||
const purchaseOrdersRouter = require('./routes/purchase-orders');
|
||||
const configRouter = require('./routes/config');
|
||||
const metricsRouter = require('./routes/metrics');
|
||||
const importRouter = require('./routes/import');
|
||||
const aiValidationRouter = require('./routes/ai-validation');
|
||||
const templatesRouter = require('./routes/templates');
|
||||
const aiPromptsRouter = require('./routes/ai-prompts');
|
||||
const reusableImagesRouter = require('./routes/reusable-images');
|
||||
const categoriesAggregateRouter = require('./routes/categoriesAggregate');
|
||||
const vendorsAggregateRouter = require('./routes/vendorsAggregate');
|
||||
const brandsAggregateRouter = require('./routes/brandsAggregate');
|
||||
|
||||
// Get the absolute path to the .env file
|
||||
const envPath = '/var/www/html/inventory/.env';
|
||||
console.log('Looking for .env file at:', envPath);
|
||||
console.log('.env file exists:', fs.existsSync(envPath));
|
||||
|
||||
try {
|
||||
require('dotenv').config({ path: envPath });
|
||||
console.log('.env file loaded successfully');
|
||||
console.log('Environment check:', {
|
||||
NODE_ENV: process.env.NODE_ENV || 'not set',
|
||||
PORT: process.env.PORT || 'not set',
|
||||
DB_HOST: process.env.DB_HOST || 'not set',
|
||||
DB_USER: process.env.DB_USER || 'not set',
|
||||
DB_NAME: process.env.DB_NAME || 'not set',
|
||||
DB_PASSWORD: process.env.DB_PASSWORD ? '[password set]' : 'not set',
|
||||
DB_PORT: process.env.DB_PORT || 'not set',
|
||||
DB_SSL: process.env.DB_SSL || 'not set'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error loading .env file:', error);
|
||||
}
|
||||
|
||||
// Resolve important directories relative to the project root
|
||||
const serverRoot = path.resolve(__dirname, '..');
|
||||
const configuredUploadsDir = process.env.UPLOADS_DIR;
|
||||
const uploadsDir = configuredUploadsDir
|
||||
? (path.isAbsolute(configuredUploadsDir)
|
||||
? configuredUploadsDir
|
||||
: path.resolve(serverRoot, configuredUploadsDir))
|
||||
: path.resolve(serverRoot, 'uploads');
|
||||
|
||||
// Persist the resolved uploads directory so downstream modules share the same path
|
||||
process.env.UPLOADS_DIR = uploadsDir;
|
||||
|
||||
const requiredDirs = [path.resolve(serverRoot, 'logs'), uploadsDir];
|
||||
|
||||
requiredDirs.forEach(dir => {
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
});
|
||||
|
||||
const app = express();
|
||||
|
||||
// Debug middleware to log request details
|
||||
app.use((req, res, next) => {
|
||||
console.log('Request details:', {
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
origin: req.get('Origin'),
|
||||
headers: req.headers
|
||||
});
|
||||
next();
|
||||
});
|
||||
|
||||
// Apply CORS middleware first, before any other middleware
|
||||
app.use(corsMiddleware);
|
||||
|
||||
// Body parser middleware
|
||||
app.use(express.json({ limit: '10mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||
|
||||
// Initialize database pool and start server
|
||||
async function startServer() {
|
||||
try {
|
||||
// Initialize database pool
|
||||
const pool = await initPool({
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
port: process.env.DB_PORT || 5432,
|
||||
max: process.env.NODE_ENV === 'production' ? 20 : 10,
|
||||
idleTimeoutMillis: 30000,
|
||||
connectionTimeoutMillis: 2000,
|
||||
ssl: process.env.DB_SSL === 'true' ? {
|
||||
rejectUnauthorized: false
|
||||
} : false
|
||||
});
|
||||
|
||||
// Make pool available to routes
|
||||
app.locals.pool = pool;
|
||||
|
||||
// Set up routes after pool is initialized
|
||||
app.use('/api/products', productsRouter);
|
||||
app.use('/api/dashboard', dashboardRouter);
|
||||
app.use('/api/orders', ordersRouter);
|
||||
app.use('/api/csv', csvRouter);
|
||||
app.use('/api/analytics', analyticsRouter);
|
||||
app.use('/api/purchase-orders', purchaseOrdersRouter);
|
||||
app.use('/api/config', configRouter);
|
||||
app.use('/api/metrics', metricsRouter);
|
||||
// Use only the aggregate routes for vendors and categories
|
||||
app.use('/api/vendors', vendorsAggregateRouter);
|
||||
app.use('/api/categories', categoriesAggregateRouter);
|
||||
// Keep the aggregate-specific endpoints for backward compatibility
|
||||
app.use('/api/categories-aggregate', categoriesAggregateRouter);
|
||||
app.use('/api/vendors-aggregate', vendorsAggregateRouter);
|
||||
app.use('/api/brands-aggregate', brandsAggregateRouter);
|
||||
app.use('/api/import', importRouter);
|
||||
app.use('/api/ai-validation', aiValidationRouter);
|
||||
app.use('/api/templates', templatesRouter);
|
||||
app.use('/api/ai-prompts', aiPromptsRouter);
|
||||
app.use('/api/reusable-images', reusableImagesRouter);
|
||||
|
||||
// Basic health check route
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString(),
|
||||
environment: process.env.NODE_ENV
|
||||
});
|
||||
});
|
||||
|
||||
// CORS error handler - must be before other error handlers
|
||||
app.use(corsErrorHandler);
|
||||
|
||||
// Error handling middleware - MUST be after routes and CORS error handler
|
||||
app.use((err, req, res, next) => {
|
||||
console.error(`[${new Date().toISOString()}] Error:`, err);
|
||||
|
||||
// Send detailed error in development, generic in production
|
||||
const error = process.env.NODE_ENV === 'production'
|
||||
? 'An internal server error occurred'
|
||||
: err.message || err;
|
||||
|
||||
res.status(err.status || 500).json({ error });
|
||||
});
|
||||
|
||||
const PORT = process.env.PORT || 3000;
|
||||
app.listen(PORT, () => {
|
||||
console.log(`[Server] Running in ${process.env.NODE_ENV || 'development'} mode on port ${PORT}`);
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to start server:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle uncaught exceptions
|
||||
process.on('uncaughtException', (err) => {
|
||||
console.error(`[${new Date().toISOString()}] Uncaught Exception:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
console.error(`[${new Date().toISOString()}] Unhandled Rejection at:`, promise, 'reason:', reason);
|
||||
});
|
||||
|
||||
// Initialize client sets for SSE
|
||||
const importClients = new Set();
|
||||
const updateClients = new Set();
|
||||
const resetClients = new Set();
|
||||
const resetMetricsClients = new Set();
|
||||
|
||||
// Helper function to send progress to SSE clients
|
||||
const sendProgressToClients = (clients, data) => {
|
||||
clients.forEach(client => {
|
||||
try {
|
||||
client.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
} catch (error) {
|
||||
console.error('Error sending SSE update:', error);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Setup SSE connection
|
||||
const setupSSE = (req, res) => {
|
||||
const { type } = req.params;
|
||||
|
||||
// Set headers for SSE
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
'Access-Control-Allow-Origin': req.headers.origin || '*',
|
||||
'Access-Control-Allow-Credentials': 'true'
|
||||
});
|
||||
|
||||
// Send initial message
|
||||
res.write('data: {"status":"connected"}\n\n');
|
||||
|
||||
// Add client to appropriate set
|
||||
const clientSet = type === 'import' ? importClients :
|
||||
type === 'update' ? updateClients :
|
||||
type === 'reset' ? resetClients :
|
||||
type === 'reset-metrics' ? resetMetricsClients :
|
||||
null;
|
||||
|
||||
if (clientSet) {
|
||||
clientSet.add(res);
|
||||
|
||||
// Remove client when connection closes
|
||||
req.on('close', () => {
|
||||
clientSet.delete(res);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Start the server
|
||||
startServer();
|
||||
79
inventory-server/src/types/status-codes.js
Normal file
79
inventory-server/src/types/status-codes.js
Normal file
@@ -0,0 +1,79 @@
|
||||
// Purchase Order Status Codes
|
||||
const PurchaseOrderStatus = {
|
||||
Canceled: 0,
|
||||
Created: 1,
|
||||
ElectronicallyReadySend: 10,
|
||||
Ordered: 11,
|
||||
Preordered: 12,
|
||||
ElectronicallySent: 13,
|
||||
ReceivingStarted: 15,
|
||||
Done: 50
|
||||
};
|
||||
|
||||
// Receiving Status Codes
|
||||
const ReceivingStatus = {
|
||||
Canceled: 0,
|
||||
Created: 1,
|
||||
PartialReceived: 30,
|
||||
FullReceived: 40,
|
||||
Paid: 50
|
||||
};
|
||||
|
||||
// Status Code Display Names
|
||||
const PurchaseOrderStatusLabels = {
|
||||
[PurchaseOrderStatus.Canceled]: 'Canceled',
|
||||
[PurchaseOrderStatus.Created]: 'Created',
|
||||
[PurchaseOrderStatus.ElectronicallyReadySend]: 'Ready to Send',
|
||||
[PurchaseOrderStatus.Ordered]: 'Ordered',
|
||||
[PurchaseOrderStatus.Preordered]: 'Preordered',
|
||||
[PurchaseOrderStatus.ElectronicallySent]: 'Sent',
|
||||
[PurchaseOrderStatus.ReceivingStarted]: 'Receiving Started',
|
||||
[PurchaseOrderStatus.Done]: 'Done'
|
||||
};
|
||||
|
||||
const ReceivingStatusLabels = {
|
||||
[ReceivingStatus.Canceled]: 'Canceled',
|
||||
[ReceivingStatus.Created]: 'Created',
|
||||
[ReceivingStatus.PartialReceived]: 'Partially Received',
|
||||
[ReceivingStatus.FullReceived]: 'Fully Received',
|
||||
[ReceivingStatus.Paid]: 'Paid'
|
||||
};
|
||||
|
||||
// Helper functions
|
||||
function getPurchaseOrderStatusLabel(status) {
|
||||
return PurchaseOrderStatusLabels[status] || 'Unknown';
|
||||
}
|
||||
|
||||
function getReceivingStatusLabel(status) {
|
||||
return ReceivingStatusLabels[status] || 'Unknown';
|
||||
}
|
||||
|
||||
// Status checks
|
||||
function isReceivingComplete(status) {
|
||||
return status >= ReceivingStatus.PartialReceived;
|
||||
}
|
||||
|
||||
function isPurchaseOrderComplete(status) {
|
||||
return status === PurchaseOrderStatus.Done;
|
||||
}
|
||||
|
||||
function isPurchaseOrderCanceled(status) {
|
||||
return status === PurchaseOrderStatus.Canceled;
|
||||
}
|
||||
|
||||
function isReceivingCanceled(status) {
|
||||
return status === ReceivingStatus.Canceled;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
PurchaseOrderStatus,
|
||||
ReceivingStatus,
|
||||
PurchaseOrderStatusLabels,
|
||||
ReceivingStatusLabels,
|
||||
getPurchaseOrderStatusLabel,
|
||||
getReceivingStatusLabel,
|
||||
isReceivingComplete,
|
||||
isPurchaseOrderComplete,
|
||||
isPurchaseOrderCanceled,
|
||||
isReceivingCanceled
|
||||
};
|
||||
45
inventory-server/src/utils/apiHelpers.js
Normal file
45
inventory-server/src/utils/apiHelpers.js
Normal file
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* Parses a query parameter value based on its expected type.
|
||||
* Throws error for invalid formats. Adjust date handling as needed.
|
||||
*/
|
||||
function parseValue(value, type) {
|
||||
if (value === null || value === undefined || value === '') return null;
|
||||
|
||||
console.log(`Parsing value: "${value}" as type: "${type}"`);
|
||||
|
||||
switch (type) {
|
||||
case 'number':
|
||||
const num = parseFloat(value);
|
||||
if (isNaN(num)) {
|
||||
console.error(`Invalid number format: "${value}"`);
|
||||
throw new Error(`Invalid number format: "${value}"`);
|
||||
}
|
||||
return num;
|
||||
case 'integer': // Specific type for integer IDs etc.
|
||||
const int = parseInt(value, 10);
|
||||
if (isNaN(int)) {
|
||||
console.error(`Invalid integer format: "${value}"`);
|
||||
throw new Error(`Invalid integer format: "${value}"`);
|
||||
}
|
||||
console.log(`Successfully parsed integer: ${int}`);
|
||||
return int;
|
||||
case 'boolean':
|
||||
if (String(value).toLowerCase() === 'true') return true;
|
||||
if (String(value).toLowerCase() === 'false') return false;
|
||||
console.error(`Invalid boolean format: "${value}"`);
|
||||
throw new Error(`Invalid boolean format: "${value}"`);
|
||||
case 'date':
|
||||
// Basic ISO date format validation (YYYY-MM-DD)
|
||||
if (!String(value).match(/^\d{4}-\d{2}-\d{2}$/)) {
|
||||
console.warn(`Potentially invalid date format passed: "${value}"`);
|
||||
// Optionally throw an error or return null depending on strictness
|
||||
// throw new Error(`Invalid date format (YYYY-MM-DD expected): "${value}"`);
|
||||
}
|
||||
return String(value); // Send as string, let DB handle casting/comparison
|
||||
case 'string':
|
||||
default:
|
||||
return String(value);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { parseValue };
|
||||
63
inventory-server/src/utils/csvImporter.js
Normal file
63
inventory-server/src/utils/csvImporter.js
Normal file
@@ -0,0 +1,63 @@
|
||||
const fs = require('fs');
|
||||
const { parse } = require('csv-parse');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
async function importProductsFromCSV(filePath, pool) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const products = [];
|
||||
|
||||
fs.createReadStream(filePath)
|
||||
.pipe(parse({
|
||||
columns: true,
|
||||
skip_empty_lines: true
|
||||
}))
|
||||
.on('data', async (row) => {
|
||||
products.push({
|
||||
id: uuidv4(),
|
||||
sku: row.sku,
|
||||
name: row.name,
|
||||
description: row.description || null,
|
||||
category: row.category || null
|
||||
});
|
||||
})
|
||||
.on('end', async () => {
|
||||
try {
|
||||
const connection = await pool.getConnection();
|
||||
|
||||
try {
|
||||
await connection.beginTransaction();
|
||||
|
||||
for (const product of products) {
|
||||
await connection.query(
|
||||
'INSERT INTO products (id, sku, name, description, category) VALUES (?, ?, ?, ?, ?)',
|
||||
[product.id, product.sku, product.name, product.description, product.category]
|
||||
);
|
||||
|
||||
// Initialize inventory level for the product
|
||||
await connection.query(
|
||||
'INSERT INTO inventory_levels (id, product_id, quantity) VALUES (?, ?, 0)',
|
||||
[uuidv4(), product.id]
|
||||
);
|
||||
}
|
||||
|
||||
await connection.commit();
|
||||
resolve({ imported: products.length });
|
||||
} catch (error) {
|
||||
await connection.rollback();
|
||||
reject(error);
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
})
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
importProductsFromCSV
|
||||
};
|
||||
21
inventory-server/src/utils/db.js
Normal file
21
inventory-server/src/utils/db.js
Normal file
@@ -0,0 +1,21 @@
|
||||
const { Pool } = require('pg');
|
||||
|
||||
let pool;
|
||||
|
||||
function initPool(config) {
|
||||
pool = new Pool(config);
|
||||
return pool;
|
||||
}
|
||||
|
||||
async function getConnection() {
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
return pool.connect();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
initPool,
|
||||
getConnection,
|
||||
getPool: () => pool
|
||||
};
|
||||
239
inventory-server/src/utils/dbConnection.js
Normal file
239
inventory-server/src/utils/dbConnection.js
Normal file
@@ -0,0 +1,239 @@
|
||||
const { Client } = require('ssh2');
|
||||
const mysql = require('mysql2/promise');
|
||||
const fs = require('fs');
|
||||
|
||||
// Connection pooling and cache configuration
|
||||
const connectionCache = {
|
||||
ssh: null,
|
||||
dbConnection: null,
|
||||
lastUsed: 0,
|
||||
isConnecting: false,
|
||||
connectionPromise: null,
|
||||
// Cache expiration time in milliseconds (5 minutes)
|
||||
expirationTime: 5 * 60 * 1000,
|
||||
// Cache for query results (key: query string, value: {data, timestamp})
|
||||
queryCache: new Map(),
|
||||
// Cache duration for different query types in milliseconds
|
||||
cacheDuration: {
|
||||
'field-options': 30 * 60 * 1000, // 30 minutes for field options
|
||||
'product-lines': 10 * 60 * 1000, // 10 minutes for product lines
|
||||
'sublines': 10 * 60 * 1000, // 10 minutes for sublines
|
||||
'taxonomy': 30 * 60 * 1000, // 30 minutes for taxonomy data
|
||||
'default': 60 * 1000 // 1 minute default
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a database connection with connection pooling
|
||||
* @returns {Promise<{ssh: object, connection: object}>} The SSH and database connection
|
||||
*/
|
||||
async function getDbConnection() {
|
||||
const now = Date.now();
|
||||
|
||||
// Check if we need to refresh the connection due to inactivity
|
||||
const needsRefresh = !connectionCache.ssh ||
|
||||
!connectionCache.dbConnection ||
|
||||
(now - connectionCache.lastUsed > connectionCache.expirationTime);
|
||||
|
||||
// If connection is still valid, update last used time and return existing connection
|
||||
if (!needsRefresh) {
|
||||
connectionCache.lastUsed = now;
|
||||
return {
|
||||
ssh: connectionCache.ssh,
|
||||
connection: connectionCache.dbConnection
|
||||
};
|
||||
}
|
||||
|
||||
// If another request is already establishing a connection, wait for that promise
|
||||
if (connectionCache.isConnecting && connectionCache.connectionPromise) {
|
||||
try {
|
||||
await connectionCache.connectionPromise;
|
||||
return {
|
||||
ssh: connectionCache.ssh,
|
||||
connection: connectionCache.dbConnection
|
||||
};
|
||||
} catch (error) {
|
||||
// If that connection attempt failed, we'll try again below
|
||||
console.error('Error waiting for existing connection:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Close existing connections if they exist
|
||||
if (connectionCache.dbConnection) {
|
||||
try {
|
||||
await connectionCache.dbConnection.end();
|
||||
} catch (error) {
|
||||
console.error('Error closing existing database connection:', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (connectionCache.ssh) {
|
||||
try {
|
||||
connectionCache.ssh.end();
|
||||
} catch (error) {
|
||||
console.error('Error closing existing SSH connection:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Mark that we're establishing a new connection
|
||||
connectionCache.isConnecting = true;
|
||||
|
||||
// Create a new promise for this connection attempt
|
||||
connectionCache.connectionPromise = setupSshTunnel().then(tunnel => {
|
||||
const { ssh, stream, dbConfig } = tunnel;
|
||||
|
||||
return mysql.createConnection({
|
||||
...dbConfig,
|
||||
stream
|
||||
}).then(connection => {
|
||||
// Store the new connections
|
||||
connectionCache.ssh = ssh;
|
||||
connectionCache.dbConnection = connection;
|
||||
connectionCache.lastUsed = Date.now();
|
||||
connectionCache.isConnecting = false;
|
||||
|
||||
return {
|
||||
ssh,
|
||||
connection
|
||||
};
|
||||
});
|
||||
}).catch(error => {
|
||||
connectionCache.isConnecting = false;
|
||||
throw error;
|
||||
});
|
||||
|
||||
// Wait for the connection to be established
|
||||
return connectionCache.connectionPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached query results or execute query if not cached
|
||||
* @param {string} cacheKey - Unique key to identify the query
|
||||
* @param {string} queryType - Type of query (field-options, product-lines, etc.)
|
||||
* @param {Function} queryFn - Function to execute if cache miss
|
||||
* @returns {Promise<any>} The query result
|
||||
*/
|
||||
async function getCachedQuery(cacheKey, queryType, queryFn) {
|
||||
// Get cache duration based on query type
|
||||
const cacheDuration = connectionCache.cacheDuration[queryType] || connectionCache.cacheDuration.default;
|
||||
|
||||
// Check if we have a valid cached result
|
||||
const cachedResult = connectionCache.queryCache.get(cacheKey);
|
||||
const now = Date.now();
|
||||
|
||||
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
|
||||
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
|
||||
return cachedResult.data;
|
||||
}
|
||||
|
||||
// No valid cache found, execute the query
|
||||
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
|
||||
const result = await queryFn();
|
||||
|
||||
// Cache the result
|
||||
connectionCache.queryCache.set(cacheKey, {
|
||||
data: result,
|
||||
timestamp: now
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup SSH tunnel to production database
|
||||
* @private - Should only be used by getDbConnection
|
||||
* @returns {Promise<{ssh: object, stream: object, dbConfig: object}>}
|
||||
*/
|
||||
async function setupSshTunnel() {
|
||||
const sshConfig = {
|
||||
host: process.env.PROD_SSH_HOST,
|
||||
port: process.env.PROD_SSH_PORT || 22,
|
||||
username: process.env.PROD_SSH_USER,
|
||||
privateKey: process.env.PROD_SSH_KEY_PATH
|
||||
? fs.readFileSync(process.env.PROD_SSH_KEY_PATH)
|
||||
: undefined,
|
||||
compress: true
|
||||
};
|
||||
|
||||
const dbConfig = {
|
||||
host: process.env.PROD_DB_HOST || 'localhost',
|
||||
user: process.env.PROD_DB_USER,
|
||||
password: process.env.PROD_DB_PASSWORD,
|
||||
database: process.env.PROD_DB_NAME,
|
||||
port: process.env.PROD_DB_PORT || 3306,
|
||||
timezone: 'Z'
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const ssh = new Client();
|
||||
|
||||
ssh.on('error', (err) => {
|
||||
console.error('SSH connection error:', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
ssh.on('ready', () => {
|
||||
ssh.forwardOut(
|
||||
'127.0.0.1',
|
||||
0,
|
||||
dbConfig.host,
|
||||
dbConfig.port,
|
||||
(err, stream) => {
|
||||
if (err) reject(err);
|
||||
resolve({ ssh, stream, dbConfig });
|
||||
}
|
||||
);
|
||||
}).connect(sshConfig);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached query results
|
||||
* @param {string} [cacheKey] - Specific cache key to clear (clears all if not provided)
|
||||
*/
|
||||
function clearQueryCache(cacheKey) {
|
||||
if (cacheKey) {
|
||||
connectionCache.queryCache.delete(cacheKey);
|
||||
console.log(`Cleared cache for key: ${cacheKey}`);
|
||||
} else {
|
||||
connectionCache.queryCache.clear();
|
||||
console.log('Cleared all query cache');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Force close all active connections
|
||||
* Useful for server shutdown or manual connection reset
|
||||
*/
|
||||
async function closeAllConnections() {
|
||||
if (connectionCache.dbConnection) {
|
||||
try {
|
||||
await connectionCache.dbConnection.end();
|
||||
console.log('Closed database connection');
|
||||
} catch (error) {
|
||||
console.error('Error closing database connection:', error);
|
||||
}
|
||||
connectionCache.dbConnection = null;
|
||||
}
|
||||
|
||||
if (connectionCache.ssh) {
|
||||
try {
|
||||
connectionCache.ssh.end();
|
||||
console.log('Closed SSH connection');
|
||||
} catch (error) {
|
||||
console.error('Error closing SSH connection:', error);
|
||||
}
|
||||
connectionCache.ssh = null;
|
||||
}
|
||||
|
||||
connectionCache.lastUsed = 0;
|
||||
connectionCache.isConnecting = false;
|
||||
connectionCache.connectionPromise = null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getDbConnection,
|
||||
getCachedQuery,
|
||||
clearQueryCache,
|
||||
closeAllConnections
|
||||
};
|
||||
Reference in New Issue
Block a user