Clean up routes
This commit is contained in:
@@ -51,83 +51,67 @@ router.get('/:id', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Get prompt by company
|
||||
router.get('/company/:companyId', async (req, res) => {
|
||||
// Get prompt by type (general, system, company_specific)
|
||||
router.get('/by-type', async (req, res) => {
|
||||
try {
|
||||
const { companyId } = req.params;
|
||||
const { type, company } = req.query;
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
WHERE company = $1
|
||||
`, [companyId]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'AI prompt not found for this company' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching AI prompt by company:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch AI prompt by company',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get general prompt
|
||||
router.get('/type/general', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
// Validate prompt type
|
||||
if (!type || !['general', 'system', 'company_specific'].includes(type)) {
|
||||
return res.status(400).json({
|
||||
error: 'Valid type query parameter is required (general, system, or company_specific)'
|
||||
});
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
WHERE prompt_type = 'general'
|
||||
`);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'General AI prompt not found' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching general AI prompt:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch general AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get system prompt
|
||||
router.get('/type/system', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
// For company_specific type, company ID is required
|
||||
if (type === 'company_specific' && !company) {
|
||||
return res.status(400).json({
|
||||
error: 'Company ID is required for company_specific prompt type'
|
||||
});
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
WHERE prompt_type = 'system'
|
||||
`);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'System AI prompt not found' });
|
||||
// For general and system types, company should not be provided
|
||||
if ((type === 'general' || type === 'system') && company) {
|
||||
return res.status(400).json({
|
||||
error: 'Company ID should not be provided for general or system prompt types'
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Build the query based on the type
|
||||
let query, params;
|
||||
if (type === 'company_specific') {
|
||||
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1 AND company = $2';
|
||||
params = [type, company];
|
||||
} else {
|
||||
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1';
|
||||
params = [type];
|
||||
}
|
||||
|
||||
// Execute the query
|
||||
const result = await pool.query(query, params);
|
||||
|
||||
// Check if any prompt was found
|
||||
if (result.rows.length === 0) {
|
||||
let errorMessage;
|
||||
if (type === 'company_specific') {
|
||||
errorMessage = `AI prompt not found for company ${company}`;
|
||||
} else {
|
||||
errorMessage = `${type.charAt(0).toUpperCase() + type.slice(1)} AI prompt not found`;
|
||||
}
|
||||
return res.status(404).json({ error: errorMessage });
|
||||
}
|
||||
|
||||
// Return the first matching prompt
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching system AI prompt:', error);
|
||||
console.error('Error fetching AI prompt by type:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch system AI prompt',
|
||||
error: 'Failed to fetch AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ const path = require("path");
|
||||
const dotenv = require("dotenv");
|
||||
const mysql = require('mysql2/promise');
|
||||
const { Client } = require('ssh2');
|
||||
const { getDbConnection } = require('../utils/dbConnection'); // Import the optimized connection function
|
||||
|
||||
// Ensure environment variables are loaded
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env") });
|
||||
@@ -18,50 +19,6 @@ if (!process.env.OPENAI_API_KEY) {
|
||||
console.error("Warning: OPENAI_API_KEY is not set in environment variables");
|
||||
}
|
||||
|
||||
// Helper function to setup SSH tunnel to production database
|
||||
async function setupSshTunnel() {
|
||||
const sshConfig = {
|
||||
host: process.env.PROD_SSH_HOST,
|
||||
port: process.env.PROD_SSH_PORT || 22,
|
||||
username: process.env.PROD_SSH_USER,
|
||||
privateKey: process.env.PROD_SSH_KEY_PATH
|
||||
? require('fs').readFileSync(process.env.PROD_SSH_KEY_PATH)
|
||||
: undefined,
|
||||
compress: true
|
||||
};
|
||||
|
||||
const dbConfig = {
|
||||
host: process.env.PROD_DB_HOST || 'localhost',
|
||||
user: process.env.PROD_DB_USER,
|
||||
password: process.env.PROD_DB_PASSWORD,
|
||||
database: process.env.PROD_DB_NAME,
|
||||
port: process.env.PROD_DB_PORT || 3306,
|
||||
timezone: 'Z'
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const ssh = new Client();
|
||||
|
||||
ssh.on('error', (err) => {
|
||||
console.error('SSH connection error:', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
ssh.on('ready', () => {
|
||||
ssh.forwardOut(
|
||||
'127.0.0.1',
|
||||
0,
|
||||
dbConfig.host,
|
||||
dbConfig.port,
|
||||
(err, stream) => {
|
||||
if (err) reject(err);
|
||||
resolve({ ssh, stream, dbConfig });
|
||||
}
|
||||
);
|
||||
}).connect(sshConfig);
|
||||
});
|
||||
}
|
||||
|
||||
// Debug endpoint for viewing prompt
|
||||
router.post("/debug", async (req, res) => {
|
||||
try {
|
||||
@@ -195,16 +152,12 @@ async function generateDebugResponse(productsToUse, res) {
|
||||
// Load taxonomy data first
|
||||
console.log("Loading taxonomy data...");
|
||||
try {
|
||||
// Setup MySQL connection via SSH tunnel
|
||||
const tunnel = await setupSshTunnel();
|
||||
ssh = tunnel.ssh;
|
||||
// Use optimized database connection
|
||||
const { connection, ssh: connSsh } = await getDbConnection();
|
||||
mysqlConnection = connection;
|
||||
ssh = connSsh;
|
||||
|
||||
mysqlConnection = await mysql.createConnection({
|
||||
...tunnel.dbConfig,
|
||||
stream: tunnel.stream
|
||||
});
|
||||
|
||||
console.log("MySQL connection established successfully");
|
||||
console.log("MySQL connection established successfully using optimized connection");
|
||||
|
||||
taxonomy = await getTaxonomyData(mysqlConnection);
|
||||
console.log("Successfully loaded taxonomy data");
|
||||
@@ -218,10 +171,6 @@ async function generateDebugResponse(productsToUse, res) {
|
||||
errno: taxonomyError.errno || null,
|
||||
sql: taxonomyError.sql || null,
|
||||
});
|
||||
} finally {
|
||||
// Make sure we close the connection
|
||||
if (mysqlConnection) await mysqlConnection.end();
|
||||
if (ssh) ssh.end();
|
||||
}
|
||||
|
||||
// Verify the taxonomy data structure
|
||||
@@ -282,11 +231,8 @@ async function generateDebugResponse(productsToUse, res) {
|
||||
console.log("Loading prompt...");
|
||||
|
||||
// Setup a new connection for loading the prompt
|
||||
const promptTunnel = await setupSshTunnel();
|
||||
const promptConnection = await mysql.createConnection({
|
||||
...promptTunnel.dbConfig,
|
||||
stream: promptTunnel.stream
|
||||
});
|
||||
// Use optimized connection instead of creating a new one
|
||||
const { connection: promptConnection } = await getDbConnection();
|
||||
|
||||
try {
|
||||
// Get the local PostgreSQL pool to fetch prompts
|
||||
@@ -296,7 +242,7 @@ async function generateDebugResponse(productsToUse, res) {
|
||||
throw new Error("Database connection not available");
|
||||
}
|
||||
|
||||
// First, fetch the system prompt
|
||||
// First, fetch the system prompt using the consolidated endpoint approach
|
||||
const systemPromptResult = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
WHERE prompt_type = 'system'
|
||||
@@ -311,7 +257,7 @@ async function generateDebugResponse(productsToUse, res) {
|
||||
console.warn("⚠️ No system prompt found in database, will use default");
|
||||
}
|
||||
|
||||
// Then, fetch the general prompt
|
||||
// Then, fetch the general prompt using the consolidated endpoint approach
|
||||
const generalPromptResult = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
WHERE prompt_type = 'general'
|
||||
@@ -458,7 +404,6 @@ async function generateDebugResponse(productsToUse, res) {
|
||||
return response;
|
||||
} finally {
|
||||
if (promptConnection) await promptConnection.end();
|
||||
if (promptTunnel.ssh) promptTunnel.ssh.end();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error generating debug response:", error);
|
||||
@@ -645,7 +590,7 @@ async function loadPrompt(connection, productsToValidate = null, appPool = null)
|
||||
throw new Error("Database connection not available");
|
||||
}
|
||||
|
||||
// Fetch the system prompt
|
||||
// Fetch the system prompt using the consolidated endpoint approach
|
||||
const systemPromptResult = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
WHERE prompt_type = 'system'
|
||||
@@ -662,7 +607,7 @@ async function loadPrompt(connection, productsToValidate = null, appPool = null)
|
||||
console.warn("⚠️ No system prompt found in database, using default");
|
||||
}
|
||||
|
||||
// Fetch the general prompt
|
||||
// Fetch the general prompt using the consolidated endpoint approach
|
||||
const generalPromptResult = await pool.query(`
|
||||
SELECT * FROM ai_prompts
|
||||
WHERE prompt_type = 'general'
|
||||
@@ -926,15 +871,11 @@ router.post("/validate", async (req, res) => {
|
||||
let promptLength = 0; // Track prompt length for performance metrics
|
||||
|
||||
try {
|
||||
// Setup MySQL connection via SSH tunnel
|
||||
console.log("🔄 Setting up connection to production database...");
|
||||
const tunnel = await setupSshTunnel();
|
||||
ssh = tunnel.ssh;
|
||||
|
||||
connection = await mysql.createConnection({
|
||||
...tunnel.dbConfig,
|
||||
stream: tunnel.stream
|
||||
});
|
||||
// Use the optimized connection utility instead of direct SSH tunnel
|
||||
console.log("🔄 Setting up connection to production database using optimized connection...");
|
||||
const { ssh: connSsh, connection: connDB } = await getDbConnection();
|
||||
ssh = connSsh;
|
||||
connection = connDB;
|
||||
|
||||
console.log("🔄 MySQL connection established successfully");
|
||||
|
||||
@@ -1238,14 +1179,11 @@ router.get("/test-taxonomy", async (req, res) => {
|
||||
let connection = null;
|
||||
|
||||
try {
|
||||
// Setup MySQL connection via SSH tunnel
|
||||
const tunnel = await setupSshTunnel();
|
||||
ssh = tunnel.ssh;
|
||||
|
||||
connection = await mysql.createConnection({
|
||||
...tunnel.dbConfig,
|
||||
stream: tunnel.stream
|
||||
});
|
||||
// Use the optimized connection utility instead of direct SSH tunnel
|
||||
console.log("🔄 Setting up connection to production database using optimized connection...");
|
||||
const { ssh: connSsh, connection: connDB } = await getDbConnection();
|
||||
ssh = connSsh;
|
||||
connection = connDB;
|
||||
|
||||
console.log("MySQL connection established successfully for test");
|
||||
|
||||
|
||||
@@ -612,99 +612,4 @@ router.get('/categories', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Forecast endpoint
|
||||
router.get('/forecast', async (req, res) => {
|
||||
try {
|
||||
const { brand, startDate, endDate } = req.query;
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const [results] = await pool.query(`
|
||||
WITH RECURSIVE category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
CAST(c.name AS CHAR(1000)) as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
CONCAT(cp.path, ' > ', c.name)
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
),
|
||||
category_metrics AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name as category_name,
|
||||
cp.path,
|
||||
p.brand,
|
||||
COUNT(DISTINCT p.pid) as num_products,
|
||||
CAST(COALESCE(ROUND(SUM(o.quantity) / DATEDIFF(?, ?), 2), 0) AS DECIMAL(15,3)) as avg_daily_sales,
|
||||
COALESCE(SUM(o.quantity), 0) as total_sold,
|
||||
CAST(COALESCE(ROUND(SUM(o.quantity) / COUNT(DISTINCT p.pid), 2), 0) AS DECIMAL(15,3)) as avgTotalSold,
|
||||
CAST(COALESCE(ROUND(AVG(o.price), 2), 0) AS DECIMAL(15,3)) as avg_price
|
||||
FROM categories c
|
||||
JOIN product_categories pc ON c.cat_id = pc.cat_id
|
||||
JOIN products p ON pc.pid = p.pid
|
||||
JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
LEFT JOIN product_metrics pmet ON p.pid = pmet.pid
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
AND o.date BETWEEN ? AND ?
|
||||
AND o.canceled = false
|
||||
WHERE p.brand = ?
|
||||
AND pmet.first_received_date BETWEEN ? AND ?
|
||||
GROUP BY c.cat_id, c.name, cp.path, p.brand
|
||||
),
|
||||
product_details AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.title,
|
||||
p.SKU,
|
||||
p.stock_quantity,
|
||||
pc.cat_id,
|
||||
pmet.first_received_date,
|
||||
COALESCE(SUM(o.quantity), 0) as total_sold,
|
||||
CAST(COALESCE(ROUND(AVG(o.price), 2), 0) AS DECIMAL(15,3)) as avg_price
|
||||
FROM products p
|
||||
JOIN product_categories pc ON p.pid = pc.pid
|
||||
JOIN product_metrics pmet ON p.pid = pmet.pid
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
AND o.date BETWEEN ? AND ?
|
||||
AND o.canceled = false
|
||||
WHERE p.brand = ?
|
||||
AND pmet.first_received_date BETWEEN ? AND ?
|
||||
GROUP BY p.pid, p.title, p.SKU, p.stock_quantity, pc.cat_id, pmet.first_received_date
|
||||
)
|
||||
SELECT
|
||||
cm.*,
|
||||
JSON_ARRAYAGG(
|
||||
JSON_OBJECT(
|
||||
'pid', pd.pid,
|
||||
'title', pd.title,
|
||||
'SKU', pd.SKU,
|
||||
'stock_quantity', pd.stock_quantity,
|
||||
'total_sold', pd.total_sold,
|
||||
'avg_price', pd.avg_price,
|
||||
'first_received_date', DATE_FORMAT(pd.first_received_date, '%Y-%m-%d')
|
||||
)
|
||||
) as products
|
||||
FROM category_metrics cm
|
||||
JOIN product_details pd ON cm.cat_id = pd.cat_id
|
||||
GROUP BY cm.cat_id, cm.category_name, cm.path, cm.brand, cm.num_products, cm.avg_daily_sales, cm.total_sold, cm.avgTotalSold, cm.avg_price
|
||||
ORDER BY cm.total_sold DESC
|
||||
`, [endDate, startDate, startDate, endDate, brand, startDate, endDate, startDate, endDate, brand, startDate, endDate]);
|
||||
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('Error fetching forecast data:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch forecast data' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -321,169 +321,5 @@ router.post('/vendors/:vendor/reset', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// ===== LEGACY ENDPOINTS =====
|
||||
// These are kept for backward compatibility but will be removed in future versions
|
||||
|
||||
// Get all configuration values
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
console.log('[Config Route] Fetching configuration values...');
|
||||
|
||||
const { rows: stockThresholds } = await pool.query('SELECT * FROM stock_thresholds WHERE id = 1');
|
||||
console.log('[Config Route] Stock thresholds:', stockThresholds);
|
||||
|
||||
const { rows: leadTimeThresholds } = await pool.query('SELECT * FROM lead_time_thresholds WHERE id = 1');
|
||||
console.log('[Config Route] Lead time thresholds:', leadTimeThresholds);
|
||||
|
||||
const { rows: salesVelocityConfig } = await pool.query('SELECT * FROM sales_velocity_config WHERE id = 1');
|
||||
console.log('[Config Route] Sales velocity config:', salesVelocityConfig);
|
||||
|
||||
const { rows: abcConfig } = await pool.query('SELECT * FROM abc_classification_config WHERE id = 1');
|
||||
console.log('[Config Route] ABC config:', abcConfig);
|
||||
|
||||
const { rows: safetyStockConfig } = await pool.query('SELECT * FROM safety_stock_config WHERE id = 1');
|
||||
console.log('[Config Route] Safety stock config:', safetyStockConfig);
|
||||
|
||||
const { rows: turnoverConfig } = await pool.query('SELECT * FROM turnover_config WHERE id = 1');
|
||||
console.log('[Config Route] Turnover config:', turnoverConfig);
|
||||
|
||||
const response = {
|
||||
stockThresholds: stockThresholds[0],
|
||||
leadTimeThresholds: leadTimeThresholds[0],
|
||||
salesVelocityConfig: salesVelocityConfig[0],
|
||||
abcConfig: abcConfig[0],
|
||||
safetyStockConfig: safetyStockConfig[0],
|
||||
turnoverConfig: turnoverConfig[0]
|
||||
};
|
||||
|
||||
console.log('[Config Route] Sending response:', response);
|
||||
res.json(response);
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error fetching configuration:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch configuration', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Update stock thresholds
|
||||
router.put('/stock-thresholds/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { critical_days, reorder_days, overstock_days, low_stock_threshold, min_reorder_quantity } = req.body;
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE stock_thresholds
|
||||
SET critical_days = $1,
|
||||
reorder_days = $2,
|
||||
overstock_days = $3,
|
||||
low_stock_threshold = $4,
|
||||
min_reorder_quantity = $5
|
||||
WHERE id = $6`,
|
||||
[critical_days, reorder_days, overstock_days, low_stock_threshold, min_reorder_quantity, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error updating stock thresholds:', error);
|
||||
res.status(500).json({ error: 'Failed to update stock thresholds' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update lead time thresholds
|
||||
router.put('/lead-time-thresholds/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { target_days, warning_days, critical_days } = req.body;
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE lead_time_thresholds
|
||||
SET target_days = $1,
|
||||
warning_days = $2,
|
||||
critical_days = $3
|
||||
WHERE id = $4`,
|
||||
[target_days, warning_days, critical_days, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error updating lead time thresholds:', error);
|
||||
res.status(500).json({ error: 'Failed to update lead time thresholds' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update sales velocity config
|
||||
router.put('/sales-velocity/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { daily_window_days, weekly_window_days, monthly_window_days } = req.body;
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE sales_velocity_config
|
||||
SET daily_window_days = $1,
|
||||
weekly_window_days = $2,
|
||||
monthly_window_days = $3
|
||||
WHERE id = $4`,
|
||||
[daily_window_days, weekly_window_days, monthly_window_days, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error updating sales velocity config:', error);
|
||||
res.status(500).json({ error: 'Failed to update sales velocity config' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update ABC classification config
|
||||
router.put('/abc-classification/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { a_threshold, b_threshold, classification_period_days } = req.body;
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE abc_classification_config
|
||||
SET a_threshold = $1,
|
||||
b_threshold = $2,
|
||||
classification_period_days = $3
|
||||
WHERE id = $4`,
|
||||
[a_threshold, b_threshold, classification_period_days, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error updating ABC classification config:', error);
|
||||
res.status(500).json({ error: 'Failed to update ABC classification config' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update safety stock config
|
||||
router.put('/safety-stock/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { coverage_days, service_level } = req.body;
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE safety_stock_config
|
||||
SET coverage_days = $1,
|
||||
service_level = $2
|
||||
WHERE id = $3`,
|
||||
[coverage_days, service_level, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error updating safety stock config:', error);
|
||||
res.status(500).json({ error: 'Failed to update safety stock config' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update turnover config
|
||||
router.put('/turnover/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { calculation_period_days, target_rate } = req.body;
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE turnover_config
|
||||
SET calculation_period_days = $1,
|
||||
target_rate = $2
|
||||
WHERE id = $3`,
|
||||
[calculation_period_days, target_rate, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('[Config Route] Error updating turnover config:', error);
|
||||
res.status(500).json({ error: 'Failed to update turnover config' });
|
||||
}
|
||||
});
|
||||
|
||||
// Export the router
|
||||
module.exports = router;
|
||||
@@ -1,881 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const db = require('../utils/db');
|
||||
|
||||
// Debug middleware MUST be first
|
||||
router.use((req, res, next) => {
|
||||
console.log(`[CSV Route Debug] ${req.method} ${req.path}`);
|
||||
next();
|
||||
});
|
||||
|
||||
// Store active processes and their progress
|
||||
let activeImport = null;
|
||||
let importProgress = null;
|
||||
let activeFullUpdate = null;
|
||||
let activeFullReset = null;
|
||||
|
||||
// SSE clients for progress updates
|
||||
const updateClients = new Set();
|
||||
const importClients = new Set();
|
||||
const resetClients = new Set();
|
||||
const resetMetricsClients = new Set();
|
||||
const calculateMetricsClients = new Set();
|
||||
const fullUpdateClients = new Set();
|
||||
const fullResetClients = new Set();
|
||||
|
||||
// Helper to send progress to specific clients
|
||||
function sendProgressToClients(clients, data) {
|
||||
// If data is a string, send it directly
|
||||
// If it's an object, convert it to JSON
|
||||
const message = typeof data === 'string'
|
||||
? `data: ${data}\n\n`
|
||||
: `data: ${JSON.stringify(data)}\n\n`;
|
||||
|
||||
clients.forEach(client => {
|
||||
try {
|
||||
client.write(message);
|
||||
// Immediately flush the response
|
||||
if (typeof client.flush === 'function') {
|
||||
client.flush();
|
||||
}
|
||||
} catch (error) {
|
||||
// Silently remove failed client
|
||||
clients.delete(client);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to run a script and stream progress
|
||||
function runScript(scriptPath, type, clients) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Kill any existing process of this type
|
||||
let activeProcess;
|
||||
switch (type) {
|
||||
case 'update':
|
||||
if (activeFullUpdate) {
|
||||
try { activeFullUpdate.kill(); } catch (e) { }
|
||||
}
|
||||
activeProcess = activeFullUpdate;
|
||||
break;
|
||||
case 'reset':
|
||||
if (activeFullReset) {
|
||||
try { activeFullReset.kill(); } catch (e) { }
|
||||
}
|
||||
activeProcess = activeFullReset;
|
||||
break;
|
||||
}
|
||||
|
||||
const child = spawn('node', [scriptPath], {
|
||||
stdio: ['inherit', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = child;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = child;
|
||||
break;
|
||||
}
|
||||
|
||||
let output = '';
|
||||
|
||||
child.stdout.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
output += text;
|
||||
|
||||
// Split by lines to handle multiple JSON outputs
|
||||
const lines = text.split('\n');
|
||||
lines.filter(line => line.trim()).forEach(line => {
|
||||
try {
|
||||
// Try to parse as JSON but don't let it affect the display
|
||||
const jsonData = JSON.parse(line);
|
||||
// Only end the process if we get a final status
|
||||
if (jsonData.status === 'complete' || jsonData.status === 'error' || jsonData.status === 'cancelled') {
|
||||
if (jsonData.status === 'complete' && !jsonData.operation?.includes('complete')) {
|
||||
// Don't close for intermediate completion messages
|
||||
sendProgressToClients(clients, line);
|
||||
return;
|
||||
}
|
||||
// Close only on final completion/error/cancellation
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
if (jsonData.status === 'error') {
|
||||
reject(new Error(jsonData.error || 'Unknown error'));
|
||||
} else {
|
||||
resolve({ output });
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Not JSON, just display as is
|
||||
}
|
||||
// Always send the raw line
|
||||
sendProgressToClients(clients, line);
|
||||
});
|
||||
});
|
||||
|
||||
child.stderr.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
console.error(text);
|
||||
// Send stderr output directly too
|
||||
sendProgressToClients(clients, text);
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
|
||||
if (code !== 0) {
|
||||
const error = `Script ${scriptPath} exited with code ${code}`;
|
||||
sendProgressToClients(clients, error);
|
||||
reject(new Error(error));
|
||||
}
|
||||
// Don't resolve here - let the completion message from the script trigger the resolve
|
||||
});
|
||||
|
||||
child.on('error', (err) => {
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
sendProgressToClients(clients, err.message);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Progress endpoints
|
||||
router.get('/:type/progress', (req, res) => {
|
||||
const { type } = req.params;
|
||||
if (!['update', 'reset'].includes(type)) {
|
||||
return res.status(400).json({ error: 'Invalid operation type' });
|
||||
}
|
||||
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
'Access-Control-Allow-Origin': req.headers.origin || '*',
|
||||
'Access-Control-Allow-Credentials': 'true'
|
||||
});
|
||||
|
||||
// Add this client to the correct set
|
||||
const clients = type === 'update' ? fullUpdateClients : fullResetClients;
|
||||
clients.add(res);
|
||||
|
||||
// Send initial connection message
|
||||
sendProgressToClients(new Set([res]), JSON.stringify({
|
||||
status: 'running',
|
||||
operation: 'Initializing connection...'
|
||||
}));
|
||||
|
||||
// Handle client disconnect
|
||||
req.on('close', () => {
|
||||
clients.delete(res);
|
||||
});
|
||||
});
|
||||
|
||||
// Debug endpoint to verify route registration
|
||||
router.get('/test', (req, res) => {
|
||||
console.log('CSV test endpoint hit');
|
||||
res.json({ message: 'CSV routes are working' });
|
||||
});
|
||||
|
||||
// Route to check import status
|
||||
router.get('/status', (req, res) => {
|
||||
console.log('CSV status endpoint hit');
|
||||
res.json({
|
||||
active: !!activeImport,
|
||||
progress: importProgress
|
||||
});
|
||||
});
|
||||
|
||||
// Add calculate-metrics status endpoint
|
||||
router.get('/calculate-metrics/status', (req, res) => {
|
||||
const calculateMetrics = require('../../scripts/calculate-metrics');
|
||||
const progress = calculateMetrics.getProgress();
|
||||
|
||||
// Only consider it active if both the process is running and we have progress
|
||||
const isActive = !!activeImport && !!progress;
|
||||
|
||||
res.json({
|
||||
active: isActive,
|
||||
progress: isActive ? progress : null
|
||||
});
|
||||
});
|
||||
|
||||
// Route to update CSV files
|
||||
router.post('/update', async (req, res, next) => {
|
||||
if (activeImport) {
|
||||
return res.status(409).json({ error: 'Import already in progress' });
|
||||
}
|
||||
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '..', '..', 'scripts', 'update-csv.js');
|
||||
|
||||
if (!require('fs').existsSync(scriptPath)) {
|
||||
return res.status(500).json({ error: 'Update script not found' });
|
||||
}
|
||||
|
||||
activeImport = spawn('node', [scriptPath]);
|
||||
|
||||
activeImport.stdout.on('data', (data) => {
|
||||
const output = data.toString().trim();
|
||||
|
||||
try {
|
||||
// Try to parse as JSON
|
||||
const jsonData = JSON.parse(output);
|
||||
sendProgressToClients(updateClients, {
|
||||
status: 'running',
|
||||
...jsonData
|
||||
});
|
||||
} catch (e) {
|
||||
// If not JSON, send as plain progress
|
||||
sendProgressToClients(updateClients, {
|
||||
status: 'running',
|
||||
progress: output
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
activeImport.stderr.on('data', (data) => {
|
||||
const error = data.toString().trim();
|
||||
try {
|
||||
// Try to parse as JSON
|
||||
const jsonData = JSON.parse(error);
|
||||
sendProgressToClients(updateClients, {
|
||||
status: 'error',
|
||||
...jsonData
|
||||
});
|
||||
} catch {
|
||||
sendProgressToClients(updateClients, {
|
||||
status: 'error',
|
||||
error
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
activeImport.on('close', (code) => {
|
||||
// Don't treat cancellation (code 143/SIGTERM) as an error
|
||||
if (code === 0 || code === 143) {
|
||||
sendProgressToClients(updateClients, {
|
||||
status: 'complete',
|
||||
operation: code === 143 ? 'Operation cancelled' : 'Update complete'
|
||||
});
|
||||
resolve();
|
||||
} else {
|
||||
const errorMsg = `Update process exited with code ${code}`;
|
||||
sendProgressToClients(updateClients, {
|
||||
status: 'error',
|
||||
error: errorMsg
|
||||
});
|
||||
reject(new Error(errorMsg));
|
||||
}
|
||||
activeImport = null;
|
||||
importProgress = null;
|
||||
});
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Error updating CSV files:', error);
|
||||
activeImport = null;
|
||||
importProgress = null;
|
||||
sendProgressToClients(updateClients, {
|
||||
status: 'error',
|
||||
error: error.message
|
||||
});
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
// Route to import CSV files
|
||||
router.post('/import', async (req, res) => {
|
||||
if (activeImport) {
|
||||
return res.status(409).json({ error: 'Import already in progress' });
|
||||
}
|
||||
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '..', '..', 'scripts', 'import-csv.js');
|
||||
|
||||
if (!require('fs').existsSync(scriptPath)) {
|
||||
return res.status(500).json({ error: 'Import script not found' });
|
||||
}
|
||||
|
||||
// Get test limits from request body
|
||||
const { products = 0, orders = 10000, purchaseOrders = 10000 } = req.body;
|
||||
|
||||
// Create environment variables for the script
|
||||
const env = {
|
||||
...process.env,
|
||||
PRODUCTS_TEST_LIMIT: products.toString(),
|
||||
ORDERS_TEST_LIMIT: orders.toString(),
|
||||
PURCHASE_ORDERS_TEST_LIMIT: purchaseOrders.toString()
|
||||
};
|
||||
|
||||
activeImport = spawn('node', [scriptPath], { env });
|
||||
|
||||
activeImport.stdout.on('data', (data) => {
|
||||
const output = data.toString().trim();
|
||||
|
||||
try {
|
||||
// Try to parse as JSON
|
||||
const jsonData = JSON.parse(output);
|
||||
sendProgressToClients(importClients, {
|
||||
status: 'running',
|
||||
...jsonData
|
||||
});
|
||||
} catch {
|
||||
// If not JSON, send as plain progress
|
||||
sendProgressToClients(importClients, {
|
||||
status: 'running',
|
||||
progress: output
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
activeImport.stderr.on('data', (data) => {
|
||||
const error = data.toString().trim();
|
||||
try {
|
||||
// Try to parse as JSON
|
||||
const jsonData = JSON.parse(error);
|
||||
sendProgressToClients(importClients, {
|
||||
status: 'error',
|
||||
...jsonData
|
||||
});
|
||||
} catch {
|
||||
sendProgressToClients(importClients, {
|
||||
status: 'error',
|
||||
error
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
activeImport.on('close', (code) => {
|
||||
// Don't treat cancellation (code 143/SIGTERM) as an error
|
||||
if (code === 0 || code === 143) {
|
||||
sendProgressToClients(importClients, {
|
||||
status: 'complete',
|
||||
operation: code === 143 ? 'Operation cancelled' : 'Import complete'
|
||||
});
|
||||
resolve();
|
||||
} else {
|
||||
sendProgressToClients(importClients, {
|
||||
status: 'error',
|
||||
error: `Process exited with code ${code}`
|
||||
});
|
||||
reject(new Error(`Import process exited with code ${code}`));
|
||||
}
|
||||
activeImport = null;
|
||||
importProgress = null;
|
||||
});
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Error importing CSV files:', error);
|
||||
activeImport = null;
|
||||
importProgress = null;
|
||||
sendProgressToClients(importClients, {
|
||||
status: 'error',
|
||||
error: error.message
|
||||
});
|
||||
res.status(500).json({ error: 'Failed to import CSV files', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Route to cancel active process
|
||||
router.post('/cancel', (req, res) => {
|
||||
let killed = false;
|
||||
|
||||
// Get the operation type from the request
|
||||
const { type } = req.query;
|
||||
const clients = type === 'update' ? fullUpdateClients : fullResetClients;
|
||||
const activeProcess = type === 'update' ? activeFullUpdate : activeFullReset;
|
||||
|
||||
if (activeProcess) {
|
||||
try {
|
||||
activeProcess.kill('SIGTERM');
|
||||
if (type === 'update') {
|
||||
activeFullUpdate = null;
|
||||
} else {
|
||||
activeFullReset = null;
|
||||
}
|
||||
killed = true;
|
||||
sendProgressToClients(clients, JSON.stringify({
|
||||
status: 'cancelled',
|
||||
operation: 'Operation cancelled'
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error(`Error killing ${type} process:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
if (killed) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'No active process to cancel' });
|
||||
}
|
||||
});
|
||||
|
||||
// Route to reset database
|
||||
router.post('/reset', async (req, res) => {
|
||||
if (activeImport) {
|
||||
return res.status(409).json({ error: 'Import already in progress' });
|
||||
}
|
||||
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '..', '..', 'scripts', 'reset-db.js');
|
||||
|
||||
if (!require('fs').existsSync(scriptPath)) {
|
||||
return res.status(500).json({ error: 'Reset script not found' });
|
||||
}
|
||||
|
||||
activeImport = spawn('node', [scriptPath]);
|
||||
|
||||
activeImport.stdout.on('data', (data) => {
|
||||
const output = data.toString().trim();
|
||||
|
||||
try {
|
||||
// Try to parse as JSON
|
||||
const jsonData = JSON.parse(output);
|
||||
sendProgressToClients(resetClients, {
|
||||
status: 'running',
|
||||
...jsonData
|
||||
});
|
||||
} catch (e) {
|
||||
// If not JSON, send as plain progress
|
||||
sendProgressToClients(resetClients, {
|
||||
status: 'running',
|
||||
progress: output
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
activeImport.stderr.on('data', (data) => {
|
||||
const error = data.toString().trim();
|
||||
try {
|
||||
// Try to parse as JSON
|
||||
const jsonData = JSON.parse(error);
|
||||
sendProgressToClients(resetClients, {
|
||||
status: 'error',
|
||||
...jsonData
|
||||
});
|
||||
} catch {
|
||||
sendProgressToClients(resetClients, {
|
||||
status: 'error',
|
||||
error
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
activeImport.on('close', (code) => {
|
||||
// Don't treat cancellation (code 143/SIGTERM) as an error
|
||||
if (code === 0 || code === 143) {
|
||||
sendProgressToClients(resetClients, {
|
||||
status: 'complete',
|
||||
operation: code === 143 ? 'Operation cancelled' : 'Reset complete'
|
||||
});
|
||||
resolve();
|
||||
} else {
|
||||
const errorMsg = `Reset process exited with code ${code}`;
|
||||
sendProgressToClients(resetClients, {
|
||||
status: 'error',
|
||||
error: errorMsg
|
||||
});
|
||||
reject(new Error(errorMsg));
|
||||
}
|
||||
activeImport = null;
|
||||
importProgress = null;
|
||||
});
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Error resetting database:', error);
|
||||
activeImport = null;
|
||||
importProgress = null;
|
||||
sendProgressToClients(resetClients, {
|
||||
status: 'error',
|
||||
error: error.message
|
||||
});
|
||||
res.status(500).json({ error: 'Failed to reset database', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Add reset-metrics endpoint
|
||||
router.post('/reset-metrics', async (req, res) => {
|
||||
if (activeImport) {
|
||||
res.status(400).json({ error: 'Operation already in progress' });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Set active import to prevent concurrent operations
|
||||
activeImport = {
|
||||
type: 'reset-metrics',
|
||||
status: 'running',
|
||||
operation: 'Starting metrics reset'
|
||||
};
|
||||
|
||||
// Send initial response
|
||||
res.status(200).json({ message: 'Reset metrics started' });
|
||||
|
||||
// Send initial progress through SSE
|
||||
sendProgressToClients(resetMetricsClients, {
|
||||
status: 'running',
|
||||
operation: 'Starting metrics reset'
|
||||
});
|
||||
|
||||
// Run the reset metrics script
|
||||
const resetMetrics = require('../../scripts/reset-metrics');
|
||||
await resetMetrics();
|
||||
|
||||
// Send completion through SSE
|
||||
sendProgressToClients(resetMetricsClients, {
|
||||
status: 'complete',
|
||||
operation: 'Metrics reset completed'
|
||||
});
|
||||
|
||||
activeImport = null;
|
||||
} catch (error) {
|
||||
console.error('Error during metrics reset:', error);
|
||||
|
||||
// Send error through SSE
|
||||
sendProgressToClients(resetMetricsClients, {
|
||||
status: 'error',
|
||||
error: error.message || 'Failed to reset metrics'
|
||||
});
|
||||
|
||||
activeImport = null;
|
||||
res.status(500).json({ error: error.message || 'Failed to reset metrics' });
|
||||
}
|
||||
});
|
||||
|
||||
// Add calculate-metrics endpoint
|
||||
router.post('/calculate-metrics', async (req, res) => {
|
||||
if (activeImport) {
|
||||
return res.status(409).json({ error: 'Import already in progress' });
|
||||
}
|
||||
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '..', '..', 'scripts', 'calculate-metrics.js');
|
||||
|
||||
if (!require('fs').existsSync(scriptPath)) {
|
||||
return res.status(500).json({ error: 'Calculate metrics script not found' });
|
||||
}
|
||||
|
||||
activeImport = spawn('node', [scriptPath]);
|
||||
let wasCancelled = false;
|
||||
|
||||
activeImport.stdout.on('data', (data) => {
|
||||
const output = data.toString().trim();
|
||||
|
||||
try {
|
||||
// Try to parse as JSON
|
||||
const jsonData = JSON.parse(output);
|
||||
importProgress = {
|
||||
status: 'running',
|
||||
...jsonData.progress
|
||||
};
|
||||
sendProgressToClients(calculateMetricsClients, importProgress);
|
||||
} catch (e) {
|
||||
// If not JSON, send as plain progress
|
||||
importProgress = {
|
||||
status: 'running',
|
||||
progress: output
|
||||
};
|
||||
sendProgressToClients(calculateMetricsClients, importProgress);
|
||||
}
|
||||
});
|
||||
|
||||
activeImport.stderr.on('data', (data) => {
|
||||
if (wasCancelled) return; // Don't send errors if cancelled
|
||||
|
||||
const error = data.toString().trim();
|
||||
try {
|
||||
// Try to parse as JSON
|
||||
const jsonData = JSON.parse(error);
|
||||
importProgress = {
|
||||
status: 'error',
|
||||
...jsonData.progress
|
||||
};
|
||||
sendProgressToClients(calculateMetricsClients, importProgress);
|
||||
} catch {
|
||||
importProgress = {
|
||||
status: 'error',
|
||||
error
|
||||
};
|
||||
sendProgressToClients(calculateMetricsClients, importProgress);
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
activeImport.on('close', (code, signal) => {
|
||||
wasCancelled = signal === 'SIGTERM' || code === 143;
|
||||
activeImport = null;
|
||||
|
||||
if (code === 0 || wasCancelled) {
|
||||
if (wasCancelled) {
|
||||
importProgress = {
|
||||
status: 'cancelled',
|
||||
operation: 'Operation cancelled'
|
||||
};
|
||||
sendProgressToClients(calculateMetricsClients, importProgress);
|
||||
} else {
|
||||
importProgress = {
|
||||
status: 'complete',
|
||||
operation: 'Metrics calculation complete'
|
||||
};
|
||||
sendProgressToClients(calculateMetricsClients, importProgress);
|
||||
}
|
||||
resolve();
|
||||
} else {
|
||||
importProgress = null;
|
||||
reject(new Error(`Metrics calculation process exited with code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Error calculating metrics:', error);
|
||||
activeImport = null;
|
||||
importProgress = null;
|
||||
|
||||
// Only send error if it wasn't a cancellation
|
||||
if (!error.message?.includes('code 143') && !error.message?.includes('SIGTERM')) {
|
||||
sendProgressToClients(calculateMetricsClients, {
|
||||
status: 'error',
|
||||
error: error.message
|
||||
});
|
||||
res.status(500).json({ error: 'Failed to calculate metrics', details: error.message });
|
||||
} else {
|
||||
res.json({ success: true });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Route to import from production database
|
||||
router.post('/import-from-prod', async (req, res) => {
|
||||
if (activeImport) {
|
||||
return res.status(409).json({ error: 'Import already in progress' });
|
||||
}
|
||||
|
||||
try {
|
||||
const importFromProd = require('../../scripts/import-from-prod');
|
||||
|
||||
// Set up progress handler
|
||||
const progressHandler = (data) => {
|
||||
importProgress = data;
|
||||
sendProgressToClients(importClients, data);
|
||||
};
|
||||
|
||||
// Start the import process
|
||||
importFromProd.outputProgress = progressHandler;
|
||||
activeImport = importFromProd; // Store the module for cancellation
|
||||
|
||||
// Run the import in the background
|
||||
importFromProd.main().catch(error => {
|
||||
console.error('Error in import process:', error);
|
||||
activeImport = null;
|
||||
importProgress = {
|
||||
status: error.message === 'Import cancelled' ? 'cancelled' : 'error',
|
||||
operation: 'Import process',
|
||||
error: error.message
|
||||
};
|
||||
sendProgressToClients(importClients, importProgress);
|
||||
}).finally(() => {
|
||||
activeImport = null;
|
||||
});
|
||||
|
||||
res.json({ message: 'Import from production started' });
|
||||
} catch (error) {
|
||||
console.error('Error starting production import:', error);
|
||||
activeImport = null;
|
||||
res.status(500).json({ error: error.message || 'Failed to start production import' });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /csv/full-update - Run full update script
|
||||
router.post('/full-update', async (req, res) => {
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '../../scripts/full-update.js');
|
||||
runScript(scriptPath, 'update', fullUpdateClients)
|
||||
.catch(error => {
|
||||
console.error('Update failed:', error);
|
||||
});
|
||||
res.status(202).json({ message: 'Update started' });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /csv/full-reset - Run full reset script
|
||||
router.post('/full-reset', async (req, res) => {
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '../../scripts/full-reset.js');
|
||||
runScript(scriptPath, 'reset', fullResetClients)
|
||||
.catch(error => {
|
||||
console.error('Reset failed:', error);
|
||||
});
|
||||
res.status(202).json({ message: 'Reset started' });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /history/import - Get recent import history
|
||||
router.get('/history/import', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
status,
|
||||
error_message,
|
||||
records_added::integer,
|
||||
records_updated::integer
|
||||
FROM import_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import history:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /history/calculate - Get recent calculation history
|
||||
router.get('/history/calculate', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
duration_minutes,
|
||||
status,
|
||||
error_message,
|
||||
total_products,
|
||||
total_orders,
|
||||
total_purchase_orders,
|
||||
processed_products,
|
||||
processed_orders,
|
||||
processed_purchase_orders,
|
||||
additional_info
|
||||
FROM calculate_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching calculate history:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/modules - Get module calculation status
|
||||
router.get('/status/modules', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
module_name,
|
||||
last_calculation_timestamp::timestamp
|
||||
FROM calculate_status
|
||||
ORDER BY module_name
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching module status:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/tables - Get table sync status
|
||||
router.get('/status/tables', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
table_name,
|
||||
last_sync_timestamp::timestamp
|
||||
FROM sync_status
|
||||
ORDER BY table_name
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching table status:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/table-counts - Get record counts for all tables
|
||||
router.get('/status/table-counts', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const tables = [
|
||||
// Core tables
|
||||
'products', 'categories', 'product_categories', 'orders', 'purchase_orders',
|
||||
// New metrics tables
|
||||
'product_metrics', 'daily_product_snapshots','brand_metrics','category_metrics','vendor_metrics',
|
||||
// Config tables
|
||||
'settings_global', 'settings_vendor', 'settings_product'
|
||||
];
|
||||
|
||||
const counts = await Promise.all(
|
||||
tables.map(table =>
|
||||
pool.query(`SELECT COUNT(*) as count FROM ${table}`)
|
||||
.then(result => ({
|
||||
table_name: table,
|
||||
count: parseInt(result.rows[0].count)
|
||||
}))
|
||||
.catch(err => ({
|
||||
table_name: table,
|
||||
count: null,
|
||||
error: err.message
|
||||
}))
|
||||
)
|
||||
);
|
||||
|
||||
// Group tables by type
|
||||
const groupedCounts = {
|
||||
core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders'].includes(c.table_name)),
|
||||
metrics: counts.filter(c => ['product_metrics', 'daily_product_snapshots','brand_metrics','category_metrics','vendor_metrics'].includes(c.table_name)),
|
||||
config: counts.filter(c => ['settings_global', 'settings_vendor', 'settings_product'].includes(c.table_name))
|
||||
};
|
||||
|
||||
res.json(groupedCounts);
|
||||
} catch (error) {
|
||||
console.error('Error fetching table counts:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
390
inventory-server/src/routes/data-management.js
Normal file
390
inventory-server/src/routes/data-management.js
Normal file
@@ -0,0 +1,390 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const db = require('../utils/db');
|
||||
|
||||
// Debug middleware MUST be first
|
||||
router.use((req, res, next) => {
|
||||
console.log(`[CSV Route Debug] ${req.method} ${req.path}`);
|
||||
next();
|
||||
});
|
||||
|
||||
// Store active processes and their progress
|
||||
let activeImport = null;
|
||||
let importProgress = null;
|
||||
let activeFullUpdate = null;
|
||||
let activeFullReset = null;
|
||||
|
||||
// SSE clients for progress updates
|
||||
const updateClients = new Set();
|
||||
const importClients = new Set();
|
||||
const resetClients = new Set();
|
||||
const resetMetricsClients = new Set();
|
||||
const calculateMetricsClients = new Set();
|
||||
const fullUpdateClients = new Set();
|
||||
const fullResetClients = new Set();
|
||||
|
||||
// Helper to send progress to specific clients
|
||||
function sendProgressToClients(clients, data) {
|
||||
// If data is a string, send it directly
|
||||
// If it's an object, convert it to JSON
|
||||
const message = typeof data === 'string'
|
||||
? `data: ${data}\n\n`
|
||||
: `data: ${JSON.stringify(data)}\n\n`;
|
||||
|
||||
clients.forEach(client => {
|
||||
try {
|
||||
client.write(message);
|
||||
// Immediately flush the response
|
||||
if (typeof client.flush === 'function') {
|
||||
client.flush();
|
||||
}
|
||||
} catch (error) {
|
||||
// Silently remove failed client
|
||||
clients.delete(client);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to run a script and stream progress
|
||||
function runScript(scriptPath, type, clients) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Kill any existing process of this type
|
||||
let activeProcess;
|
||||
switch (type) {
|
||||
case 'update':
|
||||
if (activeFullUpdate) {
|
||||
try { activeFullUpdate.kill(); } catch (e) { }
|
||||
}
|
||||
activeProcess = activeFullUpdate;
|
||||
break;
|
||||
case 'reset':
|
||||
if (activeFullReset) {
|
||||
try { activeFullReset.kill(); } catch (e) { }
|
||||
}
|
||||
activeProcess = activeFullReset;
|
||||
break;
|
||||
}
|
||||
|
||||
const child = spawn('node', [scriptPath], {
|
||||
stdio: ['inherit', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = child;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = child;
|
||||
break;
|
||||
}
|
||||
|
||||
let output = '';
|
||||
|
||||
child.stdout.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
output += text;
|
||||
|
||||
// Split by lines to handle multiple JSON outputs
|
||||
const lines = text.split('\n');
|
||||
lines.filter(line => line.trim()).forEach(line => {
|
||||
try {
|
||||
// Try to parse as JSON but don't let it affect the display
|
||||
const jsonData = JSON.parse(line);
|
||||
// Only end the process if we get a final status
|
||||
if (jsonData.status === 'complete' || jsonData.status === 'error' || jsonData.status === 'cancelled') {
|
||||
if (jsonData.status === 'complete' && !jsonData.operation?.includes('complete')) {
|
||||
// Don't close for intermediate completion messages
|
||||
sendProgressToClients(clients, line);
|
||||
return;
|
||||
}
|
||||
// Close only on final completion/error/cancellation
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
if (jsonData.status === 'error') {
|
||||
reject(new Error(jsonData.error || 'Unknown error'));
|
||||
} else {
|
||||
resolve({ output });
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Not JSON, just display as is
|
||||
}
|
||||
// Always send the raw line
|
||||
sendProgressToClients(clients, line);
|
||||
});
|
||||
});
|
||||
|
||||
child.stderr.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
console.error(text);
|
||||
// Send stderr output directly too
|
||||
sendProgressToClients(clients, text);
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
|
||||
if (code !== 0) {
|
||||
const error = `Script ${scriptPath} exited with code ${code}`;
|
||||
sendProgressToClients(clients, error);
|
||||
reject(new Error(error));
|
||||
}
|
||||
// Don't resolve here - let the completion message from the script trigger the resolve
|
||||
});
|
||||
|
||||
child.on('error', (err) => {
|
||||
switch (type) {
|
||||
case 'update':
|
||||
activeFullUpdate = null;
|
||||
break;
|
||||
case 'reset':
|
||||
activeFullReset = null;
|
||||
break;
|
||||
}
|
||||
sendProgressToClients(clients, err.message);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Progress endpoints
|
||||
router.get('/:type/progress', (req, res) => {
|
||||
const { type } = req.params;
|
||||
if (!['update', 'reset'].includes(type)) {
|
||||
return res.status(400).json({ error: 'Invalid operation type' });
|
||||
}
|
||||
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
'Access-Control-Allow-Origin': req.headers.origin || '*',
|
||||
'Access-Control-Allow-Credentials': 'true'
|
||||
});
|
||||
|
||||
// Add this client to the correct set
|
||||
const clients = type === 'update' ? fullUpdateClients : fullResetClients;
|
||||
clients.add(res);
|
||||
|
||||
// Send initial connection message
|
||||
sendProgressToClients(new Set([res]), JSON.stringify({
|
||||
status: 'running',
|
||||
operation: 'Initializing connection...'
|
||||
}));
|
||||
|
||||
// Handle client disconnect
|
||||
req.on('close', () => {
|
||||
clients.delete(res);
|
||||
});
|
||||
});
|
||||
|
||||
// Route to cancel active process
|
||||
router.post('/cancel', (req, res) => {
|
||||
let killed = false;
|
||||
|
||||
// Get the operation type from the request
|
||||
const { type } = req.query;
|
||||
const clients = type === 'update' ? fullUpdateClients : fullResetClients;
|
||||
const activeProcess = type === 'update' ? activeFullUpdate : activeFullReset;
|
||||
|
||||
if (activeProcess) {
|
||||
try {
|
||||
activeProcess.kill('SIGTERM');
|
||||
if (type === 'update') {
|
||||
activeFullUpdate = null;
|
||||
} else {
|
||||
activeFullReset = null;
|
||||
}
|
||||
killed = true;
|
||||
sendProgressToClients(clients, JSON.stringify({
|
||||
status: 'cancelled',
|
||||
operation: 'Operation cancelled'
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error(`Error killing ${type} process:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
if (killed) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'No active process to cancel' });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /csv/full-update - Run full update script
|
||||
router.post('/full-update', async (req, res) => {
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '../../scripts/full-update.js');
|
||||
runScript(scriptPath, 'update', fullUpdateClients)
|
||||
.catch(error => {
|
||||
console.error('Update failed:', error);
|
||||
});
|
||||
res.status(202).json({ message: 'Update started' });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /csv/full-reset - Run full reset script
|
||||
router.post('/full-reset', async (req, res) => {
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, '../../scripts/full-reset.js');
|
||||
runScript(scriptPath, 'reset', fullResetClients)
|
||||
.catch(error => {
|
||||
console.error('Reset failed:', error);
|
||||
});
|
||||
res.status(202).json({ message: 'Reset started' });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /history/import - Get recent import history
|
||||
router.get('/history/import', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
status,
|
||||
error_message,
|
||||
records_added::integer,
|
||||
records_updated::integer
|
||||
FROM import_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import history:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /history/calculate - Get recent calculation history
|
||||
router.get('/history/calculate', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
duration_minutes,
|
||||
status,
|
||||
error_message,
|
||||
total_products,
|
||||
total_orders,
|
||||
total_purchase_orders,
|
||||
processed_products,
|
||||
processed_orders,
|
||||
processed_purchase_orders,
|
||||
additional_info
|
||||
FROM calculate_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching calculate history:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/modules - Get module calculation status
|
||||
router.get('/status/modules', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
module_name,
|
||||
last_calculation_timestamp::timestamp
|
||||
FROM calculate_status
|
||||
ORDER BY module_name
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching module status:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/tables - Get table sync status
|
||||
router.get('/status/tables', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
table_name,
|
||||
last_sync_timestamp::timestamp
|
||||
FROM sync_status
|
||||
ORDER BY table_name
|
||||
`);
|
||||
res.json(rows || []);
|
||||
} catch (error) {
|
||||
console.error('Error fetching table status:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/table-counts - Get record counts for all tables
|
||||
router.get('/status/table-counts', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const tables = [
|
||||
// Core tables
|
||||
'products', 'categories', 'product_categories', 'orders', 'purchase_orders',
|
||||
// New metrics tables
|
||||
'product_metrics', 'daily_product_snapshots','brand_metrics','category_metrics','vendor_metrics',
|
||||
// Config tables
|
||||
'settings_global', 'settings_vendor', 'settings_product'
|
||||
];
|
||||
|
||||
const counts = await Promise.all(
|
||||
tables.map(table =>
|
||||
pool.query(`SELECT COUNT(*) as count FROM ${table}`)
|
||||
.then(result => ({
|
||||
table_name: table,
|
||||
count: parseInt(result.rows[0].count)
|
||||
}))
|
||||
.catch(err => ({
|
||||
table_name: table,
|
||||
count: null,
|
||||
error: err.message
|
||||
}))
|
||||
)
|
||||
);
|
||||
|
||||
// Group tables by type
|
||||
const groupedCounts = {
|
||||
core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders'].includes(c.table_name)),
|
||||
metrics: counts.filter(c => ['product_metrics', 'daily_product_snapshots','brand_metrics','category_metrics','vendor_metrics'].includes(c.table_name)),
|
||||
config: counts.filter(c => ['settings_global', 'settings_vendor', 'settings_product'].includes(c.table_name))
|
||||
};
|
||||
|
||||
res.json(groupedCounts);
|
||||
} catch (error) {
|
||||
console.error('Error fetching table counts:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -23,10 +23,7 @@ router.get('/brands', async (req, res) => {
|
||||
const { rows } = await pool.query(`
|
||||
SELECT DISTINCT COALESCE(p.brand, 'Unbranded') as brand
|
||||
FROM products p
|
||||
JOIN purchase_orders po ON p.pid = po.pid
|
||||
WHERE p.visible = true
|
||||
GROUP BY COALESCE(p.brand, 'Unbranded')
|
||||
HAVING SUM(po.cost_price * po.received) >= 500
|
||||
ORDER BY COALESCE(p.brand, 'Unbranded')
|
||||
`);
|
||||
|
||||
@@ -629,163 +626,6 @@ router.get('/:id', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Import products from CSV
|
||||
router.post('/import', upload.single('file'), async (req, res) => {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ error: 'No file uploaded' });
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await importProductsFromCSV(req.file.path, req.app.locals.pool);
|
||||
// Clean up the uploaded file
|
||||
require('fs').unlinkSync(req.file.path);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('Error importing products:', error);
|
||||
res.status(500).json({ error: 'Failed to import products' });
|
||||
}
|
||||
});
|
||||
|
||||
// Update a product
|
||||
router.put('/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const {
|
||||
title,
|
||||
sku,
|
||||
stock_quantity,
|
||||
price,
|
||||
regular_price,
|
||||
cost_price,
|
||||
vendor,
|
||||
brand,
|
||||
categories,
|
||||
visible,
|
||||
managing_stock
|
||||
} = req.body;
|
||||
|
||||
const { rowCount } = await pool.query(
|
||||
`UPDATE products
|
||||
SET title = $1,
|
||||
sku = $2,
|
||||
stock_quantity = $3,
|
||||
price = $4,
|
||||
regular_price = $5,
|
||||
cost_price = $6,
|
||||
vendor = $7,
|
||||
brand = $8,
|
||||
categories = $9,
|
||||
visible = $10,
|
||||
managing_stock = $11
|
||||
WHERE pid = $12`,
|
||||
[
|
||||
title,
|
||||
sku,
|
||||
stock_quantity,
|
||||
price,
|
||||
regular_price,
|
||||
cost_price,
|
||||
vendor,
|
||||
brand,
|
||||
categories,
|
||||
visible,
|
||||
managing_stock,
|
||||
req.params.id
|
||||
]
|
||||
);
|
||||
|
||||
if (rowCount === 0) {
|
||||
return res.status(404).json({ error: 'Product not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Product updated successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error updating product:', error);
|
||||
res.status(500).json({ error: 'Failed to update product' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get product metrics
|
||||
router.get('/:id/metrics', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
// Get metrics from product_metrics table with inventory health data
|
||||
const { rows: metrics } = await pool.query(`
|
||||
WITH inventory_status AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
CASE
|
||||
WHEN pm.daily_sales_avg = 0 THEN 'New'
|
||||
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * 7) THEN 'Critical'
|
||||
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * 14) THEN 'Reorder'
|
||||
WHEN p.stock_quantity > (pm.daily_sales_avg * 90) THEN 'Overstocked'
|
||||
ELSE 'Healthy'
|
||||
END as calculated_status
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
WHERE p.pid = $1
|
||||
)
|
||||
SELECT
|
||||
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
|
||||
COALESCE(pm.weekly_sales_avg, 0) as weekly_sales_avg,
|
||||
COALESCE(pm.monthly_sales_avg, 0) as monthly_sales_avg,
|
||||
COALESCE(pm.days_of_inventory, 0) as days_of_inventory,
|
||||
COALESCE(pm.reorder_point, CEIL(COALESCE(pm.daily_sales_avg, 0) * 14)) as reorder_point,
|
||||
COALESCE(pm.safety_stock, CEIL(COALESCE(pm.daily_sales_avg, 0) * 7)) as safety_stock,
|
||||
COALESCE(pm.avg_margin_percent,
|
||||
((p.price - COALESCE(p.cost_price, 0)) / NULLIF(p.price, 0)) * 100
|
||||
) as avg_margin_percent,
|
||||
COALESCE(pm.total_revenue, 0) as total_revenue,
|
||||
COALESCE(pm.inventory_value, p.stock_quantity * COALESCE(p.cost_price, 0)) as inventory_value,
|
||||
COALESCE(pm.turnover_rate, 0) as turnover_rate,
|
||||
COALESCE(pm.abc_class, 'C') as abc_class,
|
||||
COALESCE(pm.stock_status, is.calculated_status) as stock_status,
|
||||
COALESCE(pm.avg_lead_time_days, 0) as avg_lead_time_days,
|
||||
COALESCE(pm.current_lead_time, 0) as current_lead_time,
|
||||
COALESCE(pm.target_lead_time, 14) as target_lead_time,
|
||||
COALESCE(pm.lead_time_status, 'Unknown') as lead_time_status,
|
||||
COALESCE(pm.reorder_qty, 0) as reorder_qty,
|
||||
COALESCE(pm.overstocked_amt, 0) as overstocked_amt
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
LEFT JOIN inventory_status is ON p.pid = is.pid
|
||||
WHERE p.pid = $2
|
||||
`, [id, id]);
|
||||
|
||||
if (!metrics.length) {
|
||||
// Return default metrics structure if no data found
|
||||
res.json({
|
||||
daily_sales_avg: 0,
|
||||
weekly_sales_avg: 0,
|
||||
monthly_sales_avg: 0,
|
||||
days_of_inventory: 0,
|
||||
reorder_point: 0,
|
||||
safety_stock: 0,
|
||||
avg_margin_percent: 0,
|
||||
total_revenue: 0,
|
||||
inventory_value: 0,
|
||||
turnover_rate: 0,
|
||||
abc_class: 'C',
|
||||
stock_status: 'New',
|
||||
avg_lead_time_days: 0,
|
||||
current_lead_time: 0,
|
||||
target_lead_time: 14,
|
||||
lead_time_status: 'Unknown',
|
||||
reorder_qty: 0,
|
||||
overstocked_amt: 0
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(metrics[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching product metrics:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product metrics' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get product time series data
|
||||
router.get('/:id/time-series', async (req, res) => {
|
||||
const { id } = req.params;
|
||||
|
||||
Reference in New Issue
Block a user