Product editor tweaks

This commit is contained in:
2026-01-30 22:21:44 -05:00
parent 003e1ddd61
commit ac39257a51
195 changed files with 1233 additions and 66705 deletions

View File

@@ -5,7 +5,7 @@ const corsMiddleware = cors({
origin: [
'https://inventory.kent.pw',
'http://localhost:5175',
'https://acot.site',
'https://tools.acherryontop.com',
'https://tools.acherryontop.com',
/^http:\/\/192\.168\.\d+\.\d+(:\d+)?$/,
/^http:\/\/10\.\d+\.\d+\.\d+(:\d+)?$/
@@ -28,7 +28,7 @@ const corsErrorHandler = (err, req, res, next) => {
res.status(403).json({
error: 'CORS not allowed',
origin: req.get('Origin'),
message: 'Origin not in allowed list: https://inventory.kent.pw, https://acot.site, https://tools.acherryontop.com, localhost:5175, 192.168.x.x, or 10.x.x.x'
message: 'Origin not in allowed list: https://inventory.kent.pw, https://tools.acherryontop.com, https://tools.acherryontop.com, localhost:5175, 192.168.x.x, or 10.x.x.x'
});
} else {
next(err);

View File

@@ -1,310 +0,0 @@
const express = require('express');
const router = express.Router();
// Get all AI prompts
router.get('/', async (req, res) => {
try {
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT * FROM ai_prompts
ORDER BY prompt_type ASC, company ASC
`);
res.json(result.rows);
} catch (error) {
console.error('Error fetching AI prompts:', error);
res.status(500).json({
error: 'Failed to fetch AI prompts',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get prompt by ID
router.get('/:id', async (req, res) => {
try {
const { id } = req.params;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT * FROM ai_prompts
WHERE id = $1
`, [id]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'AI prompt not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Error fetching AI prompt:', error);
res.status(500).json({
error: 'Failed to fetch AI prompt',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get prompt by type (any prompt_type value - extensible)
router.get('/by-type', async (req, res) => {
try {
const { type, company } = req.query;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
// Validate type is provided
if (!type || typeof type !== 'string' || type.trim().length === 0) {
return res.status(400).json({
error: 'Valid type query parameter is required'
});
}
// For company_specific types, company ID is required
const isCompanySpecificType = type.endsWith('_company_specific');
if (isCompanySpecificType && !company) {
return res.status(400).json({
error: 'Company ID is required for company_specific prompt types'
});
}
// For non-company-specific types, company should not be provided
if (!isCompanySpecificType && company) {
return res.status(400).json({
error: 'Company ID should not be provided for non-company-specific prompt types'
});
}
// Build the query based on whether company is provided
let query, params;
if (company) {
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1 AND company = $2';
params = [type.trim(), company];
} else {
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1 AND company IS NULL';
params = [type.trim()];
}
// Execute the query
const result = await pool.query(query, params);
// Check if any prompt was found
if (result.rows.length === 0) {
const errorMessage = company
? `AI prompt '${type}' not found for company ${company}`
: `AI prompt '${type}' not found`;
return res.status(404).json({ error: errorMessage });
}
// Return the first matching prompt
res.json(result.rows[0]);
} catch (error) {
console.error('Error fetching AI prompt by type:', error);
res.status(500).json({
error: 'Failed to fetch AI prompt',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Create new AI prompt
router.post('/', async (req, res) => {
try {
const {
prompt_text,
prompt_type,
company
} = req.body;
// Validate required fields
if (!prompt_text || !prompt_type) {
return res.status(400).json({ error: 'Prompt text and type are required' });
}
// Validate prompt_type is a non-empty string (no hardcoded list - extensible)
if (typeof prompt_type !== 'string' || prompt_type.trim().length === 0) {
return res.status(400).json({ error: 'Prompt type must be a non-empty string' });
}
// For company-specific types (ending with _company_specific), require company
const isCompanySpecificType = prompt_type.endsWith('_company_specific');
if (isCompanySpecificType && !company) {
return res.status(400).json({ error: 'Company is required for company-specific prompt types' });
}
// For non-company-specific types, company should not be provided
if (!isCompanySpecificType && company) {
return res.status(400).json({ error: 'Company should not be provided for non-company-specific prompt types' });
}
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
INSERT INTO ai_prompts (
prompt_text,
prompt_type,
company
) VALUES ($1, $2, $3)
RETURNING *
`, [
prompt_text,
prompt_type.trim(),
company || null
]);
res.status(201).json(result.rows[0]);
} catch (error) {
console.error('Error creating AI prompt:', error);
// Check for unique constraint violations
if (error instanceof Error && error.message.includes('unique')) {
if (error.message.includes('idx_singleton_with_company')) {
return res.status(409).json({
error: 'A prompt of this type already exists for this company',
details: error.message
});
} else if (error.message.includes('idx_singleton_no_company')) {
return res.status(409).json({
error: 'A prompt of this type already exists',
details: error.message
});
}
}
res.status(500).json({
error: 'Failed to create AI prompt',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Update AI prompt
router.put('/:id', async (req, res) => {
try {
const { id } = req.params;
const {
prompt_text,
prompt_type,
company
} = req.body;
// Validate required fields
if (!prompt_text || !prompt_type) {
return res.status(400).json({ error: 'Prompt text and type are required' });
}
// Validate prompt_type is a non-empty string (no hardcoded list - extensible)
if (typeof prompt_type !== 'string' || prompt_type.trim().length === 0) {
return res.status(400).json({ error: 'Prompt type must be a non-empty string' });
}
// For company-specific types, require company
const isCompanySpecificType = prompt_type.endsWith('_company_specific');
if (isCompanySpecificType && !company) {
return res.status(400).json({ error: 'Company is required for company-specific prompt types' });
}
// For non-company-specific types, company should not be provided
if (!isCompanySpecificType && company) {
return res.status(400).json({ error: 'Company should not be provided for non-company-specific prompt types' });
}
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
// Check if the prompt exists
const checkResult = await pool.query('SELECT * FROM ai_prompts WHERE id = $1', [id]);
if (checkResult.rows.length === 0) {
return res.status(404).json({ error: 'AI prompt not found' });
}
const result = await pool.query(`
UPDATE ai_prompts
SET
prompt_text = $1,
prompt_type = $2,
company = $3,
updated_at = CURRENT_TIMESTAMP
WHERE id = $4
RETURNING *
`, [
prompt_text,
prompt_type.trim(),
company || null,
id
]);
res.json(result.rows[0]);
} catch (error) {
console.error('Error updating AI prompt:', error);
// Check for unique constraint violations
if (error instanceof Error && error.message.includes('unique')) {
if (error.message.includes('idx_singleton_with_company')) {
return res.status(409).json({
error: 'A prompt of this type already exists for this company',
details: error.message
});
} else if (error.message.includes('idx_singleton_no_company')) {
return res.status(409).json({
error: 'A prompt of this type already exists',
details: error.message
});
}
}
res.status(500).json({
error: 'Failed to update AI prompt',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Delete AI prompt
router.delete('/:id', async (req, res) => {
try {
const { id } = req.params;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query('DELETE FROM ai_prompts WHERE id = $1 RETURNING *', [id]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'AI prompt not found' });
}
res.json({ message: 'AI prompt deleted successfully' });
} catch (error) {
console.error('Error deleting AI prompt:', error);
res.status(500).json({
error: 'Failed to delete AI prompt',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Error handling middleware
router.use((err, req, res, next) => {
console.error('AI prompts route error:', err);
res.status(500).json({
error: 'Internal server error',
details: err.message
});
});
module.exports = router;

File diff suppressed because it is too large Load Diff

View File

@@ -1,434 +0,0 @@
/**
* AI Routes
*
* API endpoints for AI-powered product validation features.
* Provides embedding generation and similarity-based suggestions.
*/
const express = require('express');
const router = express.Router();
const aiService = require('../services/ai');
const { getDbConnection, closeAllConnections } = require('../utils/dbConnection');
// Track initialization state
let initializationPromise = null;
/**
* Ensure AI service is initialized
* Uses lazy initialization on first request
*/
async function ensureInitialized() {
if (aiService.isReady()) {
return true;
}
if (initializationPromise) {
await initializationPromise;
return aiService.isReady();
}
initializationPromise = (async () => {
try {
console.log('[AI Routes] Initializing AI service...');
// Get database connection for taxonomy
const { connection } = await getDbConnection();
const result = await aiService.initialize({
openaiApiKey: process.env.OPENAI_API_KEY,
groqApiKey: process.env.GROQ_API_KEY,
mysqlConnection: connection,
pool: null, // Will be set by setPool()
logger: console
});
if (!result.success) {
console.error('[AI Routes] AI service initialization failed:', result.message);
return false;
}
console.log('[AI Routes] AI service initialized:', {
...result.stats,
groqEnabled: result.groqEnabled
});
return true;
} catch (error) {
console.error('[AI Routes] Failed to initialize AI service:', error);
return false;
}
})();
await initializationPromise;
return aiService.isReady();
}
/**
* GET /api/ai/status
* Get AI service status
*/
router.get('/status', async (req, res) => {
try {
const status = aiService.getStatus();
res.json(status);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
/**
* POST /api/ai/initialize
* Manually trigger initialization (also happens automatically on first use)
*/
router.post('/initialize', async (req, res) => {
try {
const ready = await ensureInitialized();
const status = aiService.getStatus();
res.json({
success: ready,
...status
});
} catch (error) {
console.error('[AI Routes] Initialize error:', error);
res.status(500).json({ error: error.message });
}
});
/**
* GET /api/ai/taxonomy
* Get all taxonomy data (categories, themes, colors) without embeddings
*/
router.get('/taxonomy', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
const taxonomy = aiService.getTaxonomyData();
res.json(taxonomy);
} catch (error) {
console.error('[AI Routes] Taxonomy error:', error);
res.status(500).json({ error: error.message });
}
});
/**
* POST /api/ai/embedding
* Generate embedding for a single product
*
* Body: { product: { name, description, company_name, line_name } }
* Returns: { embedding: number[], latencyMs: number }
*/
router.post('/embedding', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
const { product } = req.body;
if (!product) {
return res.status(400).json({ error: 'Product is required' });
}
const result = await aiService.getProductEmbedding(product);
res.json(result);
} catch (error) {
console.error('[AI Routes] Embedding error:', error);
res.status(500).json({ error: error.message });
}
});
/**
* POST /api/ai/embeddings
* Generate embeddings for multiple products
*
* Body: { products: Array<{ name, description, company_name, line_name }> }
* Returns: { embeddings: Array<{ index, embedding }>, latencyMs }
*/
router.post('/embeddings', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
const { products } = req.body;
if (!Array.isArray(products)) {
return res.status(400).json({ error: 'Products array is required' });
}
const result = await aiService.getProductEmbeddings(products);
res.json(result);
} catch (error) {
console.error('[AI Routes] Embeddings error:', error);
res.status(500).json({ error: error.message });
}
});
/**
* POST /api/ai/suggestions
* Get category/theme/color suggestions for a single product
* Generates embedding and finds similar taxonomy items
*
* Body: { product: { name, description, company_name, line_name }, options?: { topCategories, topThemes, topColors } }
* Returns: { categories: Array, themes: Array, colors: Array, latencyMs }
*/
router.post('/suggestions', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
const { product, options } = req.body;
if (!product) {
return res.status(400).json({ error: 'Product is required' });
}
const suggestions = await aiService.getSuggestionsForProduct(product, options);
res.json(suggestions);
} catch (error) {
console.error('[AI Routes] Suggestions error:', error);
res.status(500).json({ error: error.message });
}
});
/**
* POST /api/ai/suggestions/batch
* Get suggestions for multiple products
* More efficient than calling /suggestions multiple times
*
* Body: { products: Array, options?: { topCategories, topThemes, topColors } }
* Returns: { results: Array<{ index, categories, themes, colors }>, latencyMs }
*/
router.post('/suggestions/batch', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
const { products, options } = req.body;
if (!Array.isArray(products)) {
return res.status(400).json({ error: 'Products array is required' });
}
const startTime = Date.now();
// Generate all embeddings at once
const { embeddings, latencyMs: embeddingLatency } = await aiService.getProductEmbeddings(products);
// Find suggestions for each embedding
const results = embeddings.map(({ index, embedding }) => {
const suggestions = aiService.findSimilarTaxonomy(embedding, options);
return {
index,
...suggestions
};
});
const totalLatency = Date.now() - startTime;
res.json({
results,
latencyMs: totalLatency,
embeddingLatencyMs: embeddingLatency,
searchLatencyMs: totalLatency - embeddingLatency,
productCount: products.length,
embeddingCount: embeddings.length
});
} catch (error) {
console.error('[AI Routes] Batch suggestions error:', error);
res.status(500).json({ error: error.message });
}
});
/**
* POST /api/ai/similar
* Find similar taxonomy items given a pre-computed embedding
* Useful when frontend has cached the embedding
*
* Body: { embedding: number[], options?: { topCategories, topThemes, topColors } }
* Returns: { categories, themes, colors }
*/
router.post('/similar', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
const { embedding, options } = req.body;
if (!embedding || !Array.isArray(embedding)) {
return res.status(400).json({ error: 'Embedding array is required' });
}
const startTime = Date.now();
const suggestions = aiService.findSimilarTaxonomy(embedding, options);
res.json({
...suggestions,
latencyMs: Date.now() - startTime
});
} catch (error) {
console.error('[AI Routes] Similar error:', error);
res.status(500).json({ error: error.message });
}
});
// ============================================================================
// INLINE AI VALIDATION ENDPOINTS (Groq-powered)
// ============================================================================
/**
* POST /api/ai/validate/inline/name
* Validate a single product name for spelling, grammar, and naming conventions
*
* Body: { product: { name, company_name, company_id, line_name, description } }
* Returns: { isValid, suggestion?, issues[], latencyMs }
*/
router.post('/validate/inline/name', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
if (!aiService.hasChatCompletion()) {
return res.status(503).json({
error: 'Chat completion not available - GROQ_API_KEY not configured'
});
}
const { product } = req.body;
if (!product) {
return res.status(400).json({ error: 'Product is required' });
}
// Get pool from app.locals (set by server.js)
const pool = req.app.locals.pool;
const result = await aiService.runTask(aiService.TASK_IDS.VALIDATE_NAME, {
product,
pool
});
if (!result.success) {
return res.status(500).json({
error: result.error || 'Validation failed',
code: result.code
});
}
res.json(result);
} catch (error) {
console.error('[AI Routes] Name validation error:', error);
res.status(500).json({ error: error.message });
}
});
/**
* POST /api/ai/validate/inline/description
* Validate a single product description for quality and guideline compliance
*
* Body: { product: { name, description, company_name, company_id, categories } }
* Returns: { isValid, suggestion?, issues[], latencyMs }
*/
router.post('/validate/inline/description', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
if (!aiService.hasChatCompletion()) {
return res.status(503).json({
error: 'Chat completion not available - GROQ_API_KEY not configured'
});
}
const { product } = req.body;
if (!product) {
return res.status(400).json({ error: 'Product is required' });
}
// Get pool from app.locals (set by server.js)
const pool = req.app.locals.pool;
const result = await aiService.runTask(aiService.TASK_IDS.VALIDATE_DESCRIPTION, {
product,
pool
});
if (!result.success) {
return res.status(500).json({
error: result.error || 'Validation failed',
code: result.code
});
}
res.json(result);
} catch (error) {
console.error('[AI Routes] Description validation error:', error);
res.status(500).json({ error: error.message });
}
});
/**
* POST /api/ai/validate/sanity-check
* Run consistency/sanity check on a batch of products
*
* Body: { products: Array<product data> }
* Returns: { issues: Array<{ productIndex, field, issue, suggestion? }>, summary, latencyMs }
*/
router.post('/validate/sanity-check', async (req, res) => {
try {
const ready = await ensureInitialized();
if (!ready) {
return res.status(503).json({ error: 'AI service not available' });
}
if (!aiService.hasChatCompletion()) {
return res.status(503).json({
error: 'Chat completion not available - GROQ_API_KEY not configured'
});
}
const { products } = req.body;
if (!Array.isArray(products) || products.length === 0) {
return res.status(400).json({ error: 'Products array is required' });
}
// Get pool from app.locals (set by server.js)
const pool = req.app.locals.pool;
const result = await aiService.runTask(aiService.TASK_IDS.SANITY_CHECK, {
products,
pool
});
if (!result.success) {
return res.status(500).json({
error: result.error || 'Sanity check failed',
code: result.code
});
}
res.json(result);
} catch (error) {
console.error('[AI Routes] Sanity check error:', error);
res.status(500).json({ error: error.message });
}
});
module.exports = router;

View File

@@ -1,774 +1 @@
const express = require('express');
const router = express.Router();
// Forecasting: summarize sales for products received in a period by brand
router.get('/forecast', async (req, res) => {
try {
const pool = req.app.locals.pool;
const brand = (req.query.brand || '').toString();
const titleSearch = (req.query.search || req.query.q || '').toString().trim() || null;
const startDateStr = req.query.startDate;
const endDateStr = req.query.endDate;
if (!brand) {
return res.status(400).json({ error: 'Missing required parameter: brand' });
}
// Default to last 30 days if no dates provided
const endDate = endDateStr ? new Date(endDateStr) : new Date();
const startDate = startDateStr ? new Date(startDateStr) : new Date(endDate.getTime() - 29 * 24 * 60 * 60 * 1000);
// Normalize to date boundaries for consistency
const startISO = new Date(Date.UTC(startDate.getUTCFullYear(), startDate.getUTCMonth(), startDate.getUTCDate())).toISOString();
const endISO = new Date(Date.UTC(endDate.getUTCFullYear(), endDate.getUTCMonth(), endDate.getUTCDate())).toISOString();
const sql = `
WITH params AS (
SELECT
$1::date AS start_date,
$2::date AS end_date,
$3::text AS brand,
$4::text AS title_search,
(($2::date - $1::date) + 1)::int AS days
),
category_path AS (
WITH RECURSIVE cp AS (
SELECT c.cat_id, c.name, c.parent_id, c.name::text AS path
FROM categories c WHERE c.parent_id IS NULL
UNION ALL
SELECT c.cat_id, c.name, c.parent_id, (cp.path || ' > ' || c.name)::text
FROM categories c
JOIN cp ON c.parent_id = cp.cat_id
)
SELECT * FROM cp
),
product_first_received AS (
SELECT
p.pid,
COALESCE(p.first_received::date, MIN(r.received_date)::date) AS first_received_date
FROM products p
LEFT JOIN receivings r ON r.pid = p.pid
GROUP BY p.pid, p.first_received
),
recent_products AS (
SELECT p.pid
FROM products p
JOIN product_first_received fr ON fr.pid = p.pid
JOIN params pr ON 1=1
WHERE p.visible = true
AND COALESCE(p.brand,'Unbranded') = pr.brand
AND fr.first_received_date BETWEEN pr.start_date AND pr.end_date
AND (pr.title_search IS NULL OR p.title ILIKE '%' || pr.title_search || '%')
),
product_pick_category AS (
(
SELECT DISTINCT ON (pc.pid)
pc.pid,
c.name AS category_name,
COALESCE(cp.path, c.name) AS path
FROM product_categories pc
JOIN categories c ON c.cat_id = pc.cat_id AND (c.type IS NULL OR c.type NOT IN (20,21))
LEFT JOIN category_path cp ON cp.cat_id = c.cat_id
WHERE pc.pid IN (SELECT pid FROM recent_products)
AND (cp.path IS NULL OR (
cp.path NOT ILIKE '%Black Friday%'
AND cp.path NOT ILIKE '%Deals%'
))
AND COALESCE(c.name, '') NOT IN ('Black Friday', 'Deals')
ORDER BY pc.pid, length(COALESCE(cp.path,'')) DESC
)
UNION ALL
(
SELECT
rp.pid,
'Uncategorized'::text AS category_name,
'Uncategorized'::text AS path
FROM recent_products rp
WHERE NOT EXISTS (
SELECT 1
FROM product_categories pc
JOIN categories c ON c.cat_id = pc.cat_id AND (c.type IS NULL OR c.type NOT IN (20,21))
LEFT JOIN category_path cp ON cp.cat_id = c.cat_id
WHERE pc.pid = rp.pid
AND (cp.path IS NULL OR (
cp.path NOT ILIKE '%Black Friday%'
AND cp.path NOT ILIKE '%Deals%'
))
AND COALESCE(c.name, '') NOT IN ('Black Friday', 'Deals')
)
)
),
product_sales AS (
SELECT
p.pid,
p.title,
p.sku,
COALESCE(p.stock_quantity, 0) AS stock_quantity,
COALESCE(p.price, 0) AS price,
COALESCE(SUM(o.quantity), 0) AS total_sold
FROM recent_products rp
JOIN products p ON p.pid = rp.pid
LEFT JOIN params pr ON true
LEFT JOIN orders o ON o.pid = p.pid
AND o.date::date BETWEEN pr.start_date AND pr.end_date
AND (o.canceled IS DISTINCT FROM TRUE)
GROUP BY p.pid, p.title, p.sku, p.stock_quantity, p.price
)
SELECT
ppc.category_name,
ppc.path,
COUNT(ps.pid) AS num_products,
SUM(ps.total_sold) AS total_sold,
ROUND(AVG(COALESCE(ps.total_sold,0) / NULLIF(pr.days,0)), 2) AS avg_daily_sales,
ROUND(AVG(COALESCE(ps.total_sold,0)), 2) AS avg_total_sold,
MIN(ps.total_sold) AS min_total_sold,
MAX(ps.total_sold) AS max_total_sold,
JSON_AGG(
JSON_BUILD_OBJECT(
'pid', ps.pid,
'title', ps.title,
'sku', ps.sku,
'total_sold', ps.total_sold,
'categoryPath', ppc.path
)
) AS products
FROM product_sales ps
JOIN product_pick_category ppc ON ppc.pid = ps.pid
JOIN params pr ON true
GROUP BY ppc.category_name, ppc.path
HAVING SUM(ps.total_sold) >= 0
ORDER BY (ppc.category_name = 'Uncategorized') ASC, avg_total_sold DESC NULLS LAST
LIMIT 200;
`;
const { rows } = await pool.query(sql, [startISO, endISO, brand, titleSearch]);
// Normalize/shape response keys to match front-end expectations
const shaped = rows.map(r => ({
category_name: r.category_name,
path: r.path,
avg_daily_sales: Number(r.avg_daily_sales) || 0,
total_sold: Number(r.total_sold) || 0,
num_products: Number(r.num_products) || 0,
avgTotalSold: Number(r.avg_total_sold) || 0,
minSold: Number(r.min_total_sold) || 0,
maxSold: Number(r.max_total_sold) || 0,
products: Array.isArray(r.products) ? r.products : []
}));
res.json(shaped);
} catch (error) {
console.error('Error fetching forecast data:', error);
res.status(500).json({ error: 'Failed to fetch forecast data' });
}
});
// Get overall analytics stats
router.get('/stats', async (req, res) => {
try {
const pool = req.app.locals.pool;
const { rows: [results] } = await pool.query(`
WITH vendor_count AS (
SELECT COUNT(DISTINCT vendor_name) AS count
FROM vendor_metrics
),
category_count AS (
SELECT COUNT(DISTINCT category_id) AS count
FROM category_metrics
),
metrics_summary AS (
SELECT
AVG(margin_30d) AS avg_profit_margin,
AVG(markup_30d) AS avg_markup,
AVG(stockturn_30d) AS avg_stock_turnover,
AVG(asp_30d) AS avg_order_value
FROM product_metrics
WHERE sales_30d > 0
)
SELECT
COALESCE(ms.avg_profit_margin, 0) AS profitMargin,
COALESCE(ms.avg_markup, 0) AS averageMarkup,
COALESCE(ms.avg_stock_turnover, 0) AS stockTurnoverRate,
COALESCE(vc.count, 0) AS vendorCount,
COALESCE(cc.count, 0) AS categoryCount,
COALESCE(ms.avg_order_value, 0) AS averageOrderValue
FROM metrics_summary ms
CROSS JOIN vendor_count vc
CROSS JOIN category_count cc
`);
// Ensure all values are numbers
const stats = {
profitMargin: Number(results.profitmargin) || 0,
averageMarkup: Number(results.averagemarkup) || 0,
stockTurnoverRate: Number(results.stockturnoverrate) || 0,
vendorCount: Number(results.vendorcount) || 0,
categoryCount: Number(results.categorycount) || 0,
averageOrderValue: Number(results.averageordervalue) || 0
};
res.json(stats);
} catch (error) {
console.error('Error fetching analytics stats:', error);
res.status(500).json({ error: 'Failed to fetch analytics stats' });
}
});
// Get profit analysis data
router.get('/profit', async (req, res) => {
try {
const pool = req.app.locals.pool;
// Get profit margins by category with full path
const { rows: byCategory } = await pool.query(`
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.name::text as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
(cp.path || ' > ' || c.name)::text
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
)
SELECT
cm.category_name as category,
COALESCE(cp.path, cm.category_name) as categorypath,
cm.avg_margin_30d as profitmargin,
cm.revenue_30d as revenue,
cm.cogs_30d as cost
FROM category_metrics cm
LEFT JOIN category_path cp ON cm.category_id = cp.cat_id
WHERE cm.revenue_30d > 0
ORDER BY cm.revenue_30d DESC
LIMIT 10
`);
// Get profit margin over time
const { rows: overTime } = await pool.query(`
WITH time_series AS (
SELECT
date_trunc('day', generate_series(
CURRENT_DATE - INTERVAL '30 days',
CURRENT_DATE,
'1 day'::interval
))::date AS date
),
daily_profits AS (
SELECT
snapshot_date as date,
SUM(net_revenue) as revenue,
SUM(cogs) as cost,
CASE
WHEN SUM(net_revenue) > 0
THEN (SUM(net_revenue - cogs) / SUM(net_revenue)) * 100
ELSE 0
END as profit_margin
FROM daily_product_snapshots
WHERE snapshot_date >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY snapshot_date
)
SELECT
to_char(ts.date, 'YYYY-MM-DD') as date,
COALESCE(dp.profit_margin, 0) as profitmargin,
COALESCE(dp.revenue, 0) as revenue,
COALESCE(dp.cost, 0) as cost
FROM time_series ts
LEFT JOIN daily_profits dp ON ts.date = dp.date
ORDER BY ts.date
`);
// Get top performing products by profit margin
const { rows: topProducts } = await pool.query(`
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.name::text as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
(cp.path || ' > ' || c.name)::text
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
),
product_categories AS (
SELECT
pc.pid,
c.name as category,
COALESCE(cp.path, c.name) as categorypath
FROM product_categories pc
JOIN categories c ON pc.cat_id = c.cat_id
LEFT JOIN category_path cp ON c.cat_id = cp.cat_id
)
SELECT
pm.title as product,
COALESCE(pc.category, 'Uncategorized') as category,
COALESCE(pc.categorypath, 'Uncategorized') as categorypath,
pm.margin_30d as profitmargin,
pm.revenue_30d as revenue,
pm.cogs_30d as cost
FROM product_metrics pm
LEFT JOIN product_categories pc ON pm.pid = pc.pid
WHERE pm.revenue_30d > 100
AND pm.margin_30d > 0
ORDER BY pm.margin_30d DESC
LIMIT 10
`);
res.json({ byCategory, overTime, topProducts });
} catch (error) {
console.error('Error fetching profit analysis:', error);
res.status(500).json({ error: 'Failed to fetch profit analysis' });
}
});
// Get vendor performance data
router.get('/vendors', async (req, res) => {
try {
const pool = req.app.locals.pool;
// Set cache control headers to prevent 304
res.set({
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Pragma': 'no-cache',
'Expires': '0'
});
console.log('Fetching vendor performance data...');
// Get vendor performance metrics from the vendor_metrics table
const { rows: rawPerformance } = await pool.query(`
SELECT
vendor_name as vendor,
revenue_30d as sales_volume,
avg_margin_30d as profit_margin,
COALESCE(
sales_30d / NULLIF(current_stock_units, 0),
0
) as stock_turnover,
product_count,
-- Use actual growth metrics from the vendor_metrics table
sales_growth_30d_vs_prev as growth
FROM vendor_metrics
WHERE revenue_30d > 0
ORDER BY revenue_30d DESC
LIMIT 20
`);
// Format the performance data
const performance = rawPerformance.map(vendor => ({
vendor: vendor.vendor,
salesVolume: Number(vendor.sales_volume) || 0,
profitMargin: Number(vendor.profit_margin) || 0,
stockTurnover: Number(vendor.stock_turnover) || 0,
productCount: Number(vendor.product_count) || 0,
growth: Number(vendor.growth) || 0
}));
// Get vendor comparison metrics (sales per product vs margin)
const { rows: rawComparison } = await pool.query(`
SELECT
vendor_name as vendor,
CASE
WHEN active_product_count > 0
THEN revenue_30d / active_product_count
ELSE 0
END as sales_per_product,
avg_margin_30d as average_margin,
product_count as size
FROM vendor_metrics
WHERE active_product_count > 0
ORDER BY sales_per_product DESC
LIMIT 10
`);
// Transform comparison data
const comparison = rawComparison.map(item => ({
vendor: item.vendor,
salesPerProduct: Number(item.sales_per_product) || 0,
averageMargin: Number(item.average_margin) || 0,
size: Number(item.size) || 0
}));
console.log('Performance data ready. Sending response...');
// Return complete structure that the front-end expects
res.json({
performance,
comparison,
// Add empty trends array to complete the structure
trends: []
});
} catch (error) {
console.error('Error fetching vendor performance:', error);
res.status(500).json({ error: 'Failed to fetch vendor performance data' });
}
});
// Get stock analysis data
router.get('/stock', async (req, res) => {
try {
const pool = req.app.locals.pool;
console.log('Fetching stock analysis data...');
// Use the new metrics tables to get data
// Get turnover by category
const { rows: turnoverByCategory } = await pool.query(`
WITH category_metrics_with_path AS (
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.name::text as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
(cp.path || ' > ' || c.name)::text
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
)
SELECT
cm.category_id,
cm.category_name,
cp.path as category_path,
cm.current_stock_units,
cm.sales_30d,
cm.stock_turn_30d
FROM category_metrics cm
LEFT JOIN category_path cp ON cm.category_id = cp.cat_id
WHERE cm.sales_30d > 0
)
SELECT
category_name as category,
COALESCE(stock_turn_30d, 0) as turnoverRate,
current_stock_units as averageStock,
sales_30d as totalSales
FROM category_metrics_with_path
ORDER BY stock_turn_30d DESC NULLS LAST
LIMIT 10
`);
// Get stock levels over time (last 30 days)
const { rows: stockLevels } = await pool.query(`
WITH date_range AS (
SELECT generate_series(
CURRENT_DATE - INTERVAL '30 days',
CURRENT_DATE,
'1 day'::interval
)::date AS date
),
daily_stock_counts AS (
SELECT
snapshot_date,
COUNT(DISTINCT pid) as total_products,
COUNT(DISTINCT CASE WHEN eod_stock_quantity > 5 THEN pid END) as in_stock,
COUNT(DISTINCT CASE WHEN eod_stock_quantity <= 5 AND eod_stock_quantity > 0 THEN pid END) as low_stock,
COUNT(DISTINCT CASE WHEN eod_stock_quantity = 0 THEN pid END) as out_of_stock
FROM daily_product_snapshots
WHERE snapshot_date >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY snapshot_date
)
SELECT
to_char(dr.date, 'YYYY-MM-DD') as date,
COALESCE(dsc.in_stock, 0) as inStock,
COALESCE(dsc.low_stock, 0) as lowStock,
COALESCE(dsc.out_of_stock, 0) as outOfStock
FROM date_range dr
LEFT JOIN daily_stock_counts dsc ON dr.date = dsc.snapshot_date
ORDER BY dr.date
`);
// Get critical items (products that need reordering)
const { rows: criticalItems } = await pool.query(`
SELECT
pm.title as product,
pm.sku as sku,
pm.current_stock as stockQuantity,
COALESCE(pm.config_safety_stock, 0) as reorderPoint,
COALESCE(pm.stockturn_30d, 0) as turnoverRate,
CASE
WHEN pm.sales_velocity_daily > 0
THEN ROUND(pm.current_stock / pm.sales_velocity_daily)
ELSE 999
END as daysUntilStockout
FROM product_metrics pm
WHERE pm.is_visible = true
AND pm.is_replenishable = true
AND pm.sales_30d > 0
AND pm.current_stock <= pm.config_safety_stock * 2
ORDER BY
CASE
WHEN pm.sales_velocity_daily > 0
THEN pm.current_stock / pm.sales_velocity_daily
ELSE 999
END ASC,
pm.revenue_30d DESC
LIMIT 10
`);
res.json({
turnoverByCategory,
stockLevels,
criticalItems
});
} catch (error) {
console.error('Error fetching stock analysis:', error);
res.status(500).json({ error: 'Failed to fetch stock analysis', details: error.message });
}
});
// Get price analysis data
router.get('/pricing', async (req, res) => {
try {
const pool = req.app.locals.pool;
// Get price points analysis
const { rows: pricePoints } = await pool.query(`
SELECT
CAST(p.price AS DECIMAL(15,3)) as price,
CAST(SUM(o.quantity) AS DECIMAL(15,3)) as salesVolume,
CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
c.name as category
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY p.price, c.name
HAVING SUM(o.quantity) > 0
ORDER BY revenue DESC
LIMIT 50
`);
// Get price elasticity data (price changes vs demand)
const { rows: elasticity } = await pool.query(`
SELECT
to_char(o.date, 'YYYY-MM-DD') as date,
CAST(AVG(o.price) AS DECIMAL(15,3)) as price,
CAST(SUM(o.quantity) AS DECIMAL(15,3)) as demand
FROM orders o
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY to_char(o.date, 'YYYY-MM-DD')
ORDER BY date
`);
// Get price optimization recommendations
const { rows: recommendations } = await pool.query(`
SELECT
p.title as product,
CAST(p.price AS DECIMAL(15,3)) as currentPrice,
CAST(
ROUND(
CASE
WHEN AVG(o.quantity) > 10 THEN p.price * 1.1
WHEN AVG(o.quantity) < 2 THEN p.price * 0.9
ELSE p.price
END, 2
) AS DECIMAL(15,3)
) as recommendedPrice,
CAST(
ROUND(
SUM(o.price * o.quantity) *
CASE
WHEN AVG(o.quantity) > 10 THEN 1.15
WHEN AVG(o.quantity) < 2 THEN 0.95
ELSE 1
END, 2
) AS DECIMAL(15,3)
) as potentialRevenue,
CASE
WHEN AVG(o.quantity) > 10 THEN 85
WHEN AVG(o.quantity) < 2 THEN 75
ELSE 65
END as confidence
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY p.pid, p.price, p.title
HAVING ABS(
CAST(
ROUND(
CASE
WHEN AVG(o.quantity) > 10 THEN p.price * 1.1
WHEN AVG(o.quantity) < 2 THEN p.price * 0.9
ELSE p.price
END, 2
) AS DECIMAL(15,3)
) - CAST(p.price AS DECIMAL(15,3))
) > 0
ORDER BY
CAST(
ROUND(
SUM(o.price * o.quantity) *
CASE
WHEN AVG(o.quantity) > 10 THEN 1.15
WHEN AVG(o.quantity) < 2 THEN 0.95
ELSE 1
END, 2
) AS DECIMAL(15,3)
) - CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) DESC
LIMIT 10
`);
res.json({ pricePoints, elasticity, recommendations });
} catch (error) {
console.error('Error fetching price analysis:', error);
res.status(500).json({ error: 'Failed to fetch price analysis' });
}
});
// Get category performance data
router.get('/categories', async (req, res) => {
try {
const pool = req.app.locals.pool;
// Common CTE for category paths
const categoryPathCTE = `
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.name::text as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
(cp.path || ' > ' || c.name)::text
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
)
`;
// Get category performance metrics with full path
const { rows: performance } = await pool.query(`
${categoryPathCTE},
monthly_sales AS (
SELECT
c.name,
cp.path,
SUM(CASE
WHEN o.date >= CURRENT_DATE - INTERVAL '30 days'
THEN o.price * o.quantity
ELSE 0
END) as current_month,
SUM(CASE
WHEN o.date >= CURRENT_DATE - INTERVAL '60 days'
AND o.date < CURRENT_DATE - INTERVAL '30 days'
THEN o.price * o.quantity
ELSE 0
END) as previous_month
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= CURRENT_DATE - INTERVAL '60 days'
GROUP BY c.name, cp.path
)
SELECT
c.name as category,
cp.path as categoryPath,
SUM(o.price * o.quantity) as revenue,
SUM(o.price * o.quantity - p.cost_price * o.quantity) as profit,
ROUND(
((ms.current_month / NULLIF(ms.previous_month, 0)) - 1) * 100,
1
) as growth,
COUNT(DISTINCT p.pid) as productCount
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
LEFT JOIN monthly_sales ms ON c.name = ms.name AND cp.path = ms.path
WHERE o.date >= CURRENT_DATE - INTERVAL '60 days'
GROUP BY c.name, cp.path, ms.current_month, ms.previous_month
HAVING SUM(o.price * o.quantity) > 0
ORDER BY revenue DESC
LIMIT 10
`);
// Get category revenue distribution with full path
const { rows: distribution } = await pool.query(`
${categoryPathCTE}
SELECT
c.name as category,
cp.path as categoryPath,
SUM(o.price * o.quantity) as value
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY c.name, cp.path
HAVING SUM(o.price * o.quantity) > 0
ORDER BY value DESC
LIMIT 6
`);
// Get category sales trends with full path
const { rows: trends } = await pool.query(`
${categoryPathCTE}
SELECT
c.name as category,
cp.path as categoryPath,
to_char(o.date, 'Mon YYYY') as month,
SUM(o.price * o.quantity) as sales
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= CURRENT_DATE - INTERVAL '6 months'
GROUP BY
c.name,
cp.path,
to_char(o.date, 'Mon YYYY'),
to_char(o.date, 'YYYY-MM')
ORDER BY
c.name,
to_char(o.date, 'YYYY-MM')
`);
res.json({ performance, distribution, trends });
} catch (error) {
console.error('Error fetching category performance:', error);
res.status(500).json({ error: 'Failed to fetch category performance' });
}
});
module.exports = router;

View File

@@ -1,284 +1 @@
const express = require('express');
const router = express.Router();
const { parseValue } = require('../utils/apiHelpers'); // Adjust path if needed
// --- Configuration & Helpers ---
const DEFAULT_PAGE_LIMIT = 50;
const MAX_PAGE_LIMIT = 200;
// Maps query keys to DB columns in brand_metrics
const COLUMN_MAP = {
brandName: { dbCol: 'bm.brand_name', type: 'string' },
productCount: { dbCol: 'bm.product_count', type: 'number' },
activeProductCount: { dbCol: 'bm.active_product_count', type: 'number' },
replenishableProductCount: { dbCol: 'bm.replenishable_product_count', type: 'number' },
currentStockUnits: { dbCol: 'bm.current_stock_units', type: 'number' },
currentStockCost: { dbCol: 'bm.current_stock_cost', type: 'number' },
currentStockRetail: { dbCol: 'bm.current_stock_retail', type: 'number' },
sales7d: { dbCol: 'bm.sales_7d', type: 'number' },
revenue7d: { dbCol: 'bm.revenue_7d', type: 'number' },
sales30d: { dbCol: 'bm.sales_30d', type: 'number' },
revenue30d: { dbCol: 'bm.revenue_30d', type: 'number' },
profit30d: { dbCol: 'bm.profit_30d', type: 'number' },
cogs30d: { dbCol: 'bm.cogs_30d', type: 'number' },
sales365d: { dbCol: 'bm.sales_365d', type: 'number' },
revenue365d: { dbCol: 'bm.revenue_365d', type: 'number' },
lifetimeSales: { dbCol: 'bm.lifetime_sales', type: 'number' },
lifetimeRevenue: { dbCol: 'bm.lifetime_revenue', type: 'number' },
avgMargin30d: { dbCol: 'bm.avg_margin_30d', type: 'number' },
// Growth metrics
salesGrowth30dVsPrev: { dbCol: 'bm.sales_growth_30d_vs_prev', type: 'number' },
revenueGrowth30dVsPrev: { dbCol: 'bm.revenue_growth_30d_vs_prev', type: 'number' },
// Add aliases if needed
name: { dbCol: 'bm.brand_name', type: 'string' },
// Add status for filtering
status: { dbCol: 'brand_status', type: 'string' },
};
function getSafeColumnInfo(queryParamKey) {
return COLUMN_MAP[queryParamKey] || null;
}
// --- Route Handlers ---
// GET /brands-aggregate/filter-options (Just brands list for now)
router.get('/filter-options', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /brands-aggregate/filter-options');
try {
// Get brand names
const { rows: brandRows } = await pool.query(`
SELECT DISTINCT brand_name FROM public.brand_metrics ORDER BY brand_name
`);
// Get status values - calculate them since they're derived
const { rows: statusRows } = await pool.query(`
SELECT DISTINCT
CASE
WHEN active_product_count > 0 AND sales_30d > 0 THEN 'active'
WHEN active_product_count > 0 THEN 'inactive'
ELSE 'pending'
END as status
FROM public.brand_metrics
ORDER BY status
`);
res.json({
brands: brandRows.map(r => r.brand_name),
statuses: statusRows.map(r => r.status)
});
} catch(error) {
console.error('Error fetching brand filter options:', error);
res.status(500).json({ error: 'Failed to fetch filter options' });
}
});
// GET /brands-aggregate/stats (Overall brand stats)
router.get('/stats', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /brands-aggregate/stats');
try {
const { rows: [stats] } = await pool.query(`
SELECT
COUNT(*) AS total_brands,
COUNT(CASE WHEN active_product_count > 0 THEN 1 END) AS active_brands,
SUM(active_product_count) AS total_active_products,
SUM(current_stock_cost) AS total_stock_value,
-- Weighted Average Margin
SUM(profit_30d) * 100.0 / NULLIF(SUM(revenue_30d), 0) AS overall_avg_margin_weighted
FROM public.brand_metrics bm
`);
res.json({
totalBrands: parseInt(stats?.total_brands || 0),
activeBrands: parseInt(stats?.active_brands || 0),
totalActiveProducts: parseInt(stats?.total_active_products || 0),
totalValue: parseFloat(stats?.total_stock_value || 0),
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || 0),
});
} catch (error) {
console.error('Error fetching brand stats:', error);
res.status(500).json({ error: 'Failed to fetch brand stats.' });
}
});
// GET /brands-aggregate/ (List brands)
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /brands-aggregate received query:', req.query);
try {
// --- Pagination ---
let page = parseInt(req.query.page, 10) || 1;
let limit = parseInt(req.query.limit, 10) || DEFAULT_PAGE_LIMIT;
limit = Math.min(limit, MAX_PAGE_LIMIT);
const offset = (page - 1) * limit;
// --- Sorting ---
const sortQueryKey = req.query.sort || 'brandName'; // Default sort
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
const sortColumn = sortColumnInfo ? sortColumnInfo.dbCol : 'bm.brand_name';
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST');
const sortClause = `ORDER BY ${sortColumn} ${sortDirection} ${nullsOrder}`;
// --- Filtering ---
const conditions = [];
const params = [];
let paramCounter = 1;
// Build conditions based on req.query, using COLUMN_MAP and parseValue
for (const key in req.query) {
if (['page', 'limit', 'sort', 'order'].includes(key)) continue;
let filterKey = key;
let operator = '='; // Default operator
const value = req.query[key];
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
if (operatorMatch) {
filterKey = operatorMatch[1];
operator = operatorMatch[2];
}
const columnInfo = getSafeColumnInfo(filterKey);
if (columnInfo) {
const dbColumn = columnInfo.dbCol;
const valueType = columnInfo.type;
try {
let conditionFragment = '';
let needsParam = true;
switch (operator.toLowerCase()) { // Normalize operator
case 'eq': operator = '='; break;
case 'ne': operator = '<>'; break;
case 'gt': operator = '>'; break;
case 'gte': operator = '>='; break;
case 'lt': operator = '<'; break;
case 'lte': operator = '<='; break;
case 'like': operator = 'LIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
case 'ilike': operator = 'ILIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
case 'between':
const [val1, val2] = String(value).split(',');
if (val1 !== undefined && val2 !== undefined) {
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
needsParam = false;
} else continue;
break;
case 'in':
const inValues = String(value).split(',');
if (inValues.length > 0) {
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
conditionFragment = `${dbColumn} IN (${placeholders})`;
params.push(...inValues.map(v => parseValue(v, valueType)));
needsParam = false;
} else continue;
break;
default: operator = '='; break;
}
if (needsParam) {
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
params.push(parseValue(value, valueType));
} else if (!conditionFragment) { // For LIKE/ILIKE
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
}
if (conditionFragment) {
conditions.push(`(${conditionFragment})`);
}
} catch (parseError) {
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
if (needsParam) paramCounter--;
}
} else {
console.warn(`Invalid filter key ignored: ${key}`);
}
}
// --- Execute Queries ---
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
// Status calculation similar to vendors
const statusCase = `
CASE
WHEN active_product_count > 0 AND sales_30d > 0 THEN 'active'
WHEN active_product_count > 0 THEN 'inactive'
ELSE 'pending'
END as brand_status
`;
const baseSql = `
FROM (
SELECT
bm.*,
${statusCase}
FROM public.brand_metrics bm
) bm
${whereClause}
`;
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
const dataSql = `
WITH brand_data AS (
SELECT
bm.*,
${statusCase}
FROM public.brand_metrics bm
)
SELECT bm.*
FROM brand_data bm
${whereClause}
${sortClause}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
`;
const dataParams = [...params, limit, offset];
console.log("Count SQL:", countSql, params);
console.log("Data SQL:", dataSql, dataParams);
const [countResult, dataResult] = await Promise.all([
pool.query(countSql, params),
pool.query(dataSql, dataParams)
]);
const total = parseInt(countResult.rows[0].total, 10);
const brands = dataResult.rows.map(row => {
// Create a new object with both snake_case and camelCase keys
const transformedRow = { ...row }; // Start with original data
for (const key in row) {
// Skip null/undefined values
if (row[key] === null || row[key] === undefined) {
continue; // Original already has the null value
}
// Transform keys to match frontend expectations (add camelCase versions)
// First handle cases like sales_7d -> sales7d
let camelKey = key.replace(/_(\d+[a-z])/g, '$1');
// Then handle regular snake_case -> camelCase
camelKey = camelKey.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
if (camelKey !== key) { // Only add if different from original
transformedRow[camelKey] = row[key];
}
}
return transformedRow;
});
// --- Respond ---
res.json({
brands,
pagination: { total, pages: Math.ceil(total / limit), currentPage: page, limit },
});
} catch (error) {
console.error('Error fetching brand metrics list:', error);
res.status(500).json({ error: 'Failed to fetch brand metrics.' });
}
});
// GET /brands-aggregate/:name (Get single brand metric)
// Implement if needed, remember to URL-decode the name parameter
module.exports = router;

View File

@@ -1,363 +0,0 @@
const express = require('express');
const router = express.Router();
const { parseValue } = require('../utils/apiHelpers'); // Adjust path if needed
// --- Configuration & Helpers ---
const DEFAULT_PAGE_LIMIT = 50;
const MAX_PAGE_LIMIT = 5000; // Increase this to allow retrieving all categories in one request
// Maps query keys to DB columns in category_metrics and categories tables
const COLUMN_MAP = {
categoryId: { dbCol: 'cm.category_id', type: 'integer' },
categoryName: { dbCol: 'cm.category_name', type: 'string' }, // From aggregate table
categoryType: { dbCol: 'cm.category_type', type: 'integer' }, // From aggregate table
parentId: { dbCol: 'cm.parent_id', type: 'integer' }, // From aggregate table
parentName: { dbCol: 'p.name', type: 'string' }, // Requires JOIN to categories
productCount: { dbCol: 'cm.product_count', type: 'number' },
activeProductCount: { dbCol: 'cm.active_product_count', type: 'number' },
replenishableProductCount: { dbCol: 'cm.replenishable_product_count', type: 'number' },
currentStockUnits: { dbCol: 'cm.current_stock_units', type: 'number' },
currentStockCost: { dbCol: 'cm.current_stock_cost', type: 'number' },
currentStockRetail: { dbCol: 'cm.current_stock_retail', type: 'number' },
sales7d: { dbCol: 'cm.sales_7d', type: 'number' },
revenue7d: { dbCol: 'cm.revenue_7d', type: 'number' },
sales30d: { dbCol: 'cm.sales_30d', type: 'number' },
revenue30d: { dbCol: 'cm.revenue_30d', type: 'number' },
profit30d: { dbCol: 'cm.profit_30d', type: 'number' },
cogs30d: { dbCol: 'cm.cogs_30d', type: 'number' },
sales365d: { dbCol: 'cm.sales_365d', type: 'number' },
revenue365d: { dbCol: 'cm.revenue_365d', type: 'number' },
lifetimeSales: { dbCol: 'cm.lifetime_sales', type: 'number' },
lifetimeRevenue: { dbCol: 'cm.lifetime_revenue', type: 'number' },
avgMargin30d: { dbCol: 'cm.avg_margin_30d', type: 'number' },
stockTurn30d: { dbCol: 'cm.stock_turn_30d', type: 'number' },
// Growth metrics
salesGrowth30dVsPrev: { dbCol: 'cm.sales_growth_30d_vs_prev', type: 'number' },
revenueGrowth30dVsPrev: { dbCol: 'cm.revenue_growth_30d_vs_prev', type: 'number' },
// Add status from the categories table for filtering
status: { dbCol: 'c.status', type: 'string' },
};
function getSafeColumnInfo(queryParamKey) {
return COLUMN_MAP[queryParamKey] || null;
}
// Type Labels (Consider moving to a shared config or fetching from DB)
const TYPE_LABELS = {
10: 'Section', 11: 'Category', 12: 'Subcategory', 13: 'Sub-subcategory',
1: 'Company', 2: 'Line', 3: 'Subline', 40: 'Artist', // From old schema comments
20: 'Theme', 21: 'Subtheme' // Additional types from categories.js
};
// --- Route Handlers ---
// GET /categories-aggregate/filter-options
router.get('/filter-options', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /categories-aggregate/filter-options');
try {
// Fetch distinct types directly from the aggregate table if reliable
// Or join with categories table if source of truth is needed
const { rows: typeRows } = await pool.query(`
SELECT DISTINCT category_type
FROM public.category_metrics
ORDER BY category_type
`);
const typeOptions = typeRows.map(r => ({
value: r.category_type,
label: TYPE_LABELS[r.category_type] || `Type ${r.category_type}` // Add labels
}));
// Add status options for filtering (from categories.js)
const { rows: statusRows } = await pool.query(`
SELECT DISTINCT status FROM public.categories ORDER BY status
`);
// Get type counts (from categories.js)
const { rows: typeCounts } = await pool.query(`
SELECT
type,
COUNT(*)::integer as count
FROM categories
GROUP BY type
ORDER BY type
`);
res.json({
types: typeOptions,
statuses: statusRows.map(r => r.status),
typeCounts: typeCounts.map(tc => ({
type: tc.type,
count: tc.count
}))
});
} catch (error) {
console.error('Error fetching category filter options:', error);
res.status(500).json({ error: 'Failed to fetch filter options' });
}
});
// GET /categories-aggregate/stats
router.get('/stats', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /categories-aggregate/stats');
try {
// Calculate stats directly from the aggregate table
const { rows: [stats] } = await pool.query(`
SELECT
COUNT(*) AS total_categories,
-- Count active based on the source categories table status
COUNT(CASE WHEN c.status = 'active' THEN cm.category_id END) AS active_categories,
SUM(cm.active_product_count) AS total_active_products, -- Sum from aggregates
SUM(cm.current_stock_cost) AS total_stock_value, -- Sum from aggregates
-- Weighted Average Margin (Revenue as weight)
SUM(cm.profit_30d) * 100.0 / NULLIF(SUM(cm.revenue_30d), 0) AS overall_avg_margin_weighted,
-- Simple Average Margin (less accurate if categories vary greatly in size)
AVG(NULLIF(cm.avg_margin_30d, 0)) AS overall_avg_margin_simple
-- Growth rate can be calculated from 30d vs previous 30d revenue if needed
FROM public.category_metrics cm
JOIN public.categories c ON cm.category_id = c.cat_id -- Join to check category status
`);
res.json({
totalCategories: parseInt(stats?.total_categories || 0),
activeCategories: parseInt(stats?.active_categories || 0), // Based on categories.status
totalActiveProducts: parseInt(stats?.total_active_products || 0),
totalValue: parseFloat(stats?.total_stock_value || 0),
// Choose which avg margin calculation to expose
avgMargin: parseFloat(stats?.overall_avg_margin_weighted || stats?.overall_avg_margin_simple || 0)
// Growth rate could be added if we implement the calculation
});
} catch (error) {
console.error('Error fetching category stats:', error);
res.status(500).json({ error: 'Failed to fetch category stats.' });
}
});
// GET /categories-aggregate/ (List categories)
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /categories-aggregate received query:', req.query);
try {
// --- Pagination ---
let page = parseInt(req.query.page, 10) || 1;
let limit = parseInt(req.query.limit, 10) || DEFAULT_PAGE_LIMIT;
limit = Math.min(limit, MAX_PAGE_LIMIT);
const offset = (page - 1) * limit;
// --- Sorting ---
const sortQueryKey = req.query.sort || 'categoryName';
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
// Hierarchical sorting logic from categories.js
const hierarchicalSortOrder = `
ORDER BY
CASE
WHEN cm.category_type = 10 THEN 1 -- sections first
WHEN cm.category_type = 11 THEN 2 -- categories second
WHEN cm.category_type = 12 THEN 3 -- subcategories third
WHEN cm.category_type = 13 THEN 4 -- subsubcategories fourth
WHEN cm.category_type = 20 THEN 5 -- themes fifth
WHEN cm.category_type = 21 THEN 6 -- subthemes last
ELSE 7
END,
cm.category_name ASC
`;
// Use hierarchical sort as default
let sortClause = hierarchicalSortOrder;
// Override with custom sort if specified
if (sortColumnInfo && sortQueryKey !== 'categoryName') {
const sortColumn = sortColumnInfo.dbCol;
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST');
sortClause = `ORDER BY ${sortColumn} ${sortDirection} ${nullsOrder}`;
}
// --- Filtering ---
const conditions = [];
const params = [];
let paramCounter = 1;
console.log("Starting to process filters from query:", req.query);
// Add filters based on req.query using COLUMN_MAP and parseValue
for (const key in req.query) {
if (['page', 'limit', 'sort', 'order'].includes(key)) continue;
let filterKey = key;
let operator = '='; // Default operator
const value = req.query[key];
console.log(`Processing filter key: "${key}" with value: "${value}"`);
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
if (operatorMatch) {
filterKey = operatorMatch[1];
operator = operatorMatch[2];
console.log(`Parsed filter key: "${filterKey}" with operator: "${operator}"`);
}
// Special case for parentName requires join
const requiresJoin = filterKey === 'parentName';
const columnInfo = getSafeColumnInfo(filterKey);
if (columnInfo) {
console.log(`Column info for "${filterKey}":`, columnInfo);
const dbColumn = columnInfo.dbCol;
const valueType = columnInfo.type;
try {
let conditionFragment = '';
let needsParam = true;
switch (operator.toLowerCase()) {
case 'eq': operator = '='; break;
case 'ne': operator = '<>'; break;
case 'gt': operator = '>'; break;
case 'gte': operator = '>='; break;
case 'lt': operator = '<'; break;
case 'lte': operator = '<='; break;
case 'like': operator = 'LIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
case 'ilike': operator = 'ILIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
case 'between':
const [val1, val2] = String(value).split(',');
if (val1 !== undefined && val2 !== undefined) {
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
needsParam = false;
} else continue;
break;
case 'in':
const inValues = String(value).split(',');
if (inValues.length > 0) {
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
conditionFragment = `${dbColumn} IN (${placeholders})`;
params.push(...inValues.map(v => parseValue(v, valueType)));
needsParam = false;
} else continue;
break;
default: operator = '='; break;
}
if (needsParam) {
try {
// Special handling for categoryType to ensure it works
if (filterKey === 'categoryType') {
console.log(`Special handling for categoryType: ${value}`);
// Force conversion to integer
const numericValue = parseInt(value, 10);
if (!isNaN(numericValue)) {
console.log(`Successfully converted categoryType to integer: ${numericValue}`);
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
params.push(numericValue);
} else {
console.error(`Failed to convert categoryType to integer: "${value}"`);
throw new Error(`Invalid categoryType value: "${value}"`);
}
} else {
// Normal handling for other fields
const parsedValue = parseValue(value, valueType);
console.log(`Parsed "${value}" as ${valueType}: ${parsedValue}`);
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
params.push(parsedValue);
}
} catch (innerError) {
console.error(`Failed to parse "${value}" as ${valueType}:`, innerError);
throw innerError;
}
} else if (!conditionFragment) { // For LIKE/ILIKE where needsParam is false
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`; // paramCounter was already incremented in push
}
if (conditionFragment) {
console.log(`Adding condition: ${conditionFragment}`);
conditions.push(`(${conditionFragment})`);
}
} catch (parseError) {
console.error(`Skipping filter for key "${key}" due to parsing error:`, parseError);
if (needsParam) paramCounter--; // Roll back counter if param push failed
}
} else {
console.warn(`Invalid filter key ignored: "${key}", not found in COLUMN_MAP`);
}
}
// --- Execute Queries ---
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
// Need JOIN for parent_name if sorting/filtering by it, or always include for display
const sortColumn = sortColumnInfo?.dbCol;
// Always include the category and parent joins for status and parent_name
const joinSql = `
JOIN public.categories c ON cm.category_id = c.cat_id
LEFT JOIN public.categories p ON cm.parent_id = p.cat_id
`;
const baseSql = `
FROM public.category_metrics cm
${joinSql}
${whereClause}
`;
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
const dataSql = `
SELECT
cm.*,
c.status,
c.description,
p.name as parent_name,
p.type as parent_type
${baseSql}
${sortClause}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
`;
const dataParams = [...params, limit, offset];
console.log("Count SQL:", countSql, params);
console.log("Data SQL:", dataSql, dataParams);
const [countResult, dataResult] = await Promise.all([
pool.query(countSql, params),
pool.query(dataSql, dataParams)
]);
const total = parseInt(countResult.rows[0].total, 10);
const categories = dataResult.rows.map(row => {
// Create a new object with both snake_case and camelCase keys
const transformedRow = { ...row }; // Start with original data
for (const key in row) {
// Skip null/undefined values
if (row[key] === null || row[key] === undefined) {
continue; // Original already has the null value
}
// Transform keys to match frontend expectations (add camelCase versions)
// First handle cases like sales_7d -> sales7d
let camelKey = key.replace(/_(\d+[a-z])/g, '$1');
// Then handle regular snake_case -> camelCase
camelKey = camelKey.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
if (camelKey !== key) { // Only add if different from original
transformedRow[camelKey] = row[key];
}
}
return transformedRow;
});
// --- Respond ---
res.json({
categories,
pagination: { total, pages: Math.ceil(total / limit), currentPage: page, limit },
});
} catch (error) {
console.error('Error fetching category metrics list:', error);
res.status(500).json({ error: 'Failed to fetch category metrics.' });
}
});
module.exports = router;

View File

@@ -1,325 +0,0 @@
const express = require('express');
const router = express.Router();
// Debug middleware
router.use((req, res, next) => {
console.log(`[Config Route] ${req.method} ${req.path}`);
next();
});
// ===== GLOBAL SETTINGS =====
// Get all global settings
router.get('/global', async (req, res) => {
const pool = req.app.locals.pool;
try {
console.log('[Config Route] Fetching global settings...');
const { rows } = await pool.query('SELECT * FROM settings_global ORDER BY setting_key');
console.log('[Config Route] Sending global settings:', rows);
res.json(rows);
} catch (error) {
console.error('[Config Route] Error fetching global settings:', error);
res.status(500).json({ error: 'Failed to fetch global settings', details: error.message });
}
});
// Update global settings
router.put('/global', async (req, res) => {
const pool = req.app.locals.pool;
try {
console.log('[Config Route] Updating global settings:', req.body);
// Validate request
if (!Array.isArray(req.body)) {
return res.status(400).json({ error: 'Request body must be an array of settings' });
}
// Begin transaction
const client = await pool.connect();
try {
await client.query('BEGIN');
for (const setting of req.body) {
if (!setting.setting_key || !setting.setting_value) {
throw new Error('Each setting must have a key and value');
}
await client.query(
`UPDATE settings_global
SET setting_value = $1,
updated_at = CURRENT_TIMESTAMP
WHERE setting_key = $2`,
[setting.setting_value, setting.setting_key]
);
}
await client.query('COMMIT');
res.json({ success: true });
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
} catch (error) {
console.error('[Config Route] Error updating global settings:', error);
res.status(500).json({ error: 'Failed to update global settings', details: error.message });
}
});
// ===== PRODUCT SETTINGS =====
// Get product settings with pagination and search
router.get('/products', async (req, res) => {
const pool = req.app.locals.pool;
try {
console.log('[Config Route] Fetching product settings...');
const page = parseInt(req.query.page) || 1;
const pageSize = parseInt(req.query.pageSize) || 10;
const offset = (page - 1) * pageSize;
const search = req.query.search || '';
// Get total count for pagination
const countQuery = search
? `SELECT COUNT(*) FROM settings_product sp
JOIN products p ON sp.pid::text = p.pid::text
WHERE sp.pid::text ILIKE $1 OR p.title ILIKE $1`
: 'SELECT COUNT(*) FROM settings_product';
const countParams = search ? [`%${search}%`] : [];
const { rows: countResult } = await pool.query(countQuery, countParams);
const total = parseInt(countResult[0].count);
// Get paginated settings
const query = search
? `SELECT sp.*, p.title as product_name
FROM settings_product sp
JOIN products p ON sp.pid::text = p.pid::text
WHERE sp.pid::text ILIKE $1 OR p.title ILIKE $1
ORDER BY sp.pid
LIMIT $2 OFFSET $3`
: `SELECT sp.*, p.title as product_name
FROM settings_product sp
JOIN products p ON sp.pid::text = p.pid::text
ORDER BY sp.pid
LIMIT $1 OFFSET $2`;
const queryParams = search
? [`%${search}%`, pageSize, offset]
: [pageSize, offset];
const { rows } = await pool.query(query, queryParams);
const response = {
items: rows,
total,
page,
pageSize
};
console.log(`[Config Route] Sending ${rows.length} product settings`);
res.json(response);
} catch (error) {
console.error('[Config Route] Error fetching product settings:', error);
res.status(500).json({ error: 'Failed to fetch product settings', details: error.message });
}
});
// Update product settings
router.put('/products/:pid', async (req, res) => {
const pool = req.app.locals.pool;
try {
const { pid } = req.params;
const { lead_time_days, days_of_stock, safety_stock, forecast_method, exclude_from_forecast } = req.body;
console.log(`[Config Route] Updating product settings for ${pid}:`, req.body);
// Check if product exists
const { rows: checkProduct } = await pool.query(
'SELECT 1 FROM settings_product WHERE pid::text = $1',
[pid]
);
if (checkProduct.length === 0) {
// Insert if it doesn't exist
await pool.query(
`INSERT INTO settings_product
(pid, lead_time_days, days_of_stock, safety_stock, forecast_method, exclude_from_forecast)
VALUES ($1, $2, $3, $4, $5, $6)`,
[pid, lead_time_days, days_of_stock, safety_stock, forecast_method, exclude_from_forecast]
);
} else {
// Update if it exists
await pool.query(
`UPDATE settings_product
SET lead_time_days = $2,
days_of_stock = $3,
safety_stock = $4,
forecast_method = $5,
exclude_from_forecast = $6,
updated_at = CURRENT_TIMESTAMP
WHERE pid::text = $1`,
[pid, lead_time_days, days_of_stock, safety_stock, forecast_method, exclude_from_forecast]
);
}
res.json({ success: true });
} catch (error) {
console.error(`[Config Route] Error updating product settings for ${req.params.pid}:`, error);
res.status(500).json({ error: 'Failed to update product settings', details: error.message });
}
});
// Reset product settings to defaults
router.post('/products/:pid/reset', async (req, res) => {
const pool = req.app.locals.pool;
try {
const { pid } = req.params;
console.log(`[Config Route] Resetting product settings for ${pid}`);
// Reset by setting everything to null/default
await pool.query(
`UPDATE settings_product
SET lead_time_days = NULL,
days_of_stock = NULL,
safety_stock = 0,
forecast_method = NULL,
exclude_from_forecast = false,
updated_at = CURRENT_TIMESTAMP
WHERE pid::text = $1`,
[pid]
);
res.json({ success: true });
} catch (error) {
console.error(`[Config Route] Error resetting product settings for ${req.params.pid}:`, error);
res.status(500).json({ error: 'Failed to reset product settings', details: error.message });
}
});
// ===== VENDOR SETTINGS =====
// Get vendor settings with pagination and search
router.get('/vendors', async (req, res) => {
const pool = req.app.locals.pool;
try {
console.log('[Config Route] Fetching vendor settings...');
const page = parseInt(req.query.page) || 1;
const pageSize = parseInt(req.query.pageSize) || 10;
const offset = (page - 1) * pageSize;
const search = req.query.search || '';
// Get total count for pagination
const countQuery = search
? 'SELECT COUNT(*) FROM settings_vendor WHERE vendor ILIKE $1'
: 'SELECT COUNT(*) FROM settings_vendor';
const countParams = search ? [`%${search}%`] : [];
const { rows: countResult } = await pool.query(countQuery, countParams);
const total = parseInt(countResult[0].count);
// Get paginated settings
const query = search
? `SELECT * FROM settings_vendor
WHERE vendor ILIKE $1
ORDER BY vendor
LIMIT $2 OFFSET $3`
: `SELECT * FROM settings_vendor
ORDER BY vendor
LIMIT $1 OFFSET $2`;
const queryParams = search
? [`%${search}%`, pageSize, offset]
: [pageSize, offset];
const { rows } = await pool.query(query, queryParams);
const response = {
items: rows,
total,
page,
pageSize
};
console.log(`[Config Route] Sending ${rows.length} vendor settings`);
res.json(response);
} catch (error) {
console.error('[Config Route] Error fetching vendor settings:', error);
res.status(500).json({ error: 'Failed to fetch vendor settings', details: error.message });
}
});
// Update vendor settings
router.put('/vendors/:vendor', async (req, res) => {
const pool = req.app.locals.pool;
try {
const vendor = req.params.vendor;
const { default_lead_time_days, default_days_of_stock } = req.body;
console.log(`[Config Route] Updating vendor settings for ${vendor}:`, req.body);
// Check if vendor exists
const { rows: checkVendor } = await pool.query(
'SELECT 1 FROM settings_vendor WHERE vendor = $1',
[vendor]
);
if (checkVendor.length === 0) {
// Insert if it doesn't exist
await pool.query(
`INSERT INTO settings_vendor
(vendor, default_lead_time_days, default_days_of_stock)
VALUES ($1, $2, $3)`,
[vendor, default_lead_time_days, default_days_of_stock]
);
} else {
// Update if it exists
await pool.query(
`UPDATE settings_vendor
SET default_lead_time_days = $2,
default_days_of_stock = $3,
updated_at = CURRENT_TIMESTAMP
WHERE vendor = $1`,
[vendor, default_lead_time_days, default_days_of_stock]
);
}
res.json({ success: true });
} catch (error) {
console.error(`[Config Route] Error updating vendor settings for ${req.params.vendor}:`, error);
res.status(500).json({ error: 'Failed to update vendor settings', details: error.message });
}
});
// Reset vendor settings to defaults
router.post('/vendors/:vendor/reset', async (req, res) => {
const pool = req.app.locals.pool;
try {
const vendor = req.params.vendor;
console.log(`[Config Route] Resetting vendor settings for ${vendor}`);
// Reset by setting everything to null
await pool.query(
`UPDATE settings_vendor
SET default_lead_time_days = NULL,
default_days_of_stock = NULL,
updated_at = CURRENT_TIMESTAMP
WHERE vendor = $1`,
[vendor]
);
res.json({ success: true });
} catch (error) {
console.error(`[Config Route] Error resetting vendor settings for ${req.params.vendor}:`, error);
res.status(500).json({ error: 'Failed to reset vendor settings', details: error.message });
}
});
// Export the router
module.exports = router;

File diff suppressed because it is too large Load Diff

View File

@@ -1,440 +0,0 @@
const express = require('express');
const router = express.Router();
const { spawn } = require('child_process');
const path = require('path');
const db = require('../utils/db');
// Debug middleware MUST be first
router.use((req, res, next) => {
console.log(`[CSV Route Debug] ${req.method} ${req.path}`);
next();
});
// Store active processes and their progress
let activeImport = null;
let importProgress = null;
let activeFullUpdate = null;
let activeFullReset = null;
// SSE clients for progress updates
const updateClients = new Set();
const importClients = new Set();
const resetClients = new Set();
const resetMetricsClients = new Set();
const calculateMetricsClients = new Set();
const fullUpdateClients = new Set();
const fullResetClients = new Set();
// Helper to send progress to specific clients
function sendProgressToClients(clients, data) {
// If data is a string, send it directly
// If it's an object, convert it to JSON
const message = typeof data === 'string'
? `data: ${data}\n\n`
: `data: ${JSON.stringify(data)}\n\n`;
clients.forEach(client => {
try {
client.write(message);
// Immediately flush the response
if (typeof client.flush === 'function') {
client.flush();
}
} catch (error) {
// Silently remove failed client
clients.delete(client);
}
});
}
// Helper to run a script and stream progress
function runScript(scriptPath, type, clients) {
return new Promise((resolve, reject) => {
// Kill any existing process of this type
let activeProcess;
switch (type) {
case 'update':
if (activeFullUpdate) {
try { activeFullUpdate.kill(); } catch (e) { }
}
activeProcess = activeFullUpdate;
break;
case 'reset':
if (activeFullReset) {
try { activeFullReset.kill(); } catch (e) { }
}
activeProcess = activeFullReset;
break;
}
const child = spawn('node', [scriptPath], {
stdio: ['inherit', 'pipe', 'pipe']
});
switch (type) {
case 'update':
activeFullUpdate = child;
break;
case 'reset':
activeFullReset = child;
break;
}
let output = '';
child.stdout.on('data', (data) => {
const text = data.toString();
output += text;
// Split by lines to handle multiple JSON outputs
const lines = text.split('\n');
lines.filter(line => line.trim()).forEach(line => {
try {
// Try to parse as JSON but don't let it affect the display
const jsonData = JSON.parse(line);
// Only end the process if we get a final status
if (jsonData.status === 'complete' || jsonData.status === 'error' || jsonData.status === 'cancelled') {
if (jsonData.status === 'complete' && !jsonData.operation?.includes('complete')) {
// Don't close for intermediate completion messages
sendProgressToClients(clients, line);
return;
}
// Close only on final completion/error/cancellation
switch (type) {
case 'update':
activeFullUpdate = null;
break;
case 'reset':
activeFullReset = null;
break;
}
if (jsonData.status === 'error') {
reject(new Error(jsonData.error || 'Unknown error'));
} else {
resolve({ output });
}
}
} catch (e) {
// Not JSON, just display as is
}
// Always send the raw line
sendProgressToClients(clients, line);
});
});
child.stderr.on('data', (data) => {
const text = data.toString();
console.error(text);
// Send stderr output directly too
sendProgressToClients(clients, text);
});
child.on('close', (code) => {
switch (type) {
case 'update':
activeFullUpdate = null;
break;
case 'reset':
activeFullReset = null;
break;
}
if (code !== 0) {
const error = `Script ${scriptPath} exited with code ${code}`;
sendProgressToClients(clients, error);
reject(new Error(error));
}
// Don't resolve here - let the completion message from the script trigger the resolve
});
child.on('error', (err) => {
switch (type) {
case 'update':
activeFullUpdate = null;
break;
case 'reset':
activeFullReset = null;
break;
}
sendProgressToClients(clients, err.message);
reject(err);
});
});
}
// Progress endpoints
router.get('/:type/progress', (req, res) => {
const { type } = req.params;
if (!['update', 'reset'].includes(type)) {
return res.status(400).json({ error: 'Invalid operation type' });
}
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Access-Control-Allow-Origin': req.headers.origin || '*',
'Access-Control-Allow-Credentials': 'true'
});
// Add this client to the correct set
const clients = type === 'update' ? fullUpdateClients : fullResetClients;
clients.add(res);
// Send initial connection message
sendProgressToClients(new Set([res]), JSON.stringify({
status: 'running',
operation: 'Initializing connection...'
}));
// Handle client disconnect
req.on('close', () => {
clients.delete(res);
});
});
// GET /status - Check for active processes
router.get('/status', (req, res) => {
try {
const hasActiveUpdate = activeFullUpdate !== null;
const hasActiveReset = activeFullReset !== null;
if (hasActiveUpdate || hasActiveReset) {
res.json({
active: true,
progress: {
status: 'running',
operation: hasActiveUpdate ? 'Full update in progress' : 'Full reset in progress',
type: hasActiveUpdate ? 'update' : 'reset'
}
});
} else {
res.json({
active: false,
progress: null
});
}
} catch (error) {
console.error('Error checking status:', error);
res.status(500).json({ error: error.message });
}
});
// Route to cancel active process
router.post('/cancel', (req, res) => {
let killed = false;
// Get the operation type from the request
const { type } = req.query;
const clients = type === 'update' ? fullUpdateClients : fullResetClients;
const activeProcess = type === 'update' ? activeFullUpdate : activeFullReset;
if (activeProcess) {
try {
activeProcess.kill('SIGTERM');
if (type === 'update') {
activeFullUpdate = null;
} else {
activeFullReset = null;
}
killed = true;
sendProgressToClients(clients, JSON.stringify({
status: 'cancelled',
operation: 'Operation cancelled'
}));
} catch (err) {
console.error(`Error killing ${type} process:`, err);
}
}
if (killed) {
res.json({ success: true });
} else {
res.status(404).json({ error: 'No active process to cancel' });
}
});
// POST /csv/full-update - Run full update script
router.post('/full-update', async (req, res) => {
try {
const scriptPath = path.join(__dirname, '../../scripts/full-update.js');
runScript(scriptPath, 'update', fullUpdateClients)
.catch(error => {
console.error('Update failed:', error);
});
res.status(202).json({ message: 'Update started' });
} catch (error) {
res.status(500).json({ error: error.message });
}
});
// POST /csv/full-reset - Run full reset script
router.post('/full-reset', async (req, res) => {
try {
const scriptPath = path.join(__dirname, '../../scripts/full-reset.js');
runScript(scriptPath, 'reset', fullResetClients)
.catch(error => {
console.error('Reset failed:', error);
});
res.status(202).json({ message: 'Reset started' });
} catch (error) {
res.status(500).json({ error: error.message });
}
});
// GET /history/import - Get recent import history
router.get('/history/import', async (req, res) => {
try {
const pool = req.app.locals.pool;
// First check which columns exist
const { rows: columns } = await pool.query(`
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'import_history'
AND column_name IN ('records_deleted', 'records_skipped', 'total_processed')
`);
const hasDeletedColumn = columns.some(col => col.column_name === 'records_deleted');
const hasSkippedColumn = columns.some(col => col.column_name === 'records_skipped');
const hasTotalProcessedColumn = columns.some(col => col.column_name === 'total_processed');
// Build query dynamically based on available columns
const query = `
SELECT
id,
start_time,
end_time,
status,
error_message,
records_added::integer,
records_updated::integer,
${hasDeletedColumn ? 'records_deleted::integer,' : '0 as records_deleted,'}
${hasSkippedColumn ? 'records_skipped::integer,' : '0 as records_skipped,'}
${hasTotalProcessedColumn ? 'total_processed::integer,' : '0 as total_processed,'}
is_incremental,
additional_info,
EXTRACT(EPOCH FROM (COALESCE(end_time, NOW()) - start_time)) / 60 as duration_minutes
FROM import_history
ORDER BY start_time DESC
LIMIT 20
`;
const { rows } = await pool.query(query);
res.json(rows || []);
} catch (error) {
console.error('Error fetching import history:', error);
res.status(500).json({ error: error.message });
}
});
// GET /history/calculate - Get recent calculation history
router.get('/history/calculate', async (req, res) => {
try {
const pool = req.app.locals.pool;
const { rows } = await pool.query(`
SELECT
id,
start_time,
end_time,
EXTRACT(EPOCH FROM (COALESCE(end_time, NOW()) - start_time)) / 60 as duration_minutes,
duration_seconds,
status,
error_message,
total_products,
total_orders,
total_purchase_orders,
processed_products,
processed_orders,
processed_purchase_orders,
additional_info
FROM calculate_history
ORDER BY start_time DESC
LIMIT 20
`);
res.json(rows || []);
} catch (error) {
console.error('Error fetching calculate history:', error);
res.status(500).json({ error: error.message });
}
});
// GET /status/modules - Get module calculation status
router.get('/status/modules', async (req, res) => {
try {
const pool = req.app.locals.pool;
const { rows } = await pool.query(`
SELECT
module_name,
last_calculation_timestamp::timestamp
FROM calculate_status
ORDER BY module_name
`);
res.json(rows || []);
} catch (error) {
console.error('Error fetching module status:', error);
res.status(500).json({ error: error.message });
}
});
// GET /status/tables - Get table sync status
router.get('/status/tables', async (req, res) => {
try {
const pool = req.app.locals.pool;
const { rows } = await pool.query(`
SELECT
table_name,
last_sync_timestamp::timestamp
FROM sync_status
ORDER BY table_name
`);
res.json(rows || []);
} catch (error) {
console.error('Error fetching table status:', error);
res.status(500).json({ error: error.message });
}
});
// GET /status/table-counts - Get record counts for all tables
router.get('/status/table-counts', async (req, res) => {
try {
const pool = req.app.locals.pool;
const tables = [
// Core tables
'products', 'categories', 'product_categories', 'orders', 'purchase_orders', 'receivings',
// New metrics tables
'product_metrics', 'daily_product_snapshots','brand_metrics','category_metrics','vendor_metrics',
// Config tables
'settings_global', 'settings_vendor', 'settings_product'
];
const counts = await Promise.all(
tables.map(table =>
pool.query(`SELECT COUNT(*) as count FROM ${table}`)
.then(result => ({
table_name: table,
count: parseInt(result.rows[0].count)
}))
.catch(err => ({
table_name: table,
count: null,
error: err.message
}))
)
);
// Group tables by type
const groupedCounts = {
core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders', 'receivings'].includes(c.table_name)),
metrics: counts.filter(c => ['product_metrics', 'daily_product_snapshots','brand_metrics','category_metrics','vendor_metrics'].includes(c.table_name)),
config: counts.filter(c => ['settings_global', 'settings_vendor', 'settings_product'].includes(c.table_name))
};
res.json(groupedCounts);
} catch (error) {
console.error('Error fetching table counts:', error);
res.status(500).json({ error: error.message });
}
});
module.exports = router;

View File

@@ -1,170 +0,0 @@
const express = require('express');
const router = express.Router();
// GET /api/hts-lookup?search=term
// Finds matching products and groups them by harmonized tariff code
router.get('/', async (req, res) => {
const searchTerm = typeof req.query.search === 'string' ? req.query.search.trim() : '';
if (!searchTerm) {
return res.status(400).json({ error: 'Search term is required' });
}
try {
const pool = req.app.locals.pool;
const likeTerm = `%${searchTerm}%`;
const { rows } = await pool.query(
`
WITH matched_products AS (
SELECT
pid,
title,
sku,
barcode,
brand,
vendor,
harmonized_tariff_code,
NULLIF(
LOWER(
REGEXP_REPLACE(
COALESCE(NULLIF(TRIM(harmonized_tariff_code), ''), ''),
'[^0-9A-Za-z]',
'',
'g'
)
),
''
) AS normalized_code
FROM products
WHERE visible = TRUE
AND (
title ILIKE $1
OR sku ILIKE $1
OR barcode ILIKE $1
OR vendor ILIKE $1
OR brand ILIKE $1
OR vendor_reference ILIKE $1
OR harmonized_tariff_code ILIKE $1
)
),
grouped AS (
SELECT
COALESCE(NULLIF(TRIM(harmonized_tariff_code), ''), 'Unspecified') AS harmonized_tariff_code,
normalized_code,
COUNT(*)::INT AS product_count,
json_agg(
json_build_object(
'pid', pid,
'title', title,
'sku', sku,
'barcode', barcode,
'brand', brand,
'vendor', vendor
)
ORDER BY title
) AS products
FROM matched_products
GROUP BY
COALESCE(NULLIF(TRIM(harmonized_tariff_code), ''), 'Unspecified'),
normalized_code
),
hts_lookup AS (
SELECT
h."HTS Number" AS hts_number,
h."Indent" AS indent,
h."Description" AS description,
h."Unit of Quantity" AS unit_of_quantity,
h."General Rate of Duty" AS general_rate_of_duty,
h."Special Rate of Duty" AS special_rate_of_duty,
h."Column 2 Rate of Duty" AS column2_rate_of_duty,
h."Quota Quantity" AS quota_quantity,
h."Additional Duties" AS additional_duties,
NULLIF(
LOWER(
REGEXP_REPLACE(
COALESCE(h."HTS Number", ''),
'[^0-9A-Za-z]',
'',
'g'
)
),
''
) AS normalized_hts_number
FROM htsdata h
)
SELECT
g.harmonized_tariff_code,
g.product_count,
g.products,
hts.hts_details
FROM grouped g
LEFT JOIN LATERAL (
SELECT json_agg(
json_build_object(
'hts_number', h.hts_number,
'indent', h.indent,
'description', h.description,
'unit_of_quantity', h.unit_of_quantity,
'general_rate_of_duty', h.general_rate_of_duty,
'special_rate_of_duty', h.special_rate_of_duty,
'column2_rate_of_duty', h.column2_rate_of_duty,
'quota_quantity', h.quota_quantity,
'additional_duties', h.additional_duties
)
ORDER BY LENGTH(COALESCE(h.normalized_hts_number, '')) ASC NULLS LAST,
NULLIF(h.indent, '')::INT NULLS LAST
) AS hts_details
FROM hts_lookup h
WHERE COALESCE(g.normalized_code, '') <> ''
AND COALESCE(h.normalized_hts_number, '') <> ''
AND (
g.normalized_code LIKE h.normalized_hts_number || '%'
OR h.normalized_hts_number LIKE g.normalized_code || '%'
)
) hts ON TRUE
ORDER BY g.product_count DESC, g.harmonized_tariff_code ASC
`,
[likeTerm]
);
const totalMatches = rows.reduce((sum, row) => sum + (parseInt(row.product_count, 10) || 0), 0);
res.json({
search: searchTerm,
total: totalMatches,
results: rows.map(row => ({
harmonized_tariff_code: row.harmonized_tariff_code,
product_count: parseInt(row.product_count, 10) || 0,
hts_details: Array.isArray(row.hts_details)
? row.hts_details.map(detail => ({
hts_number: detail.hts_number,
indent: detail.indent,
description: detail.description,
unit_of_quantity: detail.unit_of_quantity,
general_rate_of_duty: detail.general_rate_of_duty,
special_rate_of_duty: detail.special_rate_of_duty,
column2_rate_of_duty: detail.column2_rate_of_duty,
quota_quantity: detail.quota_quantity,
additional_duties: detail.additional_duties
}))
: [],
products: Array.isArray(row.products)
? row.products.map(product => ({
pid: product.pid,
title: product.title,
sku: product.sku,
barcode: product.barcode,
brand: product.brand,
vendor: product.vendor
}))
: []
}))
});
} catch (error) {
console.error('Error performing HTS lookup:', error);
res.status(500).json({ error: 'Failed to lookup HTS codes' });
}
});
module.exports = router;

View File

@@ -1,337 +0,0 @@
const express = require('express');
const router = express.Router();
// Get all import sessions for a user (named + unnamed)
router.get('/', async (req, res) => {
try {
const { user_id } = req.query;
if (!user_id) {
return res.status(400).json({ error: 'user_id query parameter is required' });
}
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT
id,
user_id,
name,
current_step,
jsonb_array_length(data) as row_count,
global_selections,
created_at,
updated_at
FROM import_sessions
WHERE user_id = $1
ORDER BY
CASE WHEN name IS NULL THEN 0 ELSE 1 END,
updated_at DESC
`, [user_id]);
res.json(result.rows);
} catch (error) {
console.error('Error fetching import sessions:', error);
res.status(500).json({
error: 'Failed to fetch import sessions',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get session by ID
router.get('/:id', async (req, res) => {
try {
const { id } = req.params;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT * FROM import_sessions
WHERE id = $1
`, [id]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Import session not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Error fetching import session:', error);
res.status(500).json({
error: 'Failed to fetch import session',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Autosave - upsert unnamed session for user
// IMPORTANT: This must be defined before /:id routes to avoid Express matching "autosave" as an :id
router.put('/autosave', async (req, res) => {
try {
const {
user_id,
current_step,
data,
product_images,
global_selections,
validation_state
} = req.body;
// Validate required fields
if (!user_id) {
return res.status(400).json({ error: 'user_id is required' });
}
if (!current_step) {
return res.status(400).json({ error: 'current_step is required' });
}
if (!data || !Array.isArray(data)) {
return res.status(400).json({ error: 'data must be an array' });
}
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
// Upsert: insert or update the unnamed session for this user
const result = await pool.query(`
INSERT INTO import_sessions (
user_id,
name,
current_step,
data,
product_images,
global_selections,
validation_state
) VALUES ($1, NULL, $2, $3, $4, $5, $6)
ON CONFLICT (user_id) WHERE name IS NULL
DO UPDATE SET
current_step = EXCLUDED.current_step,
data = EXCLUDED.data,
product_images = EXCLUDED.product_images,
global_selections = EXCLUDED.global_selections,
validation_state = EXCLUDED.validation_state,
updated_at = CURRENT_TIMESTAMP
RETURNING id, user_id, name, current_step, created_at, updated_at
`, [
user_id,
current_step,
JSON.stringify(data),
product_images ? JSON.stringify(product_images) : null,
global_selections ? JSON.stringify(global_selections) : null,
validation_state ? JSON.stringify(validation_state) : null
]);
res.json(result.rows[0]);
} catch (error) {
console.error('Error autosaving import session:', error);
res.status(500).json({
error: 'Failed to autosave import session',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Delete unnamed session for user (clear autosave)
// IMPORTANT: This must be defined before /:id routes
router.delete('/autosave/:user_id', async (req, res) => {
try {
const { user_id } = req.params;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(
'DELETE FROM import_sessions WHERE user_id = $1 AND name IS NULL RETURNING id, user_id, name, current_step, created_at, updated_at',
[user_id]
);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'No autosave session found for user' });
}
res.json({ message: 'Autosave session deleted successfully' });
} catch (error) {
console.error('Error deleting autosave session:', error);
res.status(500).json({
error: 'Failed to delete autosave session',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Create new named session
router.post('/', async (req, res) => {
try {
const {
user_id,
name,
current_step,
data,
product_images,
global_selections,
validation_state
} = req.body;
// Validate required fields
if (!user_id) {
return res.status(400).json({ error: 'user_id is required' });
}
if (!name || typeof name !== 'string' || name.trim().length === 0) {
return res.status(400).json({ error: 'name is required for creating a named session' });
}
if (!current_step) {
return res.status(400).json({ error: 'current_step is required' });
}
if (!data || !Array.isArray(data)) {
return res.status(400).json({ error: 'data must be an array' });
}
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
INSERT INTO import_sessions (
user_id,
name,
current_step,
data,
product_images,
global_selections,
validation_state
) VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, user_id, name, current_step, created_at, updated_at
`, [
user_id,
name.trim(),
current_step,
JSON.stringify(data),
product_images ? JSON.stringify(product_images) : null,
global_selections ? JSON.stringify(global_selections) : null,
validation_state ? JSON.stringify(validation_state) : null
]);
res.status(201).json(result.rows[0]);
} catch (error) {
console.error('Error creating import session:', error);
res.status(500).json({
error: 'Failed to create import session',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Update named session by ID
router.put('/:id', async (req, res) => {
try {
const { id } = req.params;
const {
name,
current_step,
data,
product_images,
global_selections,
validation_state
} = req.body;
if (!current_step) {
return res.status(400).json({ error: 'current_step is required' });
}
if (!data || !Array.isArray(data)) {
return res.status(400).json({ error: 'data must be an array' });
}
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
// Build update query - optionally include name if provided
const hasName = name !== undefined;
const result = await pool.query(`
UPDATE import_sessions
SET
${hasName ? 'name = $1,' : ''}
current_step = $${hasName ? 2 : 1},
data = $${hasName ? 3 : 2},
product_images = $${hasName ? 4 : 3},
global_selections = $${hasName ? 5 : 4},
validation_state = $${hasName ? 6 : 5},
updated_at = CURRENT_TIMESTAMP
WHERE id = $${hasName ? 7 : 6}
RETURNING id, user_id, name, current_step, created_at, updated_at
`, hasName ? [
typeof name === 'string' ? name.trim() : name,
current_step,
JSON.stringify(data),
product_images ? JSON.stringify(product_images) : null,
global_selections ? JSON.stringify(global_selections) : null,
validation_state ? JSON.stringify(validation_state) : null,
id
] : [
current_step,
JSON.stringify(data),
product_images ? JSON.stringify(product_images) : null,
global_selections ? JSON.stringify(global_selections) : null,
validation_state ? JSON.stringify(validation_state) : null,
id
]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Import session not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Error updating import session:', error);
res.status(500).json({
error: 'Failed to update import session',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Delete session by ID
router.delete('/:id', async (req, res) => {
try {
const { id } = req.params;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query('DELETE FROM import_sessions WHERE id = $1 RETURNING id, user_id, name, current_step, created_at, updated_at', [id]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Import session not found' });
}
res.json({ message: 'Import session deleted successfully' });
} catch (error) {
console.error('Error deleting import session:', error);
res.status(500).json({
error: 'Failed to delete import session',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Error handling middleware
router.use((err, req, res, next) => {
console.error('Import sessions route error:', err);
res.status(500).json({
error: 'Internal server error',
details: err.message
});
});
module.exports = router;

View File

@@ -634,7 +634,7 @@ router.post('/upload-image', upload.single('image'), async (req, res) => {
req.file.size = processingResult.finalSize;
// Create URL for the uploaded file - using an absolute URL with domain
// This will generate a URL like: https://acot.site/uploads/products/filename.jpg
// This will generate a URL like: https://tools.acherryontop.com/uploads/products/filename.jpg
const baseUrl = 'https://tools.acherryontop.com';
const imageUrl = `${baseUrl}/uploads/products/${req.file.filename}`;
@@ -1246,48 +1246,6 @@ router.get('/search-products', async (req, res) => {
}
});
// Get product images for a given PID from production DB
router.get('/product-images/:pid', async (req, res) => {
const pid = parseInt(req.params.pid, 10);
if (!pid || pid <= 0) {
return res.status(400).json({ error: 'Valid PID is required' });
}
try {
const { connection } = await getDbConnection();
const [rows] = await connection.query(
'SELECT iid, type, width, height, `order`, hidden FROM product_images WHERE pid = ? ORDER BY `order` DESC, type',
[pid]
);
// Group by iid and build image URLs using the same logic as the PHP codebase
const typeMap = { 1: 'o', 2: 'l', 3: 't', 4: '100x100', 5: '175x175', 6: '300x300', 7: '600x600', 8: '500x500', 9: '150x150' };
const padded = String(pid).padStart(10, '0');
const pathPrefix = `${padded.substring(0, 4)}/${padded.substring(4, 7)}/`;
const imagesByIid = {};
for (const row of rows) {
const typeName = typeMap[row.type];
if (!typeName) continue;
if (!imagesByIid[row.iid]) {
imagesByIid[row.iid] = { iid: row.iid, order: row.order, hidden: !!row.hidden, sizes: {} };
}
imagesByIid[row.iid].sizes[typeName] = {
width: row.width,
height: row.height,
url: `https://sbing.com/i/products/${pathPrefix}${pid}-${typeName}-${row.iid}.jpg`,
};
}
const images = Object.values(imagesByIid).sort((a, b) => b.order - a.order);
res.json(images);
} catch (error) {
console.error('Error fetching product images:', error);
res.status(500).json({ error: 'Failed to fetch product images' });
}
});
const UPC_SUPPLIER_PREFIX_LEADING_DIGIT = '4';
const UPC_MAX_SEQUENCE = 99999;
const UPC_RESERVATION_TTL = 5 * 60 * 1000; // 5 minutes

View File

@@ -1,590 +0,0 @@
const express = require('express');
const router = express.Router();
const { Pool } = require('pg'); // Assuming pg driver
// --- Configuration & Helpers ---
const DEFAULT_PAGE_LIMIT = 50;
const MAX_PAGE_LIMIT = 200; // Prevent excessive data requests
// Define direct mapping from frontend column names to database columns
// This simplifies the code by eliminating conversion logic
const COLUMN_MAP = {
// Product Info
pid: 'pm.pid',
sku: 'pm.sku',
title: 'pm.title',
brand: 'pm.brand',
vendor: 'pm.vendor',
imageUrl: 'pm.image_url',
isVisible: 'pm.is_visible',
isReplenishable: 'pm.is_replenishable',
// Additional Product Fields
barcode: 'pm.barcode',
harmonizedTariffCode: 'pm.harmonized_tariff_code',
vendorReference: 'pm.vendor_reference',
notionsReference: 'pm.notions_reference',
line: 'pm.line',
subline: 'pm.subline',
artist: 'pm.artist',
moq: 'pm.moq',
rating: 'pm.rating',
reviews: 'pm.reviews',
weight: 'pm.weight',
length: 'pm.length',
width: 'pm.width',
height: 'pm.height',
countryOfOrigin: 'pm.country_of_origin',
location: 'pm.location',
baskets: 'pm.baskets',
notifies: 'pm.notifies',
preorderCount: 'pm.preorder_count',
notionsInvCount: 'pm.notions_inv_count',
// Current Status
currentPrice: 'pm.current_price',
currentRegularPrice: 'pm.current_regular_price',
currentCostPrice: 'pm.current_cost_price',
currentLandingCostPrice: 'pm.current_landing_cost_price',
currentStock: 'pm.current_stock',
currentStockCost: 'pm.current_stock_cost',
currentStockRetail: 'pm.current_stock_retail',
currentStockGross: 'pm.current_stock_gross',
onOrderQty: 'pm.on_order_qty',
onOrderCost: 'pm.on_order_cost',
onOrderRetail: 'pm.on_order_retail',
earliestExpectedDate: 'pm.earliest_expected_date',
// Historical Dates
dateCreated: 'pm.date_created',
dateFirstReceived: 'pm.date_first_received',
dateLastReceived: 'pm.date_last_received',
dateFirstSold: 'pm.date_first_sold',
dateLastSold: 'pm.date_last_sold',
ageDays: 'pm.age_days',
// Rolling Period Metrics
sales7d: 'pm.sales_7d',
revenue7d: 'pm.revenue_7d',
sales14d: 'pm.sales_14d',
revenue14d: 'pm.revenue_14d',
sales30d: 'pm.sales_30d',
revenue30d: 'pm.revenue_30d',
cogs30d: 'pm.cogs_30d',
profit30d: 'pm.profit_30d',
returnsUnits30d: 'pm.returns_units_30d',
returnsRevenue30d: 'pm.returns_revenue_30d',
discounts30d: 'pm.discounts_30d',
grossRevenue30d: 'pm.gross_revenue_30d',
grossRegularRevenue30d: 'pm.gross_regular_revenue_30d',
stockoutDays30d: 'pm.stockout_days_30d',
sales365d: 'pm.sales_365d',
revenue365d: 'pm.revenue_365d',
avgStockUnits30d: 'pm.avg_stock_units_30d',
avgStockCost30d: 'pm.avg_stock_cost_30d',
avgStockRetail30d: 'pm.avg_stock_retail_30d',
avgStockGross30d: 'pm.avg_stock_gross_30d',
receivedQty30d: 'pm.received_qty_30d',
receivedCost30d: 'pm.received_cost_30d',
// Lifetime Metrics
lifetimeSales: 'pm.lifetime_sales',
lifetimeRevenue: 'pm.lifetime_revenue',
// First Period Metrics
first7DaysSales: 'pm.first_7_days_sales',
first7DaysRevenue: 'pm.first_7_days_revenue',
first30DaysSales: 'pm.first_30_days_sales',
first30DaysRevenue: 'pm.first_30_days_revenue',
first60DaysSales: 'pm.first_60_days_sales',
first60DaysRevenue: 'pm.first_60_days_revenue',
first90DaysSales: 'pm.first_90_days_sales',
first90DaysRevenue: 'pm.first_90_days_revenue',
// Calculated KPIs
asp30d: 'pm.asp_30d',
acp30d: 'pm.acp_30d',
avgRos30d: 'pm.avg_ros_30d',
avgSalesPerDay30d: 'pm.avg_sales_per_day_30d',
avgSalesPerMonth30d: 'pm.avg_sales_per_month_30d',
margin30d: 'pm.margin_30d',
markup30d: 'pm.markup_30d',
gmroi30d: 'pm.gmroi_30d',
stockturn30d: 'pm.stockturn_30d',
returnRate30d: 'pm.return_rate_30d',
discountRate30d: 'pm.discount_rate_30d',
stockoutRate30d: 'pm.stockout_rate_30d',
markdown30d: 'pm.markdown_30d',
markdownRate30d: 'pm.markdown_rate_30d',
sellThrough30d: 'pm.sell_through_30d',
avgLeadTimeDays: 'pm.avg_lead_time_days',
// Forecasting & Replenishment
abcClass: 'pm.abc_class',
salesVelocityDaily: 'pm.sales_velocity_daily',
configLeadTime: 'pm.config_lead_time',
configDaysOfStock: 'pm.config_days_of_stock',
configSafetyStock: 'pm.config_safety_stock',
planningPeriodDays: 'pm.planning_period_days',
leadTimeForecastUnits: 'pm.lead_time_forecast_units',
daysOfStockForecastUnits: 'pm.days_of_stock_forecast_units',
planningPeriodForecastUnits: 'pm.planning_period_forecast_units',
leadTimeClosingStock: 'pm.lead_time_closing_stock',
daysOfStockClosingStock: 'pm.days_of_stock_closing_stock',
replenishmentNeededRaw: 'pm.replenishment_needed_raw',
replenishmentUnits: 'pm.replenishment_units',
replenishmentCost: 'pm.replenishment_cost',
replenishmentRetail: 'pm.replenishment_retail',
replenishmentProfit: 'pm.replenishment_profit',
toOrderUnits: 'pm.to_order_units',
forecastLostSalesUnits: 'pm.forecast_lost_sales_units',
forecastLostRevenue: 'pm.forecast_lost_revenue',
stockCoverInDays: 'pm.stock_cover_in_days',
poCoverInDays: 'pm.po_cover_in_days',
sellsOutInDays: 'pm.sells_out_in_days',
replenishDate: 'pm.replenish_date',
overstockedUnits: 'pm.overstocked_units',
overstockedCost: 'pm.overstocked_cost',
overstockedRetail: 'pm.overstocked_retail',
isOldStock: 'pm.is_old_stock',
// Yesterday
yesterdaySales: 'pm.yesterday_sales',
// Map status column - directly mapped now instead of calculated on frontend
status: 'pm.status',
// Growth Metrics (P3)
salesGrowth30dVsPrev: 'pm.sales_growth_30d_vs_prev',
revenueGrowth30dVsPrev: 'pm.revenue_growth_30d_vs_prev',
salesGrowthYoy: 'pm.sales_growth_yoy',
revenueGrowthYoy: 'pm.revenue_growth_yoy',
// Demand Variability Metrics (P3)
salesVariance30d: 'pm.sales_variance_30d',
salesStdDev30d: 'pm.sales_std_dev_30d',
salesCv30d: 'pm.sales_cv_30d',
demandPattern: 'pm.demand_pattern',
// Service Level Metrics (P5)
fillRate30d: 'pm.fill_rate_30d',
stockoutIncidents30d: 'pm.stockout_incidents_30d',
serviceLevel30d: 'pm.service_level_30d',
lostSalesIncidents30d: 'pm.lost_sales_incidents_30d',
// Seasonality Metrics (P5)
seasonalityIndex: 'pm.seasonality_index',
seasonalPattern: 'pm.seasonal_pattern',
peakSeason: 'pm.peak_season',
// Lifetime Revenue Quality
lifetimeRevenueQuality: 'pm.lifetime_revenue_quality'
};
// Define column types for use in sorting/filtering
// This helps apply correct comparison operators and sorting logic
const COLUMN_TYPES = {
// Numeric columns (use numeric operators and sorting)
numeric: [
'pid', 'currentPrice', 'currentRegularPrice', 'currentCostPrice', 'currentLandingCostPrice',
'currentStock', 'currentStockCost', 'currentStockRetail', 'currentStockGross',
'onOrderQty', 'onOrderCost', 'onOrderRetail', 'ageDays',
'sales7d', 'revenue7d', 'sales14d', 'revenue14d', 'sales30d', 'revenue30d',
'cogs30d', 'profit30d', 'returnsUnits30d', 'returnsRevenue30d', 'discounts30d',
'grossRevenue30d', 'grossRegularRevenue30d', 'stockoutDays30d', 'sales365d', 'revenue365d',
'avgStockUnits30d', 'avgStockCost30d', 'avgStockRetail30d', 'avgStockGross30d',
'receivedQty30d', 'receivedCost30d', 'lifetimeSales', 'lifetimeRevenue',
'first7DaysSales', 'first7DaysRevenue', 'first30DaysSales', 'first30DaysRevenue',
'first60DaysSales', 'first60DaysRevenue', 'first90DaysSales', 'first90DaysRevenue',
'asp30d', 'acp30d', 'avgRos30d', 'avgSalesPerDay30d', 'avgSalesPerMonth30d',
'margin30d', 'markup30d', 'gmroi30d', 'stockturn30d', 'returnRate30d', 'discountRate30d',
'stockoutRate30d', 'markdown30d', 'markdownRate30d', 'sellThrough30d', 'avgLeadTimeDays',
'salesVelocityDaily', 'configLeadTime', 'configDaysOfStock', 'configSafetyStock',
'planningPeriodDays', 'leadTimeForecastUnits', 'daysOfStockForecastUnits',
'planningPeriodForecastUnits', 'leadTimeClosingStock', 'daysOfStockClosingStock',
'replenishmentNeededRaw', 'replenishmentUnits', 'replenishmentCost', 'replenishmentRetail',
'replenishmentProfit', 'toOrderUnits', 'forecastLostSalesUnits', 'forecastLostRevenue',
'stockCoverInDays', 'poCoverInDays', 'sellsOutInDays', 'overstockedUnits',
'overstockedCost', 'overstockedRetail', 'yesterdaySales',
// New numeric columns
'moq', 'rating', 'reviews', 'weight', 'length', 'width', 'height',
'baskets', 'notifies', 'preorderCount', 'notionsInvCount',
// Growth metrics
'salesGrowth30dVsPrev', 'revenueGrowth30dVsPrev', 'salesGrowthYoy', 'revenueGrowthYoy',
// Demand variability metrics
'salesVariance30d', 'salesStdDev30d', 'salesCv30d',
// Service level metrics
'fillRate30d', 'stockoutIncidents30d', 'serviceLevel30d', 'lostSalesIncidents30d',
// Seasonality metrics
'seasonalityIndex'
],
// Date columns (use date operators and sorting)
date: [
'dateCreated', 'dateFirstReceived', 'dateLastReceived', 'dateFirstSold', 'dateLastSold',
'earliestExpectedDate', 'replenishDate', 'forecastedOutOfStockDate'
],
// String columns (use string operators and sorting)
string: [
'sku', 'title', 'brand', 'vendor', 'imageUrl', 'abcClass', 'status',
// New string columns
'barcode', 'harmonizedTariffCode', 'vendorReference', 'notionsReference',
'line', 'subline', 'artist', 'countryOfOrigin', 'location',
// New string columns for patterns
'demandPattern', 'seasonalPattern', 'peakSeason', 'lifetimeRevenueQuality'
],
// Boolean columns (use boolean operators and sorting)
boolean: ['isVisible', 'isReplenishable', 'isOldStock']
};
// Special sort handling for certain columns
const SPECIAL_SORT_COLUMNS = {
// Percentage columns where we want to sort by the numeric value
margin30d: true,
markup30d: true,
sellThrough30d: true,
discountRate30d: true,
stockoutRate30d: true,
returnRate30d: true,
markdownRate30d: true,
// Columns where we may want to sort by absolute value
profit30d: 'abs',
// Velocity columns
salesVelocityDaily: true,
// Growth rate columns
salesGrowth30dVsPrev: 'abs',
revenueGrowth30dVsPrev: 'abs',
salesGrowthYoy: 'abs',
revenueGrowthYoy: 'abs',
// Status column needs special ordering
status: 'priority'
};
// Status priority for sorting (lower number = higher priority)
const STATUS_PRIORITY = {
'Critical': 1,
'At Risk': 2,
'Reorder': 3,
'Overstocked': 4,
'Healthy': 5,
'New': 6
// Any other status will be sorted alphabetically after these
};
// Get database column name from frontend column name
function getDbColumn(frontendColumn) {
return COLUMN_MAP[frontendColumn] || 'pm.title'; // Default to title if not found
}
// Get column type for proper sorting
function getColumnType(frontendColumn) {
return COLUMN_TYPES[frontendColumn] || 'string';
}
// --- Route Handlers ---
// GET /metrics/filter-options - Provide distinct values for filter dropdowns
router.get('/filter-options', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /metrics/filter-options');
try {
const [vendorRes, brandRes, abcClassRes] = await Promise.all([
pool.query(`SELECT DISTINCT vendor FROM public.product_metrics WHERE vendor IS NOT NULL AND vendor <> '' ORDER BY vendor`),
pool.query(`SELECT DISTINCT COALESCE(brand, 'Unbranded') as brand FROM public.product_metrics WHERE brand IS NOT NULL AND brand <> '' ORDER BY brand`),
pool.query(`SELECT DISTINCT abc_class FROM public.product_metrics WHERE abc_class IS NOT NULL ORDER BY abc_class`)
// Add queries for other distinct options if needed (e.g., categories if stored on pm)
]);
res.json({
vendors: vendorRes.rows.map(r => r.vendor),
brands: brandRes.rows.map(r => r.brand),
abcClasses: abcClassRes.rows.map(r => r.abc_class),
});
} catch (error) {
console.error('Error fetching filter options:', error);
res.status(500).json({ error: 'Failed to fetch filter options' });
}
});
// GET /metrics/ - List all product metrics with filtering, sorting, pagination
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /metrics received query:', req.query);
try {
// --- Pagination ---
let page = parseInt(req.query.page, 10);
let limit = parseInt(req.query.limit, 10);
if (isNaN(page) || page < 1) page = 1;
if (isNaN(limit) || limit < 1) limit = DEFAULT_PAGE_LIMIT;
limit = Math.min(limit, MAX_PAGE_LIMIT); // Cap the limit
const offset = (page - 1) * limit;
// --- Sorting ---
const sortQueryKey = req.query.sort || 'title'; // Default sort field key
const dbColumn = getDbColumn(sortQueryKey);
const columnType = getColumnType(sortQueryKey);
console.log(`Sorting request: ${sortQueryKey} -> ${dbColumn} (${columnType})`);
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
// Always put nulls last regardless of sort direction or column type
const nullsOrder = 'NULLS LAST';
// Build the ORDER BY clause based on column type and special handling
let orderByClause;
if (SPECIAL_SORT_COLUMNS[sortQueryKey] === 'abs') {
// Sort by absolute value for columns where negative values matter
orderByClause = `ABS(${dbColumn}::numeric) ${sortDirection} ${nullsOrder}`;
} else if (columnType === 'number' || SPECIAL_SORT_COLUMNS[sortQueryKey] === true) {
// For numeric columns, cast to numeric to ensure proper sorting
orderByClause = `${dbColumn}::numeric ${sortDirection} ${nullsOrder}`;
} else if (columnType === 'date') {
// For date columns, cast to timestamp to ensure proper sorting
orderByClause = `CASE WHEN ${dbColumn} IS NULL THEN 1 ELSE 0 END, ${dbColumn}::timestamp ${sortDirection}`;
} else if (columnType === 'status' || SPECIAL_SORT_COLUMNS[sortQueryKey] === 'priority') {
// Special handling for status column, using priority for known statuses
orderByClause = `
CASE WHEN ${dbColumn} IS NULL THEN 999
WHEN ${dbColumn} = 'Critical' THEN 1
WHEN ${dbColumn} = 'At Risk' THEN 2
WHEN ${dbColumn} = 'Reorder' THEN 3
WHEN ${dbColumn} = 'Overstocked' THEN 4
WHEN ${dbColumn} = 'Healthy' THEN 5
WHEN ${dbColumn} = 'New' THEN 6
ELSE 100
END ${sortDirection} ${nullsOrder},
${dbColumn} ${sortDirection}`;
} else {
// For string and boolean columns, no special casting needed
orderByClause = `CASE WHEN ${dbColumn} IS NULL THEN 1 ELSE 0 END, ${dbColumn} ${sortDirection}`;
}
// --- Filtering ---
const conditions = [];
const params = [];
let paramCounter = 1;
// Add default visibility/replenishable filters unless overridden
if (req.query.showInvisible !== 'true') conditions.push(`pm.is_visible = true`);
if (req.query.showNonReplenishable !== 'true') conditions.push(`pm.is_replenishable = true`);
// Special handling for stock_status
if (req.query.stock_status) {
const status = req.query.stock_status;
// Handle special case for "at-risk" which is stored as "At Risk" in the database
if (status.toLowerCase() === 'at-risk') {
conditions.push(`pm.status = $${paramCounter++}`);
params.push('At Risk');
} else {
// Capitalize first letter to match database values
conditions.push(`pm.status = $${paramCounter++}`);
params.push(status.charAt(0).toUpperCase() + status.slice(1));
}
}
// Process other filters from query parameters
for (const key in req.query) {
// Skip control params
if (['page', 'limit', 'sort', 'order', 'showInvisible', 'showNonReplenishable', 'stock_status'].includes(key)) continue;
let filterKey = key;
let operator = '='; // Default operator
let value = req.query[key];
// Check for operator suffixes (e.g., sales30d_gt, title_like)
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
if (operatorMatch) {
filterKey = operatorMatch[1]; // e.g., "sales30d"
operator = operatorMatch[2]; // e.g., "gt"
}
// Get the database column for this filter key
const dbColumn = getDbColumn(filterKey);
const valueType = getColumnType(filterKey);
if (!dbColumn) {
console.warn(`Invalid filter key ignored: ${key}`);
continue; // Skip if the key doesn't map to a known column
}
// --- Build WHERE clause fragment ---
try {
let conditionFragment = '';
let needsParam = true; // Most operators need a parameter
switch (operator.toLowerCase()) {
case 'eq': operator = '='; break;
case 'ne': operator = '<>'; break;
case 'gt': operator = '>'; break;
case 'gte': operator = '>='; break;
case 'lt': operator = '<'; break;
case 'lte': operator = '<='; break;
case 'like': operator = 'LIKE'; value = `%${value}%`; break; // Add wildcards for LIKE
case 'ilike': operator = 'ILIKE'; value = `%${value}%`; break; // Add wildcards for ILIKE
case 'between':
const [val1, val2] = String(value).split(',');
if (val1 !== undefined && val2 !== undefined) {
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
needsParam = false; // Params added manually
} else {
console.warn(`Invalid 'between' value for ${key}: ${value}`);
continue; // Skip this filter
}
break;
case 'in':
const inValues = String(value).split(',');
if (inValues.length > 0) {
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
conditionFragment = `${dbColumn} IN (${placeholders})`;
params.push(...inValues.map(v => parseValue(v, valueType))); // Add all parsed values
needsParam = false; // Params added manually
} else {
console.warn(`Invalid 'in' value for ${key}: ${value}`);
continue; // Skip this filter
}
break;
// Add other operators as needed (IS NULL, IS NOT NULL, etc.)
case '=': // Keep default '='
default: operator = '='; break; // Ensure default is handled
}
if (needsParam) {
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
params.push(parseValue(value, valueType));
}
if (conditionFragment) {
conditions.push(`(${conditionFragment})`); // Wrap condition in parentheses
}
} catch (parseError) {
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
// Decrement counter if param wasn't actually used due to error
if (needsParam) paramCounter--;
}
}
// --- Construct and Execute Queries ---
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
// Debug log of conditions and parameters
console.log('Constructed WHERE conditions:', conditions);
console.log('Parameters:', params);
// Count Query
const countSql = `SELECT COUNT(*) AS total FROM public.product_metrics pm ${whereClause}`;
console.log('Executing Count Query:', countSql, params);
const countPromise = pool.query(countSql, params);
// Data Query (Select all columns from metrics table for now)
const dataSql = `
SELECT pm.*
FROM public.product_metrics pm
${whereClause}
ORDER BY ${orderByClause}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
`;
const dataParams = [...params, limit, offset];
// Log detailed query information for debugging
console.log('Executing Data Query:');
console.log(' - Sort Column:', dbColumn);
console.log(' - Column Type:', columnType);
console.log(' - Sort Direction:', sortDirection);
console.log(' - Order By Clause:', orderByClause);
console.log(' - Full SQL:', dataSql);
console.log(' - Parameters:', dataParams);
const dataPromise = pool.query(dataSql, dataParams);
// Execute queries in parallel
const [countResult, dataResult] = await Promise.all([countPromise, dataPromise]);
const total = parseInt(countResult.rows[0].total, 10);
const metrics = dataResult.rows;
console.log(`Total: ${total}, Fetched: ${metrics.length} for page ${page}`);
// --- Respond ---
res.json({
metrics,
pagination: {
total,
pages: Math.ceil(total / limit),
currentPage: page,
limit,
},
// Optionally include applied filters/sort for frontend confirmation
appliedQuery: {
filters: req.query, // Send back raw query filters
sort: sortQueryKey,
order: sortDirection.toLowerCase()
}
});
} catch (error) {
console.error('Error fetching metrics list:', error);
res.status(500).json({ error: 'Failed to fetch product metrics list.' });
}
});
// GET /metrics/:pid - Get metrics for a single product
router.get('/:pid', async (req, res) => {
const pool = req.app.locals.pool;
const pid = parseInt(req.params.pid, 10);
if (isNaN(pid)) {
return res.status(400).json({ error: 'Invalid Product ID.' });
}
console.log(`GET /metrics/${pid}`);
try {
const { rows } = await pool.query(
`SELECT * FROM public.product_metrics WHERE pid = $1`,
[pid]
);
if (rows.length === 0) {
console.log(`Metrics not found for PID: ${pid}`);
return res.status(404).json({ error: 'Metrics not found for this product.' });
}
console.log(`Metrics found for PID: ${pid}`);
// Data is pre-calculated, return the first (only) row
res.json(rows[0]);
} catch (error) {
console.error(`Error fetching metrics for PID ${pid}:`, error);
res.status(500).json({ error: 'Failed to fetch product metrics.' });
}
});
/**
* Parses a value based on its expected type.
* Throws error for invalid formats.
*/
function parseValue(value, type) {
if (value === null || value === undefined || value === '') return null; // Allow empty strings? Or handle differently?
switch (type) {
case 'number':
const num = parseFloat(value);
if (isNaN(num)) throw new Error(`Invalid number format: "${value}"`);
return num;
case 'boolean':
if (String(value).toLowerCase() === 'true') return true;
if (String(value).toLowerCase() === 'false') return false;
throw new Error(`Invalid boolean format: "${value}"`);
case 'date':
// Basic validation, rely on DB to handle actual date conversion
if (!String(value).match(/^\d{4}-\d{2}-\d{2}$/)) {
// Allow full timestamps too? Adjust regex if needed
// console.warn(`Potentially invalid date format: "${value}"`); // Warn instead of throwing?
}
return String(value); // Send as string, let DB handle it
case 'string':
default:
return String(value);
}
}
module.exports = router;

View File

@@ -1,261 +0,0 @@
const express = require('express');
const router = express.Router();
// Get all orders with pagination, filtering, and sorting
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
try {
const page = parseInt(req.query.page) || 1;
const limit = parseInt(req.query.limit) || 50;
const offset = (page - 1) * limit;
const search = req.query.search || '';
const status = req.query.status || 'all';
const fromDate = req.query.fromDate ? new Date(req.query.fromDate) : null;
const toDate = req.query.toDate ? new Date(req.query.toDate) : null;
const minAmount = parseFloat(req.query.minAmount) || 0;
const maxAmount = req.query.maxAmount ? parseFloat(req.query.maxAmount) : null;
const sortColumn = req.query.sortColumn || 'date';
const sortDirection = req.query.sortDirection === 'desc' ? 'DESC' : 'ASC';
// Build the WHERE clause
const conditions = ['o1.canceled = false'];
const params = [];
let paramCounter = 1;
if (search) {
conditions.push(`(o1.order_number ILIKE $${paramCounter} OR o1.customer ILIKE $${paramCounter})`);
params.push(`%${search}%`);
paramCounter++;
}
if (status !== 'all') {
conditions.push(`o1.status = $${paramCounter}`);
params.push(status);
paramCounter++;
}
if (fromDate) {
conditions.push(`DATE(o1.date) >= DATE($${paramCounter})`);
params.push(fromDate.toISOString());
paramCounter++;
}
if (toDate) {
conditions.push(`DATE(o1.date) <= DATE($${paramCounter})`);
params.push(toDate.toISOString());
paramCounter++;
}
if (minAmount > 0) {
conditions.push(`total_amount >= $${paramCounter}`);
params.push(minAmount);
paramCounter++;
}
if (maxAmount) {
conditions.push(`total_amount <= $${paramCounter}`);
params.push(maxAmount);
paramCounter++;
}
// Get total count for pagination
const { rows: [countResult] } = await pool.query(`
SELECT COUNT(DISTINCT o1.order_number) as total
FROM orders o1
LEFT JOIN (
SELECT order_number, SUM(price * quantity) as total_amount
FROM orders
GROUP BY order_number
) totals ON o1.order_number = totals.order_number
WHERE ${conditions.join(' AND ')}
`, params);
const total = countResult.total;
// Get paginated results
const query = `
SELECT
o1.order_number,
o1.customer,
o1.date,
o1.status,
o1.payment_method,
o1.shipping_method,
COUNT(o2.pid) as items_count,
ROUND(SUM(o2.price * o2.quantity)::numeric, 3) as total_amount
FROM orders o1
JOIN orders o2 ON o1.order_number = o2.order_number
WHERE ${conditions.join(' AND ')}
GROUP BY
o1.order_number,
o1.customer,
o1.date,
o1.status,
o1.payment_method,
o1.shipping_method
ORDER BY ${
sortColumn === 'items_count' || sortColumn === 'total_amount'
? `${sortColumn} ${sortDirection}`
: `o1.${sortColumn} ${sortDirection}`
}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
`;
params.push(limit, offset);
const { rows } = await pool.query(query, params);
// Get order statistics
const { rows: [orderStats] } = await pool.query(`
WITH CurrentStats AS (
SELECT
COUNT(DISTINCT order_number) as total_orders,
ROUND(SUM(price * quantity)::numeric, 3) as total_revenue
FROM orders
WHERE canceled = false
AND DATE(date) >= CURRENT_DATE - INTERVAL '30 days'
),
PreviousStats AS (
SELECT
COUNT(DISTINCT order_number) as prev_orders,
ROUND(SUM(price * quantity)::numeric, 3) as prev_revenue
FROM orders
WHERE canceled = false
AND DATE(date) BETWEEN CURRENT_DATE - INTERVAL '60 days' AND CURRENT_DATE - INTERVAL '30 days'
),
OrderValues AS (
SELECT
order_number,
ROUND(SUM(price * quantity)::numeric, 3) as order_value
FROM orders
WHERE canceled = false
AND DATE(date) >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY order_number
)
SELECT
cs.total_orders,
cs.total_revenue,
CASE
WHEN ps.prev_orders > 0
THEN ROUND(((cs.total_orders - ps.prev_orders)::numeric / ps.prev_orders * 100), 1)
ELSE 0
END as order_growth,
CASE
WHEN ps.prev_revenue > 0
THEN ROUND(((cs.total_revenue - ps.prev_revenue)::numeric / ps.prev_revenue * 100), 1)
ELSE 0
END as revenue_growth,
CASE
WHEN cs.total_orders > 0
THEN ROUND((cs.total_revenue::numeric / cs.total_orders), 3)
ELSE 0
END as average_order_value,
CASE
WHEN ps.prev_orders > 0
THEN ROUND((ps.prev_revenue::numeric / ps.prev_orders), 3)
ELSE 0
END as prev_average_order_value
FROM CurrentStats cs
CROSS JOIN PreviousStats ps
`);
res.json({
orders: rows.map(row => ({
...row,
total_amount: parseFloat(row.total_amount) || 0,
items_count: parseInt(row.items_count) || 0,
date: row.date
})),
pagination: {
total,
pages: Math.ceil(total / limit),
currentPage: page,
limit
},
stats: {
totalOrders: parseInt(orderStats.total_orders) || 0,
totalRevenue: parseFloat(orderStats.total_revenue) || 0,
orderGrowth: parseFloat(orderStats.order_growth) || 0,
revenueGrowth: parseFloat(orderStats.revenue_growth) || 0,
averageOrderValue: parseFloat(orderStats.average_order_value) || 0,
aovGrowth: orderStats.prev_average_order_value > 0
? ((orderStats.average_order_value - orderStats.prev_average_order_value) / orderStats.prev_average_order_value * 100)
: 0,
conversionRate: 2.5, // Placeholder - would need actual visitor data
conversionGrowth: 0.5 // Placeholder - would need actual visitor data
}
});
} catch (error) {
console.error('Error fetching orders:', error);
res.status(500).json({ error: 'Failed to fetch orders' });
}
});
// Get a single order with its items
router.get('/:orderNumber', async (req, res) => {
const pool = req.app.locals.pool;
try {
// Get order details
const { rows: orderRows } = await pool.query(`
SELECT DISTINCT
o1.order_number,
o1.customer,
o1.date,
o1.status,
o1.payment_method,
o1.shipping_method,
o1.shipping_address,
o1.billing_address,
COUNT(o2.pid) as items_count,
ROUND(SUM(o2.price * o2.quantity)::numeric, 3) as total_amount
FROM orders o1
JOIN orders o2 ON o1.order_number = o2.order_number
WHERE o1.order_number = $1 AND o1.canceled = false
GROUP BY
o1.order_number,
o1.customer,
o1.date,
o1.status,
o1.payment_method,
o1.shipping_method,
o1.shipping_address,
o1.billing_address
`, [req.params.orderNumber]);
if (orderRows.length === 0) {
return res.status(404).json({ error: 'Order not found' });
}
// Get order items
const { rows: itemRows } = await pool.query(`
SELECT
o.pid,
p.title,
p.SKU,
o.quantity,
o.price,
ROUND((o.price * o.quantity)::numeric, 3) as total
FROM orders o
JOIN products p ON o.pid = p.pid
WHERE o.order_number = $1 AND o.canceled = false
`, [req.params.orderNumber]);
const order = {
...orderRows[0],
total_amount: parseFloat(orderRows[0].total_amount) || 0,
items_count: parseInt(orderRows[0].items_count) || 0,
items: itemRows.map(item => ({
...item,
price: parseFloat(item.price) || 0,
total: parseFloat(item.total) || 0,
quantity: parseInt(item.quantity) || 0
}))
};
res.json(order);
} catch (error) {
console.error('Error fetching order:', error);
res.status(500).json({ error: 'Failed to fetch order' });
}
});
module.exports = router;

View File

@@ -1,747 +0,0 @@
const express = require('express');
const router = express.Router();
const multer = require('multer');
const path = require('path');
const fs = require('fs');
const { importProductsFromCSV } = require('../utils/csvImporter');
const { PurchaseOrderStatus, ReceivingStatus } = require('../types/status-codes');
// Configure multer for file uploads without silent fallbacks
const configuredUploadsDir = process.env.UPLOADS_DIR;
const uploadsDir = configuredUploadsDir
? (path.isAbsolute(configuredUploadsDir)
? configuredUploadsDir
: path.resolve(__dirname, '../../', configuredUploadsDir))
: path.resolve(__dirname, '../../uploads');
try {
fs.mkdirSync(uploadsDir, { recursive: true });
} catch (error) {
console.error(`Failed to initialize uploads directory at ${uploadsDir}:`, error);
throw error;
}
const upload = multer({ dest: uploadsDir });
// Get unique brands
router.get('/brands', async (req, res) => {
console.log('Brands endpoint hit:', {
url: req.url,
method: req.method,
headers: req.headers,
path: req.path
});
try {
const pool = req.app.locals.pool;
console.log('Fetching brands from database...');
const { rows } = await pool.query(`
SELECT DISTINCT COALESCE(p.brand, 'Unbranded') as brand
FROM products p
WHERE p.visible = true
ORDER BY COALESCE(p.brand, 'Unbranded')
`);
console.log(`Found ${rows.length} brands:`, rows.slice(0, 3));
res.json(rows.map(r => r.brand));
} catch (error) {
console.error('Error fetching brands:', error);
res.status(500).json({ error: 'Failed to fetch brands' });
}
});
// Get all products with pagination, filtering, and sorting
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
try {
const page = parseInt(req.query.page) || 1;
const limit = parseInt(req.query.limit) || 50;
const offset = (page - 1) * limit;
const sortColumn = req.query.sort || 'title';
const sortDirection = req.query.order === 'desc' ? 'DESC' : 'ASC';
const conditions = ['p.visible = true'];
const params = [];
let paramCounter = 1;
// Add default replenishable filter unless explicitly showing non-replenishable
if (req.query.showNonReplenishable !== 'true') {
conditions.push('p.replenishable = true');
}
// Handle search filter
if (req.query.search) {
conditions.push(`(p.title ILIKE $${paramCounter} OR p.SKU ILIKE $${paramCounter} OR p.barcode ILIKE $${paramCounter})`);
const searchTerm = `%${req.query.search}%`;
params.push(searchTerm);
paramCounter++;
}
// Handle text filters for specific fields
if (req.query.barcode) {
conditions.push(`p.barcode ILIKE $${paramCounter}`);
params.push(`%${req.query.barcode}%`);
paramCounter++;
}
if (req.query.vendor_reference) {
conditions.push(`p.vendor_reference ILIKE $${paramCounter}`);
params.push(`%${req.query.vendor_reference}%`);
paramCounter++;
}
// Add new text filters for the additional fields
if (req.query.description) {
conditions.push(`p.description ILIKE $${paramCounter}`);
params.push(`%${req.query.description}%`);
paramCounter++;
}
if (req.query.harmonized_tariff_code) {
conditions.push(`p.harmonized_tariff_code ILIKE $${paramCounter}`);
params.push(`%${req.query.harmonized_tariff_code}%`);
paramCounter++;
}
if (req.query.notions_reference) {
conditions.push(`p.notions_reference ILIKE $${paramCounter}`);
params.push(`%${req.query.notions_reference}%`);
paramCounter++;
}
if (req.query.line) {
conditions.push(`p.line ILIKE $${paramCounter}`);
params.push(`%${req.query.line}%`);
paramCounter++;
}
if (req.query.subline) {
conditions.push(`p.subline ILIKE $${paramCounter}`);
params.push(`%${req.query.subline}%`);
paramCounter++;
}
if (req.query.artist) {
conditions.push(`p.artist ILIKE $${paramCounter}`);
params.push(`%${req.query.artist}%`);
paramCounter++;
}
if (req.query.country_of_origin) {
conditions.push(`p.country_of_origin ILIKE $${paramCounter}`);
params.push(`%${req.query.country_of_origin}%`);
paramCounter++;
}
if (req.query.location) {
conditions.push(`p.location ILIKE $${paramCounter}`);
params.push(`%${req.query.location}%`);
paramCounter++;
}
// Handle numeric filters with operators
const numericFields = {
stock: 'p.stock_quantity',
price: 'p.price',
costPrice: 'p.cost_price',
landingCost: 'p.landing_cost_price',
dailySalesAvg: 'pm.daily_sales_avg',
weeklySalesAvg: 'pm.weekly_sales_avg',
monthlySalesAvg: 'pm.monthly_sales_avg',
avgQuantityPerOrder: 'pm.avg_quantity_per_order',
numberOfOrders: 'pm.number_of_orders',
margin: 'pm.avg_margin_percent',
gmroi: 'pm.gmroi',
inventoryValue: 'pm.inventory_value',
costOfGoodsSold: 'pm.cost_of_goods_sold',
grossProfit: 'pm.gross_profit',
turnoverRate: 'pm.turnover_rate',
leadTime: 'pm.current_lead_time',
currentLeadTime: 'pm.current_lead_time',
targetLeadTime: 'pm.target_lead_time',
stockCoverage: 'pm.days_of_inventory',
daysOfStock: 'pm.days_of_inventory',
weeksOfStock: 'pm.weeks_of_inventory',
reorderPoint: 'pm.reorder_point',
safetyStock: 'pm.safety_stock',
// Add new numeric fields
preorderCount: 'p.preorder_count',
notionsInvCount: 'p.notions_inv_count',
rating: 'p.rating',
reviews: 'p.reviews',
weight: 'p.weight',
totalSold: 'p.total_sold',
baskets: 'p.baskets',
notifies: 'p.notifies'
};
Object.entries(req.query).forEach(([key, value]) => {
const field = numericFields[key];
if (field) {
const operator = req.query[`${key}_operator`] || '=';
if (operator === 'between') {
try {
const [min, max] = JSON.parse(value);
conditions.push(`${field} BETWEEN $${paramCounter} AND $${paramCounter + 1}`);
params.push(min, max);
paramCounter += 2;
} catch (e) {
console.error(`Invalid between value for ${key}:`, value);
}
} else {
conditions.push(`${field} ${operator} $${paramCounter}`);
params.push(parseFloat(value));
paramCounter++;
}
}
});
// Handle date filters
const dateFields = {
firstSaleDate: 'pm.first_sale_date',
lastSaleDate: 'pm.last_sale_date',
lastPurchaseDate: 'pm.last_purchase_date',
firstReceivedDate: 'pm.first_received_date',
lastReceivedDate: 'pm.last_received_date'
};
Object.entries(req.query).forEach(([key, value]) => {
const field = dateFields[key];
if (field) {
conditions.push(`${field}::TEXT LIKE $${paramCounter}`);
params.push(`${value}%`); // Format like '2023-01%' to match by month or '2023-01-01' for exact date
paramCounter++;
}
});
// Handle select filters
if (req.query.vendor) {
conditions.push(`p.vendor = $${paramCounter}`);
params.push(req.query.vendor);
paramCounter++;
}
if (req.query.brand) {
conditions.push(`p.brand = $${paramCounter}`);
params.push(req.query.brand);
paramCounter++;
}
if (req.query.category) {
conditions.push(`p.categories ILIKE $${paramCounter}`);
params.push(`%${req.query.category}%`);
paramCounter++;
}
if (req.query.stockStatus && req.query.stockStatus !== 'all') {
conditions.push(`pm.stock_status = $${paramCounter}`);
params.push(req.query.stockStatus);
paramCounter++;
}
if (req.query.abcClass) {
conditions.push(`pm.abc_class = $${paramCounter}`);
params.push(req.query.abcClass);
paramCounter++;
}
if (req.query.leadTimeStatus) {
conditions.push(`pm.lead_time_status = $${paramCounter}`);
params.push(req.query.leadTimeStatus);
paramCounter++;
}
if (req.query.replenishable !== undefined) {
conditions.push(`p.replenishable = $${paramCounter}`);
params.push(req.query.replenishable === 'true');
paramCounter++;
}
if (req.query.managingStock !== undefined) {
conditions.push(`p.managing_stock = $${paramCounter}`);
params.push(req.query.managingStock === 'true');
paramCounter++;
}
// Combine all conditions with AND
const whereClause = conditions.length > 0 ? 'WHERE ' + conditions.join(' AND ') : '';
// Get total count for pagination
const countQuery = `
SELECT COUNT(DISTINCT p.pid) as total
FROM products p
LEFT JOIN product_metrics pm ON p.pid = pm.pid
${whereClause}
`;
const { rows: [countResult] } = await pool.query(countQuery, params);
const total = countResult.total;
// Get available filters
const { rows: categories } = await pool.query(
'SELECT name FROM categories ORDER BY name'
);
const { rows: vendors } = await pool.query(
'SELECT DISTINCT vendor FROM products WHERE visible = true AND vendor IS NOT NULL AND vendor != \'\' ORDER BY vendor'
);
const { rows: brands } = await pool.query(
'SELECT DISTINCT COALESCE(brand, \'Unbranded\') as brand FROM products WHERE visible = true ORDER BY brand'
);
// Main query with all fields
const query = `
WITH RECURSIVE
category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.name::text as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
(cp.path || ' > ' || c.name)::text
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
),
product_thresholds AS (
SELECT
p.pid,
COALESCE(
(SELECT overstock_days FROM stock_thresholds st
WHERE st.category_id IN (
SELECT pc.cat_id
FROM product_categories pc
WHERE pc.pid = p.pid
)
AND (st.vendor = p.vendor OR st.vendor IS NULL)
ORDER BY st.vendor IS NULL
LIMIT 1),
(SELECT overstock_days FROM stock_thresholds st
WHERE st.category_id IS NULL
AND (st.vendor = p.vendor OR st.vendor IS NULL)
ORDER BY st.vendor IS NULL
LIMIT 1),
90
) as target_days
FROM products p
),
product_leaf_categories AS (
SELECT DISTINCT pc.cat_id
FROM product_categories pc
WHERE NOT EXISTS (
SELECT 1
FROM categories child
JOIN product_categories child_pc ON child.cat_id = child_pc.cat_id
WHERE child.parent_id = pc.cat_id
AND child_pc.pid = pc.pid
)
)
SELECT
p.*,
COALESCE(p.brand, 'Unbranded') as brand,
string_agg(DISTINCT (c.cat_id || ':' || c.name), ',') as categories,
pm.daily_sales_avg,
pm.weekly_sales_avg,
pm.monthly_sales_avg,
pm.avg_quantity_per_order,
pm.number_of_orders,
pm.first_sale_date,
pm.last_sale_date,
pm.days_of_inventory,
pm.weeks_of_inventory,
pm.reorder_point,
pm.safety_stock,
pm.avg_margin_percent,
CAST(pm.total_revenue AS DECIMAL(15,3)) as total_revenue,
CAST(pm.inventory_value AS DECIMAL(15,3)) as inventory_value,
CAST(pm.cost_of_goods_sold AS DECIMAL(15,3)) as cost_of_goods_sold,
CAST(pm.gross_profit AS DECIMAL(15,3)) as gross_profit,
pm.gmroi,
pm.avg_lead_time_days,
pm.last_purchase_date,
pm.last_received_date,
pm.abc_class,
pm.stock_status,
pm.turnover_rate,
p.date_last_sold
FROM products p
LEFT JOIN product_metrics pm ON p.pid = pm.pid
LEFT JOIN product_categories pc ON p.pid = pc.pid
LEFT JOIN categories c ON pc.cat_id = c.cat_id
${whereClause}
GROUP BY p.pid, pm.pid
ORDER BY ${sortColumn} ${sortDirection}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
`;
params.push(limit, offset);
const { rows: products } = await pool.query(query, params);
res.json({
products,
pagination: {
total,
pages: Math.ceil(total / limit),
currentPage: page,
limit
},
filters: {
categories: categories.map(c => c.name),
vendors: vendors.map(v => v.vendor),
brands: brands.map(b => b.brand)
}
});
} catch (error) {
console.error('Error fetching products:', error);
res.status(500).json({ error: 'Failed to fetch products' });
}
});
// Get trending products
router.get('/trending', async (req, res) => {
const pool = req.app.locals.pool;
try {
// First check if we have any data
const { rows } = await pool.query(`
SELECT COUNT(*) as count,
MAX(total_revenue) as max_revenue,
MAX(daily_sales_avg) as max_daily_sales,
COUNT(DISTINCT pid) as products_with_metrics
FROM product_metrics
WHERE total_revenue > 0 OR daily_sales_avg > 0
`);
console.log('Product metrics stats:', rows[0]);
if (parseInt(rows[0].count) === 0) {
console.log('No products with metrics found');
return res.json([]);
}
// Get trending products
const { rows: trendingProducts } = await pool.query(`
SELECT
p.pid,
p.sku,
p.title,
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
COALESCE(pm.weekly_sales_avg, 0) as weekly_sales_avg,
CASE
WHEN pm.weekly_sales_avg > 0 AND pm.daily_sales_avg > 0
THEN ((pm.daily_sales_avg - pm.weekly_sales_avg) / pm.weekly_sales_avg) * 100
ELSE 0
END as growth_rate,
COALESCE(pm.total_revenue, 0) as total_revenue
FROM products p
INNER JOIN product_metrics pm ON p.pid = pm.pid
WHERE (pm.total_revenue > 0 OR pm.daily_sales_avg > 0)
AND p.visible = true
ORDER BY growth_rate DESC
LIMIT 50
`);
console.log('Trending products:', trendingProducts);
res.json(trendingProducts);
} catch (error) {
console.error('Error fetching trending products:', error);
res.status(500).json({ error: 'Failed to fetch trending products' });
}
});
// Get a single product
router.get('/:id', async (req, res) => {
try {
const pool = req.app.locals.pool;
const id = parseInt(req.params.id);
// Common CTE for category paths
const categoryPathCTE = `
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.name::text as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
(cp.path || ' > ' || c.name)::text
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
)
`;
// Get product details with category paths
const { rows: productRows } = await pool.query(`
SELECT
p.*,
pm.daily_sales_avg,
pm.weekly_sales_avg,
pm.monthly_sales_avg,
pm.days_of_inventory,
pm.reorder_point,
pm.safety_stock,
pm.stock_status,
pm.abc_class,
pm.avg_margin_percent,
pm.total_revenue,
pm.inventory_value,
pm.turnover_rate,
pm.gmroi,
pm.cost_of_goods_sold,
pm.gross_profit,
pm.avg_lead_time_days,
pm.current_lead_time,
pm.target_lead_time,
pm.lead_time_status,
pm.reorder_qty,
pm.overstocked_amt
FROM products p
LEFT JOIN product_metrics pm ON p.pid = pm.pid
WHERE p.pid = $1
`, [id]);
if (!productRows.length) {
return res.status(404).json({ error: 'Product not found' });
}
// Get categories and their paths separately to avoid GROUP BY issues
const { rows: categoryRows } = await pool.query(`
WITH RECURSIVE
category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
c.name::text as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
(cp.path || ' > ' || c.name)::text
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
),
product_leaf_categories AS (
-- Find categories assigned to this product that aren't parents
-- of other categories assigned to this product
SELECT pc.cat_id
FROM product_categories pc
WHERE pc.pid = $1
AND NOT EXISTS (
-- Check if there are any child categories also assigned to this product
SELECT 1
FROM categories child
JOIN product_categories child_pc ON child.cat_id = child_pc.cat_id
WHERE child.parent_id = pc.cat_id
AND child_pc.pid = pc.pid
)
)
SELECT
c.cat_id,
c.name as category_name,
cp.path as full_path
FROM product_categories pc
JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
JOIN product_leaf_categories plc ON c.cat_id = plc.cat_id
WHERE pc.pid = $2
ORDER BY cp.path
`, [id, id]);
// Transform the results
const categoryPathMap = categoryRows.reduce((acc, row) => {
// Use cat_id in the key to differentiate categories with the same name
acc[`${row.cat_id}:${row.category_name}`] = row.full_path;
return acc;
}, {});
const product = {
...productRows[0],
// Include cat_id in categories array to match the keys in categoryPathMap
categories: categoryRows.map(row => `${row.cat_id}:${row.category_name}`),
category_paths: categoryPathMap,
price: parseFloat(productRows[0].price),
regular_price: parseFloat(productRows[0].regular_price),
cost_price: parseFloat(productRows[0].cost_price),
landing_cost_price: parseFloat(productRows[0].landing_cost_price),
stock_quantity: parseInt(productRows[0].stock_quantity),
moq: parseInt(productRows[0].moq),
uom: parseInt(productRows[0].uom),
managing_stock: Boolean(productRows[0].managing_stock),
replenishable: Boolean(productRows[0].replenishable),
// Format new fields
preorder_count: parseInt(productRows[0].preorder_count || 0),
notions_inv_count: parseInt(productRows[0].notions_inv_count || 0),
harmonized_tariff_code: productRows[0].harmonized_tariff_code || '',
notions_reference: productRows[0].notions_reference || '',
line: productRows[0].line || '',
subline: productRows[0].subline || '',
artist: productRows[0].artist || '',
rating: parseFloat(productRows[0].rating || 0),
reviews: parseInt(productRows[0].reviews || 0),
weight: parseFloat(productRows[0].weight || 0),
dimensions: {
length: parseFloat(productRows[0].length || 0),
width: parseFloat(productRows[0].width || 0),
height: parseFloat(productRows[0].height || 0),
},
country_of_origin: productRows[0].country_of_origin || '',
location: productRows[0].location || '',
total_sold: parseInt(productRows[0].total_sold || 0),
baskets: parseInt(productRows[0].baskets || 0),
notifies: parseInt(productRows[0].notifies || 0),
date_last_sold: productRows[0].date_last_sold || null,
// Format existing analytics fields
daily_sales_avg: parseFloat(productRows[0].daily_sales_avg) || 0,
weekly_sales_avg: parseFloat(productRows[0].weekly_sales_avg) || 0,
monthly_sales_avg: parseFloat(productRows[0].monthly_sales_avg) || 0,
avg_quantity_per_order: parseFloat(productRows[0].avg_quantity_per_order) || 0,
number_of_orders: parseInt(productRows[0].number_of_orders) || 0,
first_sale_date: productRows[0].first_sale_date || null,
last_sale_date: productRows[0].last_sale_date || null,
days_of_inventory: parseFloat(productRows[0].days_of_inventory) || 0,
weeks_of_inventory: parseFloat(productRows[0].weeks_of_inventory) || 0,
reorder_point: parseFloat(productRows[0].reorder_point) || 0,
safety_stock: parseFloat(productRows[0].safety_stock) || 0,
avg_margin_percent: parseFloat(productRows[0].avg_margin_percent) || 0,
total_revenue: parseFloat(productRows[0].total_revenue) || 0,
inventory_value: parseFloat(productRows[0].inventory_value) || 0,
cost_of_goods_sold: parseFloat(productRows[0].cost_of_goods_sold) || 0,
gross_profit: parseFloat(productRows[0].gross_profit) || 0,
gmroi: parseFloat(productRows[0].gmroi) || 0,
avg_lead_time_days: parseFloat(productRows[0].avg_lead_time_days) || 0,
current_lead_time: parseFloat(productRows[0].current_lead_time) || 0,
target_lead_time: parseFloat(productRows[0].target_lead_time) || 0,
lead_time_status: productRows[0].lead_time_status || null,
reorder_qty: parseInt(productRows[0].reorder_qty) || 0,
overstocked_amt: parseInt(productRows[0].overstocked_amt) || 0
};
res.json(product);
} catch (error) {
console.error('Error fetching product:', error);
res.status(500).json({ error: 'Failed to fetch product' });
}
});
// Get product time series data
router.get('/:id/time-series', async (req, res) => {
const { id } = req.params;
try {
const pool = req.app.locals.pool;
// Get monthly sales data
const { rows: monthlySales } = await pool.query(`
SELECT
TO_CHAR(date, 'YYYY-MM') as month,
COUNT(DISTINCT order_number) as order_count,
SUM(quantity) as units_sold,
ROUND(SUM(price * quantity)::numeric, 3) as revenue
FROM orders
WHERE pid = $1
AND canceled = false
GROUP BY TO_CHAR(date, 'YYYY-MM')
ORDER BY month DESC
LIMIT 12
`, [id]);
// Format monthly sales data
const formattedMonthlySales = monthlySales.map(month => ({
month: month.month,
order_count: parseInt(month.order_count),
units_sold: parseInt(month.units_sold),
revenue: parseFloat(month.revenue),
profit: 0 // Set to 0 since we don't have cost data in orders table
}));
// Get recent orders
const { rows: recentOrders } = await pool.query(`
SELECT
TO_CHAR(date, 'YYYY-MM-DD') as date,
order_number,
quantity,
price,
discount,
tax,
shipping,
customer_name as customer,
status
FROM orders
WHERE pid = $1
AND canceled = false
ORDER BY date DESC
LIMIT 10
`, [id]);
// Get recent purchase orders with detailed status
const { rows: recentPurchases } = await pool.query(`
SELECT
TO_CHAR(date, 'YYYY-MM-DD') as date,
TO_CHAR(expected_date, 'YYYY-MM-DD') as expected_date,
TO_CHAR(received_date, 'YYYY-MM-DD') as received_date,
po_id,
ordered,
received,
status,
receiving_status,
cost_price,
notes,
CASE
WHEN received_date IS NOT NULL THEN
(received_date - date)
WHEN expected_date < CURRENT_DATE AND status < $2 THEN
(CURRENT_DATE - expected_date)
ELSE NULL
END as lead_time_days
FROM purchase_orders
WHERE pid = $1
AND status != $3
ORDER BY date DESC
LIMIT 10
`, [id, PurchaseOrderStatus.ReceivingStarted, PurchaseOrderStatus.Canceled]);
res.json({
monthly_sales: formattedMonthlySales,
recent_orders: recentOrders.map(order => ({
...order,
price: parseFloat(order.price),
discount: parseFloat(order.discount),
tax: parseFloat(order.tax),
shipping: parseFloat(order.shipping),
quantity: parseInt(order.quantity)
})),
recent_purchases: recentPurchases.map(po => ({
...po,
ordered: parseInt(po.ordered),
received: parseInt(po.received),
status: parseInt(po.status),
receiving_status: parseInt(po.receiving_status),
cost_price: parseFloat(po.cost_price),
lead_time_days: po.lead_time_days ? parseInt(po.lead_time_days) : null
}))
});
} catch (error) {
console.error('Error fetching product time series:', error);
res.status(500).json({ error: 'Failed to fetch product time series' });
}
});
module.exports = router;

File diff suppressed because it is too large Load Diff

View File

@@ -1,396 +0,0 @@
const express = require('express');
const router = express.Router();
const multer = require('multer');
const path = require('path');
const fs = require('fs');
// Create reusable uploads directory if it doesn't exist
const uploadsDir = path.join('/var/www/html/inventory/uploads/reusable');
fs.mkdirSync(uploadsDir, { recursive: true });
// Configure multer for file uploads
const storage = multer.diskStorage({
destination: function (req, file, cb) {
console.log(`Saving reusable image to: ${uploadsDir}`);
cb(null, uploadsDir);
},
filename: function (req, file, cb) {
// Create unique filename with original extension
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
// Make sure we preserve the original file extension
let fileExt = path.extname(file.originalname).toLowerCase();
// Ensure there is a proper extension based on mimetype if none exists
if (!fileExt) {
switch (file.mimetype) {
case 'image/jpeg': fileExt = '.jpg'; break;
case 'image/png': fileExt = '.png'; break;
case 'image/gif': fileExt = '.gif'; break;
case 'image/webp': fileExt = '.webp'; break;
default: fileExt = '.jpg'; // Default to jpg
}
}
const fileName = `reusable-${uniqueSuffix}${fileExt}`;
console.log(`Generated filename: ${fileName} with mimetype: ${file.mimetype}`);
cb(null, fileName);
}
});
const upload = multer({
storage: storage,
limits: {
fileSize: 5 * 1024 * 1024, // 5MB max file size
},
fileFilter: function (req, file, cb) {
// Accept only image files
const filetypes = /jpeg|jpg|png|gif|webp/;
const mimetype = filetypes.test(file.mimetype);
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
if (mimetype && extname) {
return cb(null, true);
}
cb(new Error('Only image files are allowed'));
}
});
// Get all reusable images
router.get('/', async (req, res) => {
try {
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT * FROM reusable_images
ORDER BY created_at DESC
`);
res.json(result.rows);
} catch (error) {
console.error('Error fetching reusable images:', error);
res.status(500).json({
error: 'Failed to fetch reusable images',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get images by company or global images
router.get('/by-company/:companyId', async (req, res) => {
try {
const { companyId } = req.params;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
// Get images that are either global or belong to this company
const result = await pool.query(`
SELECT * FROM reusable_images
WHERE is_global = true OR company = $1
ORDER BY created_at DESC
`, [companyId]);
res.json(result.rows);
} catch (error) {
console.error('Error fetching reusable images by company:', error);
res.status(500).json({
error: 'Failed to fetch reusable images by company',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get global images only
router.get('/global', async (req, res) => {
try {
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT * FROM reusable_images
WHERE is_global = true
ORDER BY created_at DESC
`);
res.json(result.rows);
} catch (error) {
console.error('Error fetching global reusable images:', error);
res.status(500).json({
error: 'Failed to fetch global reusable images',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get a single image by ID
router.get('/:id', async (req, res) => {
try {
const { id } = req.params;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT * FROM reusable_images
WHERE id = $1
`, [id]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Reusable image not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Error fetching reusable image:', error);
res.status(500).json({
error: 'Failed to fetch reusable image',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Upload a new reusable image
router.post('/upload', upload.single('image'), async (req, res) => {
try {
if (!req.file) {
return res.status(400).json({ error: 'No image file provided' });
}
const { name, is_global, company } = req.body;
// Validate required fields
if (!name) {
return res.status(400).json({ error: 'Image name is required' });
}
// Convert is_global from string to boolean
const isGlobal = is_global === 'true' || is_global === true;
// Validate company is provided for non-global images
if (!isGlobal && !company) {
return res.status(400).json({ error: 'Company is required for non-global images' });
}
// Log file information
console.log('Reusable image uploaded:', {
filename: req.file.filename,
originalname: req.file.originalname,
mimetype: req.file.mimetype,
size: req.file.size,
path: req.file.path
});
// Ensure the file exists
const filePath = path.join(uploadsDir, req.file.filename);
if (!fs.existsSync(filePath)) {
return res.status(500).json({ error: 'File was not saved correctly' });
}
// Create URL for the uploaded file
const baseUrl = 'https://tools.acherryontop.com';
const imageUrl = `${baseUrl}/uploads/reusable/${req.file.filename}`;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
// Insert record into database
const result = await pool.query(`
INSERT INTO reusable_images (
name,
filename,
file_path,
image_url,
is_global,
company,
mime_type,
file_size
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
RETURNING *
`, [
name,
req.file.filename,
filePath,
imageUrl,
isGlobal,
isGlobal ? null : company,
req.file.mimetype,
req.file.size
]);
// Return success response with image data
res.status(201).json({
success: true,
image: result.rows[0],
message: 'Image uploaded successfully'
});
} catch (error) {
console.error('Error uploading reusable image:', error);
res.status(500).json({ error: error.message || 'Failed to upload image' });
}
});
// Update image details (name, is_global, company)
router.put('/:id', async (req, res) => {
try {
const { id } = req.params;
const { name, is_global, company } = req.body;
// Validate required fields
if (!name) {
return res.status(400).json({ error: 'Image name is required' });
}
// Convert is_global from string to boolean if necessary
const isGlobal = typeof is_global === 'string' ? is_global === 'true' : !!is_global;
// Validate company is provided for non-global images
if (!isGlobal && !company) {
return res.status(400).json({ error: 'Company is required for non-global images' });
}
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
// Check if the image exists
const checkResult = await pool.query('SELECT * FROM reusable_images WHERE id = $1', [id]);
if (checkResult.rows.length === 0) {
return res.status(404).json({ error: 'Reusable image not found' });
}
const result = await pool.query(`
UPDATE reusable_images
SET
name = $1,
is_global = $2,
company = $3
WHERE id = $4
RETURNING *
`, [
name,
isGlobal,
isGlobal ? null : company,
id
]);
res.json(result.rows[0]);
} catch (error) {
console.error('Error updating reusable image:', error);
res.status(500).json({
error: 'Failed to update reusable image',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Delete a reusable image
router.delete('/:id', async (req, res) => {
try {
const { id } = req.params;
const pool = req.app.locals.pool;
if (!pool) {
throw new Error('Database pool not initialized');
}
// Get the image data first to get the filename
const imageResult = await pool.query('SELECT * FROM reusable_images WHERE id = $1', [id]);
if (imageResult.rows.length === 0) {
return res.status(404).json({ error: 'Reusable image not found' });
}
const image = imageResult.rows[0];
// Delete from database
await pool.query('DELETE FROM reusable_images WHERE id = $1', [id]);
// Delete the file from filesystem
const filePath = path.join(uploadsDir, image.filename);
if (fs.existsSync(filePath)) {
fs.unlinkSync(filePath);
}
res.json({
message: 'Reusable image deleted successfully',
image
});
} catch (error) {
console.error('Error deleting reusable image:', error);
res.status(500).json({
error: 'Failed to delete reusable image',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Check if file exists and permissions
router.get('/check-file/:filename', (req, res) => {
const { filename } = req.params;
// Prevent directory traversal
if (filename.includes('..') || filename.includes('/')) {
return res.status(400).json({ error: 'Invalid filename' });
}
const filePath = path.join(uploadsDir, filename);
try {
// Check if file exists
if (!fs.existsSync(filePath)) {
return res.status(404).json({
error: 'File not found',
path: filePath,
exists: false,
readable: false
});
}
// Check if file is readable
fs.accessSync(filePath, fs.constants.R_OK);
// Get file stats
const stats = fs.statSync(filePath);
return res.json({
filename,
path: filePath,
exists: true,
readable: true,
isFile: stats.isFile(),
isDirectory: stats.isDirectory(),
size: stats.size,
created: stats.birthtime,
modified: stats.mtime,
permissions: stats.mode.toString(8)
});
} catch (error) {
return res.status(500).json({
error: error.message,
path: filePath,
exists: fs.existsSync(filePath),
readable: false
});
}
});
// Error handling middleware
router.use((err, req, res, next) => {
console.error('Reusable images route error:', err);
res.status(500).json({
error: 'Internal server error',
details: err.message
});
});
module.exports = router;

View File

@@ -1,283 +0,0 @@
const express = require('express');
const { getPool } = require('../utils/db');
const dotenv = require('dotenv');
const path = require('path');
dotenv.config({ path: path.join(__dirname, "../../.env") });
const router = express.Router();
// Get all templates
router.get('/', async (req, res) => {
try {
const pool = getPool();
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT * FROM templates
ORDER BY company ASC, product_type ASC
`);
res.json(result.rows);
} catch (error) {
console.error('Error fetching templates:', error);
res.status(500).json({
error: 'Failed to fetch templates',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Get template by company and product type
router.get('/:company/:productType', async (req, res) => {
try {
const { company, productType } = req.params;
const pool = getPool();
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
SELECT * FROM templates
WHERE company = $1 AND product_type = $2
`, [company, productType]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Template not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Error fetching template:', error);
res.status(500).json({
error: 'Failed to fetch template',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Create new template
router.post('/', async (req, res) => {
try {
const {
company,
product_type,
supplier,
msrp,
cost_each,
qty_per_unit,
case_qty,
hts_code,
description,
weight,
length,
width,
height,
tax_cat,
size_cat,
categories,
ship_restrictions
} = req.body;
// Validate required fields
if (!company || !product_type) {
return res.status(400).json({ error: 'Company and Product Type are required' });
}
const pool = getPool();
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
INSERT INTO templates (
company,
product_type,
supplier,
msrp,
cost_each,
qty_per_unit,
case_qty,
hts_code,
description,
weight,
length,
width,
height,
tax_cat,
size_cat,
categories,
ship_restrictions
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17)
RETURNING *
`, [
company,
product_type,
supplier,
msrp,
cost_each,
qty_per_unit,
case_qty,
hts_code,
description,
weight,
length,
width,
height,
tax_cat,
size_cat,
categories,
ship_restrictions
]);
res.status(201).json(result.rows[0]);
} catch (error) {
console.error('Error creating template:', error);
// Check for unique constraint violation
if (error instanceof Error && error.message.includes('unique constraint')) {
return res.status(409).json({
error: 'Template already exists for this company and product type',
details: error.message
});
}
res.status(500).json({
error: 'Failed to create template',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Update template
router.put('/:id', async (req, res) => {
try {
const { id } = req.params;
const {
company,
product_type,
supplier,
msrp,
cost_each,
qty_per_unit,
case_qty,
hts_code,
description,
weight,
length,
width,
height,
tax_cat,
size_cat,
categories,
ship_restrictions
} = req.body;
// Validate required fields
if (!company || !product_type) {
return res.status(400).json({ error: 'Company and Product Type are required' });
}
const pool = getPool();
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query(`
UPDATE templates
SET
company = $1,
product_type = $2,
supplier = $3,
msrp = $4,
cost_each = $5,
qty_per_unit = $6,
case_qty = $7,
hts_code = $8,
description = $9,
weight = $10,
length = $11,
width = $12,
height = $13,
tax_cat = $14,
size_cat = $15,
categories = $16,
ship_restrictions = $17
WHERE id = $18
RETURNING *
`, [
company,
product_type,
supplier,
msrp,
cost_each,
qty_per_unit,
case_qty,
hts_code,
description,
weight,
length,
width,
height,
tax_cat,
size_cat,
categories,
ship_restrictions,
id
]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Template not found' });
}
res.json(result.rows[0]);
} catch (error) {
console.error('Error updating template:', error);
// Check for unique constraint violation
if (error instanceof Error && error.message.includes('unique constraint')) {
return res.status(409).json({
error: 'Template already exists for this company and product type',
details: error.message
});
}
res.status(500).json({
error: 'Failed to update template',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Delete template
router.delete('/:id', async (req, res) => {
try {
const { id } = req.params;
const pool = getPool();
if (!pool) {
throw new Error('Database pool not initialized');
}
const result = await pool.query('DELETE FROM templates WHERE id = $1 RETURNING *', [id]);
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Template not found' });
}
res.json({ message: 'Template deleted successfully' });
} catch (error) {
console.error('Error deleting template:', error);
res.status(500).json({
error: 'Failed to delete template',
details: error instanceof Error ? error.message : 'Unknown error'
});
}
});
// Error handling middleware
router.use((err, req, res, next) => {
console.error('Template route error:', err);
res.status(500).json({
error: 'Internal server error',
details: err.message
});
});
module.exports = router;

View File

@@ -1,323 +1 @@
const express = require('express');
const router = express.Router();
const { parseValue } = require('../utils/apiHelpers'); // Adjust path if needed
// --- Configuration & Helpers ---
const DEFAULT_PAGE_LIMIT = 50;
const MAX_PAGE_LIMIT = 200;
// Maps query keys to DB columns in vendor_metrics
const COLUMN_MAP = {
vendorName: { dbCol: 'vm.vendor_name', type: 'string' },
productCount: { dbCol: 'vm.product_count', type: 'number' },
activeProductCount: { dbCol: 'vm.active_product_count', type: 'number' },
replenishableProductCount: { dbCol: 'vm.replenishable_product_count', type: 'number' },
currentStockUnits: { dbCol: 'vm.current_stock_units', type: 'number' },
currentStockCost: { dbCol: 'vm.current_stock_cost', type: 'number' },
currentStockRetail: { dbCol: 'vm.current_stock_retail', type: 'number' },
onOrderUnits: { dbCol: 'vm.on_order_units', type: 'number' },
onOrderCost: { dbCol: 'vm.on_order_cost', type: 'number' },
poCount365d: { dbCol: 'vm.po_count_365d', type: 'number' },
avgLeadTimeDays: { dbCol: 'vm.avg_lead_time_days', type: 'number' },
sales7d: { dbCol: 'vm.sales_7d', type: 'number' },
revenue7d: { dbCol: 'vm.revenue_7d', type: 'number' },
sales30d: { dbCol: 'vm.sales_30d', type: 'number' },
revenue30d: { dbCol: 'vm.revenue_30d', type: 'number' },
profit30d: { dbCol: 'vm.profit_30d', type: 'number' },
cogs30d: { dbCol: 'vm.cogs_30d', type: 'number' },
sales365d: { dbCol: 'vm.sales_365d', type: 'number' },
revenue365d: { dbCol: 'vm.revenue_365d', type: 'number' },
lifetimeSales: { dbCol: 'vm.lifetime_sales', type: 'number' },
lifetimeRevenue: { dbCol: 'vm.lifetime_revenue', type: 'number' },
avgMargin30d: { dbCol: 'vm.avg_margin_30d', type: 'number' },
// Growth metrics
salesGrowth30dVsPrev: { dbCol: 'vm.sales_growth_30d_vs_prev', type: 'number' },
revenueGrowth30dVsPrev: { dbCol: 'vm.revenue_growth_30d_vs_prev', type: 'number' },
// Add aliases if needed for frontend compatibility
name: { dbCol: 'vm.vendor_name', type: 'string' },
leadTime: { dbCol: 'vm.avg_lead_time_days', type: 'number' },
// Add status for filtering
status: { dbCol: 'vendor_status', type: 'string' },
};
function getSafeColumnInfo(queryParamKey) {
return COLUMN_MAP[queryParamKey] || null;
}
// --- Route Handlers ---
// GET /vendors-aggregate/filter-options (Just vendors list for now)
router.get('/filter-options', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /vendors-aggregate/filter-options');
try {
// Get vendor names
const { rows: vendorRows } = await pool.query(`
SELECT DISTINCT vendor_name FROM public.vendor_metrics ORDER BY vendor_name
`);
// Get status values - calculate them since they're derived
const { rows: statusRows } = await pool.query(`
SELECT DISTINCT
CASE
WHEN po_count_365d > 0 AND sales_30d > 0 THEN 'active'
WHEN po_count_365d > 0 THEN 'inactive'
ELSE 'pending'
END as status
FROM public.vendor_metrics
ORDER BY status
`);
res.json({
vendors: vendorRows.map(r => r.vendor_name),
statuses: statusRows.map(r => r.status)
});
} catch(error) {
console.error('Error fetching vendor filter options:', error);
res.status(500).json({ error: 'Failed to fetch filter options' });
}
});
// GET /vendors-aggregate/stats (Overall vendor stats)
router.get('/stats', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /vendors-aggregate/stats');
try {
// Get basic vendor stats from aggregate table
const { rows: [stats] } = await pool.query(`
SELECT
COUNT(*) AS total_vendors,
SUM(active_product_count) AS total_active_products,
SUM(current_stock_cost) AS total_stock_value,
SUM(on_order_cost) AS total_on_order_value,
AVG(NULLIF(avg_lead_time_days, 0)) AS overall_avg_lead_time
FROM public.vendor_metrics vm
`);
// Count active vendors based on criteria (from old vendors.js)
const { rows: [activeStats] } = await pool.query(`
SELECT
COUNT(DISTINCT CASE
WHEN po_count_365d > 0
THEN vendor_name
END) as active_vendors
FROM public.vendor_metrics
`);
// Get overall cost metrics from purchase orders
const { rows: [overallCostMetrics] } = await pool.query(`
SELECT
ROUND((SUM(ordered * po_cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
ROUND(SUM(ordered * po_cost_price)::numeric, 3) as total_spend
FROM purchase_orders
WHERE po_cost_price IS NOT NULL
AND ordered > 0
AND vendor IS NOT NULL AND vendor != ''
`);
res.json({
totalVendors: parseInt(stats?.total_vendors || 0),
activeVendors: parseInt(activeStats?.active_vendors || 0),
totalActiveProducts: parseInt(stats?.total_active_products || 0),
totalValue: parseFloat(stats?.total_stock_value || 0),
totalOnOrderValue: parseFloat(stats?.total_on_order_value || 0),
avgLeadTime: parseFloat(stats?.overall_avg_lead_time || 0),
avgUnitCost: parseFloat(overallCostMetrics?.avg_unit_cost || 0),
totalSpend: parseFloat(overallCostMetrics?.total_spend || 0)
});
} catch (error) {
console.error('Error fetching vendor stats:', error);
res.status(500).json({ error: 'Failed to fetch vendor stats.' });
}
});
// GET /vendors-aggregate/ (List vendors)
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
console.log('GET /vendors-aggregate received query:', req.query);
try {
// --- Pagination ---
let page = parseInt(req.query.page, 10) || 1;
let limit = parseInt(req.query.limit, 10) || DEFAULT_PAGE_LIMIT;
limit = Math.min(limit, MAX_PAGE_LIMIT);
const offset = (page - 1) * limit;
// --- Sorting ---
const sortQueryKey = req.query.sort || 'vendorName'; // Default sort
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
const sortColumn = sortColumnInfo ? sortColumnInfo.dbCol : 'vm.vendor_name';
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST');
const sortClause = `ORDER BY ${sortColumn} ${sortDirection} ${nullsOrder}`;
// --- Filtering ---
const conditions = [];
const params = [];
let paramCounter = 1;
// Build conditions based on req.query, using COLUMN_MAP and parseValue
for (const key in req.query) {
if (['page', 'limit', 'sort', 'order'].includes(key)) continue;
let filterKey = key;
let operator = '='; // Default operator
const value = req.query[key];
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
if (operatorMatch) {
filterKey = operatorMatch[1];
operator = operatorMatch[2];
}
const columnInfo = getSafeColumnInfo(filterKey);
if (columnInfo) {
const dbColumn = columnInfo.dbCol;
const valueType = columnInfo.type;
try {
let conditionFragment = '';
let needsParam = true;
switch (operator.toLowerCase()) { // Normalize operator
case 'eq': operator = '='; break;
case 'ne': operator = '<>'; break;
case 'gt': operator = '>'; break;
case 'gte': operator = '>='; break;
case 'lt': operator = '<'; break;
case 'lte': operator = '<='; break;
case 'like': operator = 'LIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
case 'ilike': operator = 'ILIKE'; needsParam=false; params.push(`%${parseValue(value, valueType)}%`); break;
case 'between':
const [val1, val2] = String(value).split(',');
if (val1 !== undefined && val2 !== undefined) {
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
needsParam = false;
} else continue;
break;
case 'in':
const inValues = String(value).split(',');
if (inValues.length > 0) {
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
conditionFragment = `${dbColumn} IN (${placeholders})`;
params.push(...inValues.map(v => parseValue(v, valueType)));
needsParam = false;
} else continue;
break;
default: operator = '='; break;
}
if (needsParam) {
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
params.push(parseValue(value, valueType));
} else if (!conditionFragment) { // For LIKE/ILIKE
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
}
if (conditionFragment) {
conditions.push(`(${conditionFragment})`);
}
} catch (parseError) {
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
if (needsParam) paramCounter--;
}
} else {
console.warn(`Invalid filter key ignored: ${key}`);
}
}
// --- Execute Queries ---
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
// Status calculation from vendors.js
const statusCase = `
CASE
WHEN po_count_365d > 0 AND sales_30d > 0 THEN 'active'
WHEN po_count_365d > 0 THEN 'inactive'
ELSE 'pending'
END as vendor_status
`;
const baseSql = `
FROM (
SELECT
vm.*,
${statusCase}
FROM public.vendor_metrics vm
) vm
${whereClause}
`;
const countSql = `SELECT COUNT(*) AS total ${baseSql}`;
const dataSql = `
WITH vendor_data AS (
SELECT
vm.*,
${statusCase}
FROM public.vendor_metrics vm
)
SELECT
vm.*,
COALESCE(po.avg_unit_cost, 0) as avg_unit_cost,
COALESCE(po.total_spend, 0) as total_spend
FROM vendor_data vm
LEFT JOIN (
SELECT
vendor,
ROUND((SUM(ordered * po_cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
ROUND(SUM(ordered * po_cost_price)::numeric, 3) as total_spend
FROM purchase_orders
WHERE po_cost_price IS NOT NULL AND ordered > 0
GROUP BY vendor
) po ON vm.vendor_name = po.vendor
${whereClause}
${sortClause}
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
`;
const dataParams = [...params, limit, offset];
console.log("Count SQL:", countSql, params);
console.log("Data SQL:", dataSql, dataParams);
const [countResult, dataResult] = await Promise.all([
pool.query(countSql, params),
pool.query(dataSql, dataParams)
]);
const total = parseInt(countResult.rows[0].total, 10);
const vendors = dataResult.rows.map(row => {
// Create a new object with both snake_case and camelCase keys
const transformedRow = { ...row }; // Start with original data
for (const key in row) {
// Skip null/undefined values
if (row[key] === null || row[key] === undefined) {
continue; // Original already has the null value
}
// Transform keys to match frontend expectations (add camelCase versions)
// First handle cases like sales_7d -> sales7d
let camelKey = key.replace(/_(\d+[a-z])/g, '$1');
// Then handle regular snake_case -> camelCase
camelKey = camelKey.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
if (camelKey !== key) { // Only add if different from original
transformedRow[camelKey] = row[key];
}
}
return transformedRow;
});
// --- Respond ---
res.json({
vendors,
pagination: { total, pages: Math.ceil(total / limit), currentPage: page, limit },
});
} catch (error) {
console.error('Error fetching vendor metrics list:', error);
res.status(500).json({ error: 'Failed to fetch vendor metrics.' });
}
});
// GET /vendors-aggregate/:name (Get single vendor metric)
// Implement if needed, remember to URL-decode the name parameter
module.exports = router;

View File

@@ -1,231 +0,0 @@
const express = require('express');
const cors = require('cors');
const { spawn } = require('child_process');
const path = require('path');
const fs = require('fs');
const { corsMiddleware, corsErrorHandler } = require('./middleware/cors');
const { initPool } = require('./utils/db');
const productsRouter = require('./routes/products');
const dashboardRouter = require('./routes/dashboard');
const ordersRouter = require('./routes/orders');
const csvRouter = require('./routes/data-management');
const analyticsRouter = require('./routes/analytics');
const purchaseOrdersRouter = require('./routes/purchase-orders');
const configRouter = require('./routes/config');
const metricsRouter = require('./routes/metrics');
const importRouter = require('./routes/import');
const aiValidationRouter = require('./routes/ai-validation');
const aiRouter = require('./routes/ai');
const templatesRouter = require('./routes/templates');
const aiPromptsRouter = require('./routes/ai-prompts');
const reusableImagesRouter = require('./routes/reusable-images');
const categoriesAggregateRouter = require('./routes/categoriesAggregate');
const vendorsAggregateRouter = require('./routes/vendorsAggregate');
const brandsAggregateRouter = require('./routes/brandsAggregate');
const htsLookupRouter = require('./routes/hts-lookup');
const importSessionsRouter = require('./routes/import-sessions');
// Get the absolute path to the .env file
const envPath = '/var/www/html/inventory/.env';
console.log('Looking for .env file at:', envPath);
console.log('.env file exists:', fs.existsSync(envPath));
try {
require('dotenv').config({ path: envPath });
console.log('.env file loaded successfully');
console.log('Environment check:', {
NODE_ENV: process.env.NODE_ENV || 'not set',
PORT: process.env.PORT || 'not set',
DB_HOST: process.env.DB_HOST || 'not set',
DB_USER: process.env.DB_USER || 'not set',
DB_NAME: process.env.DB_NAME || 'not set',
DB_PASSWORD: process.env.DB_PASSWORD ? '[password set]' : 'not set',
DB_PORT: process.env.DB_PORT || 'not set',
DB_SSL: process.env.DB_SSL || 'not set'
});
} catch (error) {
console.error('Error loading .env file:', error);
}
// Resolve important directories relative to the project root
const serverRoot = path.resolve(__dirname, '..');
const configuredUploadsDir = process.env.UPLOADS_DIR;
const uploadsDir = configuredUploadsDir
? (path.isAbsolute(configuredUploadsDir)
? configuredUploadsDir
: path.resolve(serverRoot, configuredUploadsDir))
: path.resolve(serverRoot, 'uploads');
// Persist the resolved uploads directory so downstream modules share the same path
process.env.UPLOADS_DIR = uploadsDir;
const requiredDirs = [path.resolve(serverRoot, 'logs'), uploadsDir];
requiredDirs.forEach(dir => {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
});
const app = express();
// Debug middleware to log request details
app.use((req, res, next) => {
console.log('Request details:', {
method: req.method,
url: req.url,
origin: req.get('Origin'),
headers: req.headers
});
next();
});
// Apply CORS middleware first, before any other middleware
app.use(corsMiddleware);
// Body parser middleware
app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
// Initialize database pool and start server
async function startServer() {
try {
// Initialize database pool
const pool = await initPool({
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
port: process.env.DB_PORT || 5432,
max: process.env.NODE_ENV === 'production' ? 20 : 10,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
ssl: process.env.DB_SSL === 'true' ? {
rejectUnauthorized: false
} : false
});
// Make pool available to routes
app.locals.pool = pool;
// Set up routes after pool is initialized
app.use('/api/products', productsRouter);
app.use('/api/dashboard', dashboardRouter);
app.use('/api/orders', ordersRouter);
app.use('/api/csv', csvRouter);
app.use('/api/analytics', analyticsRouter);
app.use('/api/purchase-orders', purchaseOrdersRouter);
app.use('/api/config', configRouter);
app.use('/api/metrics', metricsRouter);
// Use only the aggregate routes for vendors and categories
app.use('/api/vendors', vendorsAggregateRouter);
app.use('/api/categories', categoriesAggregateRouter);
// Keep the aggregate-specific endpoints for backward compatibility
app.use('/api/categories-aggregate', categoriesAggregateRouter);
app.use('/api/vendors-aggregate', vendorsAggregateRouter);
app.use('/api/brands-aggregate', brandsAggregateRouter);
app.use('/api/import', importRouter);
app.use('/api/ai-validation', aiValidationRouter);
app.use('/api/ai', aiRouter);
app.use('/api/templates', templatesRouter);
app.use('/api/ai-prompts', aiPromptsRouter);
app.use('/api/reusable-images', reusableImagesRouter);
app.use('/api/hts-lookup', htsLookupRouter);
app.use('/api/import-sessions', importSessionsRouter);
// Basic health check route
app.get('/health', (req, res) => {
res.json({
status: 'ok',
timestamp: new Date().toISOString(),
environment: process.env.NODE_ENV
});
});
// CORS error handler - must be before other error handlers
app.use(corsErrorHandler);
// Error handling middleware - MUST be after routes and CORS error handler
app.use((err, req, res, next) => {
console.error(`[${new Date().toISOString()}] Error:`, err);
// Send detailed error in development, generic in production
const error = process.env.NODE_ENV === 'production'
? 'An internal server error occurred'
: err.message || err;
res.status(err.status || 500).json({ error });
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`[Server] Running in ${process.env.NODE_ENV || 'development'} mode on port ${PORT}`);
});
} catch (error) {
console.error('Failed to start server:', error);
process.exit(1);
}
}
// Handle uncaught exceptions
process.on('uncaughtException', (err) => {
console.error(`[${new Date().toISOString()}] Uncaught Exception:`, err);
process.exit(1);
});
process.on('unhandledRejection', (reason, promise) => {
console.error(`[${new Date().toISOString()}] Unhandled Rejection at:`, promise, 'reason:', reason);
});
// Initialize client sets for SSE
const importClients = new Set();
const updateClients = new Set();
const resetClients = new Set();
const resetMetricsClients = new Set();
// Helper function to send progress to SSE clients
const sendProgressToClients = (clients, data) => {
clients.forEach(client => {
try {
client.write(`data: ${JSON.stringify(data)}\n\n`);
} catch (error) {
console.error('Error sending SSE update:', error);
}
});
};
// Setup SSE connection
const setupSSE = (req, res) => {
const { type } = req.params;
// Set headers for SSE
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Access-Control-Allow-Origin': req.headers.origin || '*',
'Access-Control-Allow-Credentials': 'true'
});
// Send initial message
res.write('data: {"status":"connected"}\n\n');
// Add client to appropriate set
const clientSet = type === 'import' ? importClients :
type === 'update' ? updateClients :
type === 'reset' ? resetClients :
type === 'reset-metrics' ? resetMetricsClients :
null;
if (clientSet) {
clientSet.add(res);
// Remove client when connection closes
req.on('close', () => {
clientSet.delete(res);
});
}
};
// Start the server
startServer();

View File

@@ -1,82 +0,0 @@
/**
* Vector similarity utilities
*/
/**
* Compute cosine similarity between two vectors
* @param {number[]} a
* @param {number[]} b
* @returns {number} Similarity score between -1 and 1
*/
function cosineSimilarity(a, b) {
if (!a || !b || a.length !== b.length) {
return 0;
}
let dotProduct = 0;
let normA = 0;
let normB = 0;
for (let i = 0; i < a.length; i++) {
dotProduct += a[i] * b[i];
normA += a[i] * a[i];
normB += b[i] * b[i];
}
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
if (denominator === 0) return 0;
return dotProduct / denominator;
}
/**
* Find top K most similar items from a collection
* @param {number[]} queryEmbedding - The embedding to search for
* @param {Array<{id: any, embedding: number[]}>} items - Items with embeddings
* @param {number} topK - Number of results to return
* @returns {Array<{id: any, similarity: number}>}
*/
function findTopMatches(queryEmbedding, items, topK = 10) {
if (!queryEmbedding || !items || items.length === 0) {
return [];
}
const scored = items.map(item => ({
id: item.id,
similarity: cosineSimilarity(queryEmbedding, item.embedding)
}));
scored.sort((a, b) => b.similarity - a.similarity);
return scored.slice(0, topK);
}
/**
* Find matches above a similarity threshold
* @param {number[]} queryEmbedding
* @param {Array<{id: any, embedding: number[]}>} items
* @param {number} threshold - Minimum similarity (0-1)
* @returns {Array<{id: any, similarity: number}>}
*/
function findMatchesAboveThreshold(queryEmbedding, items, threshold = 0.5) {
if (!queryEmbedding || !items || items.length === 0) {
return [];
}
const scored = items
.map(item => ({
id: item.id,
similarity: cosineSimilarity(queryEmbedding, item.embedding)
}))
.filter(item => item.similarity >= threshold);
scored.sort((a, b) => b.similarity - a.similarity);
return scored;
}
module.exports = {
cosineSimilarity,
findTopMatches,
findMatchesAboveThreshold
};

View File

@@ -1,323 +0,0 @@
/**
* Taxonomy Embedding Service
*
* Generates and caches embeddings for categories, themes, and colors.
* Excludes "Black Friday", "Gifts", "Deals" categories and their children.
*/
const { findTopMatches } = require('./similarity');
// Categories to exclude (and all their children)
const EXCLUDED_CATEGORY_NAMES = ['black friday', 'gifts', 'deals'];
class TaxonomyEmbeddings {
constructor({ provider, logger }) {
this.provider = provider;
this.logger = logger || console;
// Cached taxonomy with embeddings
this.categories = [];
this.themes = [];
this.colors = [];
// Raw data without embeddings (for lookup)
this.categoryMap = new Map();
this.themeMap = new Map();
this.colorMap = new Map();
this.initialized = false;
this.initializing = false;
}
/**
* Initialize embeddings - fetch taxonomy and generate embeddings
*/
async initialize(connection) {
if (this.initialized) {
return { categories: this.categories.length, themes: this.themes.length, colors: this.colors.length };
}
if (this.initializing) {
// Wait for existing initialization
while (this.initializing) {
await new Promise(resolve => setTimeout(resolve, 100));
}
return { categories: this.categories.length, themes: this.themes.length, colors: this.colors.length };
}
this.initializing = true;
try {
this.logger.info('[TaxonomyEmbeddings] Starting initialization...');
// Fetch raw taxonomy data
const [categories, themes, colors] = await Promise.all([
this._fetchCategories(connection),
this._fetchThemes(connection),
this._fetchColors(connection)
]);
this.logger.info(`[TaxonomyEmbeddings] Fetched ${categories.length} categories, ${themes.length} themes, ${colors.length} colors`);
// Generate embeddings in parallel
const [catEmbeddings, themeEmbeddings, colorEmbeddings] = await Promise.all([
this._generateEmbeddings(categories, 'categories'),
this._generateEmbeddings(themes, 'themes'),
this._generateEmbeddings(colors, 'colors')
]);
// Store with embeddings
this.categories = catEmbeddings;
this.themes = themeEmbeddings;
this.colors = colorEmbeddings;
// Build lookup maps
this.categoryMap = new Map(this.categories.map(c => [c.id, c]));
this.themeMap = new Map(this.themes.map(t => [t.id, t]));
this.colorMap = new Map(this.colors.map(c => [c.id, c]));
this.initialized = true;
this.logger.info('[TaxonomyEmbeddings] Initialization complete');
return {
categories: this.categories.length,
themes: this.themes.length,
colors: this.colors.length
};
} catch (error) {
this.logger.error('[TaxonomyEmbeddings] Initialization failed:', error);
throw error;
} finally {
this.initializing = false;
}
}
/**
* Find similar categories for a product embedding
*/
findSimilarCategories(productEmbedding, topK = 10) {
if (!this.initialized || !productEmbedding) {
return [];
}
const matches = findTopMatches(productEmbedding, this.categories, topK);
return matches.map(match => {
const cat = this.categoryMap.get(match.id);
return {
id: match.id,
name: cat?.name || '',
fullPath: cat?.fullPath || '',
similarity: match.similarity
};
});
}
/**
* Find similar themes for a product embedding
*/
findSimilarThemes(productEmbedding, topK = 5) {
if (!this.initialized || !productEmbedding) {
return [];
}
const matches = findTopMatches(productEmbedding, this.themes, topK);
return matches.map(match => {
const theme = this.themeMap.get(match.id);
return {
id: match.id,
name: theme?.name || '',
fullPath: theme?.fullPath || '',
similarity: match.similarity
};
});
}
/**
* Find similar colors for a product embedding
*/
findSimilarColors(productEmbedding, topK = 5) {
if (!this.initialized || !productEmbedding) {
return [];
}
const matches = findTopMatches(productEmbedding, this.colors, topK);
return matches.map(match => {
const color = this.colorMap.get(match.id);
return {
id: match.id,
name: color?.name || '',
similarity: match.similarity
};
});
}
/**
* Get all taxonomy data (without embeddings) for frontend
*/
getTaxonomyData() {
return {
categories: this.categories.map(({ id, name, fullPath, parentId }) => ({ id, name, fullPath, parentId })),
themes: this.themes.map(({ id, name, fullPath, parentId }) => ({ id, name, fullPath, parentId })),
colors: this.colors.map(({ id, name }) => ({ id, name }))
};
}
/**
* Check if service is ready
*/
isReady() {
return this.initialized;
}
// ============================================================================
// Private Methods
// ============================================================================
async _fetchCategories(connection) {
// Fetch hierarchical categories (types 10-13)
const [rows] = await connection.query(`
SELECT cat_id, name, master_cat_id, type
FROM product_categories
WHERE type IN (10, 11, 12, 13)
ORDER BY type, name
`);
// Build lookup for hierarchy
const byId = new Map(rows.map(r => [r.cat_id, r]));
// Find IDs of excluded top-level categories and all their descendants
const excludedIds = new Set();
// First pass: find excluded top-level categories
for (const row of rows) {
if (row.type === 10 && EXCLUDED_CATEGORY_NAMES.includes(row.name.toLowerCase())) {
excludedIds.add(row.cat_id);
}
}
// Multiple passes to find all descendants
let foundNew = true;
while (foundNew) {
foundNew = false;
for (const row of rows) {
if (!excludedIds.has(row.cat_id) && excludedIds.has(row.master_cat_id)) {
excludedIds.add(row.cat_id);
foundNew = true;
}
}
}
this.logger.info(`[TaxonomyEmbeddings] Excluding ${excludedIds.size} categories (Black Friday, Gifts, Deals and children)`);
// Build category objects with full paths, excluding filtered ones
const categories = [];
for (const row of rows) {
if (excludedIds.has(row.cat_id)) {
continue;
}
const path = [];
let current = row;
// Walk up the tree to build full path
while (current) {
path.unshift(current.name);
current = current.master_cat_id ? byId.get(current.master_cat_id) : null;
}
categories.push({
id: row.cat_id,
name: row.name,
parentId: row.master_cat_id,
type: row.type,
fullPath: path.join(' > '),
embeddingText: path.join(' ')
});
}
return categories;
}
async _fetchThemes(connection) {
// Fetch themes (types 20-21)
const [rows] = await connection.query(`
SELECT cat_id, name, master_cat_id, type
FROM product_categories
WHERE type IN (20, 21)
ORDER BY type, name
`);
const byId = new Map(rows.map(r => [r.cat_id, r]));
const themes = [];
for (const row of rows) {
const path = [];
let current = row;
while (current) {
path.unshift(current.name);
current = current.master_cat_id ? byId.get(current.master_cat_id) : null;
}
themes.push({
id: row.cat_id,
name: row.name,
parentId: row.master_cat_id,
type: row.type,
fullPath: path.join(' > '),
embeddingText: path.join(' ')
});
}
return themes;
}
async _fetchColors(connection) {
const [rows] = await connection.query(`
SELECT color, name, hex_color
FROM product_color_list
ORDER BY \`order\`
`);
return rows.map(row => ({
id: row.color,
name: row.name,
hexColor: row.hex_color,
embeddingText: row.name
}));
}
async _generateEmbeddings(items, label) {
if (items.length === 0) {
return items;
}
const startTime = Date.now();
const texts = items.map(item => item.embeddingText);
const results = [...items];
// Process in batches
let batchNum = 0;
for await (const chunk of this.provider.embedBatchChunked(texts, { batchSize: 100 })) {
batchNum++;
for (let i = 0; i < chunk.embeddings.length; i++) {
const globalIndex = chunk.startIndex + i;
results[globalIndex] = {
...results[globalIndex],
embedding: chunk.embeddings[i]
};
}
}
const elapsed = Date.now() - startTime;
this.logger.info(`[TaxonomyEmbeddings] Generated ${items.length} ${label} embeddings in ${elapsed}ms`);
return results;
}
}
module.exports = { TaxonomyEmbeddings };

View File

@@ -1,386 +0,0 @@
/**
* AI Service
*
* Main entry point for AI functionality including:
* - Embeddings for taxonomy suggestions (OpenAI)
* - Chat completions for validation tasks (Groq)
* - Task registry for AI operations
*/
const { OpenAIProvider } = require('./providers/openaiProvider');
const { GroqProvider, MODELS: GROQ_MODELS } = require('./providers/groqProvider');
const { TaxonomyEmbeddings } = require('./embeddings/taxonomyEmbeddings');
const { cosineSimilarity, findTopMatches } = require('./embeddings/similarity');
const { getRegistry, TASK_IDS, registerAllTasks } = require('./tasks');
let initialized = false;
let initializing = false;
let openaiProvider = null;
let groqProvider = null;
let taxonomyEmbeddings = null;
let logger = console;
// Store pool reference for task access
let appPool = null;
/**
* Initialize the AI service
* @param {Object} options
* @param {string} options.openaiApiKey - OpenAI API key (for embeddings)
* @param {string} [options.groqApiKey] - Groq API key (for chat completions)
* @param {Object} options.mysqlConnection - MySQL connection for taxonomy data
* @param {Object} [options.pool] - PostgreSQL pool for prompt loading
* @param {Object} [options.logger] - Logger instance
*/
async function initialize({ openaiApiKey, groqApiKey, mysqlConnection, pool, logger: customLogger }) {
if (initialized) {
return { success: true, message: 'Already initialized' };
}
if (initializing) {
// Wait for existing initialization
while (initializing) {
await new Promise(resolve => setTimeout(resolve, 100));
}
return { success: initialized, message: initialized ? 'Initialized' : 'Initialization failed' };
}
initializing = true;
try {
if (customLogger) {
logger = customLogger;
}
if (!openaiApiKey) {
throw new Error('OpenAI API key is required');
}
logger.info('[AI] Initializing AI service...');
// Store pool reference for tasks
if (pool) {
appPool = pool;
}
// Create OpenAI provider (for embeddings)
openaiProvider = new OpenAIProvider({ apiKey: openaiApiKey });
// Create Groq provider (for chat completions) if API key provided
if (groqApiKey) {
groqProvider = new GroqProvider({ apiKey: groqApiKey });
logger.info('[AI] Groq provider initialized for chat completions');
} else {
logger.warn('[AI] No Groq API key provided - chat completion tasks will not be available');
}
// Create and initialize taxonomy embeddings
taxonomyEmbeddings = new TaxonomyEmbeddings({
provider: openaiProvider,
logger
});
const stats = await taxonomyEmbeddings.initialize(mysqlConnection);
// Register validation tasks if Groq is available
if (groqProvider) {
registerValidationTasks();
}
initialized = true;
logger.info('[AI] AI service initialized', {
...stats,
groqEnabled: !!groqProvider,
tasksRegistered: getRegistry().list()
});
return {
success: true,
message: 'Initialized',
stats,
groqEnabled: !!groqProvider
};
} catch (error) {
logger.error('[AI] Initialization failed:', error);
return { success: false, message: error.message };
} finally {
initializing = false;
}
}
/**
* Register validation tasks with the task registry
* Called during initialization if Groq is available
*/
function registerValidationTasks() {
registerAllTasks(logger);
logger.info('[AI] Validation tasks registered');
}
/**
* Check if service is ready
*/
function isReady() {
return initialized && taxonomyEmbeddings?.isReady();
}
/**
* Build weighted product text for embedding.
* Weights the product name heavily by repeating it, and truncates long descriptions
* to prevent verbose marketing copy from drowning out the product signal.
*
* @param {Object} product - Product with name, description, company, line
* @returns {string} - Combined text for embedding
*/
function buildProductText(product) {
const parts = [];
const name = product.name?.trim();
const description = product.description?.trim();
const company = (product.company_name || product.company)?.trim();
const line = (product.line_name || product.line)?.trim();
// Name is most important - repeat 3x to weight it heavily in the embedding
if (name) {
parts.push(name, name, name);
}
// Company and line provide context
if (company) {
parts.push(company);
}
if (line) {
parts.push(line);
}
// Truncate description to prevent it from overwhelming the signal
if (description) {
const truncated = description.length > 500
? description.substring(0, 500) + '...'
: description;
parts.push(truncated);
}
return parts.join(' ').trim();
}
/**
* Generate embedding for a product
* @param {Object} product - Product with name, description, company, line
* @returns {Promise<{embedding: number[], latencyMs: number}>}
*/
async function getProductEmbedding(product) {
if (!initialized || !openaiProvider) {
throw new Error('AI service not initialized');
}
const text = buildProductText(product);
if (!text) {
return { embedding: null, latencyMs: 0 };
}
const result = await openaiProvider.embed(text);
return {
embedding: result.embeddings[0],
latencyMs: result.latencyMs
};
}
/**
* Generate embeddings for multiple products
* @param {Object[]} products - Array of products
* @returns {Promise<{embeddings: Array<{index: number, embedding: number[]}>, latencyMs: number}>}
*/
async function getProductEmbeddings(products) {
if (!initialized || !openaiProvider) {
throw new Error('AI service not initialized');
}
const texts = products.map(buildProductText);
// Track which products have empty text
const validIndices = texts.map((t, i) => t ? i : -1).filter(i => i >= 0);
const validTexts = texts.filter(t => t);
if (validTexts.length === 0) {
return { embeddings: [], latencyMs: 0 };
}
const result = await openaiProvider.embed(validTexts);
// Map embeddings back to original indices
const embeddings = validIndices.map((originalIndex, resultIndex) => ({
index: originalIndex,
embedding: result.embeddings[resultIndex]
}));
return {
embeddings,
latencyMs: result.latencyMs
};
}
/**
* Find similar taxonomy items for a product embedding
* @param {number[]} productEmbedding
* @param {Object} options
* @returns {{categories: Array, themes: Array, colors: Array}}
*/
function findSimilarTaxonomy(productEmbedding, options = {}) {
if (!initialized || !taxonomyEmbeddings) {
throw new Error('AI service not initialized');
}
const topCategories = options.topCategories ?? 10;
const topThemes = options.topThemes ?? 5;
const topColors = options.topColors ?? 5;
return {
categories: taxonomyEmbeddings.findSimilarCategories(productEmbedding, topCategories),
themes: taxonomyEmbeddings.findSimilarThemes(productEmbedding, topThemes),
colors: taxonomyEmbeddings.findSimilarColors(productEmbedding, topColors)
};
}
/**
* Get product embedding and find similar taxonomy in one call
* @param {Object} product
* @param {Object} options
*/
async function getSuggestionsForProduct(product, options = {}) {
const { embedding, latencyMs: embeddingLatency } = await getProductEmbedding(product);
if (!embedding) {
return {
categories: [],
themes: [],
colors: [],
latencyMs: embeddingLatency
};
}
const startSearch = Date.now();
const suggestions = findSimilarTaxonomy(embedding, options);
const searchLatency = Date.now() - startSearch;
return {
...suggestions,
latencyMs: embeddingLatency + searchLatency,
embeddingLatencyMs: embeddingLatency,
searchLatencyMs: searchLatency
};
}
/**
* Get all taxonomy data (without embeddings) for frontend
*/
function getTaxonomyData() {
if (!initialized || !taxonomyEmbeddings) {
throw new Error('AI service not initialized');
}
return taxonomyEmbeddings.getTaxonomyData();
}
/**
* Get service status
*/
function getStatus() {
const registry = getRegistry();
return {
initialized,
ready: isReady(),
hasOpenAI: !!openaiProvider,
hasGroq: !!groqProvider,
hasTaxonomy: !!taxonomyEmbeddings,
taxonomyStats: taxonomyEmbeddings ? {
categories: taxonomyEmbeddings.categories?.length || 0,
themes: taxonomyEmbeddings.themes?.length || 0,
colors: taxonomyEmbeddings.colors?.length || 0
} : null,
tasks: {
registered: registry.list(),
count: registry.size()
}
};
}
/**
* Run an AI task by ID
* @param {string} taskId - Task identifier from TASK_IDS
* @param {Object} payload - Task-specific input
* @returns {Promise<Object>} Task result
*/
async function runTask(taskId, payload = {}) {
if (!initialized) {
throw new Error('AI service not initialized');
}
if (!groqProvider) {
throw new Error('Groq provider not available - chat completion tasks require GROQ_API_KEY');
}
const registry = getRegistry();
return registry.runTask(taskId, {
...payload,
// Inject dependencies tasks may need
provider: groqProvider,
// Use pool from payload if provided (from route), fall back to stored appPool
pool: payload.pool || appPool,
logger
});
}
/**
* Get the Groq provider instance (for direct use if needed)
* @returns {GroqProvider|null}
*/
function getGroqProvider() {
return groqProvider;
}
/**
* Get the PostgreSQL pool (for tasks that need DB access)
* @returns {Object|null}
*/
function getPool() {
return appPool;
}
/**
* Check if chat completion tasks are available
* @returns {boolean}
*/
function hasChatCompletion() {
return !!groqProvider;
}
module.exports = {
// Initialization
initialize,
isReady,
getStatus,
// Embeddings (OpenAI)
getProductEmbedding,
getProductEmbeddings,
findSimilarTaxonomy,
getSuggestionsForProduct,
getTaxonomyData,
// Chat completions (Groq)
runTask,
hasChatCompletion,
getGroqProvider,
getPool,
// Constants
TASK_IDS,
GROQ_MODELS,
// Re-export utilities
cosineSimilarity,
findTopMatches
};

View File

@@ -1,176 +0,0 @@
/**
* Description Validation Prompts
*
* Functions for building and parsing description validation prompts.
* System and general prompts are loaded from the database.
*/
/**
* Sanitize an issue string from AI response
* AI sometimes returns malformed strings with escape sequences
*
* @param {string} issue - Raw issue string
* @returns {string} Cleaned issue string
*/
function sanitizeIssue(issue) {
if (!issue || typeof issue !== 'string') return '';
let cleaned = issue
// Remove trailing backslashes (incomplete escapes)
.replace(/\\+$/, '')
// Fix malformed escaped quotes at end of string
.replace(/\\",?\)?$/, '')
// Clean up double-escaped quotes
.replace(/\\\\"/g, '"')
// Clean up single escaped quotes that aren't needed
.replace(/\\"/g, '"')
// Remove any remaining trailing punctuation artifacts
.replace(/[,\s]+$/, '')
// Trim whitespace
.trim();
return cleaned;
}
/**
* Build the user prompt for description validation
* Combines database prompts with product data
*
* @param {Object} product - Product data
* @param {string} product.name - Product name
* @param {string} product.description - Current description
* @param {string} [product.company_name] - Company name
* @param {string} [product.categories] - Product categories
* @param {Object} prompts - Prompts loaded from database
* @param {string} prompts.general - General description guidelines
* @param {string} [prompts.companySpecific] - Company-specific rules
* @returns {string} Complete user prompt
*/
function buildDescriptionUserPrompt(product, prompts) {
const parts = [];
// Add general prompt/guidelines if provided
if (prompts.general) {
parts.push(prompts.general);
parts.push(''); // Empty line for separation
}
// Add company-specific rules if provided
if (prompts.companySpecific) {
parts.push(`COMPANY-SPECIFIC RULES FOR ${product.company_name || 'THIS COMPANY'}:`);
parts.push(prompts.companySpecific);
parts.push(''); // Empty line for separation
}
// Add product information
parts.push('PRODUCT TO VALIDATE:');
parts.push(`NAME: "${product.name || ''}"`);
parts.push(`COMPANY: ${product.company_name || 'Unknown'}`);
if (product.categories) {
parts.push(`CATEGORIES: ${product.categories}`);
}
parts.push('');
parts.push('CURRENT DESCRIPTION:');
parts.push(`"${product.description || '(empty)'}"`);
// Add response format instructions
parts.push('');
parts.push('CRITICAL RULES:');
parts.push('- If isValid is false, you MUST provide a suggestion with the improved description');
parts.push('- If there are ANY issues, isValid MUST be false and suggestion MUST contain the corrected text');
parts.push('- Only set isValid to true if there are ZERO issues and the description needs no changes');
parts.push('');
parts.push('RESPOND WITH JSON:');
parts.push(JSON.stringify({
isValid: 'true if perfect, false if ANY changes needed',
suggestion: 'REQUIRED when isValid is false - the complete improved description',
issues: ['list each problem found (empty array only if isValid is true)']
}, null, 2));
return parts.join('\n');
}
/**
* Parse the AI response for description validation
*
* @param {Object|null} parsed - Parsed JSON from AI
* @param {string} content - Raw response content
* @returns {Object}
*/
function parseDescriptionResponse(parsed, content) {
// If we got valid parsed JSON, use it
if (parsed && typeof parsed.isValid === 'boolean') {
// Sanitize issues - AI sometimes returns malformed escape sequences
const rawIssues = Array.isArray(parsed.issues) ? parsed.issues : [];
const issues = rawIssues
.map(sanitizeIssue)
.filter(issue => issue.length > 0);
const suggestion = parsed.suggestion || null;
// IMPORTANT: LLMs sometimes return contradictory data (isValid: true with issues).
// If there are issues, treat as invalid regardless of what the AI said.
// Also if there's a suggestion, the AI thought something needed to change.
const isValid = parsed.isValid && issues.length === 0 && !suggestion;
return { isValid, suggestion, issues };
}
// Handle case where isValid is a string "true"/"false" instead of boolean
if (parsed && typeof parsed.isValid === 'string') {
const rawIssues = Array.isArray(parsed.issues) ? parsed.issues : [];
const issues = rawIssues
.map(sanitizeIssue)
.filter(issue => issue.length > 0);
const suggestion = parsed.suggestion || null;
const rawIsValid = parsed.isValid.toLowerCase() !== 'false';
// Same defensive logic: if there are issues, it's not valid
const isValid = rawIsValid && issues.length === 0 && !suggestion;
return { isValid, suggestion, issues };
}
// Try to extract from content if parsing failed
try {
// Look for isValid pattern
const isValidMatch = content.match(/"isValid"\s*:\s*(true|false)/i);
const isValid = isValidMatch ? isValidMatch[1].toLowerCase() === 'true' : true;
// Look for suggestion (might be multiline)
const suggestionMatch = content.match(/"suggestion"\s*:\s*"((?:[^"\\]|\\.)*)"/s);
let suggestion = suggestionMatch ? suggestionMatch[1] : null;
if (suggestion) {
// Unescape common escapes
suggestion = suggestion.replace(/\\n/g, '\n').replace(/\\"/g, '"');
}
// Look for issues array
const issuesMatch = content.match(/"issues"\s*:\s*\[([\s\S]*?)\]/);
let issues = [];
if (issuesMatch) {
const issuesContent = issuesMatch[1];
const issueStrings = issuesContent.match(/"([^"]+)"/g);
if (issueStrings) {
issues = issueStrings
.map(s => sanitizeIssue(s.replace(/"/g, '')))
.filter(issue => issue.length > 0);
}
}
// Same logic: if there are issues, it's not valid
const finalIsValid = isValid && issues.length === 0 && !suggestion;
return { isValid: finalIsValid, suggestion, issues };
} catch {
// Default to valid if we can't parse anything
return { isValid: true, suggestion: null, issues: [] };
}
}
module.exports = {
buildDescriptionUserPrompt,
parseDescriptionResponse
};

View File

@@ -1,187 +0,0 @@
/**
* Name Validation Prompts
*
* Functions for building and parsing name validation prompts.
* System and general prompts are loaded from the database.
*/
/**
* Sanitize an issue string from AI response
* AI sometimes returns malformed strings with escape sequences
*
* @param {string} issue - Raw issue string
* @returns {string} Cleaned issue string
*/
function sanitizeIssue(issue) {
if (!issue || typeof issue !== 'string') return '';
let cleaned = issue
// Remove trailing backslashes (incomplete escapes)
.replace(/\\+$/, '')
// Fix malformed escaped quotes at end of string
.replace(/\\",?\)?$/, '')
// Clean up double-escaped quotes
.replace(/\\\\"/g, '"')
// Clean up single escaped quotes that aren't needed
.replace(/\\"/g, '"')
// Remove any remaining trailing punctuation artifacts
.replace(/[,\s]+$/, '')
// Trim whitespace
.trim();
return cleaned;
}
/**
* Build the user prompt for name validation
* Combines database prompts with product data
*
* @param {Object} product - Product data
* @param {string} product.name - Current product name
* @param {string} [product.company_name] - Company name
* @param {string} [product.line_name] - Product line name
* @param {string} [product.subline_name] - Product subline name
* @param {string[]} [product.siblingNames] - Names of other products in the same line
* @param {Object} prompts - Prompts loaded from database
* @param {string} prompts.general - General naming conventions
* @param {string} [prompts.companySpecific] - Company-specific rules
* @returns {string} Complete user prompt
*/
function buildNameUserPrompt(product, prompts) {
const parts = [];
// Add general prompt/conventions if provided
if (prompts.general) {
parts.push(prompts.general);
parts.push(''); // Empty line for separation
}
// Add company-specific rules if provided
if (prompts.companySpecific) {
parts.push(`COMPANY-SPECIFIC RULES FOR ${product.company_name || 'THIS COMPANY'}:`);
parts.push(prompts.companySpecific);
parts.push(''); // Empty line for separation
}
// Add product information
parts.push('PRODUCT TO VALIDATE:');
parts.push(`NAME: "${product.name || ''}"`);
parts.push(`COMPANY: ${product.company_name || 'Unknown'}`);
parts.push(`LINE: ${product.line_name || 'None'}`);
if (product.subline_name) {
parts.push(`SUBLINE: ${product.subline_name}`);
}
// Add sibling context for naming decisions
if (product.siblingNames && product.siblingNames.length > 0) {
parts.push('');
parts.push(`OTHER PRODUCTS IN THIS LINE (${product.siblingNames.length + 1} total including this one):`);
product.siblingNames.forEach(name => {
parts.push(`- ${name}`);
});
}
// Add response format instructions
parts.push('');
parts.push('RESPOND WITH JSON:');
parts.push(JSON.stringify({
isValid: 'true/false',
suggestion: 'corrected name if changes needed, or null if valid',
issues: ['issue 1', 'issue 2 (empty array if valid)']
}, null, 2));
return parts.join('\n');
}
/**
* Parse the AI response for name validation
*
* @param {Object|null} parsed - Parsed JSON from AI
* @param {string} content - Raw response content
* @returns {Object}
*/
function parseNameResponse(parsed, content) {
// Debug: Log what we're trying to parse
console.log('[parseNameResponse] Input:', {
hasParsed: !!parsed,
parsedIsValid: parsed?.isValid,
parsedType: typeof parsed?.isValid,
contentPreview: content?.substring(0, 3000)
});
// If we got valid parsed JSON, use it
if (parsed && typeof parsed.isValid === 'boolean') {
// Sanitize issues - AI sometimes returns malformed escape sequences
const rawIssues = Array.isArray(parsed.issues) ? parsed.issues : [];
const issues = rawIssues
.map(sanitizeIssue)
.filter(issue => issue.length > 0);
const suggestion = parsed.suggestion || null;
// IMPORTANT: LLMs sometimes return contradictory data (isValid: true with issues).
// If there are issues, treat as invalid regardless of what the AI said.
const isValid = parsed.isValid && issues.length === 0 && !suggestion;
return { isValid, suggestion, issues };
}
// Handle case where isValid is a string "true"/"false" instead of boolean
if (parsed && typeof parsed.isValid === 'string') {
const rawIssues = Array.isArray(parsed.issues) ? parsed.issues : [];
const issues = rawIssues
.map(sanitizeIssue)
.filter(issue => issue.length > 0);
const suggestion = parsed.suggestion || null;
const rawIsValid = parsed.isValid.toLowerCase() !== 'false';
// Same defensive logic: if there are issues, it's not valid
const isValid = rawIsValid && issues.length === 0 && !suggestion;
console.log('[parseNameResponse] Parsed isValid as string:', parsed.isValid, '→', isValid);
return { isValid, suggestion, issues };
}
// Try to extract from content if parsing failed
try {
// Look for isValid pattern - handle both boolean and quoted string
// Matches: "isValid": true, "isValid": false, "isValid": "true", "isValid": "false"
const isValidMatch = content.match(/"isValid"\s*:\s*"?(true|false)"?/i);
const isValid = isValidMatch ? isValidMatch[1].toLowerCase() === 'true' : true;
console.log('[parseNameResponse] Regex extraction:', {
isValidMatch: isValidMatch?.[0],
isValidValue: isValidMatch?.[1],
resultIsValid: isValid
});
// Look for suggestion - handle escaped quotes and null
const suggestionMatch = content.match(/"suggestion"\s*:\s*(?:"([^"\\]*(?:\\.[^"\\]*)*)"|null)/);
const suggestion = suggestionMatch ? (suggestionMatch[1] || null) : null;
// Look for issues array
const issuesMatch = content.match(/"issues"\s*:\s*\[([\s\S]*?)\]/);
let issues = [];
if (issuesMatch) {
const issuesContent = issuesMatch[1];
const issueStrings = issuesContent.match(/"([^"]+)"/g);
if (issueStrings) {
issues = issueStrings
.map(s => sanitizeIssue(s.replace(/"/g, '')))
.filter(issue => issue.length > 0);
}
}
// Same defensive logic: if there are issues, it's not valid
const finalIsValid = isValid && issues.length === 0 && !suggestion;
return { isValid: finalIsValid, suggestion, issues };
} catch {
// Default to valid if we can't parse anything
return { isValid: true, suggestion: null, issues: [] };
}
}
module.exports = {
buildNameUserPrompt,
parseNameResponse
};

View File

@@ -1,194 +0,0 @@
/**
* Prompt Loader
*
* Utilities to load AI prompts from the ai_prompts PostgreSQL table.
* Supports loading prompts by base type (e.g., 'name_validation' loads
* name_validation_system, name_validation_general, and optionally
* name_validation_company_specific).
*/
/**
* Load a single prompt by exact type
* @param {Object} pool - PostgreSQL pool
* @param {string} promptType - Exact prompt type (e.g., 'name_validation_system')
* @param {string} [company] - Company identifier (for company_specific types)
* @returns {Promise<string|null>} Prompt text or null if not found
*/
async function loadPromptByType(pool, promptType, company = null) {
try {
let result;
if (company) {
result = await pool.query(
'SELECT prompt_text FROM ai_prompts WHERE prompt_type = $1 AND company = $2',
[promptType, company]
);
} else {
result = await pool.query(
'SELECT prompt_text FROM ai_prompts WHERE prompt_type = $1 AND company IS NULL',
[promptType]
);
}
return result.rows[0]?.prompt_text || null;
} catch (error) {
console.error(`[PromptLoader] Error loading ${promptType} prompt:`, error.message);
return null;
}
}
/**
* Load all prompts for a task type (system, general, and optionally company-specific)
*
* @param {Object} pool - PostgreSQL pool
* @param {string} baseType - Base type name (e.g., 'name_validation', 'description_validation')
* @param {string|null} [company] - Optional company ID for company-specific prompts
* @returns {Promise<{system: string|null, general: string|null, companySpecific: string|null}>}
*/
async function loadPromptsByType(pool, baseType, company = null) {
const systemType = `${baseType}_system`;
const generalType = `${baseType}_general`;
const companyType = `${baseType}_company_specific`;
// Load system and general prompts in parallel
const [system, general] = await Promise.all([
loadPromptByType(pool, systemType),
loadPromptByType(pool, generalType)
]);
// Load company-specific prompt if company is provided
let companySpecific = null;
if (company) {
companySpecific = await loadPromptByType(pool, companyType, company);
}
return {
system,
general,
companySpecific
};
}
/**
* Load name validation prompts
* @param {Object} pool - PostgreSQL pool
* @param {string|null} [company] - Optional company ID
* @returns {Promise<{system: string|null, general: string|null, companySpecific: string|null}>}
*/
async function loadNameValidationPrompts(pool, company = null) {
return loadPromptsByType(pool, 'name_validation', company);
}
/**
* Load description validation prompts
* @param {Object} pool - PostgreSQL pool
* @param {string|null} [company] - Optional company ID
* @returns {Promise<{system: string|null, general: string|null, companySpecific: string|null}>}
*/
async function loadDescriptionValidationPrompts(pool, company = null) {
return loadPromptsByType(pool, 'description_validation', company);
}
/**
* Load sanity check prompts (no company-specific variant)
* @param {Object} pool - PostgreSQL pool
* @returns {Promise<{system: string|null, general: string|null, companySpecific: null}>}
*/
async function loadSanityCheckPrompts(pool) {
return loadPromptsByType(pool, 'sanity_check', null);
}
/**
* Load bulk validation prompts (GPT-5 validation)
* @param {Object} pool - PostgreSQL pool
* @param {string|null} [company] - Optional company ID
* @returns {Promise<{system: string|null, general: string|null, companySpecific: string|null}>}
*/
async function loadBulkValidationPrompts(pool, company = null) {
return loadPromptsByType(pool, 'bulk_validation', company);
}
/**
* Load bulk validation prompts for multiple companies at once
* @param {Object} pool - PostgreSQL pool
* @param {string[]} companyIds - Array of company IDs
* @returns {Promise<{system: string|null, general: string|null, companyPrompts: Map<string, string>}>}
*/
async function loadBulkValidationPromptsForCompanies(pool, companyIds = []) {
// Load system and general prompts
const [system, general] = await Promise.all([
loadPromptByType(pool, 'bulk_validation_system'),
loadPromptByType(pool, 'bulk_validation_general')
]);
// Load company-specific prompts for all provided companies
const companyPrompts = new Map();
if (companyIds.length > 0) {
try {
const result = await pool.query(
`SELECT company, prompt_text FROM ai_prompts
WHERE prompt_type = 'bulk_validation_company_specific'
AND company = ANY($1)`,
[companyIds]
);
for (const row of result.rows) {
companyPrompts.set(row.company, row.prompt_text);
}
} catch (error) {
console.error('[PromptLoader] Error loading company-specific prompts:', error.message);
}
}
return {
system,
general,
companyPrompts
};
}
/**
* Validate that required prompts exist, throw error if missing
* @param {Object} prompts - Prompts object from loadPromptsByType
* @param {string} baseType - Base type for error messages
* @param {Object} options - Validation options
* @param {boolean} [options.requireSystem=true] - Require system prompt
* @param {boolean} [options.requireGeneral=true] - Require general prompt
* @throws {Error} If required prompts are missing
*/
function validateRequiredPrompts(prompts, baseType, options = {}) {
const { requireSystem = true, requireGeneral = true } = options;
const missing = [];
if (requireSystem && !prompts.system) {
missing.push(`${baseType}_system`);
}
if (requireGeneral && !prompts.general) {
missing.push(`${baseType}_general`);
}
if (missing.length > 0) {
throw new Error(
`Missing required AI prompts: ${missing.join(', ')}. ` +
`Please add these prompts in Settings > AI Validation Prompts.`
);
}
}
module.exports = {
// Core loader
loadPromptByType,
loadPromptsByType,
// Task-specific loaders
loadNameValidationPrompts,
loadDescriptionValidationPrompts,
loadSanityCheckPrompts,
loadBulkValidationPrompts,
loadBulkValidationPromptsForCompanies,
// Validation
validateRequiredPrompts
};

View File

@@ -1,128 +0,0 @@
/**
* Sanity Check Prompts
*
* Functions for building and parsing batch product consistency validation prompts.
* System and general prompts are loaded from the database.
*/
/**
* Build the user prompt for sanity check
* Combines database prompts with product data
*
* @param {Object[]} products - Array of product data (limited fields for context)
* @param {Object} prompts - Prompts loaded from database
* @param {string} prompts.general - General sanity check rules
* @returns {string} Complete user prompt
*/
function buildSanityCheckUserPrompt(products, prompts) {
// Build a simplified product list for the prompt
const productSummaries = products.map((p, index) => ({
index,
name: p.name,
supplier: p.supplier_name || p.supplier,
company: p.company_name || p.company,
supplier_no: p.supplier_no,
msrp: p.msrp,
cost_each: p.cost_each,
qty_per_unit: p.qty_per_unit,
case_qty: p.case_qty,
tax_cat: p.tax_cat_name || p.tax_cat,
size_cat: p.size_cat_name || p.size_cat,
themes: p.theme_names || p.themes,
categories: p.category_names || p.categories,
weight: p.weight,
length: p.length,
width: p.width,
height: p.height
}));
const parts = [];
// Add general prompt/rules if provided
if (prompts.general) {
parts.push(prompts.general);
parts.push(''); // Empty line for separation
}
// Add products to review
parts.push(`PRODUCTS TO REVIEW (${products.length} items):`);
parts.push(JSON.stringify(productSummaries, null, 2));
// Add response format
parts.push('');
parts.push('RESPOND WITH JSON:');
parts.push(JSON.stringify({
issues: [
{
productIndex: 0,
field: 'msrp',
issue: 'Description of the issue found',
suggestion: 'Suggested fix or verification (optional)'
}
],
summary: '2-3 sentences summarizing the overall product quality'
}, null, 2));
parts.push('');
parts.push('If no issues are found, return empty issues array with positive summary.');
return parts.join('\n');
}
/**
* Parse the AI response for sanity check
*
* @param {Object|null} parsed - Parsed JSON from AI
* @param {string} content - Raw response content
* @returns {Object}
*/
function parseSanityCheckResponse(parsed, content) {
// If we got valid parsed JSON, use it
if (parsed && Array.isArray(parsed.issues)) {
return {
issues: parsed.issues.map(issue => ({
productIndex: issue.productIndex ?? issue.index ?? 0,
field: issue.field || 'unknown',
issue: issue.issue || issue.message || '',
suggestion: issue.suggestion || null
})),
summary: parsed.summary || 'Review complete'
};
}
// Try to extract from content if parsing failed
try {
// Try to find issues array
const issuesMatch = content.match(/"issues"\s*:\s*\[([\s\S]*?)\]/);
let issues = [];
if (issuesMatch) {
// Try to parse the array content
try {
const arrayContent = `[${issuesMatch[1]}]`;
const parsedIssues = JSON.parse(arrayContent);
issues = parsedIssues.map(issue => ({
productIndex: issue.productIndex ?? issue.index ?? 0,
field: issue.field || 'unknown',
issue: issue.issue || issue.message || '',
suggestion: issue.suggestion || null
}));
} catch {
// Couldn't parse the array
}
}
// Try to find summary
const summaryMatch = content.match(/"summary"\s*:\s*"([^"]+)"/);
const summary = summaryMatch ? summaryMatch[1] : 'Review complete';
return { issues, summary };
} catch {
return { issues: [], summary: 'Could not parse review results' };
}
}
module.exports = {
buildSanityCheckUserPrompt,
parseSanityCheckResponse
};

View File

@@ -1,203 +0,0 @@
/**
* Groq Provider - Handles chat completions via Groq's OpenAI-compatible API
*
* Uses Groq's fast inference for real-time AI validation tasks.
* Supports models like openai/gpt-oss-120b (complex) and openai/gpt-oss-20b (simple).
*/
const GROQ_BASE_URL = 'https://api.groq.com/openai/v1';
// Default models
const MODELS = {
LARGE: 'openai/gpt-oss-120b', // For complex tasks (descriptions, sanity checks)
SMALL: 'openai/gpt-oss-20b' // For simple tasks (name validation)
};
class GroqProvider {
/**
* @param {Object} options
* @param {string} options.apiKey - Groq API key
* @param {string} [options.baseUrl] - Override base URL
* @param {number} [options.timeoutMs=30000] - Default timeout
*/
constructor({ apiKey, baseUrl = GROQ_BASE_URL, timeoutMs = 30000 }) {
if (!apiKey) {
throw new Error('Groq API key is required');
}
this.apiKey = apiKey;
this.baseUrl = baseUrl;
this.timeoutMs = timeoutMs;
}
/**
* Send a chat completion request
*
* @param {Object} params
* @param {Array<{role: string, content: string}>} params.messages - Conversation messages
* @param {string} [params.model] - Model to use (defaults to LARGE)
* @param {number} [params.temperature=0.3] - Response randomness (0-2)
* @param {number} [params.maxTokens=500] - Max tokens in response
* @param {Object} [params.responseFormat] - For JSON mode: { type: 'json_object' }
* @param {number} [params.timeoutMs] - Request timeout override
* @returns {Promise<{content: string, parsed: Object|null, usage: Object, latencyMs: number, model: string}>}
*/
async chatCompletion({
messages,
model = MODELS.LARGE,
temperature = 0.3,
maxTokens = 500,
responseFormat = null,
timeoutMs = this.timeoutMs
}) {
const started = Date.now();
const body = {
model,
messages,
temperature,
max_completion_tokens: maxTokens
};
// Enable JSON mode if requested
if (responseFormat?.type === 'json_object') {
body.response_format = { type: 'json_object' };
}
// Debug: Log request being sent
console.log('[Groq] Request:', {
model: body.model,
temperature: body.temperature,
maxTokens: body.max_completion_tokens,
hasResponseFormat: !!body.response_format,
messageCount: body.messages?.length,
systemPromptLength: body.messages?.[0]?.content?.length,
userPromptLength: body.messages?.[1]?.content?.length
});
const response = await this._makeRequest('chat/completions', body, timeoutMs);
// Debug: Log raw response structure
console.log('[Groq] Raw response:', {
hasChoices: !!response.choices,
choicesLength: response.choices?.length,
firstChoice: response.choices?.[0] ? {
finishReason: response.choices[0].finish_reason,
hasMessage: !!response.choices[0].message,
contentLength: response.choices[0].message?.content?.length,
contentPreview: response.choices[0].message?.content?.substring(0, 200)
} : null,
usage: response.usage,
model: response.model
});
const content = response.choices?.[0]?.message?.content || '';
const usage = response.usage || {};
// Attempt to parse JSON if response format was requested
let parsed = null;
if (responseFormat && content) {
try {
parsed = JSON.parse(content);
} catch {
// Content isn't valid JSON - try to extract JSON from markdown
parsed = this._extractJson(content);
}
}
return {
content,
parsed,
usage: {
promptTokens: usage.prompt_tokens || 0,
completionTokens: usage.completion_tokens || 0,
totalTokens: usage.total_tokens || 0
},
latencyMs: Date.now() - started,
model: response.model || model
};
}
/**
* Extract JSON from content that might be wrapped in markdown code blocks
* @private
*/
_extractJson(content) {
// Try to find JSON in code blocks
const codeBlockMatch = content.match(/```(?:json)?\s*([\s\S]*?)```/);
if (codeBlockMatch) {
try {
return JSON.parse(codeBlockMatch[1].trim());
} catch {
// Fall through
}
}
// Try to find JSON object/array directly
const jsonMatch = content.match(/(\{[\s\S]*\}|\[[\s\S]*\])/);
if (jsonMatch) {
try {
return JSON.parse(jsonMatch[1]);
} catch {
// Fall through
}
}
return null;
}
/**
* Make an HTTP request to Groq API
* @private
*/
async _makeRequest(endpoint, body, timeoutMs) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
try {
const response = await fetch(`${this.baseUrl}/${endpoint}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.apiKey}`
},
body: JSON.stringify(body),
signal: controller.signal
});
if (!response.ok) {
const error = await response.json().catch(() => ({}));
const message = error.error?.message || `Groq API error: ${response.status}`;
const err = new Error(message);
err.status = response.status;
err.code = error.error?.code;
// Include failed_generation if available (for JSON mode failures)
if (error.error?.failed_generation) {
err.failedGeneration = error.error.failed_generation;
console.error('[Groq] JSON validation failed. Model output:', error.error.failed_generation);
}
throw err;
}
return response.json();
} catch (error) {
if (error.name === 'AbortError') {
const err = new Error(`Groq request timed out after ${timeoutMs}ms`);
err.code = 'TIMEOUT';
throw err;
}
throw error;
} finally {
clearTimeout(timeout);
}
}
/**
* Check if the provider is properly configured
* @returns {boolean}
*/
isConfigured() {
return !!this.apiKey;
}
}
module.exports = { GroqProvider, MODELS, GROQ_BASE_URL };

View File

@@ -1,117 +0,0 @@
/**
* OpenAI Provider - Handles embedding generation
*/
const EMBEDDING_MODEL = 'text-embedding-3-small';
const EMBEDDING_DIMENSIONS = 1536;
const MAX_BATCH_SIZE = 2048;
class OpenAIProvider {
constructor({ apiKey, baseUrl = 'https://api.openai.com/v1', timeoutMs = 60000 }) {
if (!apiKey) {
throw new Error('OpenAI API key is required');
}
this.apiKey = apiKey;
this.baseUrl = baseUrl;
this.timeoutMs = timeoutMs;
}
/**
* Generate embeddings for one or more texts
* @param {string|string[]} input - Text or array of texts
* @param {Object} options
* @returns {Promise<{embeddings: number[][], usage: Object, model: string, latencyMs: number}>}
*/
async embed(input, options = {}) {
const texts = Array.isArray(input) ? input : [input];
const model = options.model || EMBEDDING_MODEL;
const dimensions = options.dimensions || EMBEDDING_DIMENSIONS;
const timeoutMs = options.timeoutMs || this.timeoutMs;
if (texts.length > MAX_BATCH_SIZE) {
throw new Error(`Batch size ${texts.length} exceeds max of ${MAX_BATCH_SIZE}`);
}
const started = Date.now();
// Clean and truncate input texts
const cleanedTexts = texts.map(t =>
(t || '').replace(/\n+/g, ' ').trim().substring(0, 8000)
);
const body = {
input: cleanedTexts,
model,
encoding_format: 'float'
};
// Only embedding-3 models support dimensions parameter
if (model.includes('embedding-3')) {
body.dimensions = dimensions;
}
const response = await this._makeRequest('embeddings', body, timeoutMs);
// Sort by index to ensure order matches input
const sortedData = response.data.sort((a, b) => a.index - b.index);
return {
embeddings: sortedData.map(item => item.embedding),
usage: {
promptTokens: response.usage?.prompt_tokens || 0,
totalTokens: response.usage?.total_tokens || 0
},
model: response.model || model,
latencyMs: Date.now() - started
};
}
/**
* Generator for processing large batches in chunks
*/
async *embedBatchChunked(texts, options = {}) {
const batchSize = Math.min(options.batchSize || 100, MAX_BATCH_SIZE);
for (let i = 0; i < texts.length; i += batchSize) {
const chunk = texts.slice(i, i + batchSize);
const result = await this.embed(chunk, options);
yield {
embeddings: result.embeddings,
startIndex: i,
endIndex: i + chunk.length,
usage: result.usage,
model: result.model,
latencyMs: result.latencyMs
};
}
}
async _makeRequest(endpoint, body, timeoutMs) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
try {
const response = await fetch(`${this.baseUrl}/${endpoint}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.apiKey}`
},
body: JSON.stringify(body),
signal: controller.signal
});
if (!response.ok) {
const error = await response.json().catch(() => ({}));
throw new Error(error.error?.message || `OpenAI API error: ${response.status}`);
}
return response.json();
} finally {
clearTimeout(timeout);
}
}
}
module.exports = { OpenAIProvider, EMBEDDING_MODEL, EMBEDDING_DIMENSIONS };

View File

@@ -1,158 +0,0 @@
/**
* Description Validation Task
*
* Validates a product description for quality, accuracy, and guideline compliance.
* Uses Groq with the larger model for better reasoning about content quality.
* Loads all prompts from the database (no hardcoded prompts).
*/
const { MODELS } = require('../providers/groqProvider');
const {
loadDescriptionValidationPrompts,
validateRequiredPrompts
} = require('../prompts/promptLoader');
const {
buildDescriptionUserPrompt,
parseDescriptionResponse
} = require('../prompts/descriptionPrompts');
const TASK_ID = 'validate.description';
/**
* Create the description validation task
*
* @returns {Object} Task definition
*/
function createDescriptionValidationTask() {
return {
id: TASK_ID,
description: 'Validate product description for quality and guideline compliance',
/**
* Run the description validation
*
* @param {Object} payload
* @param {Object} payload.product - Product data
* @param {string} payload.product.name - Product name (for context)
* @param {string} payload.product.description - Description to validate
* @param {string} [payload.product.company_name] - Company name
* @param {string} [payload.product.company_id] - Company ID for loading specific rules
* @param {string} [payload.product.categories] - Product categories
* @param {Object} payload.provider - Groq provider instance
* @param {Object} payload.pool - PostgreSQL pool
* @param {Object} [payload.logger] - Logger instance
* @returns {Promise<Object>}
*/
async run(payload) {
const { product, provider, pool, logger } = payload;
const log = logger || console;
// Validate required input
if (!product?.name && !product?.description) {
return {
isValid: true,
suggestion: null,
issues: [],
skipped: true,
reason: 'No name or description provided'
};
}
if (!provider) {
throw new Error('Groq provider not available');
}
if (!pool) {
throw new Error('Database pool not available');
}
try {
// Load prompts from database
const companyKey = product.company_id || product.company_name || product.company;
const prompts = await loadDescriptionValidationPrompts(pool, companyKey);
// Validate required prompts exist
validateRequiredPrompts(prompts, 'description_validation');
// Build the user prompt with database-loaded prompts
const userPrompt = buildDescriptionUserPrompt(product, prompts);
let response;
let result;
try {
// Try with JSON mode first
response = await provider.chatCompletion({
messages: [
{ role: 'system', content: prompts.system },
{ role: 'user', content: userPrompt }
],
model: MODELS.LARGE, // openai/gpt-oss-120b - better for content analysis
temperature: 0.3, // Slightly higher for creative suggestions
maxTokens: 2000, // Reasoning models need extra tokens for thinking
responseFormat: { type: 'json_object' }
});
// Log full raw response for debugging
log.info('[DescriptionValidation] Raw AI response:', {
parsed: response.parsed,
content: response.content,
contentLength: response.content?.length
});
// Parse the response
result = parseDescriptionResponse(response.parsed, response.content);
} catch (jsonError) {
// If JSON mode failed, check if we have failedGeneration to parse
if (jsonError.failedGeneration) {
log.warn('[DescriptionValidation] JSON mode failed, attempting to parse failed_generation:', {
failedGeneration: jsonError.failedGeneration
});
result = parseDescriptionResponse(null, jsonError.failedGeneration);
response = { latencyMs: 0, usage: {}, model: MODELS.LARGE };
} else {
// Retry without JSON mode
log.warn('[DescriptionValidation] JSON mode failed, retrying without JSON mode');
response = await provider.chatCompletion({
messages: [
{ role: 'system', content: prompts.system },
{ role: 'user', content: userPrompt }
],
model: MODELS.LARGE,
temperature: 0.3,
maxTokens: 2000 // Reasoning models need extra tokens for thinking
// No responseFormat - let the model respond freely
});
log.info('[DescriptionValidation] Raw AI response (no JSON mode):', {
parsed: response.parsed,
content: response.content,
contentLength: response.content?.length
});
result = parseDescriptionResponse(response.parsed, response.content);
}
}
log.info(`[DescriptionValidation] Validated description for "${product.name}" in ${response.latencyMs}ms`, {
isValid: result.isValid,
hasSuggestion: !!result.suggestion,
issueCount: result.issues.length
});
return {
...result,
latencyMs: response.latencyMs,
usage: response.usage,
model: response.model
};
} catch (error) {
log.error('[DescriptionValidation] Error:', error.message);
throw error;
}
}
};
}
module.exports = {
TASK_ID,
createDescriptionValidationTask
};

View File

@@ -1,186 +0,0 @@
/**
* AI Task Registry
*
* Simple registry pattern for AI tasks. Each task has:
* - id: Unique identifier
* - run: Async function that executes the task
*
* This allows adding new AI capabilities without modifying core code.
*/
const { createNameValidationTask, TASK_ID: NAME_TASK_ID } = require('./nameValidationTask');
const { createDescriptionValidationTask, TASK_ID: DESC_TASK_ID } = require('./descriptionValidationTask');
const { createSanityCheckTask, TASK_ID: SANITY_TASK_ID } = require('./sanityCheckTask');
/**
* Task IDs - frozen constants for type safety
*/
const TASK_IDS = Object.freeze({
// Inline validation (triggered on field blur)
VALIDATE_NAME: NAME_TASK_ID,
VALIDATE_DESCRIPTION: DESC_TASK_ID,
// Batch operations (triggered on user action)
SANITY_CHECK: SANITY_TASK_ID
});
/**
* Task Registry
*/
class TaskRegistry {
constructor() {
this.tasks = new Map();
}
/**
* Register a task
* @param {Object} task
* @param {string} task.id - Unique task identifier
* @param {Function} task.run - Async function: (payload) => result
* @param {string} [task.description] - Human-readable description
*/
register(task) {
if (!task?.id) {
throw new Error('Task must have an id');
}
if (typeof task.run !== 'function') {
throw new Error(`Task ${task.id} must have a run function`);
}
if (this.tasks.has(task.id)) {
throw new Error(`Task ${task.id} is already registered`);
}
this.tasks.set(task.id, task);
return this;
}
/**
* Get a task by ID
* @param {string} taskId
* @returns {Object|null}
*/
get(taskId) {
return this.tasks.get(taskId) || null;
}
/**
* Check if a task exists
* @param {string} taskId
* @returns {boolean}
*/
has(taskId) {
return this.tasks.has(taskId);
}
/**
* Run a task by ID
* @param {string} taskId
* @param {Object} payload - Task-specific input
* @returns {Promise<Object>} Task result
*/
async runTask(taskId, payload = {}) {
const task = this.get(taskId);
if (!task) {
throw new Error(`Unknown task: ${taskId}`);
}
try {
const result = await task.run(payload);
return {
success: true,
taskId,
...result
};
} catch (error) {
return {
success: false,
taskId,
error: error.message,
code: error.code
};
}
}
/**
* List all registered task IDs
* @returns {string[]}
*/
list() {
return Array.from(this.tasks.keys());
}
/**
* Get count of registered tasks
* @returns {number}
*/
size() {
return this.tasks.size;
}
}
// Singleton instance
let registry = null;
/**
* Get or create the task registry
* @returns {TaskRegistry}
*/
function getRegistry() {
if (!registry) {
registry = new TaskRegistry();
}
return registry;
}
/**
* Reset the registry (mainly for testing)
*/
function resetRegistry() {
registry = null;
}
/**
* Register all validation tasks with the registry
* Call this during initialization after the registry is created
*
* @param {Object} [logger] - Optional logger
*/
function registerAllTasks(logger = console) {
const reg = getRegistry();
// Register name validation
if (!reg.has(TASK_IDS.VALIDATE_NAME)) {
reg.register(createNameValidationTask());
logger.info(`[Tasks] Registered: ${TASK_IDS.VALIDATE_NAME}`);
}
// Register description validation
if (!reg.has(TASK_IDS.VALIDATE_DESCRIPTION)) {
reg.register(createDescriptionValidationTask());
logger.info(`[Tasks] Registered: ${TASK_IDS.VALIDATE_DESCRIPTION}`);
}
// Register sanity check
if (!reg.has(TASK_IDS.SANITY_CHECK)) {
reg.register(createSanityCheckTask());
logger.info(`[Tasks] Registered: ${TASK_IDS.SANITY_CHECK}`);
}
return reg;
}
module.exports = {
// Constants
TASK_IDS,
// Registry
TaskRegistry,
getRegistry,
resetRegistry,
registerAllTasks,
// Task factories (for custom registration)
createNameValidationTask,
createDescriptionValidationTask,
createSanityCheckTask
};

View File

@@ -1,172 +0,0 @@
/**
* Name Validation Task
*
* Validates a product name for spelling, grammar, and naming conventions.
* Uses Groq with the smaller model for fast response times.
* Loads all prompts from the database (no hardcoded prompts).
*/
const { MODELS } = require('../providers/groqProvider');
const {
loadNameValidationPrompts,
validateRequiredPrompts
} = require('../prompts/promptLoader');
const {
buildNameUserPrompt,
parseNameResponse
} = require('../prompts/namePrompts');
const TASK_ID = 'validate.name';
/**
* Create the name validation task
*
* @returns {Object} Task definition
*/
function createNameValidationTask() {
return {
id: TASK_ID,
description: 'Validate product name for spelling, grammar, and conventions',
/**
* Run the name validation
*
* @param {Object} payload
* @param {Object} payload.product - Product data
* @param {string} payload.product.name - Product name to validate
* @param {string} [payload.product.company_name] - Company name
* @param {string} [payload.product.company_id] - Company ID for loading specific rules
* @param {string} [payload.product.line_name] - Product line
* @param {string} [payload.product.description] - Description for context
* @param {Object} payload.provider - Groq provider instance
* @param {Object} payload.pool - PostgreSQL pool
* @param {Object} [payload.logger] - Logger instance
* @returns {Promise<Object>}
*/
async run(payload) {
const { product, provider, pool, logger } = payload;
const log = logger || console;
// Validate required input
if (!product?.name) {
return {
isValid: true,
suggestion: null,
issues: [],
skipped: true,
reason: 'No name provided'
};
}
if (!provider) {
throw new Error('Groq provider not available');
}
if (!pool) {
throw new Error('Database pool not available');
}
try {
// Load prompts from database
const companyKey = product.company_id || product.company_name || product.company;
const prompts = await loadNameValidationPrompts(pool, companyKey);
// Debug: Log loaded prompts
log.info('[NameValidation] Loaded prompts:', {
hasSystem: !!prompts.system,
systemLength: prompts.system?.length || 0,
hasGeneral: !!prompts.general,
generalLength: prompts.general?.length || 0,
generalPreview: prompts.general?.substring(0, 100) || '(empty)',
hasCompanySpecific: !!prompts.companySpecific,
companyKey
});
// Validate required prompts exist
validateRequiredPrompts(prompts, 'name_validation');
// Build the user prompt with database-loaded prompts
const userPrompt = buildNameUserPrompt(product, prompts);
// Debug: Log the full user prompt being sent
log.info('[NameValidation] User prompt:', userPrompt.substring(0, 500));
let response;
let result;
try {
// Try with JSON mode first
response = await provider.chatCompletion({
messages: [
{ role: 'system', content: prompts.system },
{ role: 'user', content: userPrompt }
],
model: MODELS.LARGE, // openai/gpt-oss-120b - reasoning model
temperature: 0.2, // Low temperature for consistent results
maxTokens: 3000, // Reasoning models need extra tokens for thinking
responseFormat: { type: 'json_object' }
});
// Log full raw response for debugging
log.info('[NameValidation] Raw AI response:', {
parsed: response.parsed,
content: response.content,
contentLength: response.content?.length
});
// Parse the response
result = parseNameResponse(response.parsed, response.content);
} catch (jsonError) {
// If JSON mode failed, check if we have failedGeneration to parse
if (jsonError.failedGeneration) {
log.warn('[NameValidation] JSON mode failed, attempting to parse failed_generation:', {
failedGeneration: jsonError.failedGeneration
});
result = parseNameResponse(null, jsonError.failedGeneration);
response = { latencyMs: 0, usage: {}, model: MODELS.SMALL };
} else {
// Retry without JSON mode
log.warn('[NameValidation] JSON mode failed, retrying without JSON mode');
response = await provider.chatCompletion({
messages: [
{ role: 'system', content: prompts.system },
{ role: 'user', content: userPrompt }
],
model: MODELS.SMALL,
temperature: 0.2,
maxTokens: 1500 // Reasoning models need extra tokens for thinking
// No responseFormat - let the model respond freely
});
log.info('[NameValidation] Raw AI response (no JSON mode):', {
parsed: response.parsed,
content: response.content,
contentLength: response.content?.length
});
result = parseNameResponse(response.parsed, response.content);
}
}
log.info(`[NameValidation] Validated "${product.name}" in ${response.latencyMs}ms`, {
isValid: result.isValid,
hassuggestion: !!result.suggestion,
issueCount: result.issues.length
});
return {
...result,
latencyMs: response.latencyMs,
usage: response.usage,
model: response.model
};
} catch (error) {
log.error('[NameValidation] Error:', error.message);
throw error;
}
}
};
}
module.exports = {
TASK_ID,
createNameValidationTask
};

View File

@@ -1,182 +0,0 @@
/**
* Sanity Check Task
*
* Reviews a batch of products for consistency and appropriateness.
* Uses Groq with the larger model for complex batch analysis.
* Loads all prompts from the database (no hardcoded prompts).
*/
const { MODELS } = require('../providers/groqProvider');
const {
loadSanityCheckPrompts,
validateRequiredPrompts
} = require('../prompts/promptLoader');
const {
buildSanityCheckUserPrompt,
parseSanityCheckResponse
} = require('../prompts/sanityCheckPrompts');
const TASK_ID = 'sanity.check';
// Maximum products to send in a single request (to avoid token limits)
const MAX_PRODUCTS_PER_REQUEST = 50;
/**
* Create the sanity check task
*
* @returns {Object} Task definition
*/
function createSanityCheckTask() {
return {
id: TASK_ID,
description: 'Review batch of products for consistency and appropriateness',
/**
* Run the sanity check
*
* @param {Object} payload
* @param {Object[]} payload.products - Array of products to check
* @param {Object} payload.provider - Groq provider instance
* @param {Object} payload.pool - PostgreSQL pool
* @param {Object} [payload.logger] - Logger instance
* @returns {Promise<Object>}
*/
async run(payload) {
const { products, provider, pool, logger } = payload;
const log = logger || console;
// Validate required input
if (!Array.isArray(products) || products.length === 0) {
return {
issues: [],
summary: 'No products to check',
skipped: true
};
}
if (!provider) {
throw new Error('Groq provider not available');
}
if (!pool) {
throw new Error('Database pool not available');
}
try {
// Load prompts from database
const prompts = await loadSanityCheckPrompts(pool);
// Validate required prompts exist
validateRequiredPrompts(prompts, 'sanity_check');
// If batch is small enough, process in one request
if (products.length <= MAX_PRODUCTS_PER_REQUEST) {
return await checkBatch(products, prompts, provider, log);
}
// Otherwise, process in chunks and combine results
log.info(`[SanityCheck] Processing ${products.length} products in chunks`);
const allIssues = [];
const summaries = [];
for (let i = 0; i < products.length; i += MAX_PRODUCTS_PER_REQUEST) {
const chunk = products.slice(i, i + MAX_PRODUCTS_PER_REQUEST);
const chunkOffset = i; // To adjust product indices in results
const result = await checkBatch(chunk, prompts, provider, log);
// Adjust product indices to match original array
const adjustedIssues = result.issues.map(issue => ({
...issue,
productIndex: issue.productIndex + chunkOffset
}));
allIssues.push(...adjustedIssues);
summaries.push(result.summary);
}
return {
issues: allIssues,
summary: summaries.length > 1
? `Reviewed ${products.length} products in ${summaries.length} batches. ${allIssues.length} issues found.`
: summaries[0],
totalProducts: products.length,
issueCount: allIssues.length
};
} catch (error) {
log.error('[SanityCheck] Error:', error.message);
throw error;
}
}
};
}
/**
* Check a single batch of products
*
* @param {Object[]} products - Products to check
* @param {Object} prompts - Loaded prompts from database
* @param {Object} provider - Groq provider
* @param {Object} log - Logger
* @returns {Promise<Object>}
*/
async function checkBatch(products, prompts, provider, log) {
const userPrompt = buildSanityCheckUserPrompt(products, prompts);
let response;
let result;
try {
// Try with JSON mode first
response = await provider.chatCompletion({
messages: [
{ role: 'system', content: prompts.system },
{ role: 'user', content: userPrompt }
],
model: MODELS.LARGE, // openai/gpt-oss-120b - needed for complex batch analysis
temperature: 0.2, // Low temperature for consistent analysis
maxTokens: 2000, // More tokens for batch results
responseFormat: { type: 'json_object' }
});
result = parseSanityCheckResponse(response.parsed, response.content);
} catch (jsonError) {
// If JSON mode failed, check if we have failedGeneration to parse
if (jsonError.failedGeneration) {
log.warn('[SanityCheck] JSON mode failed, attempting to parse failed_generation');
result = parseSanityCheckResponse(null, jsonError.failedGeneration);
response = { latencyMs: 0, usage: {}, model: MODELS.LARGE };
} else {
// Retry without JSON mode
log.warn('[SanityCheck] JSON mode failed, retrying without JSON mode');
response = await provider.chatCompletion({
messages: [
{ role: 'system', content: prompts.system },
{ role: 'user', content: userPrompt }
],
model: MODELS.LARGE,
temperature: 0.2,
maxTokens: 2000
// No responseFormat - let the model respond freely
});
result = parseSanityCheckResponse(response.parsed, response.content);
}
}
log.info(`[SanityCheck] Checked ${products.length} products in ${response.latencyMs}ms`, {
issueCount: result.issues.length
});
return {
...result,
latencyMs: response.latencyMs,
usage: response.usage,
model: response.model
};
}
module.exports = {
TASK_ID,
createSanityCheckTask,
MAX_PRODUCTS_PER_REQUEST
};

View File

@@ -1,79 +0,0 @@
// Purchase Order Status Codes
const PurchaseOrderStatus = {
Canceled: 0,
Created: 1,
ElectronicallyReadySend: 10,
Ordered: 11,
Preordered: 12,
ElectronicallySent: 13,
ReceivingStarted: 15,
Done: 50
};
// Receiving Status Codes
const ReceivingStatus = {
Canceled: 0,
Created: 1,
PartialReceived: 30,
FullReceived: 40,
Paid: 50
};
// Status Code Display Names
const PurchaseOrderStatusLabels = {
[PurchaseOrderStatus.Canceled]: 'Canceled',
[PurchaseOrderStatus.Created]: 'Created',
[PurchaseOrderStatus.ElectronicallyReadySend]: 'Ready to Send',
[PurchaseOrderStatus.Ordered]: 'Ordered',
[PurchaseOrderStatus.Preordered]: 'Preordered',
[PurchaseOrderStatus.ElectronicallySent]: 'Sent',
[PurchaseOrderStatus.ReceivingStarted]: 'Receiving Started',
[PurchaseOrderStatus.Done]: 'Done'
};
const ReceivingStatusLabels = {
[ReceivingStatus.Canceled]: 'Canceled',
[ReceivingStatus.Created]: 'Created',
[ReceivingStatus.PartialReceived]: 'Partially Received',
[ReceivingStatus.FullReceived]: 'Fully Received',
[ReceivingStatus.Paid]: 'Paid'
};
// Helper functions
function getPurchaseOrderStatusLabel(status) {
return PurchaseOrderStatusLabels[status] || 'Unknown';
}
function getReceivingStatusLabel(status) {
return ReceivingStatusLabels[status] || 'Unknown';
}
// Status checks
function isReceivingComplete(status) {
return status >= ReceivingStatus.PartialReceived;
}
function isPurchaseOrderComplete(status) {
return status === PurchaseOrderStatus.Done;
}
function isPurchaseOrderCanceled(status) {
return status === PurchaseOrderStatus.Canceled;
}
function isReceivingCanceled(status) {
return status === ReceivingStatus.Canceled;
}
module.exports = {
PurchaseOrderStatus,
ReceivingStatus,
PurchaseOrderStatusLabels,
ReceivingStatusLabels,
getPurchaseOrderStatusLabel,
getReceivingStatusLabel,
isReceivingComplete,
isPurchaseOrderComplete,
isPurchaseOrderCanceled,
isReceivingCanceled
};

View File

@@ -1,45 +0,0 @@
/**
* Parses a query parameter value based on its expected type.
* Throws error for invalid formats. Adjust date handling as needed.
*/
function parseValue(value, type) {
if (value === null || value === undefined || value === '') return null;
console.log(`Parsing value: "${value}" as type: "${type}"`);
switch (type) {
case 'number':
const num = parseFloat(value);
if (isNaN(num)) {
console.error(`Invalid number format: "${value}"`);
throw new Error(`Invalid number format: "${value}"`);
}
return num;
case 'integer': // Specific type for integer IDs etc.
const int = parseInt(value, 10);
if (isNaN(int)) {
console.error(`Invalid integer format: "${value}"`);
throw new Error(`Invalid integer format: "${value}"`);
}
console.log(`Successfully parsed integer: ${int}`);
return int;
case 'boolean':
if (String(value).toLowerCase() === 'true') return true;
if (String(value).toLowerCase() === 'false') return false;
console.error(`Invalid boolean format: "${value}"`);
throw new Error(`Invalid boolean format: "${value}"`);
case 'date':
// Basic ISO date format validation (YYYY-MM-DD)
if (!String(value).match(/^\d{4}-\d{2}-\d{2}$/)) {
console.warn(`Potentially invalid date format passed: "${value}"`);
// Optionally throw an error or return null depending on strictness
// throw new Error(`Invalid date format (YYYY-MM-DD expected): "${value}"`);
}
return String(value); // Send as string, let DB handle casting/comparison
case 'string':
default:
return String(value);
}
}
module.exports = { parseValue };

View File

@@ -1,63 +0,0 @@
const fs = require('fs');
const { parse } = require('csv-parse');
const { v4: uuidv4 } = require('uuid');
async function importProductsFromCSV(filePath, pool) {
return new Promise((resolve, reject) => {
const products = [];
fs.createReadStream(filePath)
.pipe(parse({
columns: true,
skip_empty_lines: true
}))
.on('data', async (row) => {
products.push({
id: uuidv4(),
sku: row.sku,
name: row.name,
description: row.description || null,
category: row.category || null
});
})
.on('end', async () => {
try {
const connection = await pool.getConnection();
try {
await connection.beginTransaction();
for (const product of products) {
await connection.query(
'INSERT INTO products (id, sku, name, description, category) VALUES (?, ?, ?, ?, ?)',
[product.id, product.sku, product.name, product.description, product.category]
);
// Initialize inventory level for the product
await connection.query(
'INSERT INTO inventory_levels (id, product_id, quantity) VALUES (?, ?, 0)',
[uuidv4(), product.id]
);
}
await connection.commit();
resolve({ imported: products.length });
} catch (error) {
await connection.rollback();
reject(error);
} finally {
connection.release();
}
} catch (error) {
reject(error);
}
})
.on('error', (error) => {
reject(error);
});
});
}
module.exports = {
importProductsFromCSV
};

View File

@@ -1,21 +0,0 @@
const { Pool } = require('pg');
let pool;
function initPool(config) {
pool = new Pool(config);
return pool;
}
async function getConnection() {
if (!pool) {
throw new Error('Database pool not initialized');
}
return pool.connect();
}
module.exports = {
initPool,
getConnection,
getPool: () => pool
};

View File

@@ -1,239 +0,0 @@
const { Client } = require('ssh2');
const mysql = require('mysql2/promise');
const fs = require('fs');
// Connection pooling and cache configuration
const connectionCache = {
ssh: null,
dbConnection: null,
lastUsed: 0,
isConnecting: false,
connectionPromise: null,
// Cache expiration time in milliseconds (5 minutes)
expirationTime: 5 * 60 * 1000,
// Cache for query results (key: query string, value: {data, timestamp})
queryCache: new Map(),
// Cache duration for different query types in milliseconds
cacheDuration: {
'field-options': 30 * 60 * 1000, // 30 minutes for field options
'product-lines': 10 * 60 * 1000, // 10 minutes for product lines
'sublines': 10 * 60 * 1000, // 10 minutes for sublines
'taxonomy': 30 * 60 * 1000, // 30 minutes for taxonomy data
'default': 60 * 1000 // 1 minute default
}
};
/**
* Get a database connection with connection pooling
* @returns {Promise<{ssh: object, connection: object}>} The SSH and database connection
*/
async function getDbConnection() {
const now = Date.now();
// Check if we need to refresh the connection due to inactivity
const needsRefresh = !connectionCache.ssh ||
!connectionCache.dbConnection ||
(now - connectionCache.lastUsed > connectionCache.expirationTime);
// If connection is still valid, update last used time and return existing connection
if (!needsRefresh) {
connectionCache.lastUsed = now;
return {
ssh: connectionCache.ssh,
connection: connectionCache.dbConnection
};
}
// If another request is already establishing a connection, wait for that promise
if (connectionCache.isConnecting && connectionCache.connectionPromise) {
try {
await connectionCache.connectionPromise;
return {
ssh: connectionCache.ssh,
connection: connectionCache.dbConnection
};
} catch (error) {
// If that connection attempt failed, we'll try again below
console.error('Error waiting for existing connection:', error);
}
}
// Close existing connections if they exist
if (connectionCache.dbConnection) {
try {
await connectionCache.dbConnection.end();
} catch (error) {
console.error('Error closing existing database connection:', error);
}
}
if (connectionCache.ssh) {
try {
connectionCache.ssh.end();
} catch (error) {
console.error('Error closing existing SSH connection:', error);
}
}
// Mark that we're establishing a new connection
connectionCache.isConnecting = true;
// Create a new promise for this connection attempt
connectionCache.connectionPromise = setupSshTunnel().then(tunnel => {
const { ssh, stream, dbConfig } = tunnel;
return mysql.createConnection({
...dbConfig,
stream
}).then(connection => {
// Store the new connections
connectionCache.ssh = ssh;
connectionCache.dbConnection = connection;
connectionCache.lastUsed = Date.now();
connectionCache.isConnecting = false;
return {
ssh,
connection
};
});
}).catch(error => {
connectionCache.isConnecting = false;
throw error;
});
// Wait for the connection to be established
return connectionCache.connectionPromise;
}
/**
* Get cached query results or execute query if not cached
* @param {string} cacheKey - Unique key to identify the query
* @param {string} queryType - Type of query (field-options, product-lines, etc.)
* @param {Function} queryFn - Function to execute if cache miss
* @returns {Promise<any>} The query result
*/
async function getCachedQuery(cacheKey, queryType, queryFn) {
// Get cache duration based on query type
const cacheDuration = connectionCache.cacheDuration[queryType] || connectionCache.cacheDuration.default;
// Check if we have a valid cached result
const cachedResult = connectionCache.queryCache.get(cacheKey);
const now = Date.now();
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
return cachedResult.data;
}
// No valid cache found, execute the query
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
const result = await queryFn();
// Cache the result
connectionCache.queryCache.set(cacheKey, {
data: result,
timestamp: now
});
return result;
}
/**
* Setup SSH tunnel to production database
* @private - Should only be used by getDbConnection
* @returns {Promise<{ssh: object, stream: object, dbConfig: object}>}
*/
async function setupSshTunnel() {
const sshConfig = {
host: process.env.PROD_SSH_HOST,
port: process.env.PROD_SSH_PORT || 22,
username: process.env.PROD_SSH_USER,
privateKey: process.env.PROD_SSH_KEY_PATH
? fs.readFileSync(process.env.PROD_SSH_KEY_PATH)
: undefined,
compress: true
};
const dbConfig = {
host: process.env.PROD_DB_HOST || 'localhost',
user: process.env.PROD_DB_USER,
password: process.env.PROD_DB_PASSWORD,
database: process.env.PROD_DB_NAME,
port: process.env.PROD_DB_PORT || 3306,
timezone: 'Z'
};
return new Promise((resolve, reject) => {
const ssh = new Client();
ssh.on('error', (err) => {
console.error('SSH connection error:', err);
reject(err);
});
ssh.on('ready', () => {
ssh.forwardOut(
'127.0.0.1',
0,
dbConfig.host,
dbConfig.port,
(err, stream) => {
if (err) reject(err);
resolve({ ssh, stream, dbConfig });
}
);
}).connect(sshConfig);
});
}
/**
* Clear cached query results
* @param {string} [cacheKey] - Specific cache key to clear (clears all if not provided)
*/
function clearQueryCache(cacheKey) {
if (cacheKey) {
connectionCache.queryCache.delete(cacheKey);
console.log(`Cleared cache for key: ${cacheKey}`);
} else {
connectionCache.queryCache.clear();
console.log('Cleared all query cache');
}
}
/**
* Force close all active connections
* Useful for server shutdown or manual connection reset
*/
async function closeAllConnections() {
if (connectionCache.dbConnection) {
try {
await connectionCache.dbConnection.end();
console.log('Closed database connection');
} catch (error) {
console.error('Error closing database connection:', error);
}
connectionCache.dbConnection = null;
}
if (connectionCache.ssh) {
try {
connectionCache.ssh.end();
console.log('Closed SSH connection');
} catch (error) {
console.error('Error closing SSH connection:', error);
}
connectionCache.ssh = null;
}
connectionCache.lastUsed = 0;
connectionCache.isConnecting = false;
connectionCache.connectionPromise = null;
}
module.exports = {
getDbConnection,
getCachedQuery,
clearQueryCache,
closeAllConnections
};