Update for project move on server, add ability to update existing POs, add spec lookup page, enhance copy down functionality.
This commit is contained in:
@@ -25,7 +25,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
// Load klaviyo .env for API key
|
||||
dotenv.config({ path: path.resolve(__dirname, '../.env') });
|
||||
// Also load the main inventory-server .env for DB credentials
|
||||
const mainEnvPath = '/var/www/html/inventory/.env';
|
||||
const mainEnvPath = '/var/www/inventory/.env';
|
||||
if (fs.existsSync(mainEnvPath)) {
|
||||
dotenv.config({ path: mainEnvPath });
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ const envPaths = [
|
||||
path.resolve(__dirname, '../..', '.env'), // Two levels up (inventory/.env)
|
||||
path.resolve(__dirname, '..', '.env'), // One level up (inventory-server/.env)
|
||||
path.resolve(__dirname, '.env'), // Same directory
|
||||
'/var/www/html/inventory/.env' // Server absolute path
|
||||
'/var/www/inventory/.env' // Server absolute path
|
||||
];
|
||||
|
||||
let envLoaded = false;
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
*
|
||||
* Environment:
|
||||
* Reads DB_HOST, DB_USER, DB_PASSWORD, DB_NAME, DB_PORT from
|
||||
* /var/www/html/inventory/.env (or current process env).
|
||||
* /var/www/inventory/.env (or current process env).
|
||||
*/
|
||||
|
||||
const { spawn } = require('child_process');
|
||||
@@ -20,7 +20,7 @@ const fs = require('fs');
|
||||
|
||||
// Load .env file if it exists (production path)
|
||||
const envPaths = [
|
||||
'/var/www/html/inventory/.env',
|
||||
'/var/www/inventory/.env',
|
||||
path.join(__dirname, '../../.env'),
|
||||
];
|
||||
|
||||
|
||||
@@ -11,8 +11,8 @@ const axios = require('axios');
|
||||
const net = require('net');
|
||||
|
||||
// Create uploads directory if it doesn't exist
|
||||
const uploadsDir = path.join('/var/www/html/inventory/uploads/products');
|
||||
const reusableUploadsDir = path.join('/var/www/html/inventory/uploads/reusable');
|
||||
const uploadsDir = path.join('/var/www/inventory/uploads/products');
|
||||
const reusableUploadsDir = path.join('/var/www/inventory/uploads/reusable');
|
||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
||||
fs.mkdirSync(reusableUploadsDir, { recursive: true });
|
||||
|
||||
@@ -513,10 +513,12 @@ const storage = multer.diskStorage({
|
||||
}
|
||||
});
|
||||
|
||||
const upload = multer({
|
||||
const MAX_UPLOAD_BYTES = 25 * 1024 * 1024;
|
||||
|
||||
const upload = multer({
|
||||
storage: storage,
|
||||
limits: {
|
||||
fileSize: 15 * 1024 * 1024, // Allow bigger uploads; processing will reduce to 5MB
|
||||
fileSize: MAX_UPLOAD_BYTES,
|
||||
},
|
||||
fileFilter: function (req, file, cb) {
|
||||
// Accept only image files
|
||||
|
||||
@@ -5,7 +5,7 @@ const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
// Create reusable uploads directory if it doesn't exist
|
||||
const uploadsDir = path.join('/var/www/html/inventory/uploads/reusable');
|
||||
const uploadsDir = path.join('/var/www/inventory/uploads/reusable');
|
||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
||||
|
||||
// Configure multer for file uploads
|
||||
|
||||
@@ -0,0 +1,270 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
const MAX_MATCHES = 500;
|
||||
const DESCRIPTION_SAMPLE_LIMIT = 8;
|
||||
|
||||
// GET /api/spec-lookup?company=...&term=...
|
||||
// Returns aggregated specs across products matching company (brand) and term (title).
|
||||
router.get('/', async (req, res) => {
|
||||
const company = typeof req.query.company === 'string' ? req.query.company.trim() : '';
|
||||
const term = typeof req.query.term === 'string' ? req.query.term.trim() : '';
|
||||
|
||||
if (!company && !term) {
|
||||
return res.status(400).json({ error: 'company or term is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const conditions = [];
|
||||
const params = [];
|
||||
|
||||
if (company) {
|
||||
params.push(`%${company}%`);
|
||||
conditions.push(`brand ILIKE $${params.length}`);
|
||||
}
|
||||
if (term) {
|
||||
params.push(`%${term}%`);
|
||||
conditions.push(`title ILIKE $${params.length}`);
|
||||
}
|
||||
|
||||
params.push(MAX_MATCHES);
|
||||
const limitParam = `$${params.length}`;
|
||||
|
||||
const sql = `
|
||||
SELECT
|
||||
pid::TEXT AS pid,
|
||||
title, sku, brand, vendor, artist,
|
||||
country_of_origin, harmonized_tariff_code,
|
||||
description, categories,
|
||||
cost_price, regular_price,
|
||||
moq, weight, length, width, height,
|
||||
created_at
|
||||
FROM products
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
ORDER BY created_at DESC NULLS LAST
|
||||
LIMIT ${limitParam}
|
||||
`;
|
||||
|
||||
const { rows } = await pool.query(sql, params);
|
||||
|
||||
// Resolve category cat_ids → names. products.categories is a comma-separated cat_id string.
|
||||
const catIds = new Set();
|
||||
for (const r of rows) {
|
||||
if (!r.categories) continue;
|
||||
for (const tok of String(r.categories).split(',')) {
|
||||
const trimmed = tok.trim();
|
||||
if (trimmed && /^\d+$/.test(trimmed)) catIds.add(trimmed);
|
||||
}
|
||||
}
|
||||
// Map cat_id → {name, type}. Types 10-13 are Section/Category/Subcategory/Sub-Subcategory; 20-21 are Theme/Subtheme.
|
||||
const catIdToInfo = new Map();
|
||||
if (catIds.size > 0) {
|
||||
const { rows: catRows } = await pool.query(
|
||||
`SELECT cat_id::TEXT AS cat_id, name, type FROM categories WHERE cat_id = ANY($1::bigint[])`,
|
||||
[Array.from(catIds)],
|
||||
);
|
||||
for (const c of catRows) catIdToInfo.set(c.cat_id, { name: c.name, type: Number(c.type) });
|
||||
}
|
||||
|
||||
const products = rows.map(r => ({
|
||||
pid: Number(r.pid),
|
||||
title: r.title,
|
||||
sku: r.sku,
|
||||
brand: r.brand,
|
||||
vendor: r.vendor,
|
||||
artist: r.artist,
|
||||
country_of_origin: r.country_of_origin,
|
||||
harmonized_tariff_code: r.harmonized_tariff_code,
|
||||
description: r.description,
|
||||
categories: r.categories,
|
||||
cost_price: toNumberOrNull(r.cost_price),
|
||||
regular_price: toNumberOrNull(r.regular_price),
|
||||
moq: toNumberOrNull(r.moq),
|
||||
weight: toNumberOrNull(r.weight),
|
||||
length: toNumberOrNull(r.length),
|
||||
width: toNumberOrNull(r.width),
|
||||
height: toNumberOrNull(r.height),
|
||||
created_at: r.created_at,
|
||||
}));
|
||||
|
||||
res.json({
|
||||
company,
|
||||
term,
|
||||
total: products.length,
|
||||
truncated: products.length === MAX_MATCHES,
|
||||
products,
|
||||
aggregates: {
|
||||
numeric: {
|
||||
cost_price: numericAggregate(products, 'cost_price'),
|
||||
regular_price: numericAggregate(products, 'regular_price'),
|
||||
moq: numericAggregate(products, 'moq'),
|
||||
weight: numericAggregate(products, 'weight'),
|
||||
length: numericAggregate(products, 'length'),
|
||||
width: numericAggregate(products, 'width'),
|
||||
height: numericAggregate(products, 'height'),
|
||||
},
|
||||
categorical: {
|
||||
artist: categoricalAggregate(products, 'artist'),
|
||||
country_of_origin: categoricalAggregate(products, 'country_of_origin'),
|
||||
harmonized_tariff_code: categoricalAggregate(products, 'harmonized_tariff_code'),
|
||||
},
|
||||
categories: groupedAggregate(products, catIdToInfo, new Set([10, 11, 12, 13])),
|
||||
themes: groupedAggregate(products, catIdToInfo, new Set([20, 21])),
|
||||
description: descriptionAggregate(products),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in spec-lookup:', error);
|
||||
res.status(500).json({ error: 'Failed to compute spec lookup' });
|
||||
}
|
||||
});
|
||||
|
||||
function toNumberOrNull(v) {
|
||||
if (v === null || v === undefined) return null;
|
||||
const n = Number(v);
|
||||
return Number.isFinite(n) ? n : null;
|
||||
}
|
||||
|
||||
// Aggregate a numeric field. Treats null/0 as unset since 0 is the codebase's "no value" sentinel.
|
||||
// `products` is assumed to be ordered most-recent-first (created_at DESC) so the head of the
|
||||
// list is also the recency window we use for trend detection.
|
||||
function numericAggregate(products, field) {
|
||||
const values = [];
|
||||
// Iterate products in order so we know which values came from the most-recent rows.
|
||||
for (const p of products) {
|
||||
const v = p[field];
|
||||
if (typeof v === 'number' && Number.isFinite(v) && v > 0) values.push(v);
|
||||
}
|
||||
|
||||
if (!values.length) {
|
||||
return { count: 0, sample_size: products.length, distribution: [] };
|
||||
}
|
||||
|
||||
const sorted = [...values].sort((a, b) => a - b);
|
||||
const sum = values.reduce((s, v) => s + v, 0);
|
||||
const avg = sum / values.length;
|
||||
const mid = Math.floor(sorted.length / 2);
|
||||
const median = sorted.length % 2 === 0
|
||||
? (sorted[mid - 1] + sorted[mid]) / 2
|
||||
: sorted[mid];
|
||||
const variance = values.reduce((s, v) => s + (v - avg) ** 2, 0) / values.length;
|
||||
const stddev = Math.sqrt(variance);
|
||||
|
||||
const counts = new Map();
|
||||
for (const v of values) {
|
||||
const key = roundForKey(v);
|
||||
counts.set(key, (counts.get(key) || 0) + 1);
|
||||
}
|
||||
const distribution = Array.from(counts.entries())
|
||||
.map(([value, count]) => ({ value, count }))
|
||||
.sort((a, b) => b.count - a.count || a.value - b.value);
|
||||
|
||||
const mode = distribution[0]?.value ?? null;
|
||||
const mode_count = distribution[0]?.count ?? 0;
|
||||
|
||||
// Trend detection: scan only the most-recent N values. N adapts to sample size so this
|
||||
// can never look at more than ~20% of the data when the sample is small.
|
||||
const recentN = Math.min(20, Math.max(5, Math.floor(values.length / 4)));
|
||||
const recentValues = values.slice(0, recentN);
|
||||
let recent_mode = null;
|
||||
let recent_mode_count = 0;
|
||||
let trending = false;
|
||||
if (recentValues.length >= 3) {
|
||||
const recentCounts = new Map();
|
||||
for (const v of recentValues) {
|
||||
const key = roundForKey(v);
|
||||
recentCounts.set(key, (recentCounts.get(key) || 0) + 1);
|
||||
}
|
||||
const recentSorted = Array.from(recentCounts.entries()).sort((a, b) => b[1] - a[1] || a[0] - b[0]);
|
||||
recent_mode = recentSorted[0][0];
|
||||
recent_mode_count = recentSorted[0][1];
|
||||
// Trend = recent mode differs from overall AND dominates the window AND has min absolute support.
|
||||
const majority = recent_mode_count >= Math.ceil(recentValues.length * 0.6);
|
||||
const minSupport = recent_mode_count >= 3;
|
||||
trending = recent_mode !== mode && majority && minSupport;
|
||||
}
|
||||
|
||||
return {
|
||||
count: values.length,
|
||||
sample_size: products.length,
|
||||
avg,
|
||||
median,
|
||||
min: sorted[0],
|
||||
max: sorted[sorted.length - 1],
|
||||
stddev,
|
||||
mode,
|
||||
mode_count,
|
||||
recent_mode,
|
||||
recent_mode_count,
|
||||
recent_window: recentValues.length,
|
||||
trending,
|
||||
distribution,
|
||||
};
|
||||
}
|
||||
|
||||
// Round to 4 decimals so JS-FP noise doesn't fragment the histogram.
|
||||
function roundForKey(v) {
|
||||
return Math.round(v * 10000) / 10000;
|
||||
}
|
||||
|
||||
function categoricalAggregate(products, field) {
|
||||
const counts = new Map();
|
||||
for (const p of products) {
|
||||
const v = p[field];
|
||||
if (v === null || v === undefined) continue;
|
||||
const key = String(v).trim();
|
||||
if (!key) continue;
|
||||
counts.set(key, (counts.get(key) || 0) + 1);
|
||||
}
|
||||
return Array.from(counts.entries())
|
||||
.map(([value, count]) => ({ value, count }))
|
||||
.sort((a, b) => b.count - a.count || a.value.localeCompare(b.value));
|
||||
}
|
||||
|
||||
// Aggregate cat_id token counts, including only entries whose category type is in `acceptedTypes`.
|
||||
function groupedAggregate(products, catIdToInfo, acceptedTypes) {
|
||||
const counts = new Map();
|
||||
for (const p of products) {
|
||||
if (!p.categories) continue;
|
||||
const tokens = String(p.categories).split(',').map(t => t.trim()).filter(Boolean);
|
||||
for (const t of tokens) {
|
||||
const info = catIdToInfo.get(t);
|
||||
if (!info || !acceptedTypes.has(info.type)) continue;
|
||||
counts.set(info.name, (counts.get(info.name) || 0) + 1);
|
||||
}
|
||||
}
|
||||
return Array.from(counts.entries())
|
||||
.map(([value, count]) => ({ value, count }))
|
||||
.sort((a, b) => b.count - a.count || a.value.localeCompare(b.value));
|
||||
}
|
||||
|
||||
function descriptionAggregate(products) {
|
||||
const counts = new Map();
|
||||
for (const p of products) {
|
||||
if (!p.description) continue;
|
||||
const key = String(p.description).trim();
|
||||
if (!key) continue;
|
||||
counts.set(key, (counts.get(key) || 0) + 1);
|
||||
}
|
||||
|
||||
const duplicates = Array.from(counts.entries())
|
||||
.filter(([, count]) => count > 1)
|
||||
.map(([value, count]) => ({ value, count }))
|
||||
.sort((a, b) => b.count - a.count);
|
||||
|
||||
// Recent unique samples (products are already ordered by created_at DESC).
|
||||
const seen = new Set();
|
||||
const samples = [];
|
||||
for (const p of products) {
|
||||
const desc = (p.description || '').trim();
|
||||
if (!desc || seen.has(desc)) continue;
|
||||
seen.add(desc);
|
||||
samples.push({ value: desc, title: p.title, pid: p.pid, sku: p.sku });
|
||||
if (samples.length >= DESCRIPTION_SAMPLE_LIMIT) break;
|
||||
}
|
||||
|
||||
return { duplicates, samples };
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
@@ -23,6 +23,7 @@ const categoriesAggregateRouter = require('./routes/categoriesAggregate');
|
||||
const vendorsAggregateRouter = require('./routes/vendorsAggregate');
|
||||
const brandsAggregateRouter = require('./routes/brandsAggregate');
|
||||
const htsLookupRouter = require('./routes/hts-lookup');
|
||||
const specLookupRouter = require('./routes/spec-lookup');
|
||||
const importSessionsRouter = require('./routes/import-sessions');
|
||||
const importAuditLogRouter = require('./routes/import-audit-log');
|
||||
const productEditorAuditLogRouter = require('./routes/product-editor-audit-log');
|
||||
@@ -31,7 +32,7 @@ const linesAggregateRouter = require('./routes/linesAggregate');
|
||||
const repeatOrdersRouter = require('./routes/repeat-orders');
|
||||
|
||||
// Get the absolute path to the .env file
|
||||
const envPath = '/var/www/html/inventory/.env';
|
||||
const envPath = '/var/www/inventory/.env';
|
||||
console.log('Looking for .env file at:', envPath);
|
||||
console.log('.env file exists:', fs.existsSync(envPath));
|
||||
|
||||
@@ -136,6 +137,7 @@ async function startServer() {
|
||||
app.use('/api/ai-prompts', aiPromptsRouter);
|
||||
app.use('/api/reusable-images', reusableImagesRouter);
|
||||
app.use('/api/hts-lookup', htsLookupRouter);
|
||||
app.use('/api/spec-lookup', specLookupRouter);
|
||||
app.use('/api/import-sessions', importSessionsRouter);
|
||||
app.use('/api/import-audit-log', importAuditLogRouter);
|
||||
app.use('/api/product-editor-audit-log', productEditorAuditLogRouter);
|
||||
|
||||
Reference in New Issue
Block a user