Add audit log for product import, add tiff image support, add new/preorder filters on product editor, fix sorting in product editor
This commit is contained in:
53
inventory-server/migrations/003_create_import_audit_log.sql
Normal file
53
inventory-server/migrations/003_create_import_audit_log.sql
Normal file
@@ -0,0 +1,53 @@
|
||||
-- Migration: Create import_audit_log table
|
||||
-- Permanent audit trail of all product import submissions sent to the API
|
||||
-- Run this against your PostgreSQL database
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import_audit_log (
|
||||
id SERIAL PRIMARY KEY,
|
||||
|
||||
-- Who initiated the import
|
||||
user_id INTEGER NOT NULL,
|
||||
username VARCHAR(255),
|
||||
|
||||
-- What was submitted
|
||||
product_count INTEGER NOT NULL,
|
||||
request_payload JSONB NOT NULL, -- The exact JSON array of products sent to the API
|
||||
environment VARCHAR(10) NOT NULL, -- 'dev' or 'prod'
|
||||
target_endpoint VARCHAR(255), -- The API URL that was called
|
||||
use_test_data_source BOOLEAN DEFAULT FALSE,
|
||||
|
||||
-- What came back
|
||||
success BOOLEAN NOT NULL,
|
||||
response_payload JSONB, -- Full API response
|
||||
error_message TEXT, -- Extracted error message on failure
|
||||
created_count INTEGER DEFAULT 0, -- Number of products successfully created
|
||||
errored_count INTEGER DEFAULT 0, -- Number of products that errored
|
||||
|
||||
-- Metadata
|
||||
session_id INTEGER, -- Optional link to the import_session used (if any)
|
||||
duration_ms INTEGER, -- How long the API call took
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Index for looking up logs by user
|
||||
CREATE INDEX IF NOT EXISTS idx_import_audit_log_user_id
|
||||
ON import_audit_log (user_id);
|
||||
|
||||
-- Index for filtering by success/failure
|
||||
CREATE INDEX IF NOT EXISTS idx_import_audit_log_success
|
||||
ON import_audit_log (success);
|
||||
|
||||
-- Index for time-based queries
|
||||
CREATE INDEX IF NOT EXISTS idx_import_audit_log_created_at
|
||||
ON import_audit_log (created_at DESC);
|
||||
|
||||
-- Composite index for user + time queries
|
||||
CREATE INDEX IF NOT EXISTS idx_import_audit_log_user_created
|
||||
ON import_audit_log (user_id, created_at DESC);
|
||||
|
||||
COMMENT ON TABLE import_audit_log IS 'Permanent audit log of all product import API submissions';
|
||||
COMMENT ON COLUMN import_audit_log.request_payload IS 'Exact JSON products array sent to the external API';
|
||||
COMMENT ON COLUMN import_audit_log.response_payload IS 'Full response received from the external API';
|
||||
COMMENT ON COLUMN import_audit_log.environment IS 'dev or prod - which API endpoint was targeted';
|
||||
COMMENT ON COLUMN import_audit_log.session_id IS 'Optional reference to import_sessions.id if session was active';
|
||||
COMMENT ON COLUMN import_audit_log.duration_ms IS 'Round-trip time of the API call in milliseconds';
|
||||
193
inventory-server/src/routes/import-audit-log.js
Normal file
193
inventory-server/src/routes/import-audit-log.js
Normal file
@@ -0,0 +1,193 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Create a new audit log entry
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
user_id,
|
||||
username,
|
||||
product_count,
|
||||
request_payload,
|
||||
environment,
|
||||
target_endpoint,
|
||||
use_test_data_source,
|
||||
success,
|
||||
response_payload,
|
||||
error_message,
|
||||
created_count,
|
||||
errored_count,
|
||||
session_id,
|
||||
duration_ms,
|
||||
} = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!user_id) {
|
||||
return res.status(400).json({ error: 'user_id is required' });
|
||||
}
|
||||
if (!request_payload) {
|
||||
return res.status(400).json({ error: 'request_payload is required' });
|
||||
}
|
||||
if (typeof success !== 'boolean') {
|
||||
return res.status(400).json({ error: 'success (boolean) is required' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
INSERT INTO import_audit_log (
|
||||
user_id,
|
||||
username,
|
||||
product_count,
|
||||
request_payload,
|
||||
environment,
|
||||
target_endpoint,
|
||||
use_test_data_source,
|
||||
success,
|
||||
response_payload,
|
||||
error_message,
|
||||
created_count,
|
||||
errored_count,
|
||||
session_id,
|
||||
duration_ms
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
|
||||
RETURNING id, created_at
|
||||
`, [
|
||||
user_id,
|
||||
username || null,
|
||||
product_count || 0,
|
||||
JSON.stringify(request_payload),
|
||||
environment || 'prod',
|
||||
target_endpoint || null,
|
||||
use_test_data_source || false,
|
||||
success,
|
||||
response_payload ? JSON.stringify(response_payload) : null,
|
||||
error_message || null,
|
||||
created_count || 0,
|
||||
errored_count || 0,
|
||||
session_id || null,
|
||||
duration_ms || null,
|
||||
]);
|
||||
|
||||
res.status(201).json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error creating import audit log:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to create import audit log',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// List audit log entries (with pagination)
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const { user_id, limit = 50, offset = 0, success: successFilter } = req.query;
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const conditions = [];
|
||||
const params = [];
|
||||
let paramIndex = 1;
|
||||
|
||||
if (user_id) {
|
||||
conditions.push(`user_id = $${paramIndex++}`);
|
||||
params.push(user_id);
|
||||
}
|
||||
|
||||
if (successFilter !== undefined) {
|
||||
conditions.push(`success = $${paramIndex++}`);
|
||||
params.push(successFilter === 'true');
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0
|
||||
? `WHERE ${conditions.join(' AND ')}`
|
||||
: '';
|
||||
|
||||
// Get total count
|
||||
const countResult = await pool.query(
|
||||
`SELECT COUNT(*) FROM import_audit_log ${whereClause}`,
|
||||
params
|
||||
);
|
||||
|
||||
// Get paginated results (exclude large payload columns in list view)
|
||||
const dataParams = [...params, parseInt(limit, 10), parseInt(offset, 10)];
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
user_id,
|
||||
username,
|
||||
product_count,
|
||||
environment,
|
||||
target_endpoint,
|
||||
use_test_data_source,
|
||||
success,
|
||||
error_message,
|
||||
created_count,
|
||||
errored_count,
|
||||
session_id,
|
||||
duration_ms,
|
||||
created_at
|
||||
FROM import_audit_log
|
||||
${whereClause}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT $${paramIndex++} OFFSET $${paramIndex++}
|
||||
`, dataParams);
|
||||
|
||||
res.json({
|
||||
total: parseInt(countResult.rows[0].count, 10),
|
||||
entries: result.rows,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching import audit log:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch import audit log',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get a single audit log entry (with full payloads)
|
||||
router.get('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
'SELECT * FROM import_audit_log WHERE id = $1',
|
||||
[id]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Audit log entry not found' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import audit log entry:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch audit log entry',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Error handling middleware
|
||||
router.use((err, req, res, next) => {
|
||||
console.error('Import audit log route error:', err);
|
||||
res.status(500).json({
|
||||
error: 'Internal server error',
|
||||
details: err.message
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -194,7 +194,7 @@ const processUploadedImage = async (filePath, mimetype) => {
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
|
||||
const format = (baseMetadata.format || '').toLowerCase();
|
||||
let format = (baseMetadata.format || '').toLowerCase();
|
||||
if (format === 'gif') {
|
||||
if (metadata.size > MAX_IMAGE_SIZE_BYTES) {
|
||||
const message = `GIF optimization is limited; resulting size is ${bytesToMegabytes(metadata.size)}MB (target 5MB).`;
|
||||
@@ -211,6 +211,16 @@ const processUploadedImage = async (filePath, mimetype) => {
|
||||
return { notices, warnings: legacyWarnings, metadata, finalSize: metadata.size };
|
||||
}
|
||||
|
||||
// TIFF: convert to JPEG (don't store TIFF files)
|
||||
let convertedFromTiff = false;
|
||||
if (format === 'tiff') {
|
||||
convertedFromTiff = true;
|
||||
format = 'jpeg';
|
||||
const message = 'Converted from TIFF to JPEG.';
|
||||
notices.push({ message, level: 'info', code: 'converted_from_tiff', source: 'server' });
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
|
||||
const supportsQuality = ['jpeg', 'jpg', 'webp'].includes(format);
|
||||
let targetQuality = supportsQuality ? 90 : undefined;
|
||||
let finalQuality = undefined;
|
||||
@@ -343,8 +353,8 @@ const processUploadedImage = async (filePath, mimetype) => {
|
||||
});
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
} else if (shouldConvertToRgb) {
|
||||
const { data, info } = await encode({ width: currentWidth, height: currentHeight });
|
||||
} else if (shouldConvertToRgb || convertedFromTiff) {
|
||||
const { data, info } = await encode({ width: currentWidth, height: currentHeight, quality: targetQuality });
|
||||
mutated = true;
|
||||
finalBuffer = data;
|
||||
finalInfo = info;
|
||||
@@ -363,6 +373,15 @@ const processUploadedImage = async (filePath, mimetype) => {
|
||||
metadata.optimizedSize = metadata.size;
|
||||
}
|
||||
|
||||
// Rename TIFF files to .jpg after conversion
|
||||
let newFilePath = null;
|
||||
if (convertedFromTiff) {
|
||||
newFilePath = filePath.replace(/\.tiff?$/i, '.jpg');
|
||||
if (newFilePath !== filePath) {
|
||||
await fsp.rename(filePath, newFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
metadata.convertedToRgb = shouldConvertToRgb && mutated;
|
||||
metadata.resized = resized;
|
||||
if (finalQuality) {
|
||||
@@ -396,6 +415,7 @@ const processUploadedImage = async (filePath, mimetype) => {
|
||||
warnings: legacyWarnings,
|
||||
metadata,
|
||||
finalSize: finalBuffer.length,
|
||||
newFilePath,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -419,10 +439,11 @@ const storage = multer.diskStorage({
|
||||
case 'image/png': fileExt = '.png'; break;
|
||||
case 'image/gif': fileExt = '.gif'; break;
|
||||
case 'image/webp': fileExt = '.webp'; break;
|
||||
case 'image/tiff': fileExt = '.tif'; break;
|
||||
default: fileExt = '.jpg'; // Default to jpg
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const fileName = `${req.body.upc || 'product'}-${uniqueSuffix}${fileExt}`;
|
||||
console.log(`Generated filename: ${fileName} with mimetype: ${file.mimetype}`);
|
||||
cb(null, fileName);
|
||||
@@ -436,10 +457,10 @@ const upload = multer({
|
||||
},
|
||||
fileFilter: function (req, file, cb) {
|
||||
// Accept only image files
|
||||
const filetypes = /jpeg|jpg|png|gif|webp/;
|
||||
const filetypes = /jpeg|jpg|png|gif|webp|tiff?/;
|
||||
const mimetype = filetypes.test(file.mimetype);
|
||||
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
|
||||
|
||||
|
||||
if (mimetype && extname) {
|
||||
return cb(null, true);
|
||||
}
|
||||
@@ -633,13 +654,19 @@ router.post('/upload-image', upload.single('image'), async (req, res) => {
|
||||
const processingResult = await processUploadedImage(filePath, req.file.mimetype);
|
||||
req.file.size = processingResult.finalSize;
|
||||
|
||||
// If TIFF was converted to JPG, update filename to match the renamed file
|
||||
const effectivePath = processingResult.newFilePath || filePath;
|
||||
if (processingResult.newFilePath) {
|
||||
req.file.filename = path.basename(processingResult.newFilePath);
|
||||
}
|
||||
|
||||
// Create URL for the uploaded file - using an absolute URL with domain
|
||||
// This will generate a URL like: https://acot.site/uploads/products/filename.jpg
|
||||
const baseUrl = 'https://tools.acherryontop.com';
|
||||
const imageUrl = `${baseUrl}/uploads/products/${req.file.filename}`;
|
||||
|
||||
|
||||
// Schedule this image for deletion in 24 hours
|
||||
scheduleImageDeletion(req.file.filename, filePath);
|
||||
scheduleImageDeletion(req.file.filename, effectivePath);
|
||||
|
||||
// Return success response with image URL
|
||||
res.status(200).json({
|
||||
@@ -1308,8 +1335,11 @@ const PRODUCT_SELECT = `
|
||||
pls.date_sold AS date_last_sold,
|
||||
IF(p.tax_code IS NULL, '', CAST(p.tax_code AS CHAR)) AS tax_code,
|
||||
CAST(p.size_cat AS CHAR) AS size_cat,
|
||||
CAST(p.shipping_restrictions AS CHAR) AS shipping_restrictions
|
||||
CAST(p.shipping_restrictions AS CHAR) AS shipping_restrictions,
|
||||
IF(DATEDIFF(NOW(), p.date_ol) <= 45 AND p.notnew = 0 AND (si_feed.all IS NULL OR si_feed.all != 2), 1, 0) AS is_new,
|
||||
IF(si_feed.all = 2, 1, 0) AS is_preorder
|
||||
FROM products p
|
||||
LEFT JOIN shop_inventory si_feed ON p.pid = si_feed.pid AND si_feed.store = 0
|
||||
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
|
||||
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
||||
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
||||
@@ -1334,7 +1364,7 @@ router.get('/line-products', async (req, res) => {
|
||||
where += ' AND p.subline = ?';
|
||||
params.push(Number(subline));
|
||||
}
|
||||
const query = `${PRODUCT_SELECT} ${where} GROUP BY p.pid ORDER BY p.description`;
|
||||
const query = `${PRODUCT_SELECT} ${where} GROUP BY p.pid ORDER BY IF(p.date_ol != '0000-00-00 00:00:00', p.date_ol, p.date_created) DESC, p.description`;
|
||||
const [results] = await connection.query(query, params);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
@@ -1501,7 +1531,7 @@ router.get('/path-products', async (req, res) => {
|
||||
return res.status(400).json({ error: 'No valid filters found in path' });
|
||||
}
|
||||
|
||||
const query = `${PRODUCT_SELECT} WHERE ${whereParts.join(' AND ')} GROUP BY p.pid ORDER BY p.description`;
|
||||
const query = `${PRODUCT_SELECT} WHERE ${whereParts.join(' AND ')} GROUP BY p.pid ORDER BY IF(p.date_ol != '0000-00-00 00:00:00', p.date_ol, p.date_created) DESC, p.description`;
|
||||
const [results] = await connection.query(query, params);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
@@ -1552,6 +1582,60 @@ router.get('/product-images/:pid', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Batch fetch product images for multiple PIDs
|
||||
router.get('/product-images-batch', async (req, res) => {
|
||||
const { pids } = req.query;
|
||||
if (!pids) {
|
||||
return res.status(400).json({ error: 'pids query parameter is required' });
|
||||
}
|
||||
const pidList = String(pids).split(',').map(Number).filter(n => n > 0);
|
||||
if (pidList.length === 0) {
|
||||
return res.json({});
|
||||
}
|
||||
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
const placeholders = pidList.map(() => '?').join(',');
|
||||
const [rows] = await connection.query(
|
||||
`SELECT pid, iid, type, width, height, \`order\`, hidden FROM product_images WHERE pid IN (${placeholders}) ORDER BY \`order\` DESC, type`,
|
||||
pidList
|
||||
);
|
||||
|
||||
const typeMap = { 1: 'o', 2: 'l', 3: 't', 4: '100x100', 5: '175x175', 6: '300x300', 7: '600x600', 8: '500x500', 9: '150x150' };
|
||||
const result = {};
|
||||
for (const pid of pidList) {
|
||||
result[pid] = {};
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
const typeName = typeMap[row.type];
|
||||
if (!typeName) continue;
|
||||
const pid = row.pid;
|
||||
if (!result[pid]) result[pid] = {};
|
||||
if (!result[pid][row.iid]) {
|
||||
result[pid][row.iid] = { iid: row.iid, order: row.order, hidden: !!row.hidden, sizes: {} };
|
||||
}
|
||||
const padded = String(pid).padStart(10, '0');
|
||||
const pathPrefix = `${padded.substring(0, 4)}/${padded.substring(4, 7)}/`;
|
||||
result[pid][row.iid].sizes[typeName] = {
|
||||
width: row.width,
|
||||
height: row.height,
|
||||
url: `https://sbing.com/i/products/${pathPrefix}${pid}-${typeName}-${row.iid}.jpg`,
|
||||
};
|
||||
}
|
||||
|
||||
// Convert each pid's iid map to sorted array
|
||||
const output = {};
|
||||
for (const pid of pidList) {
|
||||
output[pid] = Object.values(result[pid] || {}).sort((a, b) => b.order - a.order);
|
||||
}
|
||||
res.json(output);
|
||||
} catch (error) {
|
||||
console.error('Error fetching batch product images:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product images' });
|
||||
}
|
||||
});
|
||||
|
||||
const UPC_SUPPLIER_PREFIX_LEADING_DIGIT = '4';
|
||||
const UPC_MAX_SEQUENCE = 99999;
|
||||
const UPC_RESERVATION_TTL = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
@@ -24,6 +24,7 @@ const vendorsAggregateRouter = require('./routes/vendorsAggregate');
|
||||
const brandsAggregateRouter = require('./routes/brandsAggregate');
|
||||
const htsLookupRouter = require('./routes/hts-lookup');
|
||||
const importSessionsRouter = require('./routes/import-sessions');
|
||||
const importAuditLogRouter = require('./routes/import-audit-log');
|
||||
const newsletterRouter = require('./routes/newsletter');
|
||||
|
||||
// Get the absolute path to the .env file
|
||||
@@ -133,6 +134,7 @@ async function startServer() {
|
||||
app.use('/api/reusable-images', reusableImagesRouter);
|
||||
app.use('/api/hts-lookup', htsLookupRouter);
|
||||
app.use('/api/import-sessions', importSessionsRouter);
|
||||
app.use('/api/import-audit-log', importAuditLogRouter);
|
||||
app.use('/api/newsletter', newsletterRouter);
|
||||
|
||||
// Basic health check route
|
||||
|
||||
Reference in New Issue
Block a user