1117 lines
38 KiB
JavaScript
1117 lines
38 KiB
JavaScript
const express = require('express');
|
|
const router = express.Router();
|
|
const { Client } = require('ssh2');
|
|
const mysql = require('mysql2/promise');
|
|
const multer = require('multer');
|
|
const path = require('path');
|
|
const fs = require('fs');
|
|
|
|
// Create uploads directory if it doesn't exist
|
|
const uploadsDir = path.join('/var/www/html/inventory/uploads/products');
|
|
fs.mkdirSync(uploadsDir, { recursive: true });
|
|
|
|
// Create a Map to track image upload times and their scheduled deletion
|
|
const imageUploadMap = new Map();
|
|
|
|
// Connection pooling and cache configuration
|
|
const connectionCache = {
|
|
ssh: null,
|
|
dbConnection: null,
|
|
lastUsed: 0,
|
|
isConnecting: false,
|
|
connectionPromise: null,
|
|
// Cache expiration time in milliseconds (5 minutes)
|
|
expirationTime: 5 * 60 * 1000,
|
|
// Cache for query results (key: query string, value: {data, timestamp})
|
|
queryCache: new Map(),
|
|
// Cache duration for different query types in milliseconds
|
|
cacheDuration: {
|
|
'field-options': 30 * 60 * 1000, // 30 minutes for field options
|
|
'product-lines': 10 * 60 * 1000, // 10 minutes for product lines
|
|
'sublines': 10 * 60 * 1000, // 10 minutes for sublines
|
|
'default': 60 * 1000 // 1 minute default
|
|
}
|
|
};
|
|
|
|
// Function to schedule image deletion after 24 hours
|
|
const scheduleImageDeletion = (filename, filePath) => {
|
|
// Delete any existing timeout for this file
|
|
if (imageUploadMap.has(filename)) {
|
|
clearTimeout(imageUploadMap.get(filename).timeoutId);
|
|
}
|
|
|
|
// Schedule deletion after 24 hours (24 * 60 * 60 * 1000 ms)
|
|
const timeoutId = setTimeout(() => {
|
|
console.log(`Auto-deleting image after 24 hours: ${filename}`);
|
|
|
|
// Check if file exists before trying to delete
|
|
if (fs.existsSync(filePath)) {
|
|
try {
|
|
fs.unlinkSync(filePath);
|
|
console.log(`Successfully auto-deleted image: ${filename}`);
|
|
} catch (error) {
|
|
console.error(`Error auto-deleting image ${filename}:`, error);
|
|
}
|
|
} else {
|
|
console.log(`File already deleted: ${filename}`);
|
|
}
|
|
|
|
// Remove from tracking map
|
|
imageUploadMap.delete(filename);
|
|
}, 24 * 60 * 60 * 1000); // 24 hours
|
|
|
|
// Store upload time and timeout ID
|
|
imageUploadMap.set(filename, {
|
|
uploadTime: new Date(),
|
|
timeoutId: timeoutId,
|
|
filePath: filePath
|
|
});
|
|
};
|
|
|
|
// Function to clean up scheduled deletions on server restart
|
|
const cleanupImagesOnStartup = () => {
|
|
console.log('Checking for images to clean up...');
|
|
|
|
// Check if uploads directory exists
|
|
if (!fs.existsSync(uploadsDir)) {
|
|
console.log('Uploads directory does not exist');
|
|
return;
|
|
}
|
|
|
|
// Read all files in the directory
|
|
fs.readdir(uploadsDir, (err, files) => {
|
|
if (err) {
|
|
console.error('Error reading uploads directory:', err);
|
|
return;
|
|
}
|
|
|
|
const now = new Date();
|
|
let countDeleted = 0;
|
|
|
|
files.forEach(filename => {
|
|
const filePath = path.join(uploadsDir, filename);
|
|
|
|
// Get file stats
|
|
try {
|
|
const stats = fs.statSync(filePath);
|
|
const fileCreationTime = stats.birthtime || stats.ctime; // birthtime might not be available on all systems
|
|
const ageMs = now.getTime() - fileCreationTime.getTime();
|
|
|
|
// If file is older than 24 hours, delete it
|
|
if (ageMs > 24 * 60 * 60 * 1000) {
|
|
fs.unlinkSync(filePath);
|
|
countDeleted++;
|
|
console.log(`Deleted old image on startup: ${filename} (age: ${Math.round(ageMs / (60 * 60 * 1000))} hours)`);
|
|
} else {
|
|
// Schedule deletion for remaining time
|
|
const remainingMs = (24 * 60 * 60 * 1000) - ageMs;
|
|
console.log(`Scheduling deletion for ${filename} in ${Math.round(remainingMs / (60 * 60 * 1000))} hours`);
|
|
|
|
const timeoutId = setTimeout(() => {
|
|
if (fs.existsSync(filePath)) {
|
|
try {
|
|
fs.unlinkSync(filePath);
|
|
console.log(`Successfully auto-deleted scheduled image: ${filename}`);
|
|
} catch (error) {
|
|
console.error(`Error auto-deleting scheduled image ${filename}:`, error);
|
|
}
|
|
}
|
|
imageUploadMap.delete(filename);
|
|
}, remainingMs);
|
|
|
|
imageUploadMap.set(filename, {
|
|
uploadTime: fileCreationTime,
|
|
timeoutId: timeoutId,
|
|
filePath: filePath
|
|
});
|
|
}
|
|
} catch (error) {
|
|
console.error(`Error processing file ${filename}:`, error);
|
|
}
|
|
});
|
|
|
|
console.log(`Cleanup completed: ${countDeleted} old images deleted, ${imageUploadMap.size} images scheduled for deletion`);
|
|
});
|
|
};
|
|
|
|
// Run cleanup on server start
|
|
cleanupImagesOnStartup();
|
|
|
|
// Configure multer for file uploads
|
|
const storage = multer.diskStorage({
|
|
destination: function (req, file, cb) {
|
|
console.log(`Saving to: ${uploadsDir}`);
|
|
cb(null, uploadsDir);
|
|
},
|
|
filename: function (req, file, cb) {
|
|
// Create unique filename with original extension
|
|
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
|
|
|
|
// Make sure we preserve the original file extension
|
|
let fileExt = path.extname(file.originalname).toLowerCase();
|
|
|
|
// Ensure there is a proper extension based on mimetype if none exists
|
|
if (!fileExt) {
|
|
switch (file.mimetype) {
|
|
case 'image/jpeg': fileExt = '.jpg'; break;
|
|
case 'image/png': fileExt = '.png'; break;
|
|
case 'image/gif': fileExt = '.gif'; break;
|
|
case 'image/webp': fileExt = '.webp'; break;
|
|
default: fileExt = '.jpg'; // Default to jpg
|
|
}
|
|
}
|
|
|
|
const fileName = `${req.body.upc || 'product'}-${uniqueSuffix}${fileExt}`;
|
|
console.log(`Generated filename: ${fileName} with mimetype: ${file.mimetype}`);
|
|
cb(null, fileName);
|
|
}
|
|
});
|
|
|
|
const upload = multer({
|
|
storage: storage,
|
|
limits: {
|
|
fileSize: 5 * 1024 * 1024, // 5MB max file size
|
|
},
|
|
fileFilter: function (req, file, cb) {
|
|
// Accept only image files
|
|
const filetypes = /jpeg|jpg|png|gif|webp/;
|
|
const mimetype = filetypes.test(file.mimetype);
|
|
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
|
|
|
|
if (mimetype && extname) {
|
|
return cb(null, true);
|
|
}
|
|
cb(new Error('Only image files are allowed'));
|
|
}
|
|
});
|
|
|
|
// Modified function to get a database connection with connection pooling
|
|
async function getDbConnection() {
|
|
const now = Date.now();
|
|
|
|
// Check if we need to refresh the connection due to inactivity
|
|
const needsRefresh = !connectionCache.ssh ||
|
|
!connectionCache.dbConnection ||
|
|
(now - connectionCache.lastUsed > connectionCache.expirationTime);
|
|
|
|
// If connection is still valid, update last used time and return existing connection
|
|
if (!needsRefresh) {
|
|
connectionCache.lastUsed = now;
|
|
return {
|
|
ssh: connectionCache.ssh,
|
|
connection: connectionCache.dbConnection
|
|
};
|
|
}
|
|
|
|
// If another request is already establishing a connection, wait for that promise
|
|
if (connectionCache.isConnecting && connectionCache.connectionPromise) {
|
|
try {
|
|
await connectionCache.connectionPromise;
|
|
return {
|
|
ssh: connectionCache.ssh,
|
|
connection: connectionCache.dbConnection
|
|
};
|
|
} catch (error) {
|
|
// If that connection attempt failed, we'll try again below
|
|
console.error('Error waiting for existing connection:', error);
|
|
}
|
|
}
|
|
|
|
// Close existing connections if they exist
|
|
if (connectionCache.dbConnection) {
|
|
try {
|
|
await connectionCache.dbConnection.end();
|
|
} catch (error) {
|
|
console.error('Error closing existing database connection:', error);
|
|
}
|
|
}
|
|
|
|
if (connectionCache.ssh) {
|
|
try {
|
|
connectionCache.ssh.end();
|
|
} catch (error) {
|
|
console.error('Error closing existing SSH connection:', error);
|
|
}
|
|
}
|
|
|
|
// Mark that we're establishing a new connection
|
|
connectionCache.isConnecting = true;
|
|
|
|
// Create a new promise for this connection attempt
|
|
connectionCache.connectionPromise = setupSshTunnel().then(tunnel => {
|
|
const { ssh, stream, dbConfig } = tunnel;
|
|
|
|
return mysql.createConnection({
|
|
...dbConfig,
|
|
stream
|
|
}).then(connection => {
|
|
// Store the new connections
|
|
connectionCache.ssh = ssh;
|
|
connectionCache.dbConnection = connection;
|
|
connectionCache.lastUsed = Date.now();
|
|
connectionCache.isConnecting = false;
|
|
|
|
return {
|
|
ssh,
|
|
connection
|
|
};
|
|
});
|
|
}).catch(error => {
|
|
connectionCache.isConnecting = false;
|
|
throw error;
|
|
});
|
|
|
|
// Wait for the connection to be established
|
|
return connectionCache.connectionPromise;
|
|
}
|
|
|
|
// Helper function to get cached query results or execute query if not cached
|
|
async function getCachedQuery(cacheKey, queryType, queryFn) {
|
|
// Get cache duration based on query type
|
|
const cacheDuration = connectionCache.cacheDuration[queryType] || connectionCache.cacheDuration.default;
|
|
|
|
// Check if we have a valid cached result
|
|
const cachedResult = connectionCache.queryCache.get(cacheKey);
|
|
const now = Date.now();
|
|
|
|
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
|
|
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
|
|
return cachedResult.data;
|
|
}
|
|
|
|
// No valid cache found, execute the query
|
|
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
|
|
const result = await queryFn();
|
|
|
|
// Cache the result
|
|
connectionCache.queryCache.set(cacheKey, {
|
|
data: result,
|
|
timestamp: now
|
|
});
|
|
|
|
return result;
|
|
}
|
|
|
|
// Helper function to setup SSH tunnel - ONLY USED BY getDbConnection NOW
|
|
async function setupSshTunnel() {
|
|
const sshConfig = {
|
|
host: process.env.PROD_SSH_HOST,
|
|
port: process.env.PROD_SSH_PORT || 22,
|
|
username: process.env.PROD_SSH_USER,
|
|
privateKey: process.env.PROD_SSH_KEY_PATH
|
|
? require('fs').readFileSync(process.env.PROD_SSH_KEY_PATH)
|
|
: undefined,
|
|
compress: true
|
|
};
|
|
|
|
const dbConfig = {
|
|
host: process.env.PROD_DB_HOST || 'localhost',
|
|
user: process.env.PROD_DB_USER,
|
|
password: process.env.PROD_DB_PASSWORD,
|
|
database: process.env.PROD_DB_NAME,
|
|
port: process.env.PROD_DB_PORT || 3306,
|
|
timezone: 'Z'
|
|
};
|
|
|
|
return new Promise((resolve, reject) => {
|
|
const ssh = new Client();
|
|
|
|
ssh.on('error', (err) => {
|
|
console.error('SSH connection error:', err);
|
|
reject(err);
|
|
});
|
|
|
|
ssh.on('ready', () => {
|
|
ssh.forwardOut(
|
|
'127.0.0.1',
|
|
0,
|
|
dbConfig.host,
|
|
dbConfig.port,
|
|
(err, stream) => {
|
|
if (err) reject(err);
|
|
resolve({ ssh, stream, dbConfig });
|
|
}
|
|
);
|
|
}).connect(sshConfig);
|
|
});
|
|
}
|
|
|
|
// Image upload endpoint
|
|
router.post('/upload-image', upload.single('image'), (req, res) => {
|
|
try {
|
|
if (!req.file) {
|
|
return res.status(400).json({ error: 'No image file provided' });
|
|
}
|
|
|
|
// Log file information
|
|
console.log('File uploaded:', {
|
|
filename: req.file.filename,
|
|
originalname: req.file.originalname,
|
|
mimetype: req.file.mimetype,
|
|
size: req.file.size,
|
|
path: req.file.path
|
|
});
|
|
|
|
// Ensure the file exists
|
|
const filePath = path.join(uploadsDir, req.file.filename);
|
|
if (!fs.existsSync(filePath)) {
|
|
return res.status(500).json({ error: 'File was not saved correctly' });
|
|
}
|
|
|
|
// Log file access permissions
|
|
fs.access(filePath, fs.constants.R_OK, (err) => {
|
|
if (err) {
|
|
console.error('File permission issue:', err);
|
|
} else {
|
|
console.log('File is readable');
|
|
}
|
|
});
|
|
|
|
// Create URL for the uploaded file - using an absolute URL with domain
|
|
// This will generate a URL like: https://inventory.acot.site/uploads/products/filename.jpg
|
|
const baseUrl = 'https://inventory.acot.site';
|
|
const imageUrl = `${baseUrl}/uploads/products/${req.file.filename}`;
|
|
|
|
// Schedule this image for deletion in 24 hours
|
|
scheduleImageDeletion(req.file.filename, filePath);
|
|
|
|
// Return success response with image URL
|
|
res.status(200).json({
|
|
success: true,
|
|
imageUrl,
|
|
fileName: req.file.filename,
|
|
mimetype: req.file.mimetype,
|
|
fullPath: filePath,
|
|
message: 'Image uploaded successfully (will auto-delete after 24 hours)'
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error uploading image:', error);
|
|
res.status(500).json({ error: error.message || 'Failed to upload image' });
|
|
}
|
|
});
|
|
|
|
// Image deletion endpoint
|
|
router.delete('/delete-image', (req, res) => {
|
|
try {
|
|
const { filename } = req.body;
|
|
|
|
if (!filename) {
|
|
return res.status(400).json({ error: 'Filename is required' });
|
|
}
|
|
|
|
const filePath = path.join(uploadsDir, filename);
|
|
|
|
// Check if file exists
|
|
if (!fs.existsSync(filePath)) {
|
|
return res.status(404).json({ error: 'File not found' });
|
|
}
|
|
|
|
// Delete the file
|
|
fs.unlinkSync(filePath);
|
|
|
|
// Clear any scheduled deletion for this file
|
|
if (imageUploadMap.has(filename)) {
|
|
clearTimeout(imageUploadMap.get(filename).timeoutId);
|
|
imageUploadMap.delete(filename);
|
|
}
|
|
|
|
// Return success response
|
|
res.status(200).json({
|
|
success: true,
|
|
message: 'Image deleted successfully'
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error deleting image:', error);
|
|
res.status(500).json({ error: error.message || 'Failed to delete image' });
|
|
}
|
|
});
|
|
|
|
// Get all options for import fields
|
|
router.get('/field-options', async (req, res) => {
|
|
try {
|
|
// Use cached connection
|
|
const { connection } = await getDbConnection();
|
|
|
|
const cacheKey = 'field-options';
|
|
const result = await getCachedQuery(cacheKey, 'field-options', async () => {
|
|
// Fetch companies (type 1)
|
|
const [companies] = await connection.query(`
|
|
SELECT cat_id, name
|
|
FROM product_categories
|
|
WHERE type = 1
|
|
ORDER BY name
|
|
`);
|
|
|
|
// Fetch artists (type 40)
|
|
const [artists] = await connection.query(`
|
|
SELECT cat_id, name
|
|
FROM product_categories
|
|
WHERE type = 40
|
|
ORDER BY name
|
|
`);
|
|
|
|
// Fetch sizes (type 50)
|
|
const [sizes] = await connection.query(`
|
|
SELECT cat_id, name
|
|
FROM product_categories
|
|
WHERE type = 50
|
|
ORDER BY name
|
|
`);
|
|
|
|
// Fetch themes with subthemes
|
|
const [themes] = await connection.query(`
|
|
SELECT t.cat_id, t.name AS display_name, t.type, t.name AS sort_theme,
|
|
'' AS sort_subtheme, 1 AS level_order
|
|
FROM product_categories t
|
|
WHERE t.type = 20
|
|
UNION ALL
|
|
SELECT ts.cat_id, CONCAT(t.name,' - ',ts.name) AS display_name, ts.type,
|
|
t.name AS sort_theme, ts.name AS sort_subtheme, 2 AS level_order
|
|
FROM product_categories ts
|
|
JOIN product_categories t ON ts.master_cat_id = t.cat_id
|
|
WHERE ts.type = 21 AND t.type = 20
|
|
ORDER BY sort_theme, sort_subtheme
|
|
`);
|
|
|
|
// Fetch categories with all levels
|
|
const [categories] = await connection.query(`
|
|
SELECT s.cat_id, s.name AS display_name, s.type, s.name AS sort_section,
|
|
'' AS sort_category, '' AS sort_subcategory, '' AS sort_subsubcategory,
|
|
1 AS level_order
|
|
FROM product_categories s
|
|
WHERE s.type = 10
|
|
UNION ALL
|
|
SELECT c.cat_id, CONCAT(s.name,' - ',c.name) AS display_name, c.type,
|
|
s.name AS sort_section, c.name AS sort_category, '' AS sort_subcategory,
|
|
'' AS sort_subsubcategory, 2 AS level_order
|
|
FROM product_categories c
|
|
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
|
WHERE c.type = 11 AND s.type = 10
|
|
UNION ALL
|
|
SELECT sc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name) AS display_name,
|
|
sc.type, s.name AS sort_section, c.name AS sort_category,
|
|
sc.name AS sort_subcategory, '' AS sort_subsubcategory, 3 AS level_order
|
|
FROM product_categories sc
|
|
JOIN product_categories c ON sc.master_cat_id = c.cat_id
|
|
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
|
WHERE sc.type = 12 AND c.type = 11 AND s.type = 10
|
|
UNION ALL
|
|
SELECT ssc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name,' - ',ssc.name) AS display_name,
|
|
ssc.type, s.name AS sort_section, c.name AS sort_category,
|
|
sc.name AS sort_subcategory, ssc.name AS sort_subsubcategory, 4 AS level_order
|
|
FROM product_categories ssc
|
|
JOIN product_categories sc ON ssc.master_cat_id = sc.cat_id
|
|
JOIN product_categories c ON sc.master_cat_id = c.cat_id
|
|
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
|
WHERE ssc.type = 13 AND sc.type = 12 AND c.type = 11 AND s.type = 10
|
|
ORDER BY sort_section, sort_category, sort_subcategory, sort_subsubcategory
|
|
`);
|
|
|
|
// Fetch colors
|
|
const [colors] = await connection.query(`
|
|
SELECT color, name, hex_color
|
|
FROM product_color_list
|
|
ORDER BY \`order\`
|
|
`);
|
|
|
|
// Fetch suppliers
|
|
const [suppliers] = await connection.query(`
|
|
SELECT supplierid as value, companyname as label
|
|
FROM suppliers
|
|
WHERE companyname <> ''
|
|
ORDER BY companyname
|
|
`);
|
|
|
|
// Fetch tax categories
|
|
const [taxCategories] = await connection.query(`
|
|
SELECT CAST(tax_code_id AS CHAR) as value, name as label
|
|
FROM product_tax_codes
|
|
ORDER BY tax_code_id = 0 DESC, name
|
|
`);
|
|
|
|
// Format and return all options
|
|
return {
|
|
companies: companies.map(c => ({ label: c.name, value: c.cat_id.toString() })),
|
|
artists: artists.map(a => ({ label: a.name, value: a.cat_id.toString() })),
|
|
sizes: sizes.map(s => ({ label: s.name, value: s.cat_id.toString() })),
|
|
themes: themes.map(t => ({
|
|
label: t.display_name,
|
|
value: t.cat_id.toString(),
|
|
type: t.type,
|
|
level: t.level_order
|
|
})),
|
|
categories: categories.map(c => ({
|
|
label: c.display_name,
|
|
value: c.cat_id.toString(),
|
|
type: c.type,
|
|
level: c.level_order
|
|
})),
|
|
colors: colors.map(c => ({
|
|
label: c.name,
|
|
value: c.color,
|
|
hexColor: c.hex_color
|
|
})),
|
|
suppliers: suppliers,
|
|
taxCategories: taxCategories,
|
|
shippingRestrictions: [
|
|
{ label: "None", value: "0" },
|
|
{ label: "US Only", value: "1" },
|
|
{ label: "Limited Quantity", value: "2" },
|
|
{ label: "US/CA Only", value: "3" },
|
|
{ label: "No FedEx 2 Day", value: "4" },
|
|
{ label: "North America Only", value: "5" }
|
|
]
|
|
};
|
|
});
|
|
|
|
// Add debugging to verify category types
|
|
console.log(`Returning ${result.categories.length} categories with types: ${Array.from(new Set(result.categories.map(c => c.type))).join(', ')}`);
|
|
|
|
res.json(result);
|
|
} catch (error) {
|
|
console.error('Error fetching import field options:', error);
|
|
res.status(500).json({ error: 'Failed to fetch import field options' });
|
|
}
|
|
});
|
|
|
|
// Get product lines for a specific company
|
|
router.get('/product-lines/:companyId', async (req, res) => {
|
|
try {
|
|
// Use cached connection
|
|
const { connection } = await getDbConnection();
|
|
|
|
const companyId = req.params.companyId;
|
|
const cacheKey = `product-lines-${companyId}`;
|
|
|
|
const lines = await getCachedQuery(cacheKey, 'product-lines', async () => {
|
|
const [queryResult] = await connection.query(`
|
|
SELECT cat_id as value, name as label
|
|
FROM product_categories
|
|
WHERE type = 2
|
|
AND master_cat_id = ?
|
|
ORDER BY name
|
|
`, [companyId]);
|
|
|
|
return queryResult.map(l => ({ label: l.label, value: l.value.toString() }));
|
|
});
|
|
|
|
res.json(lines);
|
|
} catch (error) {
|
|
console.error('Error fetching product lines:', error);
|
|
res.status(500).json({ error: 'Failed to fetch product lines' });
|
|
}
|
|
});
|
|
|
|
// Get sublines for a specific product line
|
|
router.get('/sublines/:lineId', async (req, res) => {
|
|
try {
|
|
// Use cached connection
|
|
const { connection } = await getDbConnection();
|
|
|
|
const lineId = req.params.lineId;
|
|
const cacheKey = `sublines-${lineId}`;
|
|
|
|
const sublines = await getCachedQuery(cacheKey, 'sublines', async () => {
|
|
const [queryResult] = await connection.query(`
|
|
SELECT cat_id as value, name as label
|
|
FROM product_categories
|
|
WHERE type = 3
|
|
AND master_cat_id = ?
|
|
ORDER BY name
|
|
`, [lineId]);
|
|
|
|
return queryResult.map(s => ({ label: s.label, value: s.value.toString() }));
|
|
});
|
|
|
|
res.json(sublines);
|
|
} catch (error) {
|
|
console.error('Error fetching sublines:', error);
|
|
res.status(500).json({ error: 'Failed to fetch sublines' });
|
|
}
|
|
});
|
|
|
|
// Add a simple endpoint to check file existence and permissions
|
|
router.get('/check-file/:filename', (req, res) => {
|
|
const { filename } = req.params;
|
|
|
|
// Prevent directory traversal
|
|
if (filename.includes('..') || filename.includes('/')) {
|
|
return res.status(400).json({ error: 'Invalid filename' });
|
|
}
|
|
|
|
const filePath = path.join(uploadsDir, filename);
|
|
|
|
try {
|
|
// Check if file exists
|
|
if (!fs.existsSync(filePath)) {
|
|
return res.status(404).json({
|
|
error: 'File not found',
|
|
path: filePath,
|
|
exists: false,
|
|
readable: false
|
|
});
|
|
}
|
|
|
|
// Check if file is readable
|
|
fs.accessSync(filePath, fs.constants.R_OK);
|
|
|
|
// Get file stats
|
|
const stats = fs.statSync(filePath);
|
|
|
|
return res.json({
|
|
filename,
|
|
path: filePath,
|
|
exists: true,
|
|
readable: true,
|
|
isFile: stats.isFile(),
|
|
isDirectory: stats.isDirectory(),
|
|
size: stats.size,
|
|
created: stats.birthtime,
|
|
modified: stats.mtime,
|
|
permissions: stats.mode.toString(8)
|
|
});
|
|
} catch (error) {
|
|
return res.status(500).json({
|
|
error: error.message,
|
|
path: filePath,
|
|
exists: fs.existsSync(filePath),
|
|
readable: false
|
|
});
|
|
}
|
|
});
|
|
|
|
// List all files in uploads directory
|
|
router.get('/list-uploads', (req, res) => {
|
|
try {
|
|
if (!fs.existsSync(uploadsDir)) {
|
|
return res.status(404).json({ error: 'Uploads directory not found', path: uploadsDir });
|
|
}
|
|
|
|
const files = fs.readdirSync(uploadsDir);
|
|
const fileDetails = files.map(file => {
|
|
const filePath = path.join(uploadsDir, file);
|
|
try {
|
|
const stats = fs.statSync(filePath);
|
|
return {
|
|
filename: file,
|
|
isFile: stats.isFile(),
|
|
isDirectory: stats.isDirectory(),
|
|
size: stats.size,
|
|
created: stats.birthtime,
|
|
modified: stats.mtime,
|
|
permissions: stats.mode.toString(8)
|
|
};
|
|
} catch (error) {
|
|
return { filename: file, error: error.message };
|
|
}
|
|
});
|
|
|
|
return res.json({
|
|
directory: uploadsDir,
|
|
count: files.length,
|
|
files: fileDetails
|
|
});
|
|
} catch (error) {
|
|
return res.status(500).json({ error: error.message, path: uploadsDir });
|
|
}
|
|
});
|
|
|
|
// Search products from production database
|
|
router.get('/search-products', async (req, res) => {
|
|
const { q, company, dateRange } = req.query;
|
|
|
|
if (!q) {
|
|
return res.status(400).json({ error: 'Search term is required' });
|
|
}
|
|
|
|
try {
|
|
const { connection } = await getDbConnection();
|
|
|
|
// Build WHERE clause with additional filters
|
|
let whereClause = `
|
|
WHERE (
|
|
p.description LIKE ? OR
|
|
p.itemnumber LIKE ? OR
|
|
p.upc LIKE ? OR
|
|
pc1.name LIKE ? OR
|
|
s.companyname LIKE ?
|
|
)`;
|
|
|
|
// Add company filter if provided
|
|
if (company) {
|
|
whereClause += ` AND p.company = ${connection.escape(company)}`;
|
|
}
|
|
|
|
// Add date range filter if provided
|
|
if (dateRange) {
|
|
let dateCondition;
|
|
const now = new Date();
|
|
|
|
switch(dateRange) {
|
|
case '1week':
|
|
// Last week: date is after (current date - 7 days)
|
|
const weekAgo = new Date(now);
|
|
weekAgo.setDate(now.getDate() - 7);
|
|
dateCondition = `p.datein >= ${connection.escape(weekAgo.toISOString().slice(0, 10))}`;
|
|
break;
|
|
case '1month':
|
|
// Last month: date is after (current date - 30 days)
|
|
const monthAgo = new Date(now);
|
|
monthAgo.setDate(now.getDate() - 30);
|
|
dateCondition = `p.datein >= ${connection.escape(monthAgo.toISOString().slice(0, 10))}`;
|
|
break;
|
|
case '2months':
|
|
// Last 2 months: date is after (current date - 60 days)
|
|
const twoMonthsAgo = new Date(now);
|
|
twoMonthsAgo.setDate(now.getDate() - 60);
|
|
dateCondition = `p.datein >= ${connection.escape(twoMonthsAgo.toISOString().slice(0, 10))}`;
|
|
break;
|
|
case '3months':
|
|
// Last 3 months: date is after (current date - 90 days)
|
|
const threeMonthsAgo = new Date(now);
|
|
threeMonthsAgo.setDate(now.getDate() - 90);
|
|
dateCondition = `p.datein >= ${connection.escape(threeMonthsAgo.toISOString().slice(0, 10))}`;
|
|
break;
|
|
case '6months':
|
|
// Last 6 months: date is after (current date - 180 days)
|
|
const sixMonthsAgo = new Date(now);
|
|
sixMonthsAgo.setDate(now.getDate() - 180);
|
|
dateCondition = `p.datein >= ${connection.escape(sixMonthsAgo.toISOString().slice(0, 10))}`;
|
|
break;
|
|
case '1year':
|
|
// Last year: date is after (current date - 365 days)
|
|
const yearAgo = new Date(now);
|
|
yearAgo.setDate(now.getDate() - 365);
|
|
dateCondition = `p.datein >= ${connection.escape(yearAgo.toISOString().slice(0, 10))}`;
|
|
break;
|
|
default:
|
|
// If an unrecognized value is provided, don't add a date condition
|
|
dateCondition = null;
|
|
}
|
|
|
|
if (dateCondition) {
|
|
whereClause += ` AND ${dateCondition}`;
|
|
}
|
|
}
|
|
|
|
// Special case for wildcard search
|
|
const isWildcardSearch = q === '*';
|
|
const searchPattern = isWildcardSearch ? '%' : `%${q}%`;
|
|
const exactPattern = isWildcardSearch ? '%' : q;
|
|
|
|
// Search for products based on various fields
|
|
const query = `
|
|
SELECT
|
|
p.pid,
|
|
p.description AS title,
|
|
p.notes AS description,
|
|
p.itemnumber AS sku,
|
|
p.upc AS barcode,
|
|
p.harmonized_tariff_code,
|
|
pcp.price_each AS price,
|
|
p.sellingprice AS regular_price,
|
|
CASE
|
|
WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0)
|
|
THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0)
|
|
ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1)
|
|
END AS cost_price,
|
|
s.companyname AS vendor,
|
|
sid.supplier_itemnumber AS vendor_reference,
|
|
sid.notions_itemnumber AS notions_reference,
|
|
sid.supplier_id AS supplier,
|
|
sid.notions_case_pack AS case_qty,
|
|
pc1.name AS brand,
|
|
p.company AS brand_id,
|
|
pc2.name AS line,
|
|
p.line AS line_id,
|
|
pc3.name AS subline,
|
|
p.subline AS subline_id,
|
|
pc4.name AS artist,
|
|
p.artist AS artist_id,
|
|
COALESCE(CASE
|
|
WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit
|
|
ELSE sid.supplier_qty_per_unit
|
|
END, sid.notions_qty_per_unit) AS moq,
|
|
p.weight,
|
|
p.length,
|
|
p.width,
|
|
p.height,
|
|
p.country_of_origin,
|
|
ci.totalsold AS total_sold,
|
|
p.datein AS first_received,
|
|
pls.date_sold AS date_last_sold,
|
|
IF(p.tax_code IS NULL, '', CAST(p.tax_code AS CHAR)) AS tax_code,
|
|
CAST(p.size_cat AS CHAR) AS size_cat,
|
|
CAST(p.shipping_restrictions AS CHAR) AS shipping_restrictions
|
|
FROM products p
|
|
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
|
|
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
|
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
|
LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id
|
|
LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id
|
|
LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id
|
|
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
|
|
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
|
LEFT JOIN current_inventory ci ON p.pid = ci.pid
|
|
${whereClause}
|
|
GROUP BY p.pid
|
|
${isWildcardSearch ? 'ORDER BY p.datein DESC' : `
|
|
ORDER BY
|
|
CASE
|
|
WHEN p.description LIKE ? THEN 1
|
|
WHEN p.itemnumber = ? THEN 2
|
|
WHEN p.upc = ? THEN 3
|
|
WHEN pc1.name LIKE ? THEN 4
|
|
WHEN s.companyname LIKE ? THEN 5
|
|
ELSE 6
|
|
END
|
|
`}
|
|
`;
|
|
|
|
// Prepare query parameters based on whether it's a wildcard search
|
|
let queryParams;
|
|
if (isWildcardSearch) {
|
|
queryParams = [
|
|
searchPattern, // LIKE for description
|
|
searchPattern, // LIKE for itemnumber
|
|
searchPattern, // LIKE for upc
|
|
searchPattern, // LIKE for brand name
|
|
searchPattern // LIKE for company name
|
|
];
|
|
} else {
|
|
queryParams = [
|
|
searchPattern, // LIKE for description
|
|
searchPattern, // LIKE for itemnumber
|
|
searchPattern, // LIKE for upc
|
|
searchPattern, // LIKE for brand name
|
|
searchPattern, // LIKE for company name
|
|
// For ORDER BY clause
|
|
searchPattern, // LIKE for description
|
|
exactPattern, // Exact match for itemnumber
|
|
exactPattern, // Exact match for upc
|
|
searchPattern, // LIKE for brand name
|
|
searchPattern // LIKE for company name
|
|
];
|
|
}
|
|
|
|
const [results] = await connection.query(query, queryParams);
|
|
|
|
// Debug log to check values
|
|
if (results.length > 0) {
|
|
console.log('Product search result sample fields:', {
|
|
pid: results[0].pid,
|
|
tax_code: results[0].tax_code,
|
|
tax_code_type: typeof results[0].tax_code,
|
|
tax_code_value: `Value: '${results[0].tax_code}'`,
|
|
size_cat: results[0].size_cat,
|
|
shipping_restrictions: results[0].shipping_restrictions,
|
|
supplier: results[0].supplier,
|
|
case_qty: results[0].case_qty,
|
|
moq: results[0].moq
|
|
});
|
|
}
|
|
|
|
res.json(results);
|
|
} catch (error) {
|
|
console.error('Error searching products:', error);
|
|
res.status(500).json({ error: 'Failed to search products' });
|
|
}
|
|
});
|
|
|
|
// Endpoint to check UPC and generate item number
|
|
router.get('/check-upc-and-generate-sku', async (req, res) => {
|
|
const { upc, supplierId } = req.query;
|
|
|
|
if (!upc || !supplierId) {
|
|
return res.status(400).json({ error: 'UPC and supplier ID are required' });
|
|
}
|
|
|
|
try {
|
|
const { connection } = await getDbConnection();
|
|
|
|
// Step 1: Check if the UPC already exists
|
|
const [upcCheck] = await connection.query(
|
|
'SELECT pid, itemnumber FROM products WHERE upc = ? LIMIT 1',
|
|
[upc]
|
|
);
|
|
|
|
if (upcCheck.length > 0) {
|
|
return res.status(409).json({
|
|
error: 'UPC already exists',
|
|
existingProductId: upcCheck[0].pid,
|
|
existingItemNumber: upcCheck[0].itemnumber
|
|
});
|
|
}
|
|
|
|
// Step 2: Generate item number - supplierId-last5DigitsOfUPC minus last digit
|
|
let itemNumber = '';
|
|
const upcStr = String(upc);
|
|
|
|
// Extract the last 5 digits of the UPC, removing the last digit (checksum)
|
|
// So we get 5 digits from positions: length-6 to length-2
|
|
if (upcStr.length >= 6) {
|
|
const lastFiveMinusOne = upcStr.substring(upcStr.length - 6, upcStr.length - 1);
|
|
itemNumber = `${supplierId}-${lastFiveMinusOne}`;
|
|
} else if (upcStr.length >= 5) {
|
|
// If UPC is shorter, use as many digits as possible
|
|
const digitsToUse = upcStr.substring(0, upcStr.length - 1);
|
|
itemNumber = `${supplierId}-${digitsToUse}`;
|
|
} else {
|
|
// Very short UPC, just use the whole thing
|
|
itemNumber = `${supplierId}-${upcStr}`;
|
|
}
|
|
|
|
// Step 3: Check if the generated item number exists
|
|
const [itemNumberCheck] = await connection.query(
|
|
'SELECT pid FROM products WHERE itemnumber = ? LIMIT 1',
|
|
[itemNumber]
|
|
);
|
|
|
|
// Step 4: If the item number exists, modify it to use the last 5 digits of the UPC
|
|
if (itemNumberCheck.length > 0) {
|
|
console.log(`Item number ${itemNumber} already exists, using alternative format`);
|
|
|
|
if (upcStr.length >= 5) {
|
|
// Use the last 5 digits (including the checksum)
|
|
const lastFive = upcStr.substring(upcStr.length - 5);
|
|
itemNumber = `${supplierId}-${lastFive}`;
|
|
|
|
// Check again if this new item number also exists
|
|
const [altItemNumberCheck] = await connection.query(
|
|
'SELECT pid FROM products WHERE itemnumber = ? LIMIT 1',
|
|
[itemNumber]
|
|
);
|
|
|
|
if (altItemNumberCheck.length > 0) {
|
|
// If even the alternative format exists, add a timestamp suffix for uniqueness
|
|
const timestamp = Date.now().toString().substring(8, 13); // Get last 5 digits of timestamp
|
|
itemNumber = `${supplierId}-${timestamp}`;
|
|
console.log(`Alternative item number also exists, using timestamp: ${itemNumber}`);
|
|
}
|
|
} else {
|
|
// For very short UPCs, add a timestamp
|
|
const timestamp = Date.now().toString().substring(8, 13); // Get last 5 digits of timestamp
|
|
itemNumber = `${supplierId}-${timestamp}`;
|
|
}
|
|
}
|
|
|
|
// Return the generated item number
|
|
res.json({
|
|
success: true,
|
|
itemNumber,
|
|
upc,
|
|
supplierId
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error checking UPC and generating item number:', error);
|
|
res.status(500).json({
|
|
error: 'Failed to check UPC and generate item number',
|
|
details: error.message
|
|
});
|
|
}
|
|
});
|
|
|
|
// Get product categories for a specific product
|
|
router.get('/product-categories/:pid', async (req, res) => {
|
|
try {
|
|
const { pid } = req.params;
|
|
|
|
if (!pid || isNaN(parseInt(pid))) {
|
|
return res.status(400).json({ error: 'Valid product ID is required' });
|
|
}
|
|
|
|
// Use the getDbConnection function instead of getPool
|
|
const { connection } = await getDbConnection();
|
|
|
|
// Query to get categories for a specific product
|
|
const query = `
|
|
SELECT pc.cat_id, pc.name, pc.type, pc.combined_name, pc.master_cat_id
|
|
FROM product_category_index pci
|
|
JOIN product_categories pc ON pci.cat_id = pc.cat_id
|
|
WHERE pci.pid = ?
|
|
ORDER BY pc.type, pc.name
|
|
`;
|
|
|
|
const [rows] = await connection.query(query, [pid]);
|
|
|
|
// Add debugging to log category types
|
|
const categoryTypes = rows.map(row => row.type);
|
|
const uniqueTypes = [...new Set(categoryTypes)];
|
|
console.log(`Product ${pid} has ${rows.length} categories with types: ${uniqueTypes.join(', ')}`);
|
|
console.log('Categories:', rows.map(row => ({ id: row.cat_id, name: row.name, type: row.type })));
|
|
|
|
// Check for parent categories to filter out deals and black friday
|
|
const sectionQuery = `
|
|
SELECT pc.cat_id, pc.name
|
|
FROM product_categories pc
|
|
WHERE pc.type = 10 AND (LOWER(pc.name) LIKE '%deal%' OR LOWER(pc.name) LIKE '%black friday%')
|
|
`;
|
|
|
|
const [dealSections] = await connection.query(sectionQuery);
|
|
const dealSectionIds = dealSections.map(section => section.cat_id);
|
|
|
|
console.log('Filtering out categories from deal sections:', dealSectionIds);
|
|
|
|
// Filter out categories from deals and black friday sections
|
|
const filteredCategories = rows.filter(category => {
|
|
// Direct check for top-level deal sections
|
|
if (category.type === 10) {
|
|
return !dealSectionIds.some(id => id === category.cat_id);
|
|
}
|
|
|
|
// For categories (type 11), check if their parent is a deal section
|
|
if (category.type === 11) {
|
|
return !dealSectionIds.some(id => id === category.master_cat_id);
|
|
}
|
|
|
|
// For subcategories (type 12), get their parent category first
|
|
if (category.type === 12) {
|
|
const parentId = category.master_cat_id;
|
|
// Find the parent category in our rows
|
|
const parentCategory = rows.find(c => c.cat_id === parentId);
|
|
// If parent not found or parent's parent is not a deal section, keep it
|
|
return !parentCategory || !dealSectionIds.some(id => id === parentCategory.master_cat_id);
|
|
}
|
|
|
|
// For subsubcategories (type 13), check their hierarchy manually
|
|
if (category.type === 13) {
|
|
const parentId = category.master_cat_id;
|
|
// Find the parent subcategory
|
|
const parentSubcategory = rows.find(c => c.cat_id === parentId);
|
|
if (!parentSubcategory) return true;
|
|
|
|
// Find the grandparent category
|
|
const grandparentId = parentSubcategory.master_cat_id;
|
|
const grandparentCategory = rows.find(c => c.cat_id === grandparentId);
|
|
// If grandparent not found or grandparent's parent is not a deal section, keep it
|
|
return !grandparentCategory || !dealSectionIds.some(id => id === grandparentCategory.master_cat_id);
|
|
}
|
|
|
|
// Keep all other category types
|
|
return true;
|
|
});
|
|
|
|
console.log(`Filtered out ${rows.length - filteredCategories.length} deal/black friday categories`);
|
|
|
|
// Format the response to match the expected format in the frontend
|
|
const categories = filteredCategories.map(category => ({
|
|
value: category.cat_id.toString(),
|
|
label: category.name,
|
|
type: category.type,
|
|
combined_name: category.combined_name
|
|
}));
|
|
|
|
res.json(categories);
|
|
} catch (error) {
|
|
console.error('Error fetching product categories:', error);
|
|
res.status(500).json({
|
|
error: 'Failed to fetch product categories',
|
|
details: error.message
|
|
});
|
|
}
|
|
});
|
|
|
|
module.exports = router;
|