Fix csv update/import on settings page + lots of cors work

This commit is contained in:
2025-01-10 14:17:07 -05:00
parent dbdf77331c
commit a1f4e57394
9 changed files with 957 additions and 329 deletions

View File

@@ -4,10 +4,10 @@ const csv = require('csv-parse');
const mysql = require('mysql2/promise');
const dotenv = require('dotenv');
// For testing purposes, limit the number of rows to import (0 = no limit)
const PRODUCTS_TEST_LIMIT = 0;
const ORDERS_TEST_LIMIT = 10000;
const PURCHASE_ORDERS_TEST_LIMIT = 10000;
// Get test limits from environment variables
const PRODUCTS_TEST_LIMIT = parseInt(process.env.PRODUCTS_TEST_LIMIT || '0');
const ORDERS_TEST_LIMIT = parseInt(process.env.ORDERS_TEST_LIMIT || '10000');
const PURCHASE_ORDERS_TEST_LIMIT = parseInt(process.env.PURCHASE_ORDERS_TEST_LIMIT || '10000');
dotenv.config({ path: path.join(__dirname, '../.env') });
@@ -19,6 +19,17 @@ const dbConfig = {
multipleStatements: true
};
// Helper function to output progress in JSON format
function outputProgress(data) {
if (!data.status) {
data = {
status: 'running',
...data
};
}
console.log(JSON.stringify(data));
}
// Helper function to count total rows in a CSV file
async function countRows(filePath) {
return new Promise((resolve, reject) => {
@@ -41,24 +52,33 @@ function formatDuration(seconds) {
// Helper function to update progress with time estimate
function updateProgress(current, total, operation, startTime) {
const percentage = ((current / total) * 100).toFixed(1);
const elapsed = (Date.now() - startTime) / 1000;
const rate = current / elapsed; // rows per second
const remaining = (total - current) / rate;
process.stdout.write(
`\r${operation}: ${current.toLocaleString()}/${total.toLocaleString()} rows ` +
`(${percentage}%) - Rate: ${Math.round(rate)}/s - ` +
`Elapsed: ${formatDuration(elapsed)} - ` +
`Est. remaining: ${formatDuration(remaining)}`
);
outputProgress({
status: 'running',
operation,
current,
total,
rate,
elapsed: formatDuration(elapsed),
remaining: formatDuration(remaining),
percentage: ((current / total) * 100).toFixed(1)
});
}
async function importProducts(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting products import (${totalRows.toLocaleString()} total rows${PRODUCTS_TEST_LIMIT > 0 ? ` - limited to ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
outputProgress({
operation: 'Starting products import',
current: 0,
total: totalRows,
testLimit: PRODUCTS_TEST_LIMIT,
percentage: '0'
});
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -73,7 +93,12 @@ async function importProducts(connection, filePath) {
for await (const record of parser) {
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
console.log(`\nReached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`);
outputProgress({
operation: 'Products import',
message: `Reached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`,
current: rowCount,
total: totalRows
});
break;
}
rowCount++;
@@ -81,7 +106,7 @@ async function importProducts(connection, filePath) {
// Update progress every 100ms to avoid console flooding
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Products', startTime);
updateProgress(rowCount, totalRows, 'Products import', startTime);
lastUpdate = now;
}
@@ -121,15 +146,29 @@ async function importProducts(connection, filePath) {
}
}
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nProducts import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated (processed ${rowCount.toLocaleString()} rows)`);
outputProgress({
status: 'running',
operation: 'Products import completed',
current: rowCount,
total: totalRows,
added,
updated,
duration: formatDuration((Date.now() - startTime) / 1000),
percentage: '100'
});
}
async function importOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), ORDERS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting orders import (${totalRows.toLocaleString()} total rows${ORDERS_TEST_LIMIT > 0 ? ` - limited to ${ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
outputProgress({
operation: 'Starting orders import',
current: 0,
total: totalRows,
testLimit: ORDERS_TEST_LIMIT,
percentage: '0'
});
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -149,7 +188,12 @@ async function importOrders(connection, filePath) {
for await (const record of parser) {
if (ORDERS_TEST_LIMIT > 0 && rowCount >= ORDERS_TEST_LIMIT) {
console.log(`\nReached test limit of ${ORDERS_TEST_LIMIT.toLocaleString()} rows`);
outputProgress({
operation: 'Orders import',
message: `Reached test limit of ${ORDERS_TEST_LIMIT.toLocaleString()} rows`,
current: rowCount,
total: totalRows
});
break;
}
rowCount++;
@@ -157,7 +201,7 @@ async function importOrders(connection, filePath) {
// Update progress every 100ms
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Orders', startTime);
updateProgress(rowCount, totalRows, 'Orders import', startTime);
lastUpdate = now;
}
@@ -194,15 +238,30 @@ async function importOrders(connection, filePath) {
}
}
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nOrders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
outputProgress({
status: 'running',
operation: 'Orders import completed',
current: rowCount,
total: totalRows,
added,
updated,
skipped,
duration: formatDuration((Date.now() - startTime) / 1000),
percentage: '100'
});
}
async function importPurchaseOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PURCHASE_ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PURCHASE_ORDERS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting purchase orders import (${totalRows.toLocaleString()} total rows${PURCHASE_ORDERS_TEST_LIMIT > 0 ? ` - limited to ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
outputProgress({
operation: 'Starting purchase orders import',
current: 0,
total: totalRows,
testLimit: PURCHASE_ORDERS_TEST_LIMIT,
percentage: '0'
});
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -222,7 +281,12 @@ async function importPurchaseOrders(connection, filePath) {
for await (const record of parser) {
if (PURCHASE_ORDERS_TEST_LIMIT > 0 && rowCount >= PURCHASE_ORDERS_TEST_LIMIT) {
console.log(`\nReached test limit of ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows`);
outputProgress({
operation: 'Purchase orders import',
message: `Reached test limit of ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows`,
current: rowCount,
total: totalRows
});
break;
}
rowCount++;
@@ -230,7 +294,7 @@ async function importPurchaseOrders(connection, filePath) {
// Update progress every 100ms
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Purchase Orders', startTime);
updateProgress(rowCount, totalRows, 'Purchase orders import', startTime);
lastUpdate = now;
}
@@ -267,18 +331,35 @@ async function importPurchaseOrders(connection, filePath) {
}
}
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nPurchase orders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
outputProgress({
status: 'running',
operation: 'Purchase orders import completed',
current: rowCount,
total: totalRows,
added,
updated,
skipped,
duration: formatDuration((Date.now() - startTime) / 1000),
percentage: '100'
});
}
async function main() {
console.log('Starting import process...');
outputProgress({
operation: 'Starting import process',
message: 'Connecting to database...'
});
const startTime = Date.now();
const connection = await mysql.createConnection(dbConfig);
try {
// Check if tables exist, if not create them
console.log('Checking database schema...');
outputProgress({
operation: 'Checking database schema',
message: 'Creating tables if needed...'
});
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
await connection.query(schemaSQL);
@@ -287,14 +368,21 @@ async function main() {
await importOrders(connection, path.join(__dirname, '../csv/39f2x83-orders.csv'));
await importPurchaseOrders(connection, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nAll imports completed successfully in ${duration} seconds`);
outputProgress({
status: 'complete',
operation: 'Import process completed',
duration: formatDuration((Date.now() - startTime) / 1000)
});
} catch (error) {
console.error('\nError during import:', error);
outputProgress({
status: 'error',
error: error.message
});
process.exit(1);
} finally {
await connection.end();
}
}
// Run the import
main();

View File

@@ -38,17 +38,42 @@ function downloadFile(url, filePath) {
const totalSize = parseInt(response.headers['content-length'], 10);
let downloadedSize = 0;
let lastProgressUpdate = Date.now();
const startTime = Date.now();
response.on('data', chunk => {
downloadedSize += chunk.length;
const progress = (downloadedSize / totalSize * 100).toFixed(2);
process.stdout.write(`\rDownloading ${path.basename(filePath)}: ${progress}%`);
const now = Date.now();
// Update progress at most every 100ms to avoid console flooding
if (now - lastProgressUpdate > 100) {
const elapsed = (now - startTime) / 1000;
const rate = downloadedSize / elapsed;
const remaining = (totalSize - downloadedSize) / rate;
console.log(JSON.stringify({
status: 'running',
operation: `Downloading ${path.basename(filePath)}`,
current: downloadedSize,
total: totalSize,
rate: (rate / 1024 / 1024).toFixed(2), // MB/s
elapsed: formatDuration(elapsed),
remaining: formatDuration(remaining),
percentage: ((downloadedSize / totalSize) * 100).toFixed(1)
}));
lastProgressUpdate = now;
}
});
response.pipe(file);
file.on('finish', () => {
process.stdout.write('\n');
console.log(JSON.stringify({
status: 'running',
operation: `Completed ${path.basename(filePath)}`,
current: totalSize,
total: totalSize,
percentage: '100'
}));
file.close();
resolve();
});
@@ -64,34 +89,79 @@ function downloadFile(url, filePath) {
});
}
// Helper function to format duration
function formatDuration(seconds) {
if (seconds < 60) return `${Math.round(seconds)}s`;
const minutes = Math.floor(seconds / 60);
seconds = Math.round(seconds % 60);
return `${minutes}m ${seconds}s`;
}
// Main function to update all files
async function updateFiles() {
console.log('Starting CSV file updates...');
console.log(JSON.stringify({
status: 'running',
operation: 'Starting CSV file updates',
total: FILES.length,
current: 0
}));
for (const file of FILES) {
for (let i = 0; i < FILES.length; i++) {
const file = FILES[i];
const filePath = path.join(CSV_DIR, file.name);
try {
// Delete existing file if it exists
if (fs.existsSync(filePath)) {
console.log(`Removing existing file: ${file.name}`);
console.log(JSON.stringify({
status: 'running',
operation: `Removing existing file: ${file.name}`,
current: i,
total: FILES.length,
percentage: ((i / FILES.length) * 100).toFixed(1)
}));
fs.unlinkSync(filePath);
}
// Download new file
console.log(`Downloading ${file.name}...`);
console.log(JSON.stringify({
status: 'running',
operation: `Starting download: ${file.name}`,
current: i,
total: FILES.length,
percentage: ((i / FILES.length) * 100).toFixed(1)
}));
await downloadFile(file.url, filePath);
console.log(`Successfully updated ${file.name}`);
console.log(JSON.stringify({
status: 'running',
operation: `Successfully updated ${file.name}`,
current: i + 1,
total: FILES.length,
percentage: (((i + 1) / FILES.length) * 100).toFixed(1)
}));
} catch (error) {
console.error(`Error updating ${file.name}:`, error.message);
console.error(JSON.stringify({
status: 'error',
operation: `Error updating ${file.name}`,
error: error.message
}));
throw error;
}
}
console.log('CSV file update complete!');
console.log(JSON.stringify({
status: 'complete',
operation: 'CSV file update complete',
current: FILES.length,
total: FILES.length,
percentage: '100'
}));
}
// Run the update
updateFiles().catch(error => {
console.error('Update failed:', error);
console.error(JSON.stringify({
error: `Update failed: ${error.message}`
}));
process.exit(1);
});

View File

@@ -1,115 +0,0 @@
const express = require('express');
const cors = require('cors');
const mysql = require('mysql2/promise');
const productsRouter = require('./routes/products');
const dashboardRouter = require('./routes/dashboard');
const ordersRouter = require('./routes/orders');
const csvRoutes = require('./routes/csv');
const app = express();
// Debug middleware to log all requests
app.use((req, res, next) => {
console.log(`[App Debug] ${new Date().toISOString()} - ${req.method} ${req.path}`);
next();
});
// Configure CORS with specific options
app.use(cors({
origin: [
'http://localhost:5173', // Local development
'https://inventory.kent.pw', // Production frontend
/\.kent\.pw$/ // Any subdomain of kent.pw
],
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization'],
credentials: true,
optionsSuccessStatus: 200
}));
app.use(express.json());
// Database connection
const pool = mysql.createPool({
host: process.env.DB_HOST || 'localhost',
user: process.env.DB_USER || 'root',
password: process.env.DB_PASSWORD || '',
database: process.env.DB_NAME || 'inventory',
waitForConnections: true,
connectionLimit: 10,
queueLimit: 0
});
// Make db pool available in routes
app.locals.pool = pool;
// Debug endpoint to list all registered routes
app.get('/api/debug/routes', (req, res) => {
console.log('Debug routes endpoint hit');
const routes = [];
app._router.stack.forEach(middleware => {
if (middleware.route) {
routes.push({
path: middleware.route.path,
methods: Object.keys(middleware.route.methods)
});
} else if (middleware.name === 'router') {
middleware.handle.stack.forEach(handler => {
if (handler.route) {
const fullPath = (middleware.regexp.source === '^\\/?(?=\\/|$)' ? '' : middleware.regexp.source.replace(/\\\//g, '/').replace(/\^|\$/g, '')) + handler.route.path;
routes.push({
path: fullPath,
methods: Object.keys(handler.route.methods)
});
}
});
}
});
res.json(routes);
});
// Test endpoint to verify server is running
app.get('/api/health', (req, res) => {
res.json({ status: 'ok' });
});
// Mount all routes under /api
console.log('Mounting routes...');
console.log('Mounting products routes...');
app.use('/api/products', productsRouter);
console.log('Mounting dashboard routes...');
app.use('/api/dashboard', dashboardRouter);
console.log('Mounting orders routes...');
app.use('/api/orders', ordersRouter);
console.log('Mounting CSV routes...');
app.use('/api/csv', csvRoutes);
console.log('CSV routes mounted');
console.log('All routes mounted');
// Error handling middleware
app.use((err, req, res, next) => {
console.error('Error:', err);
res.status(500).json({ error: err.message });
});
// 404 handler
app.use((req, res) => {
console.log('404 Not Found:', req.method, req.path);
res.status(404).json({ error: 'Not Found' });
});
const PORT = process.env.PORT || 3010;
app.listen(PORT, () => {
console.log(`Server is running on port ${PORT}`);
console.log('Available routes:');
console.log('- GET /api/health');
console.log('- GET /api/debug/routes');
console.log('- GET /api/csv/status');
console.log('- GET /api/csv/test');
console.log('- POST /api/csv/update');
});

View File

@@ -0,0 +1,39 @@
const cors = require('cors');
// Single CORS middleware for all endpoints
const corsMiddleware = cors({
origin: [
'https://inventory.kent.pw',
'http://localhost:5173',
/^http:\/\/192\.168\.\d+\.\d+(:\d+)?$/,
/^http:\/\/10\.\d+\.\d+\.\d+(:\d+)?$/
],
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization'],
exposedHeaders: ['Content-Type'],
credentials: true
});
// Error handler for CORS
const corsErrorHandler = (err, req, res, next) => {
if (err.message === 'CORS not allowed') {
console.error('CORS Error:', {
origin: req.get('Origin'),
method: req.method,
path: req.path,
headers: req.headers
});
res.status(403).json({
error: 'CORS not allowed',
origin: req.get('Origin'),
message: 'Origin not in allowed list: https://inventory.kent.pw, localhost:5173, 192.168.x.x, or 10.x.x.x'
});
} else {
next(err);
}
};
module.exports = {
corsMiddleware,
corsErrorHandler
};

View File

@@ -14,15 +14,77 @@ let activeImport = null;
let importProgress = null;
// SSE clients for progress updates
const clients = new Set();
const updateClients = new Set();
const importClients = new Set();
// Helper to send progress to all connected clients
function sendProgressToClients(progress) {
// Helper to send progress to specific clients
function sendProgressToClients(clients, progress) {
const data = typeof progress === 'string' ? { progress } : progress;
// Ensure we have a status field
if (!data.status) {
data.status = 'running';
}
const message = `data: ${JSON.stringify(data)}\n\n`;
clients.forEach(client => {
client.write(`data: ${JSON.stringify(progress)}\n\n`);
try {
client.write(message);
// Immediately flush the response
if (typeof client.flush === 'function') {
client.flush();
}
} catch (error) {
// Silently remove failed client
clients.delete(client);
}
});
}
// Progress endpoints
router.get('/update/progress', (req, res) => {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Access-Control-Allow-Origin': req.headers.origin || '*',
'Access-Control-Allow-Credentials': 'true'
});
// Send an initial message to test the connection
res.write('data: {"status":"running","operation":"Initializing connection..."}\n\n');
// Add this client to the update set
updateClients.add(res);
// Remove client when connection closes
req.on('close', () => {
updateClients.delete(res);
});
});
router.get('/import/progress', (req, res) => {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Access-Control-Allow-Origin': req.headers.origin || '*',
'Access-Control-Allow-Credentials': 'true'
});
// Send an initial message to test the connection
res.write('data: {"status":"running","operation":"Initializing connection..."}\n\n');
// Add this client to the import set
importClients.add(res);
// Remove client when connection closes
req.on('close', () => {
importClients.delete(res);
});
});
// Debug endpoint to verify route registration
router.get('/test', (req, res) => {
console.log('CSV test endpoint hit');
@@ -39,45 +101,72 @@ router.get('/status', (req, res) => {
});
// Route to update CSV files
router.post('/update', async (req, res) => {
console.log('CSV update endpoint hit');
router.post('/update', async (req, res, next) => {
if (activeImport) {
console.log('Import already in progress');
return res.status(409).json({ error: 'Import already in progress' });
}
try {
const scriptPath = path.join(__dirname, '..', '..', 'scripts', 'update-csv.js');
console.log('Running script:', scriptPath);
if (!require('fs').existsSync(scriptPath)) {
console.error('Script not found:', scriptPath);
return res.status(500).json({ error: 'Update script not found' });
}
activeImport = spawn('node', [scriptPath]);
activeImport.stdout.on('data', (data) => {
console.log(`CSV Update: ${data}`);
importProgress = data.toString();
sendProgressToClients({ status: 'running', progress: importProgress });
const output = data.toString().trim();
try {
// Try to parse as JSON
const jsonData = JSON.parse(output);
sendProgressToClients(updateClients, {
status: 'running',
...jsonData
});
} catch (e) {
// If not JSON, send as plain progress
sendProgressToClients(updateClients, {
status: 'running',
progress: output
});
}
});
activeImport.stderr.on('data', (data) => {
console.error(`CSV Update Error: ${data}`);
sendProgressToClients({ status: 'error', error: data.toString() });
const error = data.toString().trim();
try {
// Try to parse as JSON
const jsonData = JSON.parse(error);
sendProgressToClients(updateClients, {
status: 'error',
...jsonData
});
} catch {
sendProgressToClients(updateClients, {
status: 'error',
error
});
}
});
await new Promise((resolve, reject) => {
activeImport.on('close', (code) => {
console.log(`CSV update process exited with code ${code}`);
if (code === 0) {
sendProgressToClients({ status: 'complete' });
// Don't treat cancellation (code 143/SIGTERM) as an error
if (code === 0 || code === 143) {
sendProgressToClients(updateClients, {
status: 'complete',
operation: code === 143 ? 'Operation cancelled' : 'Update complete'
});
resolve();
} else {
sendProgressToClients({ status: 'error', error: `Process exited with code ${code}` });
reject(new Error(`Update process exited with code ${code}`));
const errorMsg = `Update process exited with code ${code}`;
sendProgressToClients(updateClients, {
status: 'error',
error: errorMsg
});
reject(new Error(errorMsg));
}
activeImport = null;
importProgress = null;
@@ -89,7 +178,138 @@ router.post('/update', async (req, res) => {
console.error('Error updating CSV files:', error);
activeImport = null;
importProgress = null;
res.status(500).json({ error: 'Failed to update CSV files', details: error.message });
sendProgressToClients(updateClients, {
status: 'error',
error: error.message
});
next(error);
}
});
// Route to import CSV files
router.post('/import', async (req, res) => {
if (activeImport) {
return res.status(409).json({ error: 'Import already in progress' });
}
try {
const scriptPath = path.join(__dirname, '..', '..', 'scripts', 'import-csv.js');
if (!require('fs').existsSync(scriptPath)) {
return res.status(500).json({ error: 'Import script not found' });
}
// Get test limits from request body
const { products = 0, orders = 10000, purchaseOrders = 10000 } = req.body;
// Create environment variables for the script
const env = {
...process.env,
PRODUCTS_TEST_LIMIT: products.toString(),
ORDERS_TEST_LIMIT: orders.toString(),
PURCHASE_ORDERS_TEST_LIMIT: purchaseOrders.toString()
};
activeImport = spawn('node', [scriptPath], { env });
activeImport.stdout.on('data', (data) => {
const output = data.toString().trim();
try {
// Try to parse as JSON
const jsonData = JSON.parse(output);
sendProgressToClients(importClients, {
status: 'running',
...jsonData
});
} catch {
// If not JSON, send as plain progress
sendProgressToClients(importClients, {
status: 'running',
progress: output
});
}
});
activeImport.stderr.on('data', (data) => {
const error = data.toString().trim();
try {
// Try to parse as JSON
const jsonData = JSON.parse(error);
sendProgressToClients(importClients, {
status: 'error',
...jsonData
});
} catch {
sendProgressToClients(importClients, {
status: 'error',
error
});
}
});
await new Promise((resolve, reject) => {
activeImport.on('close', (code) => {
// Don't treat cancellation (code 143/SIGTERM) as an error
if (code === 0 || code === 143) {
sendProgressToClients(importClients, {
status: 'complete',
operation: code === 143 ? 'Operation cancelled' : 'Import complete'
});
resolve();
} else {
sendProgressToClients(importClients, {
status: 'error',
error: `Process exited with code ${code}`
});
reject(new Error(`Import process exited with code ${code}`));
}
activeImport = null;
importProgress = null;
});
});
res.json({ success: true });
} catch (error) {
console.error('Error importing CSV files:', error);
activeImport = null;
importProgress = null;
sendProgressToClients(importClients, {
status: 'error',
error: error.message
});
res.status(500).json({ error: 'Failed to import CSV files', details: error.message });
}
});
// Route to cancel active process
router.post('/cancel', (req, res) => {
if (!activeImport) {
return res.status(404).json({ error: 'No active process to cancel' });
}
try {
// Kill the process
activeImport.kill();
// Clean up
activeImport = null;
importProgress = null;
// Notify all clients
const cancelMessage = {
status: 'complete',
operation: 'Operation cancelled'
};
sendProgressToClients(updateClients, cancelMessage);
sendProgressToClients(importClients, cancelMessage);
res.json({ success: true });
} catch (error) {
// Even if there's an error, try to clean up
activeImport = null;
importProgress = null;
res.status(500).json({ error: 'Failed to cancel process' });
}
});

View File

@@ -1,5 +1,12 @@
const path = require('path');
const fs = require('fs');
const express = require('express');
const mysql = require('mysql2/promise');
const { corsMiddleware, corsErrorHandler } = require('./middleware/cors');
const productsRouter = require('./routes/products');
const dashboardRouter = require('./routes/dashboard');
const ordersRouter = require('./routes/orders');
const csvRouter = require('./routes/csv');
// Get the absolute path to the .env file
const envPath = path.resolve(process.cwd(), '.env');
@@ -21,23 +28,6 @@ try {
console.error('Error loading .env file:', error);
}
// Log environment variables (excluding sensitive data)
console.log('Environment variables loaded:', {
NODE_ENV: process.env.NODE_ENV,
PORT: process.env.PORT,
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_NAME: process.env.DB_NAME,
// Not logging DB_PASSWORD for security
});
const express = require('express');
const cors = require('cors');
const mysql = require('mysql2/promise');
const productsRouter = require('./routes/products');
const dashboardRouter = require('./routes/dashboard');
const ordersRouter = require('./routes/orders');
// Ensure required directories exist
['logs', 'uploads'].forEach(dir => {
if (!fs.existsSync(dir)) {
@@ -47,39 +37,62 @@ const ordersRouter = require('./routes/orders');
const app = express();
// CORS configuration - move before route handlers
app.use(cors({
origin: ['https://inventory.kent.pw', 'https://www.inventory.kent.pw'],
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization'],
credentials: true,
optionsSuccessStatus: 200 // Some legacy browsers (IE11) choke on 204
}));
// Debug middleware to log request details
app.use((req, res, next) => {
console.log('Request details:', {
method: req.method,
url: req.url,
origin: req.get('Origin'),
headers: req.headers
});
next();
});
// Apply CORS middleware first, before any other middleware
app.use(corsMiddleware);
// Body parser middleware
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Request logging middleware
app.use((req, res, next) => {
const start = Date.now();
res.on('finish', () => {
const duration = Date.now() - start;
console.log(
`[${new Date().toISOString()}] ${req.method} ${req.url} ${res.statusCode} ${duration}ms`
);
// Routes
app.use('/api/products', productsRouter);
app.use('/api/dashboard', dashboardRouter);
app.use('/api/orders', ordersRouter);
app.use('/api/csv', csvRouter);
// Basic health check route
app.get('/health', (req, res) => {
res.json({
status: 'ok',
timestamp: new Date().toISOString(),
environment: process.env.NODE_ENV
});
next();
});
// Error handling middleware - move before route handlers
// CORS error handler - must be before other error handlers
app.use(corsErrorHandler);
// Error handling middleware - MUST be after routes and CORS error handler
app.use((err, req, res, next) => {
console.error(`[${new Date().toISOString()}] Error:`, err);
res.status(500).json({
error: process.env.NODE_ENV === 'production'
? 'An internal server error occurred'
: err.message
});
// Send detailed error in development, generic in production
const error = process.env.NODE_ENV === 'production'
? 'An internal server error occurred'
: err.message || err;
res.status(err.status || 500).json({ error });
});
// Handle uncaught exceptions
process.on('uncaughtException', (err) => {
console.error(`[${new Date().toISOString()}] Uncaught Exception:`, err);
process.exit(1);
});
process.on('unhandledRejection', (reason, promise) => {
console.error(`[${new Date().toISOString()}] Unhandled Rejection at:`, promise, 'reason:', reason);
});
// Database connection pool
@@ -109,30 +122,6 @@ pool.getConnection()
process.exit(1);
});
// Routes
app.use('/api/products', productsRouter);
app.use('/api/dashboard', dashboardRouter);
app.use('/api/orders', ordersRouter);
// Basic health check route
app.get('/health', (req, res) => {
res.json({
status: 'ok',
timestamp: new Date().toISOString(),
environment: process.env.NODE_ENV
});
});
// Handle uncaught exceptions
process.on('uncaughtException', (err) => {
console.error(`[${new Date().toISOString()}] Uncaught Exception:`, err);
process.exit(1);
});
process.on('unhandledRejection', (reason, promise) => {
console.error(`[${new Date().toISOString()}] Unhandled Rejection at:`, promise, 'reason:', reason);
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`[Server] Running in ${process.env.NODE_ENV || 'development'} mode on port ${PORT}`);