Connect with database for dropdowns, more validate data step fixes
This commit is contained in:
270
inventory-server/src/routes/import.js
Normal file
270
inventory-server/src/routes/import.js
Normal file
@@ -0,0 +1,270 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { Client } = require('ssh2');
|
||||
const mysql = require('mysql2/promise');
|
||||
|
||||
// Helper function to setup SSH tunnel
|
||||
async function setupSshTunnel() {
|
||||
const sshConfig = {
|
||||
host: process.env.PROD_SSH_HOST,
|
||||
port: process.env.PROD_SSH_PORT || 22,
|
||||
username: process.env.PROD_SSH_USER,
|
||||
privateKey: process.env.PROD_SSH_KEY_PATH
|
||||
? require('fs').readFileSync(process.env.PROD_SSH_KEY_PATH)
|
||||
: undefined,
|
||||
compress: true
|
||||
};
|
||||
|
||||
const dbConfig = {
|
||||
host: process.env.PROD_DB_HOST || 'localhost',
|
||||
user: process.env.PROD_DB_USER,
|
||||
password: process.env.PROD_DB_PASSWORD,
|
||||
database: process.env.PROD_DB_NAME,
|
||||
port: process.env.PROD_DB_PORT || 3306,
|
||||
timezone: 'Z'
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const ssh = new Client();
|
||||
|
||||
ssh.on('error', (err) => {
|
||||
console.error('SSH connection error:', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
ssh.on('ready', () => {
|
||||
ssh.forwardOut(
|
||||
'127.0.0.1',
|
||||
0,
|
||||
dbConfig.host,
|
||||
dbConfig.port,
|
||||
(err, stream) => {
|
||||
if (err) reject(err);
|
||||
resolve({ ssh, stream, dbConfig });
|
||||
}
|
||||
);
|
||||
}).connect(sshConfig);
|
||||
});
|
||||
}
|
||||
|
||||
// Get all options for import fields
|
||||
router.get('/field-options', async (req, res) => {
|
||||
let ssh;
|
||||
let connection;
|
||||
|
||||
try {
|
||||
// Setup SSH tunnel and get database connection
|
||||
const tunnel = await setupSshTunnel();
|
||||
ssh = tunnel.ssh;
|
||||
|
||||
// Create MySQL connection over SSH tunnel
|
||||
connection = await mysql.createConnection({
|
||||
...tunnel.dbConfig,
|
||||
stream: tunnel.stream
|
||||
});
|
||||
|
||||
// Fetch companies (type 1)
|
||||
const [companies] = await connection.query(`
|
||||
SELECT cat_id, name
|
||||
FROM product_categories
|
||||
WHERE type = 1
|
||||
ORDER BY name
|
||||
`);
|
||||
|
||||
// Fetch artists (type 40)
|
||||
const [artists] = await connection.query(`
|
||||
SELECT cat_id, name
|
||||
FROM product_categories
|
||||
WHERE type = 40
|
||||
ORDER BY name
|
||||
`);
|
||||
|
||||
// Fetch sizes (type 50)
|
||||
const [sizes] = await connection.query(`
|
||||
SELECT cat_id, name
|
||||
FROM product_categories
|
||||
WHERE type = 50
|
||||
ORDER BY name
|
||||
`);
|
||||
|
||||
// Fetch themes with subthemes
|
||||
const [themes] = await connection.query(`
|
||||
SELECT t.cat_id, t.name AS display_name, t.type, t.name AS sort_theme,
|
||||
'' AS sort_subtheme, 1 AS level_order
|
||||
FROM product_categories t
|
||||
WHERE t.type = 20
|
||||
UNION ALL
|
||||
SELECT ts.cat_id, CONCAT(t.name,' - ',ts.name) AS display_name, ts.type,
|
||||
t.name AS sort_theme, ts.name AS sort_subtheme, 2 AS level_order
|
||||
FROM product_categories ts
|
||||
JOIN product_categories t ON ts.master_cat_id = t.cat_id
|
||||
WHERE ts.type = 21 AND t.type = 20
|
||||
ORDER BY sort_theme, sort_subtheme
|
||||
`);
|
||||
|
||||
// Fetch categories with all levels
|
||||
const [categories] = await connection.query(`
|
||||
SELECT s.cat_id, s.name AS display_name, s.type, s.name AS sort_section,
|
||||
'' AS sort_category, '' AS sort_subcategory, '' AS sort_subsubcategory,
|
||||
1 AS level_order
|
||||
FROM product_categories s
|
||||
WHERE s.type = 10
|
||||
UNION ALL
|
||||
SELECT c.cat_id, CONCAT(s.name,' - ',c.name) AS display_name, c.type,
|
||||
s.name AS sort_section, c.name AS sort_category, '' AS sort_subcategory,
|
||||
'' AS sort_subsubcategory, 2 AS level_order
|
||||
FROM product_categories c
|
||||
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
||||
WHERE c.type = 11 AND s.type = 10
|
||||
UNION ALL
|
||||
SELECT sc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name) AS display_name,
|
||||
sc.type, s.name AS sort_section, c.name AS sort_category,
|
||||
sc.name AS sort_subcategory, '' AS sort_subsubcategory, 3 AS level_order
|
||||
FROM product_categories sc
|
||||
JOIN product_categories c ON sc.master_cat_id = c.cat_id
|
||||
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
||||
WHERE sc.type = 12 AND c.type = 11 AND s.type = 10
|
||||
UNION ALL
|
||||
SELECT ssc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name,' - ',ssc.name) AS display_name,
|
||||
ssc.type, s.name AS sort_section, c.name AS sort_category,
|
||||
sc.name AS sort_subcategory, ssc.name AS sort_subsubcategory, 4 AS level_order
|
||||
FROM product_categories ssc
|
||||
JOIN product_categories sc ON ssc.master_cat_id = sc.cat_id
|
||||
JOIN product_categories c ON sc.master_cat_id = c.cat_id
|
||||
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
||||
WHERE ssc.type = 13 AND sc.type = 12 AND c.type = 11 AND s.type = 10
|
||||
ORDER BY sort_section, sort_category, sort_subcategory, sort_subsubcategory
|
||||
`);
|
||||
|
||||
// Fetch colors
|
||||
const [colors] = await connection.query(`
|
||||
SELECT color, name, hex_color
|
||||
FROM product_color_list
|
||||
ORDER BY \`order\`
|
||||
`);
|
||||
|
||||
// Fetch suppliers
|
||||
const [suppliers] = await connection.query(`
|
||||
SELECT supplierid as value, companyname as label
|
||||
FROM suppliers
|
||||
WHERE companyname <> ''
|
||||
ORDER BY companyname
|
||||
`);
|
||||
|
||||
// Fetch tax categories
|
||||
const [taxCategories] = await connection.query(`
|
||||
SELECT tax_code_id as value, name as label
|
||||
FROM product_tax_codes
|
||||
ORDER BY tax_code_id = 0 DESC, name
|
||||
`);
|
||||
|
||||
res.json({
|
||||
companies: companies.map(c => ({ label: c.name, value: c.cat_id.toString() })),
|
||||
artists: artists.map(a => ({ label: a.name, value: a.cat_id.toString() })),
|
||||
sizes: sizes.map(s => ({ label: s.name, value: s.cat_id.toString() })),
|
||||
themes: themes.map(t => ({
|
||||
label: t.display_name,
|
||||
value: t.cat_id.toString(),
|
||||
type: t.type,
|
||||
level: t.level_order
|
||||
})),
|
||||
categories: categories.map(c => ({
|
||||
label: c.display_name,
|
||||
value: c.cat_id.toString(),
|
||||
type: c.type,
|
||||
level: c.level_order
|
||||
})),
|
||||
colors: colors.map(c => ({
|
||||
label: c.name,
|
||||
value: c.color,
|
||||
hexColor: c.hex_color
|
||||
})),
|
||||
suppliers: suppliers,
|
||||
taxCategories: taxCategories,
|
||||
shippingRestrictions: [
|
||||
{ label: "None", value: "0" },
|
||||
{ label: "US Only", value: "1" },
|
||||
{ label: "Limited Quantity", value: "2" },
|
||||
{ label: "US/CA Only", value: "3" },
|
||||
{ label: "No FedEx 2 Day", value: "4" },
|
||||
{ label: "North America Only", value: "5" }
|
||||
]
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching import field options:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch import field options' });
|
||||
} finally {
|
||||
if (connection) await connection.end();
|
||||
if (ssh) ssh.end();
|
||||
}
|
||||
});
|
||||
|
||||
// Get product lines for a specific company
|
||||
router.get('/product-lines/:companyId', async (req, res) => {
|
||||
let ssh;
|
||||
let connection;
|
||||
|
||||
try {
|
||||
// Setup SSH tunnel and get database connection
|
||||
const tunnel = await setupSshTunnel();
|
||||
ssh = tunnel.ssh;
|
||||
|
||||
// Create MySQL connection over SSH tunnel
|
||||
connection = await mysql.createConnection({
|
||||
...tunnel.dbConfig,
|
||||
stream: tunnel.stream
|
||||
});
|
||||
|
||||
const [lines] = await connection.query(`
|
||||
SELECT cat_id as value, name as label
|
||||
FROM product_categories
|
||||
WHERE type = 2
|
||||
AND master_cat_id = ?
|
||||
ORDER BY name
|
||||
`, [req.params.companyId]);
|
||||
|
||||
res.json(lines.map(l => ({ label: l.label, value: l.value.toString() })));
|
||||
} catch (error) {
|
||||
console.error('Error fetching product lines:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product lines' });
|
||||
} finally {
|
||||
if (connection) await connection.end();
|
||||
if (ssh) ssh.end();
|
||||
}
|
||||
});
|
||||
|
||||
// Get sublines for a specific product line
|
||||
router.get('/sublines/:lineId', async (req, res) => {
|
||||
let ssh;
|
||||
let connection;
|
||||
|
||||
try {
|
||||
// Setup SSH tunnel and get database connection
|
||||
const tunnel = await setupSshTunnel();
|
||||
ssh = tunnel.ssh;
|
||||
|
||||
// Create MySQL connection over SSH tunnel
|
||||
connection = await mysql.createConnection({
|
||||
...tunnel.dbConfig,
|
||||
stream: tunnel.stream
|
||||
});
|
||||
|
||||
const [sublines] = await connection.query(`
|
||||
SELECT cat_id as value, name as label
|
||||
FROM product_categories
|
||||
WHERE type = 3
|
||||
AND master_cat_id = ?
|
||||
ORDER BY name
|
||||
`, [req.params.lineId]);
|
||||
|
||||
res.json(sublines.map(s => ({ label: s.label, value: s.value.toString() })));
|
||||
} catch (error) {
|
||||
console.error('Error fetching sublines:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch sublines' });
|
||||
} finally {
|
||||
if (connection) await connection.end();
|
||||
if (ssh) ssh.end();
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -17,6 +17,7 @@ const metricsRouter = require('./routes/metrics');
|
||||
const vendorsRouter = require('./routes/vendors');
|
||||
const categoriesRouter = require('./routes/categories');
|
||||
const testConnectionRouter = require('./routes/test-connection');
|
||||
const importRouter = require('./routes/import');
|
||||
|
||||
// Get the absolute path to the .env file
|
||||
const envPath = path.resolve(process.cwd(), '.env');
|
||||
@@ -65,58 +66,68 @@ app.use(corsMiddleware);
|
||||
app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
|
||||
// Initialize database pool
|
||||
const pool = initPool({
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
waitForConnections: true,
|
||||
connectionLimit: process.env.NODE_ENV === 'production' ? 20 : 10,
|
||||
queueLimit: 0,
|
||||
enableKeepAlive: true,
|
||||
keepAliveInitialDelay: 0
|
||||
});
|
||||
// Initialize database pool and start server
|
||||
async function startServer() {
|
||||
try {
|
||||
// Initialize database pool
|
||||
const pool = await initPool({
|
||||
waitForConnections: true,
|
||||
connectionLimit: process.env.NODE_ENV === 'production' ? 20 : 10,
|
||||
queueLimit: 0,
|
||||
enableKeepAlive: true,
|
||||
keepAliveInitialDelay: 0
|
||||
});
|
||||
|
||||
// Make pool available to routes
|
||||
app.locals.pool = pool;
|
||||
// Make pool available to routes
|
||||
app.locals.pool = pool;
|
||||
|
||||
// Routes
|
||||
app.use('/api/products', productsRouter);
|
||||
app.use('/api/dashboard', dashboardRouter);
|
||||
app.use('/api/orders', ordersRouter);
|
||||
app.use('/api/csv', csvRouter);
|
||||
app.use('/api/analytics', analyticsRouter);
|
||||
app.use('/api/purchase-orders', purchaseOrdersRouter);
|
||||
app.use('/api/config', configRouter);
|
||||
app.use('/api/metrics', metricsRouter);
|
||||
app.use('/api/vendors', vendorsRouter);
|
||||
app.use('/api/categories', categoriesRouter);
|
||||
app.use('/api', testConnectionRouter);
|
||||
// Set up routes after pool is initialized
|
||||
app.use('/api/products', productsRouter);
|
||||
app.use('/api/dashboard', dashboardRouter);
|
||||
app.use('/api/orders', ordersRouter);
|
||||
app.use('/api/csv', csvRouter);
|
||||
app.use('/api/analytics', analyticsRouter);
|
||||
app.use('/api/purchase-orders', purchaseOrdersRouter);
|
||||
app.use('/api/config', configRouter);
|
||||
app.use('/api/metrics', metricsRouter);
|
||||
app.use('/api/vendors', vendorsRouter);
|
||||
app.use('/api/categories', categoriesRouter);
|
||||
app.use('/api/import', importRouter);
|
||||
app.use('/api', testConnectionRouter);
|
||||
|
||||
// Basic health check route
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString(),
|
||||
environment: process.env.NODE_ENV
|
||||
});
|
||||
});
|
||||
// Basic health check route
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString(),
|
||||
environment: process.env.NODE_ENV
|
||||
});
|
||||
});
|
||||
|
||||
// CORS error handler - must be before other error handlers
|
||||
app.use(corsErrorHandler);
|
||||
// CORS error handler - must be before other error handlers
|
||||
app.use(corsErrorHandler);
|
||||
|
||||
// Error handling middleware - MUST be after routes and CORS error handler
|
||||
app.use((err, req, res, next) => {
|
||||
console.error(`[${new Date().toISOString()}] Error:`, err);
|
||||
|
||||
// Send detailed error in development, generic in production
|
||||
const error = process.env.NODE_ENV === 'production'
|
||||
? 'An internal server error occurred'
|
||||
: err.message || err;
|
||||
|
||||
res.status(err.status || 500).json({ error });
|
||||
});
|
||||
// Error handling middleware - MUST be after routes and CORS error handler
|
||||
app.use((err, req, res, next) => {
|
||||
console.error(`[${new Date().toISOString()}] Error:`, err);
|
||||
|
||||
// Send detailed error in development, generic in production
|
||||
const error = process.env.NODE_ENV === 'production'
|
||||
? 'An internal server error occurred'
|
||||
: err.message || err;
|
||||
|
||||
res.status(err.status || 500).json({ error });
|
||||
});
|
||||
|
||||
const PORT = process.env.PORT || 3000;
|
||||
app.listen(PORT, () => {
|
||||
console.log(`[Server] Running in ${process.env.NODE_ENV || 'development'} mode on port ${PORT}`);
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to start server:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle uncaught exceptions
|
||||
process.on('uncaughtException', (err) => {
|
||||
@@ -128,17 +139,6 @@ process.on('unhandledRejection', (reason, promise) => {
|
||||
console.error(`[${new Date().toISOString()}] Unhandled Rejection at:`, promise, 'reason:', reason);
|
||||
});
|
||||
|
||||
// Test database connection
|
||||
pool.getConnection()
|
||||
.then(connection => {
|
||||
console.log('[Database] Connected successfully');
|
||||
connection.release();
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('[Database] Error connecting:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Initialize client sets for SSE
|
||||
const importClients = new Set();
|
||||
const updateClients = new Set();
|
||||
@@ -189,62 +189,5 @@ const setupSSE = (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
// Update the status endpoint to include reset-metrics
|
||||
app.get('/csv/status', (req, res) => {
|
||||
res.json({
|
||||
active: !!currentOperation,
|
||||
type: currentOperation?.type || null,
|
||||
progress: currentOperation ? {
|
||||
status: currentOperation.status,
|
||||
operation: currentOperation.operation,
|
||||
current: currentOperation.current,
|
||||
total: currentOperation.total,
|
||||
percentage: currentOperation.percentage
|
||||
} : null
|
||||
});
|
||||
});
|
||||
|
||||
// Update progress endpoint mapping
|
||||
app.get('/csv/:type/progress', (req, res) => {
|
||||
const { type } = req.params;
|
||||
if (!['import', 'update', 'reset', 'reset-metrics'].includes(type)) {
|
||||
res.status(400).json({ error: 'Invalid operation type' });
|
||||
return;
|
||||
}
|
||||
|
||||
setupSSE(req, res);
|
||||
});
|
||||
|
||||
// Update the cancel endpoint to handle reset-metrics
|
||||
app.post('/csv/cancel', (req, res) => {
|
||||
const { operation } = req.query;
|
||||
|
||||
if (!currentOperation) {
|
||||
res.status(400).json({ error: 'No operation in progress' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (operation && operation.toLowerCase() !== currentOperation.type) {
|
||||
res.status(400).json({ error: 'Operation type mismatch' });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Handle cancellation based on operation type
|
||||
if (currentOperation.type === 'reset-metrics') {
|
||||
// Reset metrics doesn't need special cleanup
|
||||
currentOperation = null;
|
||||
res.json({ message: 'Reset metrics cancelled' });
|
||||
} else {
|
||||
// ... existing cancellation logic for other operations ...
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during cancellation:', error);
|
||||
res.status(500).json({ error: 'Failed to cancel operation' });
|
||||
}
|
||||
});
|
||||
|
||||
const PORT = process.env.PORT || 3000;
|
||||
app.listen(PORT, () => {
|
||||
console.log(`[Server] Running in ${process.env.NODE_ENV || 'development'} mode on port ${PORT}`);
|
||||
});
|
||||
// Start the server
|
||||
startServer();
|
||||
@@ -1,10 +1,66 @@
|
||||
const mysql = require('mysql2/promise');
|
||||
const { Client } = require('ssh2');
|
||||
|
||||
let pool;
|
||||
|
||||
function initPool(config) {
|
||||
pool = mysql.createPool(config);
|
||||
return pool;
|
||||
async function setupSshTunnel() {
|
||||
const sshConfig = {
|
||||
host: process.env.PROD_SSH_HOST,
|
||||
port: process.env.PROD_SSH_PORT || 22,
|
||||
username: process.env.PROD_SSH_USER,
|
||||
privateKey: process.env.PROD_SSH_KEY_PATH
|
||||
? require('fs').readFileSync(process.env.PROD_SSH_KEY_PATH)
|
||||
: undefined,
|
||||
compress: true
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const ssh = new Client();
|
||||
|
||||
ssh.on('error', (err) => {
|
||||
console.error('SSH connection error:', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
ssh.on('ready', () => {
|
||||
ssh.forwardOut(
|
||||
'127.0.0.1',
|
||||
0,
|
||||
process.env.PROD_DB_HOST || 'localhost',
|
||||
process.env.PROD_DB_PORT || 3306,
|
||||
(err, stream) => {
|
||||
if (err) reject(err);
|
||||
resolve({ ssh, stream });
|
||||
}
|
||||
);
|
||||
}).connect(sshConfig);
|
||||
});
|
||||
}
|
||||
|
||||
async function initPool(config) {
|
||||
try {
|
||||
const tunnel = await setupSshTunnel();
|
||||
|
||||
pool = mysql.createPool({
|
||||
...config,
|
||||
stream: tunnel.stream,
|
||||
host: process.env.PROD_DB_HOST || 'localhost',
|
||||
user: process.env.PROD_DB_USER,
|
||||
password: process.env.PROD_DB_PASSWORD,
|
||||
database: process.env.PROD_DB_NAME,
|
||||
port: process.env.PROD_DB_PORT || 3306
|
||||
});
|
||||
|
||||
// Test the connection
|
||||
const connection = await pool.getConnection();
|
||||
console.log('[Database] Connected successfully through SSH tunnel');
|
||||
connection.release();
|
||||
|
||||
return pool;
|
||||
} catch (error) {
|
||||
console.error('[Database] Error initializing pool:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function getConnection() {
|
||||
|
||||
Reference in New Issue
Block a user