diff --git a/inventory-server/src/routes/import.js b/inventory-server/src/routes/import.js new file mode 100644 index 0000000..09b9df6 --- /dev/null +++ b/inventory-server/src/routes/import.js @@ -0,0 +1,270 @@ +const express = require('express'); +const router = express.Router(); +const { Client } = require('ssh2'); +const mysql = require('mysql2/promise'); + +// Helper function to setup SSH tunnel +async function setupSshTunnel() { + const sshConfig = { + host: process.env.PROD_SSH_HOST, + port: process.env.PROD_SSH_PORT || 22, + username: process.env.PROD_SSH_USER, + privateKey: process.env.PROD_SSH_KEY_PATH + ? require('fs').readFileSync(process.env.PROD_SSH_KEY_PATH) + : undefined, + compress: true + }; + + const dbConfig = { + host: process.env.PROD_DB_HOST || 'localhost', + user: process.env.PROD_DB_USER, + password: process.env.PROD_DB_PASSWORD, + database: process.env.PROD_DB_NAME, + port: process.env.PROD_DB_PORT || 3306, + timezone: 'Z' + }; + + return new Promise((resolve, reject) => { + const ssh = new Client(); + + ssh.on('error', (err) => { + console.error('SSH connection error:', err); + reject(err); + }); + + ssh.on('ready', () => { + ssh.forwardOut( + '127.0.0.1', + 0, + dbConfig.host, + dbConfig.port, + (err, stream) => { + if (err) reject(err); + resolve({ ssh, stream, dbConfig }); + } + ); + }).connect(sshConfig); + }); +} + +// Get all options for import fields +router.get('/field-options', async (req, res) => { + let ssh; + let connection; + + try { + // Setup SSH tunnel and get database connection + const tunnel = await setupSshTunnel(); + ssh = tunnel.ssh; + + // Create MySQL connection over SSH tunnel + connection = await mysql.createConnection({ + ...tunnel.dbConfig, + stream: tunnel.stream + }); + + // Fetch companies (type 1) + const [companies] = await connection.query(` + SELECT cat_id, name + FROM product_categories + WHERE type = 1 + ORDER BY name + `); + + // Fetch artists (type 40) + const [artists] = await connection.query(` + SELECT cat_id, name + FROM product_categories + WHERE type = 40 + ORDER BY name + `); + + // Fetch sizes (type 50) + const [sizes] = await connection.query(` + SELECT cat_id, name + FROM product_categories + WHERE type = 50 + ORDER BY name + `); + + // Fetch themes with subthemes + const [themes] = await connection.query(` + SELECT t.cat_id, t.name AS display_name, t.type, t.name AS sort_theme, + '' AS sort_subtheme, 1 AS level_order + FROM product_categories t + WHERE t.type = 20 + UNION ALL + SELECT ts.cat_id, CONCAT(t.name,' - ',ts.name) AS display_name, ts.type, + t.name AS sort_theme, ts.name AS sort_subtheme, 2 AS level_order + FROM product_categories ts + JOIN product_categories t ON ts.master_cat_id = t.cat_id + WHERE ts.type = 21 AND t.type = 20 + ORDER BY sort_theme, sort_subtheme + `); + + // Fetch categories with all levels + const [categories] = await connection.query(` + SELECT s.cat_id, s.name AS display_name, s.type, s.name AS sort_section, + '' AS sort_category, '' AS sort_subcategory, '' AS sort_subsubcategory, + 1 AS level_order + FROM product_categories s + WHERE s.type = 10 + UNION ALL + SELECT c.cat_id, CONCAT(s.name,' - ',c.name) AS display_name, c.type, + s.name AS sort_section, c.name AS sort_category, '' AS sort_subcategory, + '' AS sort_subsubcategory, 2 AS level_order + FROM product_categories c + JOIN product_categories s ON c.master_cat_id = s.cat_id + WHERE c.type = 11 AND s.type = 10 + UNION ALL + SELECT sc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name) AS display_name, + sc.type, s.name AS sort_section, c.name AS sort_category, + sc.name AS sort_subcategory, '' AS sort_subsubcategory, 3 AS level_order + FROM product_categories sc + JOIN product_categories c ON sc.master_cat_id = c.cat_id + JOIN product_categories s ON c.master_cat_id = s.cat_id + WHERE sc.type = 12 AND c.type = 11 AND s.type = 10 + UNION ALL + SELECT ssc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name,' - ',ssc.name) AS display_name, + ssc.type, s.name AS sort_section, c.name AS sort_category, + sc.name AS sort_subcategory, ssc.name AS sort_subsubcategory, 4 AS level_order + FROM product_categories ssc + JOIN product_categories sc ON ssc.master_cat_id = sc.cat_id + JOIN product_categories c ON sc.master_cat_id = c.cat_id + JOIN product_categories s ON c.master_cat_id = s.cat_id + WHERE ssc.type = 13 AND sc.type = 12 AND c.type = 11 AND s.type = 10 + ORDER BY sort_section, sort_category, sort_subcategory, sort_subsubcategory + `); + + // Fetch colors + const [colors] = await connection.query(` + SELECT color, name, hex_color + FROM product_color_list + ORDER BY \`order\` + `); + + // Fetch suppliers + const [suppliers] = await connection.query(` + SELECT supplierid as value, companyname as label + FROM suppliers + WHERE companyname <> '' + ORDER BY companyname + `); + + // Fetch tax categories + const [taxCategories] = await connection.query(` + SELECT tax_code_id as value, name as label + FROM product_tax_codes + ORDER BY tax_code_id = 0 DESC, name + `); + + res.json({ + companies: companies.map(c => ({ label: c.name, value: c.cat_id.toString() })), + artists: artists.map(a => ({ label: a.name, value: a.cat_id.toString() })), + sizes: sizes.map(s => ({ label: s.name, value: s.cat_id.toString() })), + themes: themes.map(t => ({ + label: t.display_name, + value: t.cat_id.toString(), + type: t.type, + level: t.level_order + })), + categories: categories.map(c => ({ + label: c.display_name, + value: c.cat_id.toString(), + type: c.type, + level: c.level_order + })), + colors: colors.map(c => ({ + label: c.name, + value: c.color, + hexColor: c.hex_color + })), + suppliers: suppliers, + taxCategories: taxCategories, + shippingRestrictions: [ + { label: "None", value: "0" }, + { label: "US Only", value: "1" }, + { label: "Limited Quantity", value: "2" }, + { label: "US/CA Only", value: "3" }, + { label: "No FedEx 2 Day", value: "4" }, + { label: "North America Only", value: "5" } + ] + }); + } catch (error) { + console.error('Error fetching import field options:', error); + res.status(500).json({ error: 'Failed to fetch import field options' }); + } finally { + if (connection) await connection.end(); + if (ssh) ssh.end(); + } +}); + +// Get product lines for a specific company +router.get('/product-lines/:companyId', async (req, res) => { + let ssh; + let connection; + + try { + // Setup SSH tunnel and get database connection + const tunnel = await setupSshTunnel(); + ssh = tunnel.ssh; + + // Create MySQL connection over SSH tunnel + connection = await mysql.createConnection({ + ...tunnel.dbConfig, + stream: tunnel.stream + }); + + const [lines] = await connection.query(` + SELECT cat_id as value, name as label + FROM product_categories + WHERE type = 2 + AND master_cat_id = ? + ORDER BY name + `, [req.params.companyId]); + + res.json(lines.map(l => ({ label: l.label, value: l.value.toString() }))); + } catch (error) { + console.error('Error fetching product lines:', error); + res.status(500).json({ error: 'Failed to fetch product lines' }); + } finally { + if (connection) await connection.end(); + if (ssh) ssh.end(); + } +}); + +// Get sublines for a specific product line +router.get('/sublines/:lineId', async (req, res) => { + let ssh; + let connection; + + try { + // Setup SSH tunnel and get database connection + const tunnel = await setupSshTunnel(); + ssh = tunnel.ssh; + + // Create MySQL connection over SSH tunnel + connection = await mysql.createConnection({ + ...tunnel.dbConfig, + stream: tunnel.stream + }); + + const [sublines] = await connection.query(` + SELECT cat_id as value, name as label + FROM product_categories + WHERE type = 3 + AND master_cat_id = ? + ORDER BY name + `, [req.params.lineId]); + + res.json(sublines.map(s => ({ label: s.label, value: s.value.toString() }))); + } catch (error) { + console.error('Error fetching sublines:', error); + res.status(500).json({ error: 'Failed to fetch sublines' }); + } finally { + if (connection) await connection.end(); + if (ssh) ssh.end(); + } +}); + +module.exports = router; \ No newline at end of file diff --git a/inventory-server/src/server.js b/inventory-server/src/server.js index c58ad7d..7c1f5ca 100755 --- a/inventory-server/src/server.js +++ b/inventory-server/src/server.js @@ -17,6 +17,7 @@ const metricsRouter = require('./routes/metrics'); const vendorsRouter = require('./routes/vendors'); const categoriesRouter = require('./routes/categories'); const testConnectionRouter = require('./routes/test-connection'); +const importRouter = require('./routes/import'); // Get the absolute path to the .env file const envPath = path.resolve(process.cwd(), '.env'); @@ -65,58 +66,68 @@ app.use(corsMiddleware); app.use(express.json()); app.use(express.urlencoded({ extended: true })); -// Initialize database pool -const pool = initPool({ - host: process.env.DB_HOST, - user: process.env.DB_USER, - password: process.env.DB_PASSWORD, - database: process.env.DB_NAME, - waitForConnections: true, - connectionLimit: process.env.NODE_ENV === 'production' ? 20 : 10, - queueLimit: 0, - enableKeepAlive: true, - keepAliveInitialDelay: 0 -}); +// Initialize database pool and start server +async function startServer() { + try { + // Initialize database pool + const pool = await initPool({ + waitForConnections: true, + connectionLimit: process.env.NODE_ENV === 'production' ? 20 : 10, + queueLimit: 0, + enableKeepAlive: true, + keepAliveInitialDelay: 0 + }); -// Make pool available to routes -app.locals.pool = pool; + // Make pool available to routes + app.locals.pool = pool; -// Routes -app.use('/api/products', productsRouter); -app.use('/api/dashboard', dashboardRouter); -app.use('/api/orders', ordersRouter); -app.use('/api/csv', csvRouter); -app.use('/api/analytics', analyticsRouter); -app.use('/api/purchase-orders', purchaseOrdersRouter); -app.use('/api/config', configRouter); -app.use('/api/metrics', metricsRouter); -app.use('/api/vendors', vendorsRouter); -app.use('/api/categories', categoriesRouter); -app.use('/api', testConnectionRouter); + // Set up routes after pool is initialized + app.use('/api/products', productsRouter); + app.use('/api/dashboard', dashboardRouter); + app.use('/api/orders', ordersRouter); + app.use('/api/csv', csvRouter); + app.use('/api/analytics', analyticsRouter); + app.use('/api/purchase-orders', purchaseOrdersRouter); + app.use('/api/config', configRouter); + app.use('/api/metrics', metricsRouter); + app.use('/api/vendors', vendorsRouter); + app.use('/api/categories', categoriesRouter); + app.use('/api/import', importRouter); + app.use('/api', testConnectionRouter); -// Basic health check route -app.get('/health', (req, res) => { - res.json({ - status: 'ok', - timestamp: new Date().toISOString(), - environment: process.env.NODE_ENV - }); -}); + // Basic health check route + app.get('/health', (req, res) => { + res.json({ + status: 'ok', + timestamp: new Date().toISOString(), + environment: process.env.NODE_ENV + }); + }); -// CORS error handler - must be before other error handlers -app.use(corsErrorHandler); + // CORS error handler - must be before other error handlers + app.use(corsErrorHandler); -// Error handling middleware - MUST be after routes and CORS error handler -app.use((err, req, res, next) => { - console.error(`[${new Date().toISOString()}] Error:`, err); - - // Send detailed error in development, generic in production - const error = process.env.NODE_ENV === 'production' - ? 'An internal server error occurred' - : err.message || err; - - res.status(err.status || 500).json({ error }); -}); + // Error handling middleware - MUST be after routes and CORS error handler + app.use((err, req, res, next) => { + console.error(`[${new Date().toISOString()}] Error:`, err); + + // Send detailed error in development, generic in production + const error = process.env.NODE_ENV === 'production' + ? 'An internal server error occurred' + : err.message || err; + + res.status(err.status || 500).json({ error }); + }); + + const PORT = process.env.PORT || 3000; + app.listen(PORT, () => { + console.log(`[Server] Running in ${process.env.NODE_ENV || 'development'} mode on port ${PORT}`); + }); + } catch (error) { + console.error('Failed to start server:', error); + process.exit(1); + } +} // Handle uncaught exceptions process.on('uncaughtException', (err) => { @@ -128,17 +139,6 @@ process.on('unhandledRejection', (reason, promise) => { console.error(`[${new Date().toISOString()}] Unhandled Rejection at:`, promise, 'reason:', reason); }); -// Test database connection -pool.getConnection() - .then(connection => { - console.log('[Database] Connected successfully'); - connection.release(); - }) - .catch(err => { - console.error('[Database] Error connecting:', err); - process.exit(1); - }); - // Initialize client sets for SSE const importClients = new Set(); const updateClients = new Set(); @@ -189,62 +189,5 @@ const setupSSE = (req, res) => { } }; -// Update the status endpoint to include reset-metrics -app.get('/csv/status', (req, res) => { - res.json({ - active: !!currentOperation, - type: currentOperation?.type || null, - progress: currentOperation ? { - status: currentOperation.status, - operation: currentOperation.operation, - current: currentOperation.current, - total: currentOperation.total, - percentage: currentOperation.percentage - } : null - }); -}); - -// Update progress endpoint mapping -app.get('/csv/:type/progress', (req, res) => { - const { type } = req.params; - if (!['import', 'update', 'reset', 'reset-metrics'].includes(type)) { - res.status(400).json({ error: 'Invalid operation type' }); - return; - } - - setupSSE(req, res); -}); - -// Update the cancel endpoint to handle reset-metrics -app.post('/csv/cancel', (req, res) => { - const { operation } = req.query; - - if (!currentOperation) { - res.status(400).json({ error: 'No operation in progress' }); - return; - } - - if (operation && operation.toLowerCase() !== currentOperation.type) { - res.status(400).json({ error: 'Operation type mismatch' }); - return; - } - - try { - // Handle cancellation based on operation type - if (currentOperation.type === 'reset-metrics') { - // Reset metrics doesn't need special cleanup - currentOperation = null; - res.json({ message: 'Reset metrics cancelled' }); - } else { - // ... existing cancellation logic for other operations ... - } - } catch (error) { - console.error('Error during cancellation:', error); - res.status(500).json({ error: 'Failed to cancel operation' }); - } -}); - -const PORT = process.env.PORT || 3000; -app.listen(PORT, () => { - console.log(`[Server] Running in ${process.env.NODE_ENV || 'development'} mode on port ${PORT}`); -}); \ No newline at end of file +// Start the server +startServer(); \ No newline at end of file diff --git a/inventory-server/src/utils/db.js b/inventory-server/src/utils/db.js index 28f689f..653ebc8 100644 --- a/inventory-server/src/utils/db.js +++ b/inventory-server/src/utils/db.js @@ -1,10 +1,66 @@ const mysql = require('mysql2/promise'); +const { Client } = require('ssh2'); let pool; -function initPool(config) { - pool = mysql.createPool(config); - return pool; +async function setupSshTunnel() { + const sshConfig = { + host: process.env.PROD_SSH_HOST, + port: process.env.PROD_SSH_PORT || 22, + username: process.env.PROD_SSH_USER, + privateKey: process.env.PROD_SSH_KEY_PATH + ? require('fs').readFileSync(process.env.PROD_SSH_KEY_PATH) + : undefined, + compress: true + }; + + return new Promise((resolve, reject) => { + const ssh = new Client(); + + ssh.on('error', (err) => { + console.error('SSH connection error:', err); + reject(err); + }); + + ssh.on('ready', () => { + ssh.forwardOut( + '127.0.0.1', + 0, + process.env.PROD_DB_HOST || 'localhost', + process.env.PROD_DB_PORT || 3306, + (err, stream) => { + if (err) reject(err); + resolve({ ssh, stream }); + } + ); + }).connect(sshConfig); + }); +} + +async function initPool(config) { + try { + const tunnel = await setupSshTunnel(); + + pool = mysql.createPool({ + ...config, + stream: tunnel.stream, + host: process.env.PROD_DB_HOST || 'localhost', + user: process.env.PROD_DB_USER, + password: process.env.PROD_DB_PASSWORD, + database: process.env.PROD_DB_NAME, + port: process.env.PROD_DB_PORT || 3306 + }); + + // Test the connection + const connection = await pool.getConnection(); + console.log('[Database] Connected successfully through SSH tunnel'); + connection.release(); + + return pool; + } catch (error) { + console.error('[Database] Error initializing pool:', error); + throw error; + } } async function getConnection() { diff --git a/inventory/src/lib/react-spreadsheet-import/src/steps/ValidationStep/ValidationStep.tsx b/inventory/src/lib/react-spreadsheet-import/src/steps/ValidationStep/ValidationStep.tsx index 8659d4f..5d3dc2a 100644 --- a/inventory/src/lib/react-spreadsheet-import/src/steps/ValidationStep/ValidationStep.tsx +++ b/inventory/src/lib/react-spreadsheet-import/src/steps/ValidationStep/ValidationStep.tsx @@ -101,7 +101,6 @@ const EditableCell = ({ value, onChange, error, field }: CellProps) => { } const isRequired = field.validations?.some(v => v.rule === "required") - const isRequiredAndEmpty = isRequired && !value // Determine the current validation state const getValidationState = () => { @@ -170,6 +169,7 @@ const EditableCell = ({ value, onChange, error, field }: CellProps) => { "w-full justify-between", currentError ? "border-destructive text-destructive" : "border-input" )} + disabled={field.disabled} > {value ? field.fieldType.options.find((option) => option.value === value)?.label @@ -189,6 +189,9 @@ const EditableCell = ({ value, onChange, error, field }: CellProps) => { value={option.value} onSelect={(currentValue) => { onChange(currentValue) + if (field.onChange) { + field.onChange(currentValue) + } setIsEditing(false) }} > @@ -335,7 +338,7 @@ const EditableCell = ({ value, onChange, error, field }: CellProps) => { return (
{ - if (field.fieldType.type !== "checkbox") { + if (field.fieldType.type !== "checkbox" && !field.disabled) { setIsEditing(true) setInputValue(Array.isArray(value) ? value.join(", ") : value ?? "") } @@ -343,14 +346,15 @@ const EditableCell = ({ value, onChange, error, field }: CellProps) => { className={cn( "min-h-[36px] cursor-text p-2 rounded-md border bg-background", currentError ? "border-destructive" : "border-input", - field.fieldType.type === "checkbox" ? "flex items-center" : "flex items-center justify-between" + field.fieldType.type === "checkbox" ? "flex items-center" : "flex items-center justify-between", + field.disabled && "opacity-50 cursor-not-allowed bg-muted" )} >
{value ? getDisplayValue(value, field.fieldType) : ""}
{(field.fieldType.type === "select" || field.fieldType.type === "multi-select") && ( - + )} {currentError && (
@@ -376,7 +380,7 @@ const ColumnHeader = ({
{field.label}
- {data.length > 1 && ( + {data.length > 1 && !field.disabled && ( + @@ -412,9 +578,13 @@ export function Import() { setIsOpen(false)} + onClose={() => { + setIsOpen(false); + setStartFromScratch(false); + }} onSubmit={handleData} - fields={IMPORT_FIELDS} + fields={importFields} + initialStepState={startFromScratch ? { type: StepType.validateData, data: [{}] } : undefined} /> ); diff --git a/package-lock.json b/package-lock.json index 2f387da..b114ea5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4,9 +4,11 @@ "requires": true, "packages": { "": { - "name": "inventory", "dependencies": { "shadcn": "^1.0.0" + }, + "devDependencies": { + "ts-essentials": "^10.0.4" } }, "node_modules/shadcn": { @@ -14,6 +16,21 @@ "resolved": "https://registry.npmjs.org/shadcn/-/shadcn-1.0.0.tgz", "integrity": "sha512-kCxBIBiPS83WxrWkOQHamWpr9XlLtOtOlJM6QX90h9A5xZCBMhxu4ibcNT2ZnzZLdexkYbQrnijfPKdOsZxOpA==", "license": "ISC" + }, + "node_modules/ts-essentials": { + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-10.0.4.tgz", + "integrity": "sha512-lwYdz28+S4nicm+jFi6V58LaAIpxzhg9rLdgNC1VsdP/xiFBseGhF1M/shwCk6zMmwahBZdXcl34LVHrEang3A==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": ">=4.5.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } } } } diff --git a/package.json b/package.json index 897fc78..65d0e40 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,8 @@ { "dependencies": { "shadcn": "^1.0.0" + }, + "devDependencies": { + "ts-essentials": "^10.0.4" } }