Optimize match columns step
This commit is contained in:
@@ -13,6 +13,26 @@ fs.mkdirSync(uploadsDir, { recursive: true });
|
|||||||
// Create a Map to track image upload times and their scheduled deletion
|
// Create a Map to track image upload times and their scheduled deletion
|
||||||
const imageUploadMap = new Map();
|
const imageUploadMap = new Map();
|
||||||
|
|
||||||
|
// Connection pooling and cache configuration
|
||||||
|
const connectionCache = {
|
||||||
|
ssh: null,
|
||||||
|
dbConnection: null,
|
||||||
|
lastUsed: 0,
|
||||||
|
isConnecting: false,
|
||||||
|
connectionPromise: null,
|
||||||
|
// Cache expiration time in milliseconds (5 minutes)
|
||||||
|
expirationTime: 5 * 60 * 1000,
|
||||||
|
// Cache for query results (key: query string, value: {data, timestamp})
|
||||||
|
queryCache: new Map(),
|
||||||
|
// Cache duration for different query types in milliseconds
|
||||||
|
cacheDuration: {
|
||||||
|
'field-options': 30 * 60 * 1000, // 30 minutes for field options
|
||||||
|
'product-lines': 10 * 60 * 1000, // 10 minutes for product lines
|
||||||
|
'sublines': 10 * 60 * 1000, // 10 minutes for sublines
|
||||||
|
'default': 60 * 1000 // 1 minute default
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Function to schedule image deletion after 24 hours
|
// Function to schedule image deletion after 24 hours
|
||||||
const scheduleImageDeletion = (filename, filePath) => {
|
const scheduleImageDeletion = (filename, filePath) => {
|
||||||
// Delete any existing timeout for this file
|
// Delete any existing timeout for this file
|
||||||
@@ -165,7 +185,114 @@ const upload = multer({
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Helper function to setup SSH tunnel
|
// Modified function to get a database connection with connection pooling
|
||||||
|
async function getDbConnection() {
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Check if we need to refresh the connection due to inactivity
|
||||||
|
const needsRefresh = !connectionCache.ssh ||
|
||||||
|
!connectionCache.dbConnection ||
|
||||||
|
(now - connectionCache.lastUsed > connectionCache.expirationTime);
|
||||||
|
|
||||||
|
// If connection is still valid, update last used time and return existing connection
|
||||||
|
if (!needsRefresh) {
|
||||||
|
connectionCache.lastUsed = now;
|
||||||
|
return {
|
||||||
|
ssh: connectionCache.ssh,
|
||||||
|
connection: connectionCache.dbConnection
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If another request is already establishing a connection, wait for that promise
|
||||||
|
if (connectionCache.isConnecting && connectionCache.connectionPromise) {
|
||||||
|
try {
|
||||||
|
await connectionCache.connectionPromise;
|
||||||
|
return {
|
||||||
|
ssh: connectionCache.ssh,
|
||||||
|
connection: connectionCache.dbConnection
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
// If that connection attempt failed, we'll try again below
|
||||||
|
console.error('Error waiting for existing connection:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close existing connections if they exist
|
||||||
|
if (connectionCache.dbConnection) {
|
||||||
|
try {
|
||||||
|
await connectionCache.dbConnection.end();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error closing existing database connection:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (connectionCache.ssh) {
|
||||||
|
try {
|
||||||
|
connectionCache.ssh.end();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error closing existing SSH connection:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mark that we're establishing a new connection
|
||||||
|
connectionCache.isConnecting = true;
|
||||||
|
|
||||||
|
// Create a new promise for this connection attempt
|
||||||
|
connectionCache.connectionPromise = setupSshTunnel().then(tunnel => {
|
||||||
|
const { ssh, stream, dbConfig } = tunnel;
|
||||||
|
|
||||||
|
return mysql.createConnection({
|
||||||
|
...dbConfig,
|
||||||
|
stream
|
||||||
|
}).then(connection => {
|
||||||
|
// Store the new connections
|
||||||
|
connectionCache.ssh = ssh;
|
||||||
|
connectionCache.dbConnection = connection;
|
||||||
|
connectionCache.lastUsed = Date.now();
|
||||||
|
connectionCache.isConnecting = false;
|
||||||
|
|
||||||
|
return {
|
||||||
|
ssh,
|
||||||
|
connection
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}).catch(error => {
|
||||||
|
connectionCache.isConnecting = false;
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for the connection to be established
|
||||||
|
return connectionCache.connectionPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to get cached query results or execute query if not cached
|
||||||
|
async function getCachedQuery(cacheKey, queryType, queryFn) {
|
||||||
|
// Get cache duration based on query type
|
||||||
|
const cacheDuration = connectionCache.cacheDuration[queryType] || connectionCache.cacheDuration.default;
|
||||||
|
|
||||||
|
// Check if we have a valid cached result
|
||||||
|
const cachedResult = connectionCache.queryCache.get(cacheKey);
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
|
||||||
|
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
|
||||||
|
return cachedResult.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No valid cache found, execute the query
|
||||||
|
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
|
||||||
|
const result = await queryFn();
|
||||||
|
|
||||||
|
// Cache the result
|
||||||
|
connectionCache.queryCache.set(cacheKey, {
|
||||||
|
data: result,
|
||||||
|
timestamp: now
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to setup SSH tunnel - ONLY USED BY getDbConnection NOW
|
||||||
async function setupSshTunnel() {
|
async function setupSshTunnel() {
|
||||||
const sshConfig = {
|
const sshConfig = {
|
||||||
host: process.env.PROD_SSH_HOST,
|
host: process.env.PROD_SSH_HOST,
|
||||||
@@ -303,221 +430,202 @@ router.delete('/delete-image', (req, res) => {
|
|||||||
|
|
||||||
// Get all options for import fields
|
// Get all options for import fields
|
||||||
router.get('/field-options', async (req, res) => {
|
router.get('/field-options', async (req, res) => {
|
||||||
let ssh;
|
|
||||||
let connection;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Setup SSH tunnel and get database connection
|
// Use cached connection
|
||||||
const tunnel = await setupSshTunnel();
|
const { connection } = await getDbConnection();
|
||||||
ssh = tunnel.ssh;
|
|
||||||
|
const cacheKey = 'field-options';
|
||||||
// Create MySQL connection over SSH tunnel
|
const result = await getCachedQuery(cacheKey, 'field-options', async () => {
|
||||||
connection = await mysql.createConnection({
|
// Fetch companies (type 1)
|
||||||
...tunnel.dbConfig,
|
const [companies] = await connection.query(`
|
||||||
stream: tunnel.stream
|
SELECT cat_id, name
|
||||||
|
FROM product_categories
|
||||||
|
WHERE type = 1
|
||||||
|
ORDER BY name
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Fetch artists (type 40)
|
||||||
|
const [artists] = await connection.query(`
|
||||||
|
SELECT cat_id, name
|
||||||
|
FROM product_categories
|
||||||
|
WHERE type = 40
|
||||||
|
ORDER BY name
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Fetch sizes (type 50)
|
||||||
|
const [sizes] = await connection.query(`
|
||||||
|
SELECT cat_id, name
|
||||||
|
FROM product_categories
|
||||||
|
WHERE type = 50
|
||||||
|
ORDER BY name
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Fetch themes with subthemes
|
||||||
|
const [themes] = await connection.query(`
|
||||||
|
SELECT t.cat_id, t.name AS display_name, t.type, t.name AS sort_theme,
|
||||||
|
'' AS sort_subtheme, 1 AS level_order
|
||||||
|
FROM product_categories t
|
||||||
|
WHERE t.type = 20
|
||||||
|
UNION ALL
|
||||||
|
SELECT ts.cat_id, CONCAT(t.name,' - ',ts.name) AS display_name, ts.type,
|
||||||
|
t.name AS sort_theme, ts.name AS sort_subtheme, 2 AS level_order
|
||||||
|
FROM product_categories ts
|
||||||
|
JOIN product_categories t ON ts.master_cat_id = t.cat_id
|
||||||
|
WHERE ts.type = 21 AND t.type = 20
|
||||||
|
ORDER BY sort_theme, sort_subtheme
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Fetch categories with all levels
|
||||||
|
const [categories] = await connection.query(`
|
||||||
|
SELECT s.cat_id, s.name AS display_name, s.type, s.name AS sort_section,
|
||||||
|
'' AS sort_category, '' AS sort_subcategory, '' AS sort_subsubcategory,
|
||||||
|
1 AS level_order
|
||||||
|
FROM product_categories s
|
||||||
|
WHERE s.type = 10
|
||||||
|
UNION ALL
|
||||||
|
SELECT c.cat_id, CONCAT(s.name,' - ',c.name) AS display_name, c.type,
|
||||||
|
s.name AS sort_section, c.name AS sort_category, '' AS sort_subcategory,
|
||||||
|
'' AS sort_subsubcategory, 2 AS level_order
|
||||||
|
FROM product_categories c
|
||||||
|
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
||||||
|
WHERE c.type = 11 AND s.type = 10
|
||||||
|
UNION ALL
|
||||||
|
SELECT sc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name) AS display_name,
|
||||||
|
sc.type, s.name AS sort_section, c.name AS sort_category,
|
||||||
|
sc.name AS sort_subcategory, '' AS sort_subsubcategory, 3 AS level_order
|
||||||
|
FROM product_categories sc
|
||||||
|
JOIN product_categories c ON sc.master_cat_id = c.cat_id
|
||||||
|
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
||||||
|
WHERE sc.type = 12 AND c.type = 11 AND s.type = 10
|
||||||
|
UNION ALL
|
||||||
|
SELECT ssc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name,' - ',ssc.name) AS display_name,
|
||||||
|
ssc.type, s.name AS sort_section, c.name AS sort_category,
|
||||||
|
sc.name AS sort_subcategory, ssc.name AS sort_subsubcategory, 4 AS level_order
|
||||||
|
FROM product_categories ssc
|
||||||
|
JOIN product_categories sc ON ssc.master_cat_id = sc.cat_id
|
||||||
|
JOIN product_categories c ON sc.master_cat_id = c.cat_id
|
||||||
|
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
||||||
|
WHERE ssc.type = 13 AND sc.type = 12 AND c.type = 11 AND s.type = 10
|
||||||
|
ORDER BY sort_section, sort_category, sort_subcategory, sort_subsubcategory
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Fetch colors
|
||||||
|
const [colors] = await connection.query(`
|
||||||
|
SELECT color, name, hex_color
|
||||||
|
FROM product_color_list
|
||||||
|
ORDER BY \`order\`
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Fetch suppliers
|
||||||
|
const [suppliers] = await connection.query(`
|
||||||
|
SELECT supplierid as value, companyname as label
|
||||||
|
FROM suppliers
|
||||||
|
WHERE companyname <> ''
|
||||||
|
ORDER BY companyname
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Fetch tax categories
|
||||||
|
const [taxCategories] = await connection.query(`
|
||||||
|
SELECT tax_code_id as value, name as label
|
||||||
|
FROM product_tax_codes
|
||||||
|
ORDER BY tax_code_id = 0 DESC, name
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Format and return all options
|
||||||
|
return {
|
||||||
|
companies: companies.map(c => ({ label: c.name, value: c.cat_id.toString() })),
|
||||||
|
artists: artists.map(a => ({ label: a.name, value: a.cat_id.toString() })),
|
||||||
|
sizes: sizes.map(s => ({ label: s.name, value: s.cat_id.toString() })),
|
||||||
|
themes: themes.map(t => ({
|
||||||
|
label: t.display_name,
|
||||||
|
value: t.cat_id.toString(),
|
||||||
|
type: t.type,
|
||||||
|
level: t.level_order
|
||||||
|
})),
|
||||||
|
categories: categories.map(c => ({
|
||||||
|
label: c.display_name,
|
||||||
|
value: c.cat_id.toString(),
|
||||||
|
type: c.type,
|
||||||
|
level: c.level_order
|
||||||
|
})),
|
||||||
|
colors: colors.map(c => ({
|
||||||
|
label: c.name,
|
||||||
|
value: c.color,
|
||||||
|
hexColor: c.hex_color
|
||||||
|
})),
|
||||||
|
suppliers: suppliers,
|
||||||
|
taxCategories: taxCategories,
|
||||||
|
shippingRestrictions: [
|
||||||
|
{ label: "None", value: "0" },
|
||||||
|
{ label: "US Only", value: "1" },
|
||||||
|
{ label: "Limited Quantity", value: "2" },
|
||||||
|
{ label: "US/CA Only", value: "3" },
|
||||||
|
{ label: "No FedEx 2 Day", value: "4" },
|
||||||
|
{ label: "North America Only", value: "5" }
|
||||||
|
]
|
||||||
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fetch companies (type 1)
|
res.json(result);
|
||||||
const [companies] = await connection.query(`
|
|
||||||
SELECT cat_id, name
|
|
||||||
FROM product_categories
|
|
||||||
WHERE type = 1
|
|
||||||
ORDER BY name
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Fetch artists (type 40)
|
|
||||||
const [artists] = await connection.query(`
|
|
||||||
SELECT cat_id, name
|
|
||||||
FROM product_categories
|
|
||||||
WHERE type = 40
|
|
||||||
ORDER BY name
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Fetch sizes (type 50)
|
|
||||||
const [sizes] = await connection.query(`
|
|
||||||
SELECT cat_id, name
|
|
||||||
FROM product_categories
|
|
||||||
WHERE type = 50
|
|
||||||
ORDER BY name
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Fetch themes with subthemes
|
|
||||||
const [themes] = await connection.query(`
|
|
||||||
SELECT t.cat_id, t.name AS display_name, t.type, t.name AS sort_theme,
|
|
||||||
'' AS sort_subtheme, 1 AS level_order
|
|
||||||
FROM product_categories t
|
|
||||||
WHERE t.type = 20
|
|
||||||
UNION ALL
|
|
||||||
SELECT ts.cat_id, CONCAT(t.name,' - ',ts.name) AS display_name, ts.type,
|
|
||||||
t.name AS sort_theme, ts.name AS sort_subtheme, 2 AS level_order
|
|
||||||
FROM product_categories ts
|
|
||||||
JOIN product_categories t ON ts.master_cat_id = t.cat_id
|
|
||||||
WHERE ts.type = 21 AND t.type = 20
|
|
||||||
ORDER BY sort_theme, sort_subtheme
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Fetch categories with all levels
|
|
||||||
const [categories] = await connection.query(`
|
|
||||||
SELECT s.cat_id, s.name AS display_name, s.type, s.name AS sort_section,
|
|
||||||
'' AS sort_category, '' AS sort_subcategory, '' AS sort_subsubcategory,
|
|
||||||
1 AS level_order
|
|
||||||
FROM product_categories s
|
|
||||||
WHERE s.type = 10
|
|
||||||
UNION ALL
|
|
||||||
SELECT c.cat_id, CONCAT(s.name,' - ',c.name) AS display_name, c.type,
|
|
||||||
s.name AS sort_section, c.name AS sort_category, '' AS sort_subcategory,
|
|
||||||
'' AS sort_subsubcategory, 2 AS level_order
|
|
||||||
FROM product_categories c
|
|
||||||
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
|
||||||
WHERE c.type = 11 AND s.type = 10
|
|
||||||
UNION ALL
|
|
||||||
SELECT sc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name) AS display_name,
|
|
||||||
sc.type, s.name AS sort_section, c.name AS sort_category,
|
|
||||||
sc.name AS sort_subcategory, '' AS sort_subsubcategory, 3 AS level_order
|
|
||||||
FROM product_categories sc
|
|
||||||
JOIN product_categories c ON sc.master_cat_id = c.cat_id
|
|
||||||
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
|
||||||
WHERE sc.type = 12 AND c.type = 11 AND s.type = 10
|
|
||||||
UNION ALL
|
|
||||||
SELECT ssc.cat_id, CONCAT(s.name,' - ',c.name,' - ',sc.name,' - ',ssc.name) AS display_name,
|
|
||||||
ssc.type, s.name AS sort_section, c.name AS sort_category,
|
|
||||||
sc.name AS sort_subcategory, ssc.name AS sort_subsubcategory, 4 AS level_order
|
|
||||||
FROM product_categories ssc
|
|
||||||
JOIN product_categories sc ON ssc.master_cat_id = sc.cat_id
|
|
||||||
JOIN product_categories c ON sc.master_cat_id = c.cat_id
|
|
||||||
JOIN product_categories s ON c.master_cat_id = s.cat_id
|
|
||||||
WHERE ssc.type = 13 AND sc.type = 12 AND c.type = 11 AND s.type = 10
|
|
||||||
ORDER BY sort_section, sort_category, sort_subcategory, sort_subsubcategory
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Fetch colors
|
|
||||||
const [colors] = await connection.query(`
|
|
||||||
SELECT color, name, hex_color
|
|
||||||
FROM product_color_list
|
|
||||||
ORDER BY \`order\`
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Fetch suppliers
|
|
||||||
const [suppliers] = await connection.query(`
|
|
||||||
SELECT supplierid as value, companyname as label
|
|
||||||
FROM suppliers
|
|
||||||
WHERE companyname <> ''
|
|
||||||
ORDER BY companyname
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Fetch tax categories
|
|
||||||
const [taxCategories] = await connection.query(`
|
|
||||||
SELECT tax_code_id as value, name as label
|
|
||||||
FROM product_tax_codes
|
|
||||||
ORDER BY tax_code_id = 0 DESC, name
|
|
||||||
`);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
companies: companies.map(c => ({ label: c.name, value: c.cat_id.toString() })),
|
|
||||||
artists: artists.map(a => ({ label: a.name, value: a.cat_id.toString() })),
|
|
||||||
sizes: sizes.map(s => ({ label: s.name, value: s.cat_id.toString() })),
|
|
||||||
themes: themes.map(t => ({
|
|
||||||
label: t.display_name,
|
|
||||||
value: t.cat_id.toString(),
|
|
||||||
type: t.type,
|
|
||||||
level: t.level_order
|
|
||||||
})),
|
|
||||||
categories: categories.map(c => ({
|
|
||||||
label: c.display_name,
|
|
||||||
value: c.cat_id.toString(),
|
|
||||||
type: c.type,
|
|
||||||
level: c.level_order
|
|
||||||
})),
|
|
||||||
colors: colors.map(c => ({
|
|
||||||
label: c.name,
|
|
||||||
value: c.color,
|
|
||||||
hexColor: c.hex_color
|
|
||||||
})),
|
|
||||||
suppliers: suppliers,
|
|
||||||
taxCategories: taxCategories,
|
|
||||||
shippingRestrictions: [
|
|
||||||
{ label: "None", value: "0" },
|
|
||||||
{ label: "US Only", value: "1" },
|
|
||||||
{ label: "Limited Quantity", value: "2" },
|
|
||||||
{ label: "US/CA Only", value: "3" },
|
|
||||||
{ label: "No FedEx 2 Day", value: "4" },
|
|
||||||
{ label: "North America Only", value: "5" }
|
|
||||||
]
|
|
||||||
});
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error fetching import field options:', error);
|
console.error('Error fetching import field options:', error);
|
||||||
res.status(500).json({ error: 'Failed to fetch import field options' });
|
res.status(500).json({ error: 'Failed to fetch import field options' });
|
||||||
} finally {
|
|
||||||
if (connection) await connection.end();
|
|
||||||
if (ssh) ssh.end();
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get product lines for a specific company
|
// Get product lines for a specific company
|
||||||
router.get('/product-lines/:companyId', async (req, res) => {
|
router.get('/product-lines/:companyId', async (req, res) => {
|
||||||
let ssh;
|
|
||||||
let connection;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Setup SSH tunnel and get database connection
|
// Use cached connection
|
||||||
const tunnel = await setupSshTunnel();
|
const { connection } = await getDbConnection();
|
||||||
ssh = tunnel.ssh;
|
|
||||||
|
const companyId = req.params.companyId;
|
||||||
|
const cacheKey = `product-lines-${companyId}`;
|
||||||
|
|
||||||
// Create MySQL connection over SSH tunnel
|
const lines = await getCachedQuery(cacheKey, 'product-lines', async () => {
|
||||||
connection = await mysql.createConnection({
|
const [queryResult] = await connection.query(`
|
||||||
...tunnel.dbConfig,
|
SELECT cat_id as value, name as label
|
||||||
stream: tunnel.stream
|
FROM product_categories
|
||||||
|
WHERE type = 2
|
||||||
|
AND master_cat_id = ?
|
||||||
|
ORDER BY name
|
||||||
|
`, [companyId]);
|
||||||
|
|
||||||
|
return queryResult.map(l => ({ label: l.label, value: l.value.toString() }));
|
||||||
});
|
});
|
||||||
|
|
||||||
const [lines] = await connection.query(`
|
res.json(lines);
|
||||||
SELECT cat_id as value, name as label
|
|
||||||
FROM product_categories
|
|
||||||
WHERE type = 2
|
|
||||||
AND master_cat_id = ?
|
|
||||||
ORDER BY name
|
|
||||||
`, [req.params.companyId]);
|
|
||||||
|
|
||||||
res.json(lines.map(l => ({ label: l.label, value: l.value.toString() })));
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error fetching product lines:', error);
|
console.error('Error fetching product lines:', error);
|
||||||
res.status(500).json({ error: 'Failed to fetch product lines' });
|
res.status(500).json({ error: 'Failed to fetch product lines' });
|
||||||
} finally {
|
|
||||||
if (connection) await connection.end();
|
|
||||||
if (ssh) ssh.end();
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get sublines for a specific product line
|
// Get sublines for a specific product line
|
||||||
router.get('/sublines/:lineId', async (req, res) => {
|
router.get('/sublines/:lineId', async (req, res) => {
|
||||||
let ssh;
|
|
||||||
let connection;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Setup SSH tunnel and get database connection
|
// Use cached connection
|
||||||
const tunnel = await setupSshTunnel();
|
const { connection } = await getDbConnection();
|
||||||
ssh = tunnel.ssh;
|
|
||||||
|
const lineId = req.params.lineId;
|
||||||
|
const cacheKey = `sublines-${lineId}`;
|
||||||
|
|
||||||
// Create MySQL connection over SSH tunnel
|
const sublines = await getCachedQuery(cacheKey, 'sublines', async () => {
|
||||||
connection = await mysql.createConnection({
|
const [queryResult] = await connection.query(`
|
||||||
...tunnel.dbConfig,
|
SELECT cat_id as value, name as label
|
||||||
stream: tunnel.stream
|
FROM product_categories
|
||||||
|
WHERE type = 3
|
||||||
|
AND master_cat_id = ?
|
||||||
|
ORDER BY name
|
||||||
|
`, [lineId]);
|
||||||
|
|
||||||
|
return queryResult.map(s => ({ label: s.label, value: s.value.toString() }));
|
||||||
});
|
});
|
||||||
|
|
||||||
const [sublines] = await connection.query(`
|
res.json(sublines);
|
||||||
SELECT cat_id as value, name as label
|
|
||||||
FROM product_categories
|
|
||||||
WHERE type = 3
|
|
||||||
AND master_cat_id = ?
|
|
||||||
ORDER BY name
|
|
||||||
`, [req.params.lineId]);
|
|
||||||
|
|
||||||
res.json(sublines.map(s => ({ label: s.label, value: s.value.toString() })));
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error fetching sublines:', error);
|
console.error('Error fetching sublines:', error);
|
||||||
res.status(500).json({ error: 'Failed to fetch sublines' });
|
res.status(500).json({ error: 'Failed to fetch sublines' });
|
||||||
} finally {
|
|
||||||
if (connection) await connection.end();
|
|
||||||
if (ssh) ssh.end();
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user