Optimize product import with dynamic batching and memory management
This commit is contained in:
@@ -357,29 +357,25 @@ async function importProducts(prodConnection, localConnection) {
|
||||
});
|
||||
}
|
||||
|
||||
// Now join with local temp tables
|
||||
const [rows] = await localConnection.query(`
|
||||
SELECT
|
||||
p.*,
|
||||
COALESCE(tis.stock_quantity, 0) as stock_quantity,
|
||||
COALESCE(tis.preorder_count, 0) as preorder_count,
|
||||
COALESCE(tis.notions_inv_count, 0) as notions_inv_count,
|
||||
COALESCE(tpp.price, 0) as price,
|
||||
COALESCE(tpp.regular_price, 0) as regular_price,
|
||||
COALESCE(tpp.cost_price, 0) as cost_price
|
||||
FROM temp_prod_data p
|
||||
LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid
|
||||
LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid
|
||||
`);
|
||||
|
||||
// Drop the temporary production data table
|
||||
await localConnection.query("DROP TEMPORARY TABLE IF EXISTS temp_prod_data");
|
||||
|
||||
// Process products in batches
|
||||
const BATCH_SIZE = 10000;
|
||||
// Now join with local temp tables and process in batches
|
||||
const BATCH_SIZE = 2500;
|
||||
let processed = 0;
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
|
||||
while (processed < totalProducts) {
|
||||
const [batch] = await localConnection.query(`
|
||||
SELECT
|
||||
p.*,
|
||||
COALESCE(tis.stock_quantity, 0) as stock_quantity,
|
||||
COALESCE(tis.preorder_count, 0) as preorder_count,
|
||||
COALESCE(tis.notions_inv_count, 0) as notions_inv_count,
|
||||
COALESCE(tpp.price, 0) as price,
|
||||
COALESCE(tpp.regular_price, 0) as regular_price,
|
||||
COALESCE(tpp.cost_price, 0) as cost_price
|
||||
FROM temp_prod_data p
|
||||
LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid
|
||||
LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid
|
||||
LIMIT ? OFFSET ?
|
||||
`, [BATCH_SIZE, processed]);
|
||||
|
||||
// Add image URLs
|
||||
batch.forEach(row => {
|
||||
@@ -420,12 +416,20 @@ async function importProducts(prodConnection, localConnection) {
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Products import",
|
||||
message: `Processed ${processed} of ${rows.length} products`,
|
||||
message: `Processed ${processed} of ${totalProducts} products`,
|
||||
current: processed,
|
||||
total: rows.length
|
||||
total: totalProducts
|
||||
});
|
||||
|
||||
// Force garbage collection between batches
|
||||
if (global.gc) {
|
||||
global.gc();
|
||||
}
|
||||
}
|
||||
|
||||
// Drop temporary tables
|
||||
await cleanupTemporaryTables(localConnection);
|
||||
|
||||
// After successful import, update the sync status
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
@@ -435,15 +439,12 @@ async function importProducts(prodConnection, localConnection) {
|
||||
|
||||
return {
|
||||
status: "complete",
|
||||
totalImported: rows.length,
|
||||
totalImported: totalProducts,
|
||||
incrementalUpdate: true,
|
||||
lastSyncTime
|
||||
};
|
||||
} catch (error) {
|
||||
throw error;
|
||||
} finally {
|
||||
// Cleanup temporary tables
|
||||
await cleanupTemporaryTables(localConnection);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user