Fixes/improvements for import scripts

This commit is contained in:
2025-03-24 22:27:44 -04:00
parent 75da2c6772
commit 87d4b9e804
5 changed files with 190 additions and 166 deletions

View File

@@ -6,6 +6,9 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
let recordsUpdated = 0;
try {
// Begin transaction for the entire import process
await localConnection.beginTransaction();
// Get last sync info
const [syncInfo] = await localConnection.query(
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'"
@@ -39,7 +42,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
FROM (
SELECT DISTINCT pop.po_id, pop.pid
FROM po p
USE INDEX (idx_date_created)
JOIN po_products pop ON p.po_id = pop.po_id
JOIN suppliers s ON p.supplier_id = s.supplierid
WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
@@ -59,6 +61,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
console.log('Fetching purchase orders in batches...');
const FETCH_BATCH_SIZE = 5000;
const INSERT_BATCH_SIZE = 200; // Process 200 records at a time for inserts
let offset = 0;
let allProcessed = false;
let totalProcessed = 0;
@@ -101,64 +104,62 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
console.log(`Processing batch of ${poList.length} purchase order items (${offset}-${offset + poList.length})`);
let processed = 0;
// Process each PO in a separate insert to avoid parameter issues
for (let i = 0; i < poList.length; i++) {
const po = poList[i];
// Process in smaller batches for inserts
for (let i = 0; i < poList.length; i += INSERT_BATCH_SIZE) {
const batch = poList.slice(i, Math.min(i + INSERT_BATCH_SIZE, poList.length));
try {
// Single row insert
await localConnection.query(`
INSERT INTO temp_purchase_orders (
po_id, pid, sku, name, vendor, date, expected_date,
status, notes, ordered, cost_price
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
ON CONFLICT (po_id, pid) DO UPDATE SET
sku = EXCLUDED.sku,
name = EXCLUDED.name,
vendor = EXCLUDED.vendor,
date = EXCLUDED.date,
expected_date = EXCLUDED.expected_date,
status = EXCLUDED.status,
notes = EXCLUDED.notes,
ordered = EXCLUDED.ordered,
cost_price = EXCLUDED.cost_price
`, [
po.po_id,
po.pid,
po.sku,
po.name,
po.vendor,
po.date,
po.expected_date,
po.status,
po.notes,
po.ordered,
po.cost_price
]);
processed++;
totalProcessed++;
// Only log occasionally
if (processed % 500 === 0 || processed === 1 || processed === poList.length) {
outputProgress({
status: "running",
operation: "Purchase orders import",
message: `Batch ${Math.floor(offset/FETCH_BATCH_SIZE) + 1}: ${processed}/${poList.length} (Total: ${totalProcessed}/${total})`,
current: totalProcessed,
total: total,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, totalProcessed, total),
rate: calculateRate(startTime, totalProcessed)
});
}
} catch (error) {
console.error(`Error inserting PO #${po.po_id} product #${po.pid}:`, error.message);
console.log('PO data:', po);
}
// Create parameterized query with placeholders
const placeholders = batch.map((_, idx) => {
const base = idx * 11; // 11 columns
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11})`;
}).join(',');
// Create flattened values array
const values = batch.flatMap(po => [
po.po_id,
po.pid,
po.sku,
po.name,
po.vendor,
po.date,
po.expected_date,
po.status,
po.notes,
po.ordered,
po.cost_price
]);
// Execute batch insert
await localConnection.query(`
INSERT INTO temp_purchase_orders (
po_id, pid, sku, name, vendor, date, expected_date,
status, notes, ordered, cost_price
)
VALUES ${placeholders}
ON CONFLICT (po_id, pid) DO UPDATE SET
sku = EXCLUDED.sku,
name = EXCLUDED.name,
vendor = EXCLUDED.vendor,
date = EXCLUDED.date,
expected_date = EXCLUDED.expected_date,
status = EXCLUDED.status,
notes = EXCLUDED.notes,
ordered = EXCLUDED.ordered,
cost_price = EXCLUDED.cost_price
`, values);
totalProcessed += batch.length;
outputProgress({
status: "running",
operation: "Purchase orders import",
message: `Processed ${totalProcessed}/${total} purchase order items`,
current: totalProcessed,
total: total,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, totalProcessed, total),
rate: calculateRate(startTime, totalProcessed)
});
}
// Update offset for next batch
@@ -220,6 +221,9 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
// Clean up temporary tables
await localConnection.query(`DROP TABLE IF EXISTS temp_purchase_orders;`);
// Commit transaction
await localConnection.commit();
return {
status: "complete",
@@ -230,11 +234,11 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
} catch (error) {
console.error("Error during purchase orders import:", error);
// Attempt cleanup on error
// Rollback transaction
try {
await localConnection.query(`DROP TABLE IF EXISTS temp_purchase_orders;`);
} catch (cleanupError) {
console.error('Error during cleanup:', cleanupError.message);
await localConnection.rollback();
} catch (rollbackError) {
console.error('Error during rollback:', rollbackError.message);
}
return {