Incremental import order fixes

This commit is contained in:
2025-02-01 11:03:42 -05:00
parent 50b86d6d8a
commit 2d0089dc52
2 changed files with 24 additions and 3 deletions

View File

@@ -10,9 +10,9 @@ const importPurchaseOrders = require('./import/purchase-orders');
dotenv.config({ path: path.join(__dirname, "../.env") }); dotenv.config({ path: path.join(__dirname, "../.env") });
// Constants to control which imports run // Constants to control which imports run
const IMPORT_CATEGORIES = false; const IMPORT_CATEGORIES = true;
const IMPORT_PRODUCTS = false; const IMPORT_PRODUCTS = true;
const IMPORT_ORDERS = false; const IMPORT_ORDERS = true;
const IMPORT_PURCHASE_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true;
// Add flag for incremental updates // Add flag for incremental updates

View File

@@ -188,10 +188,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
// Get unique order IDs // Get unique order IDs
const orderIds = [...new Set(orderItems.map(item => item.order_id))]; const orderIds = [...new Set(orderItems.map(item => item.order_id))];
console.log('Total unique order IDs:', orderIds.length);
// Get order metadata in batches // Get order metadata in batches
for (let i = 0; i < orderIds.length; i += 5000) { for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000); const batchIds = orderIds.slice(i, i + 5000);
console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`);
console.log('Sample of batch IDs:', batchIds.slice(0, 5));
const [orders] = await prodConnection.query(` const [orders] = await prodConnection.query(`
SELECT SELECT
o.order_id, o.order_id,
@@ -205,6 +209,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
WHERE o.order_id IN (?) WHERE o.order_id IN (?)
`, [batchIds]); `, [batchIds]);
console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`);
const duplicates = orders.filter((order, index, self) =>
self.findIndex(o => o.order_id === order.order_id) !== index
);
if (duplicates.length > 0) {
console.log('Found duplicates:', duplicates);
}
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(","); const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
const values = orders.flatMap(order => [ const values = orders.flatMap(order => [
order.order_id, order.date, order.customer, order.customer_name, order.status, order.canceled order.order_id, order.date, order.customer, order.customer_name, order.status, order.canceled
@@ -212,6 +224,12 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
await localConnection.query(` await localConnection.query(`
INSERT INTO temp_order_meta VALUES ${placeholders} INSERT INTO temp_order_meta VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
date = VALUES(date),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled)
`, values); `, values);
outputProgress({ outputProgress({
@@ -239,6 +257,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
await localConnection.query(` await localConnection.query(`
INSERT INTO temp_order_discounts VALUES ${placeholders} INSERT INTO temp_order_discounts VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
discount = VALUES(discount)
`, values); `, values);
} }
} }
@@ -274,6 +294,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(","); const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
await localConnection.query(` await localConnection.query(`
INSERT INTO temp_order_taxes VALUES ${placeholders} INSERT INTO temp_order_taxes VALUES ${placeholders}
ON DUPLICATE KEY UPDATE tax = VALUES(tax)
`, values); `, values);
} }
} }