Fix import script order count issues
This commit is contained in:
@@ -24,6 +24,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
let totalOrderItems = 0;
|
||||
let totalUniqueOrders = 0;
|
||||
|
||||
// Add a cumulative counter for processed orders before the loop
|
||||
let cumulativeProcessedOrders = 0;
|
||||
|
||||
try {
|
||||
// Insert temporary table creation queries
|
||||
await localConnection.query(`
|
||||
@@ -372,6 +375,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
// Filter orders and track missing products - do this in a single pass
|
||||
const validOrders = [];
|
||||
const values = [];
|
||||
const processedOrderItems = new Set(); // Track unique order items
|
||||
const processedOrders = new Set(); // Track unique orders
|
||||
|
||||
for (const order of orders) {
|
||||
if (!existingPids.has(order.pid)) {
|
||||
missingProducts.add(order.pid);
|
||||
@@ -380,6 +386,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
}
|
||||
validOrders.push(order);
|
||||
values.push(...columnNames.map(col => order[col] ?? null));
|
||||
processedOrderItems.add(`${order.order_number}-${order.pid}`);
|
||||
processedOrders.add(order.order_number);
|
||||
}
|
||||
|
||||
if (validOrders.length > 0) {
|
||||
@@ -387,58 +395,102 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
|
||||
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
|
||||
|
||||
// First check which orders exist and get their current values
|
||||
const [existingOrders] = await localConnection.query(
|
||||
`SELECT ${columnNames.join(",")} FROM orders WHERE (order_number, pid) IN (${validOrders.map(() => "(?,?)").join(",")})`,
|
||||
validOrders.flatMap(o => [o.order_number, o.pid])
|
||||
);
|
||||
const existingOrderMap = new Map(
|
||||
existingOrders.map(o => [`${o.order_number}-${o.pid}`, o])
|
||||
);
|
||||
const result = await localConnection.query(`
|
||||
INSERT INTO orders (${columnNames.join(",")})
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
date = VALUES(date),
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
discount = VALUES(discount),
|
||||
tax = VALUES(tax),
|
||||
tax_included = VALUES(tax_included),
|
||||
shipping = VALUES(shipping),
|
||||
customer = VALUES(customer),
|
||||
customer_name = VALUES(customer_name),
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
costeach = VALUES(costeach)
|
||||
`, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat());
|
||||
|
||||
// Split into inserts and updates
|
||||
const insertsAndUpdates = validOrders.reduce((acc, order) => {
|
||||
const key = `${order.order_number}-${order.pid}`;
|
||||
if (existingOrderMap.has(key)) {
|
||||
const existing = existingOrderMap.get(key);
|
||||
// Check if any values are different
|
||||
const hasChanges = columnNames.some(col => {
|
||||
const newVal = order[col] ?? null;
|
||||
const oldVal = existing[col] ?? null;
|
||||
if (typeof newVal === 'number' && typeof oldVal === 'number') {
|
||||
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
|
||||
}
|
||||
return newVal !== oldVal;
|
||||
});
|
||||
if (hasChanges) {
|
||||
acc.updates.push(order);
|
||||
}
|
||||
} else {
|
||||
acc.inserts.push(order);
|
||||
}
|
||||
return acc;
|
||||
}, { inserts: [], updates: [] });
|
||||
const affectedRows = result[0].affectedRows;
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
// Handle inserts
|
||||
if (insertsAndUpdates.inserts.length > 0) {
|
||||
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(singlePlaceholder).join(",");
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += updates;
|
||||
importedCount += processedOrderItems.size; // Count unique order items processed
|
||||
}
|
||||
|
||||
const insertResult = await localConnection.query(`
|
||||
INSERT INTO orders (${columnNames.join(",")})
|
||||
VALUES ${insertPlaceholders}
|
||||
`, insertsAndUpdates.inserts.map(i => columnNames.map(col => i[col] ?? null)).flat());
|
||||
// Update progress based on unique orders processed
|
||||
cumulativeProcessedOrders += processedOrders.size;
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`,
|
||||
current: cumulativeProcessedOrders,
|
||||
total: totalUniqueOrders,
|
||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
|
||||
rate: calculateRate(startTime, cumulativeProcessedOrders)
|
||||
});
|
||||
}
|
||||
|
||||
recordsAdded += insertResult[0].affectedRows;
|
||||
importedCount += insertResult[0].affectedRows;
|
||||
}
|
||||
// Now try to import any orders that were skipped due to missing products
|
||||
if (skippedOrders.size > 0) {
|
||||
try {
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`,
|
||||
});
|
||||
|
||||
// Handle updates - now we know these actually have changes
|
||||
if (insertsAndUpdates.updates.length > 0) {
|
||||
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(singlePlaceholder).join(",");
|
||||
// Get the orders that were skipped
|
||||
const [skippedProdOrders] = await localConnection.query(`
|
||||
SELECT DISTINCT
|
||||
oi.order_id as order_number,
|
||||
oi.pid,
|
||||
oi.SKU,
|
||||
om.date,
|
||||
oi.price,
|
||||
oi.quantity,
|
||||
oi.base_discount + COALESCE(od.discount, 0) as discount,
|
||||
COALESCE(ot.tax, 0) as tax,
|
||||
0 as tax_included,
|
||||
0 as shipping,
|
||||
om.customer,
|
||||
om.customer_name,
|
||||
om.status,
|
||||
om.canceled,
|
||||
COALESCE(tc.costeach, 0) as costeach
|
||||
FROM temp_order_items oi
|
||||
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
||||
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
||||
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
|
||||
WHERE oi.order_id IN (?)
|
||||
`, [Array.from(skippedOrders)]);
|
||||
|
||||
const updateResult = await localConnection.query(`
|
||||
INSERT INTO orders (${columnNames.join(",")})
|
||||
VALUES ${updatePlaceholders}
|
||||
// Check which products exist now
|
||||
const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))];
|
||||
const [existingProducts] = skippedPids.length > 0 ? await localConnection.query(
|
||||
"SELECT pid FROM products WHERE pid IN (?)",
|
||||
[skippedPids]
|
||||
) : [[]];
|
||||
const existingPids = new Set(existingProducts.map(p => p.pid));
|
||||
|
||||
// Filter orders that can now be imported
|
||||
const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid));
|
||||
const retryOrderItems = new Set(); // Track unique order items in retry
|
||||
|
||||
if (validOrders.length > 0) {
|
||||
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
||||
const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat();
|
||||
|
||||
const result = await localConnection.query(`
|
||||
INSERT INTO orders (${columnNames.join(", ")})
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
date = VALUES(date),
|
||||
@@ -453,28 +505,35 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
costeach = VALUES(costeach)
|
||||
`, insertsAndUpdates.updates.map(u => columnNames.map(col => u[col] ?? null)).flat());
|
||||
`, values);
|
||||
|
||||
recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows
|
||||
importedCount += updateResult[0].affectedRows / 2;
|
||||
const affectedRows = result[0].affectedRows;
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
// Track unique order items
|
||||
validOrders.forEach(order => {
|
||||
retryOrderItems.add(`${order.order_number}-${order.pid}`);
|
||||
});
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Successfully imported ${retryOrderItems.size} previously skipped order items`,
|
||||
});
|
||||
|
||||
// Update the main counters
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += updates;
|
||||
importedCount += retryOrderItems.size;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Warning: Failed to retry skipped orders:', error.message);
|
||||
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
|
||||
}
|
||||
|
||||
// Update progress based on batch size - this is the number of order items we've processed
|
||||
processedCount = i + batchIds.length * (totalOrderItems / totalUniqueOrders);
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Imported ${Math.floor(importedCount)} orders (${Math.floor(processedCount)} of ${totalOrderItems} items processed)`,
|
||||
current: Math.floor(processedCount),
|
||||
total: totalOrderItems,
|
||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalOrderItems),
|
||||
rate: calculateRate(startTime, processedCount)
|
||||
});
|
||||
}
|
||||
|
||||
// Clean up temporary tables
|
||||
// Clean up temporary tables after ALL processing is complete
|
||||
await localConnection.query(`
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
||||
@@ -483,132 +542,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
|
||||
`);
|
||||
|
||||
// Import missing products if any
|
||||
if (missingProducts.size > 0) {
|
||||
try {
|
||||
// Import missing products directly without materialization
|
||||
await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts));
|
||||
|
||||
// Retry skipped orders after importing products
|
||||
if (skippedOrders.size > 0) {
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`
|
||||
});
|
||||
|
||||
const skippedOrdersArray = Array.from(skippedOrders);
|
||||
const [skippedProdOrders] = skippedOrdersArray.length > 0 ? await prodConnection.query(`
|
||||
SELECT
|
||||
o.order_id,
|
||||
CASE
|
||||
WHEN o.date_placed = '0000-00-00 00:00:00' OR o.date_placed IS NULL THEN o.stamp
|
||||
ELSE o.date_placed
|
||||
END as date,
|
||||
o.order_cid,
|
||||
o.bill_firstname,
|
||||
o.bill_lastname,
|
||||
o.order_email,
|
||||
o.order_status,
|
||||
o.date_shipped,
|
||||
o.date_cancelled,
|
||||
oi.prod_pid,
|
||||
oi.prod_itemnumber,
|
||||
oi.prod_price,
|
||||
oi.qty_ordered,
|
||||
oi.qty_back,
|
||||
oi.qty_placed,
|
||||
oi.qty_placed_2,
|
||||
oi.discounted,
|
||||
oi.summary_cogs,
|
||||
oi.summary_profit,
|
||||
oi.summary_orderdate,
|
||||
oi.summary_paiddate,
|
||||
oi.date_added,
|
||||
oi.stamp
|
||||
FROM order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
WHERE o.order_id IN (?)
|
||||
`, [skippedOrdersArray]) : [[]];
|
||||
|
||||
// Prepare values for insertion
|
||||
const skippedOrderValues = skippedProdOrders.flatMap(order => {
|
||||
if (!order.date) {
|
||||
console.log(`Warning: Skipped order ${order.order_id} has null date:`, JSON.stringify(order, null, 2));
|
||||
return [];
|
||||
}
|
||||
|
||||
const canceled = order.date_cancelled !== '0000-00-00 00:00:00' ? 1 : 0;
|
||||
const customerName = `${order.bill_firstname} ${order.bill_lastname}`;
|
||||
|
||||
// Create an object with keys based on column names
|
||||
const orderData = {
|
||||
id: order.order_id,
|
||||
order_number: order.order_id,
|
||||
pid: order.prod_pid,
|
||||
SKU: order.prod_itemnumber,
|
||||
date: order.date ? (
|
||||
order.date instanceof Date ?
|
||||
order.date.toJSON()?.slice(0,10) || null :
|
||||
(typeof order.date === 'string' ? order.date.split(' ')[0] : null)
|
||||
) : null,
|
||||
price: order.prod_price,
|
||||
quantity: order.qty_ordered,
|
||||
discount: order.discounted,
|
||||
tax: 0, // Placeholder, will be calculated later
|
||||
tax_included: 0, // Placeholder, will be calculated later
|
||||
shipping: 0, // Placeholder, will be calculated later
|
||||
customer: order.order_email,
|
||||
customer_name: customerName,
|
||||
status: order.order_status,
|
||||
canceled: canceled,
|
||||
};
|
||||
|
||||
// Map column names to values, handling missing columns
|
||||
return [columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null)];
|
||||
});
|
||||
|
||||
// Construct the insert query dynamically
|
||||
const skippedPlaceholders = skippedProdOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
||||
const skippedInsertQuery = `
|
||||
INSERT INTO orders (${columnNames.join(", ")})
|
||||
VALUES ${skippedPlaceholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
date = VALUES(date),
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
discount = VALUES(discount),
|
||||
tax = VALUES(tax),
|
||||
tax_included = VALUES(tax_included),
|
||||
shipping = VALUES(shipping),
|
||||
customer = VALUES(customer),
|
||||
customer_name = VALUES(customer_name),
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
costeach = VALUES(costeach)
|
||||
`;
|
||||
|
||||
// Execute the insert query
|
||||
if (skippedOrderValues.length > 0) {
|
||||
const result = await localConnection.query(skippedInsertQuery, skippedOrderValues.flat());
|
||||
const addedOrUpdated = Math.floor(result[0].affectedRows / 2); // Round down to avoid fractional orders
|
||||
importedCount += addedOrUpdated;
|
||||
recordsUpdated += addedOrUpdated;
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Successfully imported ${addedOrUpdated} previously skipped orders`,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Warning: Failed to import missing products:', error.message);
|
||||
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
|
||||
}
|
||||
}
|
||||
|
||||
// Only update sync status if we get here (no errors thrown)
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
@@ -618,9 +551,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
|
||||
return {
|
||||
status: "complete",
|
||||
totalImported: Math.floor(importedCount), // Round down to avoid fractional orders
|
||||
totalImported: Math.floor(importedCount),
|
||||
recordsAdded: recordsAdded || 0,
|
||||
recordsUpdated: Math.floor(recordsUpdated), // Round down to avoid fractional orders
|
||||
recordsUpdated: Math.floor(recordsUpdated),
|
||||
totalSkipped: skippedOrders.size,
|
||||
missingProducts: missingProducts.size,
|
||||
incrementalUpdate,
|
||||
|
||||
@@ -459,7 +459,15 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
VALUES ${insertPlaceholders}
|
||||
`, insertsAndUpdates.inserts.map(i => i.values).flat());
|
||||
|
||||
recordsAdded += insertResult[0].affectedRows;
|
||||
const affectedRows = insertResult[0].affectedRows;
|
||||
// For an upsert, MySQL counts rows twice for updates
|
||||
// So if affectedRows is odd, we have (updates * 2 + inserts)
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
|
||||
processed += batchProcessed;
|
||||
}
|
||||
|
||||
// Handle updates - now we know these actually have changes
|
||||
@@ -477,10 +485,15 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
.join(",")};
|
||||
`, insertsAndUpdates.updates.map(u => u.values).flat());
|
||||
|
||||
recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows
|
||||
}
|
||||
const affectedRows = updateResult[0].affectedRows;
|
||||
// For an upsert, MySQL counts rows twice for updates
|
||||
// So if affectedRows is odd, we have (updates * 2 + inserts)
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
processed += batchProcessed;
|
||||
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
|
||||
processed += batchProcessed;
|
||||
}
|
||||
|
||||
// Update progress based on time interval
|
||||
const now = Date.now();
|
||||
|
||||
Reference in New Issue
Block a user