|
|
|
|
@@ -19,6 +19,13 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
const missingProducts = new Set();
|
|
|
|
|
let recordsAdded = 0;
|
|
|
|
|
let recordsUpdated = 0;
|
|
|
|
|
let processedCount = 0;
|
|
|
|
|
let importedCount = 0;
|
|
|
|
|
let totalOrderItems = 0;
|
|
|
|
|
let totalUniqueOrders = 0;
|
|
|
|
|
|
|
|
|
|
// Add a cumulative counter for processed orders before the loop
|
|
|
|
|
let cumulativeProcessedOrders = 0;
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
// Insert temporary table creation queries
|
|
|
|
|
@@ -86,7 +93,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
|
|
|
|
|
console.log('Orders: Using last sync time:', lastSyncTime);
|
|
|
|
|
|
|
|
|
|
// First get all relevant order items with basic info
|
|
|
|
|
// First get count of order items
|
|
|
|
|
const [[{ total }]] = await prodConnection.query(`
|
|
|
|
|
SELECT COUNT(*) as total
|
|
|
|
|
FROM order_items oi
|
|
|
|
|
@@ -115,7 +122,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
` : ''}
|
|
|
|
|
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
|
|
|
|
|
|
|
|
|
console.log('Orders: Found changes:', total);
|
|
|
|
|
totalOrderItems = total;
|
|
|
|
|
console.log('Orders: Found changes:', totalOrderItems);
|
|
|
|
|
|
|
|
|
|
// Get order items in batches
|
|
|
|
|
const [orderItems] = await prodConnection.query(`
|
|
|
|
|
@@ -155,9 +163,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
|
|
|
|
|
console.log('Orders: Processing', orderItems.length, 'order items');
|
|
|
|
|
|
|
|
|
|
const totalOrders = orderItems.length;
|
|
|
|
|
let processed = 0;
|
|
|
|
|
|
|
|
|
|
// Insert order items in batches
|
|
|
|
|
for (let i = 0; i < orderItems.length; i += 5000) {
|
|
|
|
|
const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
|
|
|
|
|
@@ -176,22 +181,30 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
base_discount = VALUES(base_discount)
|
|
|
|
|
`, values);
|
|
|
|
|
|
|
|
|
|
processed += batch.length;
|
|
|
|
|
processedCount = i + batch.length;
|
|
|
|
|
outputProgress({
|
|
|
|
|
status: "running",
|
|
|
|
|
operation: "Orders import",
|
|
|
|
|
message: `Loading order items: ${processed} of ${totalOrders}`,
|
|
|
|
|
current: processed,
|
|
|
|
|
total: totalOrders
|
|
|
|
|
message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
|
|
|
|
|
current: processedCount,
|
|
|
|
|
total: totalOrderItems
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get unique order IDs
|
|
|
|
|
const orderIds = [...new Set(orderItems.map(item => item.order_id))];
|
|
|
|
|
totalUniqueOrders = orderIds.length;
|
|
|
|
|
console.log('Total unique order IDs:', totalUniqueOrders);
|
|
|
|
|
|
|
|
|
|
// Reset processed count for order processing phase
|
|
|
|
|
processedCount = 0;
|
|
|
|
|
|
|
|
|
|
// Get order metadata in batches
|
|
|
|
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
|
|
|
|
const batchIds = orderIds.slice(i, i + 5000);
|
|
|
|
|
console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`);
|
|
|
|
|
console.log('Sample of batch IDs:', batchIds.slice(0, 5));
|
|
|
|
|
|
|
|
|
|
const [orders] = await prodConnection.query(`
|
|
|
|
|
SELECT
|
|
|
|
|
o.order_id,
|
|
|
|
|
@@ -205,6 +218,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
WHERE o.order_id IN (?)
|
|
|
|
|
`, [batchIds]);
|
|
|
|
|
|
|
|
|
|
console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`);
|
|
|
|
|
const duplicates = orders.filter((order, index, self) =>
|
|
|
|
|
self.findIndex(o => o.order_id === order.order_id) !== index
|
|
|
|
|
);
|
|
|
|
|
if (duplicates.length > 0) {
|
|
|
|
|
console.log('Found duplicates:', duplicates);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
|
|
|
|
|
const values = orders.flatMap(order => [
|
|
|
|
|
order.order_id, order.date, order.customer, order.customer_name, order.status, order.canceled
|
|
|
|
|
@@ -212,17 +233,27 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
|
|
|
|
|
await localConnection.query(`
|
|
|
|
|
INSERT INTO temp_order_meta VALUES ${placeholders}
|
|
|
|
|
ON DUPLICATE KEY UPDATE
|
|
|
|
|
date = VALUES(date),
|
|
|
|
|
customer = VALUES(customer),
|
|
|
|
|
customer_name = VALUES(customer_name),
|
|
|
|
|
status = VALUES(status),
|
|
|
|
|
canceled = VALUES(canceled)
|
|
|
|
|
`, values);
|
|
|
|
|
|
|
|
|
|
processedCount = i + orders.length;
|
|
|
|
|
outputProgress({
|
|
|
|
|
status: "running",
|
|
|
|
|
operation: "Orders import",
|
|
|
|
|
message: `Loading order metadata: ${i + orders.length} of ${orderIds.length}`,
|
|
|
|
|
current: i + orders.length,
|
|
|
|
|
total: orderIds.length
|
|
|
|
|
message: `Loading order metadata: ${processedCount} of ${totalUniqueOrders}`,
|
|
|
|
|
current: processedCount,
|
|
|
|
|
total: totalUniqueOrders
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Reset processed count for final phase
|
|
|
|
|
processedCount = 0;
|
|
|
|
|
|
|
|
|
|
// Get promotional discounts in batches
|
|
|
|
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
|
|
|
|
const batchIds = orderIds.slice(i, i + 5000);
|
|
|
|
|
@@ -239,6 +270,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
|
|
|
|
|
await localConnection.query(`
|
|
|
|
|
INSERT INTO temp_order_discounts VALUES ${placeholders}
|
|
|
|
|
ON DUPLICATE KEY UPDATE
|
|
|
|
|
discount = VALUES(discount)
|
|
|
|
|
`, values);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
@@ -274,6 +307,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
|
|
|
|
|
await localConnection.query(`
|
|
|
|
|
INSERT INTO temp_order_taxes VALUES ${placeholders}
|
|
|
|
|
ON DUPLICATE KEY UPDATE tax = VALUES(tax)
|
|
|
|
|
`, values);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
@@ -300,8 +334,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Now combine all the data and insert into orders table
|
|
|
|
|
let importedCount = 0;
|
|
|
|
|
|
|
|
|
|
// Pre-check all products at once instead of per batch
|
|
|
|
|
const allOrderPids = [...new Set(orderItems.map(item => item.pid))];
|
|
|
|
|
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
|
|
|
|
|
@@ -343,6 +375,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
// Filter orders and track missing products - do this in a single pass
|
|
|
|
|
const validOrders = [];
|
|
|
|
|
const values = [];
|
|
|
|
|
const processedOrderItems = new Set(); // Track unique order items
|
|
|
|
|
const processedOrders = new Set(); // Track unique orders
|
|
|
|
|
|
|
|
|
|
for (const order of orders) {
|
|
|
|
|
if (!existingPids.has(order.pid)) {
|
|
|
|
|
missingProducts.add(order.pid);
|
|
|
|
|
@@ -351,6 +386,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
}
|
|
|
|
|
validOrders.push(order);
|
|
|
|
|
values.push(...columnNames.map(col => order[col] ?? null));
|
|
|
|
|
processedOrderItems.add(`${order.order_number}-${order.pid}`);
|
|
|
|
|
processedOrders.add(order.order_number);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (validOrders.length > 0) {
|
|
|
|
|
@@ -358,71 +395,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
|
|
|
|
|
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
|
|
|
|
|
|
|
|
|
|
// First check which orders exist and get their current values
|
|
|
|
|
const [existingOrders] = await localConnection.query(
|
|
|
|
|
`SELECT ${columnNames.join(",")} FROM orders WHERE (order_number, pid) IN (${validOrders.map(() => "(?,?)").join(",")})`,
|
|
|
|
|
validOrders.flatMap(o => [o.order_number, o.pid])
|
|
|
|
|
);
|
|
|
|
|
const existingOrderMap = new Map(
|
|
|
|
|
existingOrders.map(o => [`${o.order_number}-${o.pid}`, o])
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Split into inserts and updates
|
|
|
|
|
const insertsAndUpdates = validOrders.reduce((acc, order) => {
|
|
|
|
|
const key = `${order.order_number}-${order.pid}`;
|
|
|
|
|
if (existingOrderMap.has(key)) {
|
|
|
|
|
const existing = existingOrderMap.get(key);
|
|
|
|
|
// Check if any values are different
|
|
|
|
|
const hasChanges = columnNames.some(col => {
|
|
|
|
|
const newVal = order[col] ?? null;
|
|
|
|
|
const oldVal = existing[col] ?? null;
|
|
|
|
|
if (typeof newVal === 'number' && typeof oldVal === 'number') {
|
|
|
|
|
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
|
|
|
|
|
}
|
|
|
|
|
return newVal !== oldVal;
|
|
|
|
|
});
|
|
|
|
|
if (hasChanges) {
|
|
|
|
|
acc.updates.push({
|
|
|
|
|
order_number: order.order_number,
|
|
|
|
|
pid: order.pid,
|
|
|
|
|
values: columnNames.map(col => order[col] ?? null)
|
|
|
|
|
});
|
|
|
|
|
} else {
|
|
|
|
|
acc.inserts.push({
|
|
|
|
|
order_number: order.order_number,
|
|
|
|
|
pid: order.pid,
|
|
|
|
|
values: columnNames.map(col => order[col] ?? null)
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
acc.inserts.push({
|
|
|
|
|
order_number: order.order_number,
|
|
|
|
|
pid: order.pid,
|
|
|
|
|
values: columnNames.map(col => order[col] ?? null)
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
return acc;
|
|
|
|
|
}, { inserts: [], updates: [] });
|
|
|
|
|
|
|
|
|
|
// Handle inserts
|
|
|
|
|
if (insertsAndUpdates.inserts.length > 0) {
|
|
|
|
|
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(singlePlaceholder).join(",");
|
|
|
|
|
|
|
|
|
|
const insertResult = await localConnection.query(`
|
|
|
|
|
const result = await localConnection.query(`
|
|
|
|
|
INSERT INTO orders (${columnNames.join(",")})
|
|
|
|
|
VALUES ${insertPlaceholders}
|
|
|
|
|
`, insertsAndUpdates.inserts.map(i => i.values).flat());
|
|
|
|
|
|
|
|
|
|
recordsAdded += insertResult[0].affectedRows;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Handle updates - now we know these actually have changes
|
|
|
|
|
if (insertsAndUpdates.updates.length > 0) {
|
|
|
|
|
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(singlePlaceholder).join(",");
|
|
|
|
|
|
|
|
|
|
const updateResult = await localConnection.query(`
|
|
|
|
|
INSERT INTO orders (${columnNames.join(",")})
|
|
|
|
|
VALUES ${updatePlaceholders}
|
|
|
|
|
VALUES ${placeholders}
|
|
|
|
|
ON DUPLICATE KEY UPDATE
|
|
|
|
|
SKU = VALUES(SKU),
|
|
|
|
|
date = VALUES(date),
|
|
|
|
|
@@ -435,28 +410,130 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
customer = VALUES(customer),
|
|
|
|
|
customer_name = VALUES(customer_name),
|
|
|
|
|
status = VALUES(status),
|
|
|
|
|
canceled = VALUES(canceled)
|
|
|
|
|
`, insertsAndUpdates.updates.map(u => u.values).flat());
|
|
|
|
|
canceled = VALUES(canceled),
|
|
|
|
|
costeach = VALUES(costeach)
|
|
|
|
|
`, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat());
|
|
|
|
|
|
|
|
|
|
recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows
|
|
|
|
|
const affectedRows = result[0].affectedRows;
|
|
|
|
|
const updates = Math.floor(affectedRows / 2);
|
|
|
|
|
const inserts = affectedRows - (updates * 2);
|
|
|
|
|
|
|
|
|
|
recordsAdded += inserts;
|
|
|
|
|
recordsUpdated += updates;
|
|
|
|
|
importedCount += processedOrderItems.size; // Count unique order items processed
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
importedCount += validOrders.length;
|
|
|
|
|
// Update progress based on unique orders processed
|
|
|
|
|
cumulativeProcessedOrders += processedOrders.size;
|
|
|
|
|
outputProgress({
|
|
|
|
|
status: "running",
|
|
|
|
|
operation: "Orders import",
|
|
|
|
|
message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`,
|
|
|
|
|
current: cumulativeProcessedOrders,
|
|
|
|
|
total: totalUniqueOrders,
|
|
|
|
|
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
|
|
|
|
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
|
|
|
|
|
rate: calculateRate(startTime, cumulativeProcessedOrders)
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Now try to import any orders that were skipped due to missing products
|
|
|
|
|
if (skippedOrders.size > 0) {
|
|
|
|
|
try {
|
|
|
|
|
outputProgress({
|
|
|
|
|
status: "running",
|
|
|
|
|
operation: "Orders import",
|
|
|
|
|
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Get the orders that were skipped
|
|
|
|
|
const [skippedProdOrders] = await localConnection.query(`
|
|
|
|
|
SELECT DISTINCT
|
|
|
|
|
oi.order_id as order_number,
|
|
|
|
|
oi.pid,
|
|
|
|
|
oi.SKU,
|
|
|
|
|
om.date,
|
|
|
|
|
oi.price,
|
|
|
|
|
oi.quantity,
|
|
|
|
|
oi.base_discount + COALESCE(od.discount, 0) as discount,
|
|
|
|
|
COALESCE(ot.tax, 0) as tax,
|
|
|
|
|
0 as tax_included,
|
|
|
|
|
0 as shipping,
|
|
|
|
|
om.customer,
|
|
|
|
|
om.customer_name,
|
|
|
|
|
om.status,
|
|
|
|
|
om.canceled,
|
|
|
|
|
COALESCE(tc.costeach, 0) as costeach
|
|
|
|
|
FROM temp_order_items oi
|
|
|
|
|
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
|
|
|
|
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
|
|
|
|
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
|
|
|
|
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
|
|
|
|
|
WHERE oi.order_id IN (?)
|
|
|
|
|
`, [Array.from(skippedOrders)]);
|
|
|
|
|
|
|
|
|
|
// Check which products exist now
|
|
|
|
|
const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))];
|
|
|
|
|
const [existingProducts] = skippedPids.length > 0 ? await localConnection.query(
|
|
|
|
|
"SELECT pid FROM products WHERE pid IN (?)",
|
|
|
|
|
[skippedPids]
|
|
|
|
|
) : [[]];
|
|
|
|
|
const existingPids = new Set(existingProducts.map(p => p.pid));
|
|
|
|
|
|
|
|
|
|
// Filter orders that can now be imported
|
|
|
|
|
const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid));
|
|
|
|
|
const retryOrderItems = new Set(); // Track unique order items in retry
|
|
|
|
|
|
|
|
|
|
if (validOrders.length > 0) {
|
|
|
|
|
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
|
|
|
|
const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat();
|
|
|
|
|
|
|
|
|
|
const result = await localConnection.query(`
|
|
|
|
|
INSERT INTO orders (${columnNames.join(", ")})
|
|
|
|
|
VALUES ${placeholders}
|
|
|
|
|
ON DUPLICATE KEY UPDATE
|
|
|
|
|
SKU = VALUES(SKU),
|
|
|
|
|
date = VALUES(date),
|
|
|
|
|
price = VALUES(price),
|
|
|
|
|
quantity = VALUES(quantity),
|
|
|
|
|
discount = VALUES(discount),
|
|
|
|
|
tax = VALUES(tax),
|
|
|
|
|
tax_included = VALUES(tax_included),
|
|
|
|
|
shipping = VALUES(shipping),
|
|
|
|
|
customer = VALUES(customer),
|
|
|
|
|
customer_name = VALUES(customer_name),
|
|
|
|
|
status = VALUES(status),
|
|
|
|
|
canceled = VALUES(canceled),
|
|
|
|
|
costeach = VALUES(costeach)
|
|
|
|
|
`, values);
|
|
|
|
|
|
|
|
|
|
const affectedRows = result[0].affectedRows;
|
|
|
|
|
const updates = Math.floor(affectedRows / 2);
|
|
|
|
|
const inserts = affectedRows - (updates * 2);
|
|
|
|
|
|
|
|
|
|
// Track unique order items
|
|
|
|
|
validOrders.forEach(order => {
|
|
|
|
|
retryOrderItems.add(`${order.order_number}-${order.pid}`);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
outputProgress({
|
|
|
|
|
status: "running",
|
|
|
|
|
operation: "Orders import",
|
|
|
|
|
message: `Imported ${importedCount} of ${totalOrders} orders`,
|
|
|
|
|
current: importedCount,
|
|
|
|
|
total: totalOrders,
|
|
|
|
|
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
|
|
|
|
remaining: estimateRemaining(startTime, importedCount, totalOrders),
|
|
|
|
|
rate: calculateRate(startTime, importedCount)
|
|
|
|
|
message: `Successfully imported ${retryOrderItems.size} previously skipped order items`,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Update the main counters
|
|
|
|
|
recordsAdded += inserts;
|
|
|
|
|
recordsUpdated += updates;
|
|
|
|
|
importedCount += retryOrderItems.size;
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.warn('Warning: Failed to retry skipped orders:', error.message);
|
|
|
|
|
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Clean up temporary tables
|
|
|
|
|
// Clean up temporary tables after ALL processing is complete
|
|
|
|
|
await localConnection.query(`
|
|
|
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
|
|
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
|
|
|
|
@@ -465,119 +542,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
|
|
|
|
|
`);
|
|
|
|
|
|
|
|
|
|
// Import missing products if any
|
|
|
|
|
if (missingProducts.size > 0) {
|
|
|
|
|
try {
|
|
|
|
|
// Import missing products directly without materialization
|
|
|
|
|
await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts));
|
|
|
|
|
|
|
|
|
|
// Retry skipped orders after importing products
|
|
|
|
|
if (skippedOrders.size > 0) {
|
|
|
|
|
outputProgress({
|
|
|
|
|
status: "running",
|
|
|
|
|
operation: "Orders import",
|
|
|
|
|
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const skippedOrdersArray = Array.from(skippedOrders);
|
|
|
|
|
const [skippedProdOrders] = skippedOrdersArray.length > 0 ? await prodConnection.query(`
|
|
|
|
|
SELECT
|
|
|
|
|
o.order_id,
|
|
|
|
|
CASE
|
|
|
|
|
WHEN o.date_placed = '0000-00-00 00:00:00' OR o.date_placed IS NULL THEN o.stamp
|
|
|
|
|
ELSE o.date_placed
|
|
|
|
|
END as date,
|
|
|
|
|
o.order_cid,
|
|
|
|
|
o.bill_firstname,
|
|
|
|
|
o.bill_lastname,
|
|
|
|
|
o.order_email,
|
|
|
|
|
o.order_status,
|
|
|
|
|
o.date_shipped,
|
|
|
|
|
o.date_cancelled,
|
|
|
|
|
oi.prod_pid,
|
|
|
|
|
oi.prod_itemnumber,
|
|
|
|
|
oi.prod_price,
|
|
|
|
|
oi.qty_ordered,
|
|
|
|
|
oi.qty_back,
|
|
|
|
|
oi.qty_placed,
|
|
|
|
|
oi.qty_placed_2,
|
|
|
|
|
oi.discounted,
|
|
|
|
|
oi.summary_cogs,
|
|
|
|
|
oi.summary_profit,
|
|
|
|
|
oi.summary_orderdate,
|
|
|
|
|
oi.summary_paiddate,
|
|
|
|
|
oi.date_added,
|
|
|
|
|
oi.stamp
|
|
|
|
|
FROM order_items oi
|
|
|
|
|
JOIN _order o ON oi.order_id = o.order_id
|
|
|
|
|
WHERE o.order_id IN (?)
|
|
|
|
|
`, [skippedOrdersArray]) : [[]];
|
|
|
|
|
|
|
|
|
|
// Prepare values for insertion
|
|
|
|
|
const skippedOrderValues = skippedProdOrders.flatMap(order => {
|
|
|
|
|
if (!order.date) {
|
|
|
|
|
console.log(`Warning: Skipped order ${order.order_id} has null date:`, JSON.stringify(order, null, 2));
|
|
|
|
|
return [];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const canceled = order.date_cancelled !== '0000-00-00 00:00:00' ? 1 : 0;
|
|
|
|
|
const customerName = `${order.bill_firstname} ${order.bill_lastname}`;
|
|
|
|
|
|
|
|
|
|
// Create an object with keys based on column names
|
|
|
|
|
const orderData = {
|
|
|
|
|
id: order.order_id,
|
|
|
|
|
order_number: order.order_id,
|
|
|
|
|
pid: order.prod_pid,
|
|
|
|
|
SKU: order.prod_itemnumber,
|
|
|
|
|
date: order.date ? (
|
|
|
|
|
order.date instanceof Date ?
|
|
|
|
|
order.date.toJSON()?.slice(0,10) || null :
|
|
|
|
|
(typeof order.date === 'string' ? order.date.split(' ')[0] : null)
|
|
|
|
|
) : null,
|
|
|
|
|
price: order.prod_price,
|
|
|
|
|
quantity: order.qty_ordered,
|
|
|
|
|
discount: order.discounted,
|
|
|
|
|
tax: 0, // Placeholder, will be calculated later
|
|
|
|
|
tax_included: 0, // Placeholder, will be calculated later
|
|
|
|
|
shipping: 0, // Placeholder, will be calculated later
|
|
|
|
|
customer: order.order_email,
|
|
|
|
|
customer_name: customerName,
|
|
|
|
|
status: order.order_status,
|
|
|
|
|
canceled: canceled,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Map column names to values, handling missing columns
|
|
|
|
|
return [columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null)];
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Construct the insert query dynamically
|
|
|
|
|
const skippedPlaceholders = skippedProdOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
|
|
|
|
const skippedInsertQuery = `
|
|
|
|
|
INSERT INTO orders (${columnNames.join(", ")})
|
|
|
|
|
VALUES ${skippedPlaceholders}
|
|
|
|
|
ON DUPLICATE KEY UPDATE
|
|
|
|
|
${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")}
|
|
|
|
|
`;
|
|
|
|
|
|
|
|
|
|
// Execute the insert query
|
|
|
|
|
if (skippedOrderValues.length > 0) {
|
|
|
|
|
await localConnection.query(skippedInsertQuery, skippedOrderValues.flat());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
importedCount += skippedProdOrders.length;
|
|
|
|
|
|
|
|
|
|
outputProgress({
|
|
|
|
|
status: "running",
|
|
|
|
|
operation: "Orders import",
|
|
|
|
|
message: `Successfully imported ${skippedProdOrders.length} previously skipped orders`,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.warn('Warning: Failed to import missing products:', error.message);
|
|
|
|
|
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Only update sync status if we get here (no errors thrown)
|
|
|
|
|
await localConnection.query(`
|
|
|
|
|
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
|
|
|
|
@@ -587,9 +551,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
status: "complete",
|
|
|
|
|
totalImported: importedCount,
|
|
|
|
|
totalImported: Math.floor(importedCount),
|
|
|
|
|
recordsAdded: recordsAdded || 0,
|
|
|
|
|
recordsUpdated: recordsUpdated || 0,
|
|
|
|
|
recordsUpdated: Math.floor(recordsUpdated),
|
|
|
|
|
totalSkipped: skippedOrders.size,
|
|
|
|
|
missingProducts: missingProducts.size,
|
|
|
|
|
incrementalUpdate,
|
|
|
|
|
|