652 lines
24 KiB
JavaScript
652 lines
24 KiB
JavaScript
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
|
|
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
|
|
|
|
/**
|
|
* Imports orders from a production MySQL database to a local MySQL database.
|
|
* It can run in two modes:
|
|
* 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time.
|
|
* 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp.
|
|
*
|
|
* @param {object} prodConnection - A MySQL connection to production DB (MySQL 5.7).
|
|
* @param {object} localConnection - A MySQL connection to local DB (MySQL 8.0).
|
|
* @param {boolean} incrementalUpdate - Set to false for a full sync; true for incremental.
|
|
*
|
|
* @returns {object} Information about the sync operation.
|
|
*/
|
|
async function importOrders(prodConnection, localConnection, incrementalUpdate = true) {
|
|
const startTime = Date.now();
|
|
const skippedOrders = new Set();
|
|
const missingProducts = new Set();
|
|
let recordsAdded = 0;
|
|
let recordsUpdated = 0;
|
|
let processedCount = 0;
|
|
let importedCount = 0;
|
|
let totalOrderItems = 0;
|
|
let totalUniqueOrders = 0;
|
|
|
|
// Add a cumulative counter for processed orders before the loop
|
|
let cumulativeProcessedOrders = 0;
|
|
|
|
try {
|
|
// Clean up any existing temp tables first
|
|
await localConnection.query(`
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
|
|
`);
|
|
|
|
// Create all temp tables with correct schema
|
|
await localConnection.query(`
|
|
CREATE TEMPORARY TABLE temp_order_items (
|
|
order_id INT UNSIGNED NOT NULL,
|
|
pid INT UNSIGNED NOT NULL,
|
|
SKU VARCHAR(50) NOT NULL,
|
|
price DECIMAL(10,2) NOT NULL,
|
|
quantity INT NOT NULL,
|
|
base_discount DECIMAL(10,2) DEFAULT 0,
|
|
PRIMARY KEY (order_id, pid)
|
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
|
`);
|
|
|
|
await localConnection.query(`
|
|
CREATE TEMPORARY TABLE temp_order_meta (
|
|
order_id INT UNSIGNED NOT NULL,
|
|
date DATE NOT NULL,
|
|
customer VARCHAR(100) NOT NULL,
|
|
customer_name VARCHAR(150) NOT NULL,
|
|
status INT,
|
|
canceled TINYINT(1),
|
|
summary_discount DECIMAL(10,2) DEFAULT 0.00,
|
|
summary_subtotal DECIMAL(10,2) DEFAULT 0.00,
|
|
PRIMARY KEY (order_id)
|
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
|
`);
|
|
|
|
await localConnection.query(`
|
|
CREATE TEMPORARY TABLE temp_order_discounts (
|
|
order_id INT UNSIGNED NOT NULL,
|
|
pid INT UNSIGNED NOT NULL,
|
|
discount DECIMAL(10,2) NOT NULL,
|
|
PRIMARY KEY (order_id, pid)
|
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
|
`);
|
|
|
|
await localConnection.query(`
|
|
CREATE TEMPORARY TABLE temp_order_taxes (
|
|
order_id INT UNSIGNED NOT NULL,
|
|
pid INT UNSIGNED NOT NULL,
|
|
tax DECIMAL(10,2) NOT NULL,
|
|
PRIMARY KEY (order_id, pid)
|
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
|
`);
|
|
|
|
await localConnection.query(`
|
|
CREATE TEMPORARY TABLE temp_order_costs (
|
|
order_id INT UNSIGNED NOT NULL,
|
|
pid INT UNSIGNED NOT NULL,
|
|
costeach DECIMAL(10,3) DEFAULT 0.000,
|
|
PRIMARY KEY (order_id, pid)
|
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
|
`);
|
|
|
|
// Get column names from the local table
|
|
const [columns] = await localConnection.query(`
|
|
SELECT COLUMN_NAME
|
|
FROM INFORMATION_SCHEMA.COLUMNS
|
|
WHERE TABLE_NAME = 'orders'
|
|
ORDER BY ORDINAL_POSITION
|
|
`);
|
|
const columnNames = columns.map(col => col.COLUMN_NAME);
|
|
|
|
// Get last sync info
|
|
const [syncInfo] = await localConnection.query(
|
|
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
|
|
);
|
|
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
|
|
|
console.log('Orders: Using last sync time:', lastSyncTime);
|
|
|
|
// First get count of order items
|
|
const [[{ total }]] = await prodConnection.query(`
|
|
SELECT COUNT(*) as total
|
|
FROM order_items oi
|
|
USE INDEX (PRIMARY)
|
|
JOIN _order o ON oi.order_id = o.order_id
|
|
WHERE o.order_status >= 15
|
|
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
|
|
AND o.date_placed_onlydate IS NOT NULL
|
|
${incrementalUpdate ? `
|
|
AND (
|
|
o.stamp > ?
|
|
OR oi.stamp > ?
|
|
OR EXISTS (
|
|
SELECT 1 FROM order_discount_items odi
|
|
WHERE odi.order_id = o.order_id
|
|
AND odi.pid = oi.prod_pid
|
|
)
|
|
OR EXISTS (
|
|
SELECT 1 FROM order_tax_info oti
|
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
|
WHERE oti.order_id = o.order_id
|
|
AND otip.pid = oi.prod_pid
|
|
AND oti.stamp > ?
|
|
)
|
|
)
|
|
` : ''}
|
|
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
|
|
|
totalOrderItems = total;
|
|
console.log('Orders: Found changes:', totalOrderItems);
|
|
|
|
// Get order items in batches
|
|
const [orderItems] = await prodConnection.query(`
|
|
SELECT
|
|
oi.order_id,
|
|
oi.prod_pid as pid,
|
|
oi.prod_itemnumber as SKU,
|
|
oi.prod_price as price,
|
|
oi.qty_ordered as quantity,
|
|
COALESCE(oi.prod_price_reg - oi.prod_price, 0) as base_discount,
|
|
oi.stamp as last_modified
|
|
FROM order_items oi
|
|
USE INDEX (PRIMARY)
|
|
JOIN _order o ON oi.order_id = o.order_id
|
|
WHERE o.order_status >= 15
|
|
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
|
|
AND o.date_placed_onlydate IS NOT NULL
|
|
${incrementalUpdate ? `
|
|
AND (
|
|
o.stamp > ?
|
|
OR oi.stamp > ?
|
|
OR EXISTS (
|
|
SELECT 1 FROM order_discount_items odi
|
|
WHERE odi.order_id = o.order_id
|
|
AND odi.pid = oi.prod_pid
|
|
)
|
|
OR EXISTS (
|
|
SELECT 1 FROM order_tax_info oti
|
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
|
WHERE oti.order_id = o.order_id
|
|
AND otip.pid = oi.prod_pid
|
|
AND oti.stamp > ?
|
|
)
|
|
)
|
|
` : ''}
|
|
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
|
|
|
console.log('Orders: Processing', orderItems.length, 'order items');
|
|
|
|
// Insert order items in batches
|
|
for (let i = 0; i < orderItems.length; i += 5000) {
|
|
const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
|
|
const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
|
|
const values = batch.flatMap(item => [
|
|
item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount
|
|
]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
|
|
VALUES ${placeholders}
|
|
ON DUPLICATE KEY UPDATE
|
|
SKU = VALUES(SKU),
|
|
price = VALUES(price),
|
|
quantity = VALUES(quantity),
|
|
base_discount = VALUES(base_discount)
|
|
`, values);
|
|
|
|
processedCount = i + batch.length;
|
|
outputProgress({
|
|
status: "running",
|
|
operation: "Orders import",
|
|
message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
|
|
current: processedCount,
|
|
total: totalOrderItems
|
|
});
|
|
}
|
|
|
|
// Get unique order IDs
|
|
const orderIds = [...new Set(orderItems.map(item => item.order_id))];
|
|
totalUniqueOrders = orderIds.length;
|
|
console.log('Total unique order IDs:', totalUniqueOrders);
|
|
|
|
// Reset processed count for order processing phase
|
|
processedCount = 0;
|
|
|
|
// Get order metadata in batches
|
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
|
const batchIds = orderIds.slice(i, i + 5000);
|
|
console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`);
|
|
console.log('Sample of batch IDs:', batchIds.slice(0, 5));
|
|
|
|
const [orders] = await prodConnection.query(`
|
|
SELECT
|
|
o.order_id,
|
|
o.date_placed_onlydate as date,
|
|
o.order_cid as customer,
|
|
CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name,
|
|
o.order_status as status,
|
|
CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled,
|
|
o.summary_discount,
|
|
o.summary_subtotal
|
|
FROM _order o
|
|
LEFT JOIN users u ON o.order_cid = u.cid
|
|
WHERE o.order_id IN (?)
|
|
`, [batchIds]);
|
|
|
|
console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`);
|
|
const duplicates = orders.filter((order, index, self) =>
|
|
self.findIndex(o => o.order_id === order.order_id) !== index
|
|
);
|
|
if (duplicates.length > 0) {
|
|
console.log('Found duplicates:', duplicates);
|
|
}
|
|
|
|
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?, ?, ?)").join(",");
|
|
const values = orders.flatMap(order => [
|
|
order.order_id,
|
|
order.date,
|
|
order.customer,
|
|
order.customer_name,
|
|
order.status,
|
|
order.canceled,
|
|
order.summary_discount,
|
|
order.summary_subtotal
|
|
]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_meta (
|
|
order_id,
|
|
date,
|
|
customer,
|
|
customer_name,
|
|
status,
|
|
canceled,
|
|
summary_discount,
|
|
summary_subtotal
|
|
) VALUES ${placeholders}
|
|
ON DUPLICATE KEY UPDATE
|
|
date = VALUES(date),
|
|
customer = VALUES(customer),
|
|
customer_name = VALUES(customer_name),
|
|
status = VALUES(status),
|
|
canceled = VALUES(canceled),
|
|
summary_discount = VALUES(summary_discount),
|
|
summary_subtotal = VALUES(summary_subtotal)
|
|
`, values);
|
|
|
|
processedCount = i + orders.length;
|
|
outputProgress({
|
|
status: "running",
|
|
operation: "Orders import",
|
|
message: `Loading order metadata: ${processedCount} of ${totalUniqueOrders}`,
|
|
current: processedCount,
|
|
total: totalUniqueOrders
|
|
});
|
|
}
|
|
|
|
// Reset processed count for final phase
|
|
processedCount = 0;
|
|
|
|
// Get promotional discounts in batches
|
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
|
const batchIds = orderIds.slice(i, i + 5000);
|
|
const [discounts] = await prodConnection.query(`
|
|
SELECT order_id, pid, SUM(amount) as discount
|
|
FROM order_discount_items
|
|
WHERE order_id IN (?)
|
|
GROUP BY order_id, pid
|
|
`, [batchIds]);
|
|
|
|
if (discounts.length > 0) {
|
|
const placeholders = discounts.map(() => "(?, ?, ?)").join(",");
|
|
const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_discounts VALUES ${placeholders}
|
|
ON DUPLICATE KEY UPDATE
|
|
discount = VALUES(discount)
|
|
`, values);
|
|
}
|
|
}
|
|
|
|
// Get tax information in batches
|
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
|
const batchIds = orderIds.slice(i, i + 5000);
|
|
const [taxes] = await prodConnection.query(`
|
|
SELECT DISTINCT
|
|
oti.order_id,
|
|
otip.pid,
|
|
otip.item_taxes_to_collect as tax
|
|
FROM order_tax_info oti
|
|
JOIN (
|
|
SELECT order_id, MAX(stamp) as max_stamp
|
|
FROM order_tax_info
|
|
WHERE order_id IN (?)
|
|
GROUP BY order_id
|
|
) latest ON oti.order_id = latest.order_id AND oti.stamp = latest.max_stamp
|
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
|
`, [batchIds]);
|
|
|
|
if (taxes.length > 0) {
|
|
// Remove any duplicates before inserting
|
|
const uniqueTaxes = new Map();
|
|
taxes.forEach(t => {
|
|
const key = `${t.order_id}-${t.pid}`;
|
|
uniqueTaxes.set(key, t);
|
|
});
|
|
|
|
const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]);
|
|
if (values.length > 0) {
|
|
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_taxes VALUES ${placeholders}
|
|
ON DUPLICATE KEY UPDATE tax = VALUES(tax)
|
|
`, values);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Get costeach values in batches
|
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
|
const batchIds = orderIds.slice(i, i + 5000);
|
|
const [costs] = await prodConnection.query(`
|
|
SELECT
|
|
oc.orderid as order_id,
|
|
oc.pid,
|
|
COALESCE(
|
|
oc.costeach,
|
|
(SELECT pi.costeach
|
|
FROM product_inventory pi
|
|
WHERE pi.pid = oc.pid
|
|
AND pi.daterec <= o.date_placed
|
|
ORDER BY pi.daterec DESC LIMIT 1)
|
|
) as costeach
|
|
FROM order_costs oc
|
|
JOIN _order o ON oc.orderid = o.order_id
|
|
WHERE oc.orderid IN (?)
|
|
`, [batchIds]);
|
|
|
|
if (costs.length > 0) {
|
|
const placeholders = costs.map(() => '(?, ?, ?)').join(",");
|
|
const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach || 0]);
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_costs (order_id, pid, costeach)
|
|
VALUES ${placeholders}
|
|
ON DUPLICATE KEY UPDATE costeach = VALUES(costeach)
|
|
`, values);
|
|
}
|
|
}
|
|
|
|
// Now combine all the data and insert into orders table
|
|
// Pre-check all products at once instead of per batch
|
|
const allOrderPids = [...new Set(orderItems.map(item => item.pid))];
|
|
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
|
|
"SELECT pid FROM products WHERE pid IN (?)",
|
|
[allOrderPids]
|
|
) : [[]];
|
|
const existingPids = new Set(existingProducts.map(p => p.pid));
|
|
|
|
// Process in larger batches
|
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
|
const batchIds = orderIds.slice(i, i + 5000);
|
|
|
|
// Get combined data for this batch
|
|
const [orders] = await localConnection.query(`
|
|
SELECT
|
|
oi.order_id as order_number,
|
|
oi.pid,
|
|
oi.SKU,
|
|
om.date,
|
|
oi.price,
|
|
oi.quantity,
|
|
oi.base_discount + COALESCE(od.discount, 0) +
|
|
CASE
|
|
WHEN om.summary_discount > 0 THEN
|
|
ROUND((om.summary_discount * (oi.price * oi.quantity)) /
|
|
NULLIF(om.summary_subtotal, 0), 2)
|
|
ELSE 0
|
|
END as discount,
|
|
COALESCE(ot.tax, 0) as tax,
|
|
0 as tax_included,
|
|
0 as shipping,
|
|
om.customer,
|
|
om.customer_name,
|
|
om.status,
|
|
om.canceled,
|
|
COALESCE(tc.costeach, 0) as costeach
|
|
FROM temp_order_items oi
|
|
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
|
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
|
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
|
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
|
|
WHERE oi.order_id IN (?)
|
|
`, [batchIds]);
|
|
|
|
// Filter orders and track missing products - do this in a single pass
|
|
const validOrders = [];
|
|
const values = [];
|
|
const processedOrderItems = new Set(); // Track unique order items
|
|
const processedOrders = new Set(); // Track unique orders
|
|
|
|
for (const order of orders) {
|
|
if (!existingPids.has(order.pid)) {
|
|
missingProducts.add(order.pid);
|
|
skippedOrders.add(order.order_number);
|
|
continue;
|
|
}
|
|
validOrders.push(order);
|
|
values.push(...columnNames.map(col => order[col] ?? null));
|
|
processedOrderItems.add(`${order.order_number}-${order.pid}`);
|
|
processedOrders.add(order.order_number);
|
|
}
|
|
|
|
if (validOrders.length > 0) {
|
|
// Pre-compute the placeholders string once
|
|
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
|
|
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
|
|
|
|
const result = await localConnection.query(`
|
|
INSERT INTO orders (${columnNames.join(",")})
|
|
VALUES ${placeholders}
|
|
ON DUPLICATE KEY UPDATE
|
|
SKU = VALUES(SKU),
|
|
date = VALUES(date),
|
|
price = VALUES(price),
|
|
quantity = VALUES(quantity),
|
|
discount = VALUES(discount),
|
|
tax = VALUES(tax),
|
|
tax_included = VALUES(tax_included),
|
|
shipping = VALUES(shipping),
|
|
customer = VALUES(customer),
|
|
customer_name = VALUES(customer_name),
|
|
status = VALUES(status),
|
|
canceled = VALUES(canceled),
|
|
costeach = VALUES(costeach)
|
|
`, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat());
|
|
|
|
const affectedRows = result[0].affectedRows;
|
|
const updates = Math.floor(affectedRows / 2);
|
|
const inserts = affectedRows - (updates * 2);
|
|
|
|
recordsAdded += inserts;
|
|
recordsUpdated += updates;
|
|
importedCount += processedOrderItems.size;
|
|
|
|
// Mark affected products for recalculation
|
|
const affectedPids = [...new Set(validOrders.map(o => o.pid))];
|
|
if (affectedPids.length > 0) {
|
|
await localConnection.query(`
|
|
INSERT INTO product_metric_status (pid, needs_recalculation)
|
|
VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')}
|
|
ON DUPLICATE KEY UPDATE
|
|
needs_recalculation = TRUE,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
`, affectedPids);
|
|
}
|
|
}
|
|
|
|
// Update progress based on unique orders processed
|
|
cumulativeProcessedOrders += processedOrders.size;
|
|
outputProgress({
|
|
status: "running",
|
|
operation: "Orders import",
|
|
message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`,
|
|
current: cumulativeProcessedOrders,
|
|
total: totalUniqueOrders,
|
|
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
|
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
|
|
rate: calculateRate(startTime, cumulativeProcessedOrders)
|
|
});
|
|
}
|
|
|
|
// Now try to import any orders that were skipped due to missing products
|
|
if (skippedOrders.size > 0) {
|
|
try {
|
|
outputProgress({
|
|
status: "running",
|
|
operation: "Orders import",
|
|
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`,
|
|
});
|
|
|
|
// Get the orders that were skipped
|
|
const [skippedProdOrders] = await localConnection.query(`
|
|
SELECT DISTINCT
|
|
oi.order_id as order_number,
|
|
oi.pid,
|
|
oi.SKU,
|
|
om.date,
|
|
oi.price,
|
|
oi.quantity,
|
|
oi.base_discount + COALESCE(od.discount, 0) +
|
|
CASE
|
|
WHEN o.summary_discount > 0 THEN
|
|
ROUND((o.summary_discount * (oi.price * oi.quantity)) /
|
|
NULLIF(o.summary_subtotal, 0), 2)
|
|
ELSE 0
|
|
END as discount,
|
|
COALESCE(ot.tax, 0) as tax,
|
|
0 as tax_included,
|
|
0 as shipping,
|
|
om.customer,
|
|
om.customer_name,
|
|
om.status,
|
|
om.canceled,
|
|
COALESCE(tc.costeach, 0) as costeach
|
|
FROM temp_order_items oi
|
|
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
|
LEFT JOIN _order o ON oi.order_id = o.order_id
|
|
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
|
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
|
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
|
|
WHERE oi.order_id IN (?)
|
|
`, [Array.from(skippedOrders)]);
|
|
|
|
// Check which products exist now
|
|
const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))];
|
|
const [existingProducts] = skippedPids.length > 0 ? await localConnection.query(
|
|
"SELECT pid FROM products WHERE pid IN (?)",
|
|
[skippedPids]
|
|
) : [[]];
|
|
const existingPids = new Set(existingProducts.map(p => p.pid));
|
|
|
|
// Filter orders that can now be imported
|
|
const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid));
|
|
const retryOrderItems = new Set(); // Track unique order items in retry
|
|
|
|
if (validOrders.length > 0) {
|
|
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
|
const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat();
|
|
|
|
const result = await localConnection.query(`
|
|
INSERT INTO orders (${columnNames.join(", ")})
|
|
VALUES ${placeholders}
|
|
ON DUPLICATE KEY UPDATE
|
|
SKU = VALUES(SKU),
|
|
date = VALUES(date),
|
|
price = VALUES(price),
|
|
quantity = VALUES(quantity),
|
|
discount = VALUES(discount),
|
|
tax = VALUES(tax),
|
|
tax_included = VALUES(tax_included),
|
|
shipping = VALUES(shipping),
|
|
customer = VALUES(customer),
|
|
customer_name = VALUES(customer_name),
|
|
status = VALUES(status),
|
|
canceled = VALUES(canceled),
|
|
costeach = VALUES(costeach)
|
|
`, values);
|
|
|
|
const affectedRows = result[0].affectedRows;
|
|
const updates = Math.floor(affectedRows / 2);
|
|
const inserts = affectedRows - (updates * 2);
|
|
|
|
// Track unique order items
|
|
validOrders.forEach(order => {
|
|
retryOrderItems.add(`${order.order_number}-${order.pid}`);
|
|
});
|
|
|
|
outputProgress({
|
|
status: "running",
|
|
operation: "Orders import",
|
|
message: `Successfully imported ${retryOrderItems.size} previously skipped order items`,
|
|
});
|
|
|
|
// Update the main counters
|
|
recordsAdded += inserts;
|
|
recordsUpdated += updates;
|
|
importedCount += retryOrderItems.size;
|
|
|
|
// Mark affected products for recalculation
|
|
const affectedPids = [...new Set(validOrders.map(o => o.pid))];
|
|
if (affectedPids.length > 0) {
|
|
await localConnection.query(`
|
|
INSERT INTO product_metric_status (pid, needs_recalculation)
|
|
VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')}
|
|
ON DUPLICATE KEY UPDATE
|
|
needs_recalculation = TRUE,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
`, affectedPids);
|
|
}
|
|
}
|
|
} catch (error) {
|
|
console.warn('Warning: Failed to retry skipped orders:', error.message);
|
|
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
|
|
}
|
|
}
|
|
|
|
// Clean up temporary tables after ALL processing is complete
|
|
await localConnection.query(`
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
|
|
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
|
|
`);
|
|
|
|
// Only update sync status if we get here (no errors thrown)
|
|
await localConnection.query(`
|
|
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
|
VALUES ('orders', NOW())
|
|
ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW()
|
|
`);
|
|
|
|
return {
|
|
status: "complete",
|
|
totalImported: Math.floor(importedCount),
|
|
recordsAdded: recordsAdded || 0,
|
|
recordsUpdated: Math.floor(recordsUpdated),
|
|
totalSkipped: skippedOrders.size,
|
|
missingProducts: missingProducts.size,
|
|
incrementalUpdate,
|
|
lastSyncTime
|
|
};
|
|
} catch (error) {
|
|
console.error("Error during orders import:", error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
module.exports = importOrders;
|