Update import scripts, working through categories
This commit is contained in:
@@ -2,7 +2,7 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } =
|
||||
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
|
||||
|
||||
/**
|
||||
* Imports orders from a production MySQL database to a local MySQL database.
|
||||
* Imports orders from a production MySQL database to a local PostgreSQL database.
|
||||
* It can run in two modes:
|
||||
* 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time.
|
||||
* 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp.
|
||||
@@ -23,93 +23,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
let importedCount = 0;
|
||||
let totalOrderItems = 0;
|
||||
let totalUniqueOrders = 0;
|
||||
|
||||
// Add a cumulative counter for processed orders before the loop
|
||||
let cumulativeProcessedOrders = 0;
|
||||
|
||||
try {
|
||||
// Clean up any existing temp tables first
|
||||
await localConnection.query(`
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
|
||||
`);
|
||||
|
||||
// Create all temp tables with correct schema
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_items (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
price DECIMAL(10,2) NOT NULL,
|
||||
quantity INT NOT NULL,
|
||||
base_discount DECIMAL(10,2) DEFAULT 0,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_meta (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
customer VARCHAR(100) NOT NULL,
|
||||
customer_name VARCHAR(150) NOT NULL,
|
||||
status INT,
|
||||
canceled TINYINT(1),
|
||||
summary_discount DECIMAL(10,2) DEFAULT 0.00,
|
||||
summary_subtotal DECIMAL(10,2) DEFAULT 0.00,
|
||||
PRIMARY KEY (order_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_discounts (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
discount DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_taxes (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
tax DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_costs (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
costeach DECIMAL(10,3) DEFAULT 0.000,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
// Get column names from the local table
|
||||
const [columns] = await localConnection.query(`
|
||||
SELECT COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_NAME = 'orders'
|
||||
AND COLUMN_NAME != 'updated' -- Exclude the updated column
|
||||
ORDER BY ORDINAL_POSITION
|
||||
`);
|
||||
const columnNames = columns.map(col => col.COLUMN_NAME);
|
||||
|
||||
// Get last sync info
|
||||
const [syncInfo] = await localConnection.query(
|
||||
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
|
||||
);
|
||||
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||
|
||||
console.log('Orders: Using last sync time:', lastSyncTime);
|
||||
|
||||
// First get count of order items
|
||||
// First get count of order items - Keep MySQL compatible for production
|
||||
const [[{ total }]] = await prodConnection.query(`
|
||||
SELECT COUNT(*) as total
|
||||
FROM order_items oi
|
||||
@@ -141,7 +66,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
totalOrderItems = total;
|
||||
console.log('Orders: Found changes:', totalOrderItems);
|
||||
|
||||
// Get order items in batches
|
||||
// Get order items - Keep MySQL compatible for production
|
||||
const [orderItems] = await prodConnection.query(`
|
||||
SELECT
|
||||
oi.order_id,
|
||||
@@ -179,10 +104,64 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
|
||||
console.log('Orders: Processing', orderItems.length, 'order items');
|
||||
|
||||
// Create temporary tables in PostgreSQL
|
||||
await localConnection.query(`
|
||||
DROP TABLE IF EXISTS temp_order_items;
|
||||
DROP TABLE IF EXISTS temp_order_meta;
|
||||
DROP TABLE IF EXISTS temp_order_discounts;
|
||||
DROP TABLE IF EXISTS temp_order_taxes;
|
||||
DROP TABLE IF EXISTS temp_order_costs;
|
||||
|
||||
CREATE TEMP TABLE temp_order_items (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
price DECIMAL(10,2) NOT NULL,
|
||||
quantity INTEGER NOT NULL,
|
||||
base_discount DECIMAL(10,2) DEFAULT 0,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_order_meta (
|
||||
order_id INTEGER NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
customer VARCHAR(100) NOT NULL,
|
||||
customer_name VARCHAR(150) NOT NULL,
|
||||
status INTEGER,
|
||||
canceled BOOLEAN,
|
||||
summary_discount DECIMAL(10,2) DEFAULT 0.00,
|
||||
summary_subtotal DECIMAL(10,2) DEFAULT 0.00,
|
||||
PRIMARY KEY (order_id)
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_order_discounts (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
discount DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_order_taxes (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
tax DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_order_costs (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
costeach DECIMAL(10,3) DEFAULT 0.000,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
`);
|
||||
|
||||
// Insert order items in batches
|
||||
for (let i = 0; i < orderItems.length; i += 5000) {
|
||||
const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
|
||||
const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
|
||||
const placeholders = batch.map((_, idx) =>
|
||||
`($${idx * 6 + 1}, $${idx * 6 + 2}, $${idx * 6 + 3}, $${idx * 6 + 4}, $${idx * 6 + 5}, $${idx * 6 + 6})`
|
||||
).join(",");
|
||||
const values = batch.flatMap(item => [
|
||||
item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount
|
||||
]);
|
||||
@@ -190,11 +169,11 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
base_discount = VALUES(base_discount)
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
SKU = EXCLUDED.SKU,
|
||||
price = EXCLUDED.price,
|
||||
quantity = EXCLUDED.quantity,
|
||||
base_discount = EXCLUDED.base_discount
|
||||
`, values);
|
||||
|
||||
processedCount = i + batch.length;
|
||||
@@ -215,11 +194,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
// Reset processed count for order processing phase
|
||||
processedCount = 0;
|
||||
|
||||
// Get order metadata in batches
|
||||
// Get order metadata in batches - Keep MySQL compatible for production
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`);
|
||||
console.log('Sample of batch IDs:', batchIds.slice(0, 5));
|
||||
|
||||
const [orders] = await prodConnection.query(`
|
||||
SELECT
|
||||
@@ -235,48 +213,40 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
LEFT JOIN users u ON o.order_cid = u.cid
|
||||
WHERE o.order_id IN (?)
|
||||
`, [batchIds]);
|
||||
|
||||
console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`);
|
||||
const duplicates = orders.filter((order, index, self) =>
|
||||
self.findIndex(o => o.order_id === order.order_id) !== index
|
||||
);
|
||||
if (duplicates.length > 0) {
|
||||
console.log('Found duplicates:', duplicates);
|
||||
|
||||
// Insert into PostgreSQL temp table
|
||||
if (orders.length > 0) {
|
||||
const placeholders = orders.map((_, idx) =>
|
||||
`($${idx * 8 + 1}, $${idx * 8 + 2}, $${idx * 8 + 3}, $${idx * 8 + 4}, $${idx * 8 + 5}, $${idx * 8 + 6}, $${idx * 8 + 7}, $${idx * 8 + 8})`
|
||||
).join(",");
|
||||
const values = orders.flatMap(order => [
|
||||
order.order_id,
|
||||
order.date,
|
||||
order.customer,
|
||||
order.customer_name,
|
||||
order.status,
|
||||
order.canceled,
|
||||
order.summary_discount,
|
||||
order.summary_subtotal
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_meta (
|
||||
order_id, date, customer, customer_name, status, canceled,
|
||||
summary_discount, summary_subtotal
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_id) DO UPDATE SET
|
||||
date = EXCLUDED.date,
|
||||
customer = EXCLUDED.customer,
|
||||
customer_name = EXCLUDED.customer_name,
|
||||
status = EXCLUDED.status,
|
||||
canceled = EXCLUDED.canceled,
|
||||
summary_discount = EXCLUDED.summary_discount,
|
||||
summary_subtotal = EXCLUDED.summary_subtotal
|
||||
`, values);
|
||||
}
|
||||
|
||||
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?, ?, ?)").join(",");
|
||||
const values = orders.flatMap(order => [
|
||||
order.order_id,
|
||||
order.date,
|
||||
order.customer,
|
||||
order.customer_name,
|
||||
order.status,
|
||||
order.canceled,
|
||||
order.summary_discount,
|
||||
order.summary_subtotal
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_meta (
|
||||
order_id,
|
||||
date,
|
||||
customer,
|
||||
customer_name,
|
||||
status,
|
||||
canceled,
|
||||
summary_discount,
|
||||
summary_subtotal
|
||||
) VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
date = VALUES(date),
|
||||
customer = VALUES(customer),
|
||||
customer_name = VALUES(customer_name),
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
summary_discount = VALUES(summary_discount),
|
||||
summary_subtotal = VALUES(summary_subtotal)
|
||||
`, values);
|
||||
|
||||
processedCount = i + orders.length;
|
||||
outputProgress({
|
||||
status: "running",
|
||||
@@ -287,10 +257,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
});
|
||||
}
|
||||
|
||||
// Reset processed count for final phase
|
||||
processedCount = 0;
|
||||
|
||||
// Get promotional discounts in batches
|
||||
// Process promotional discounts in batches - Keep MySQL compatible for production
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
const [discounts] = await prodConnection.query(`
|
||||
@@ -301,18 +268,21 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
`, [batchIds]);
|
||||
|
||||
if (discounts.length > 0) {
|
||||
const placeholders = discounts.map(() => "(?, ?, ?)").join(",");
|
||||
const placeholders = discounts.map((_, idx) =>
|
||||
`($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})`
|
||||
).join(",");
|
||||
const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_discounts VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
discount = VALUES(discount)
|
||||
INSERT INTO temp_order_discounts (order_id, pid, discount)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
discount = EXCLUDED.discount
|
||||
`, values);
|
||||
}
|
||||
}
|
||||
|
||||
// Get tax information in batches
|
||||
// Get tax information in batches - Keep MySQL compatible for production
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
const [taxes] = await prodConnection.query(`
|
||||
@@ -331,7 +301,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
`, [batchIds]);
|
||||
|
||||
if (taxes.length > 0) {
|
||||
// Remove any duplicates before inserting
|
||||
const uniqueTaxes = new Map();
|
||||
taxes.forEach(t => {
|
||||
const key = `${t.order_id}-${t.pid}`;
|
||||
@@ -340,16 +309,21 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
|
||||
const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]);
|
||||
if (values.length > 0) {
|
||||
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
|
||||
const placeholders = Array.from({length: uniqueTaxes.size}, (_, idx) => {
|
||||
const base = idx * 3;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3})`;
|
||||
}).join(",");
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_taxes VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE tax = VALUES(tax)
|
||||
INSERT INTO temp_order_taxes (order_id, pid, tax)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
tax = EXCLUDED.tax
|
||||
`, values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get costeach values in batches
|
||||
// Get costeach values in batches - Keep MySQL compatible for production
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
const [costs] = await prodConnection.query(`
|
||||
@@ -370,24 +344,27 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
`, [batchIds]);
|
||||
|
||||
if (costs.length > 0) {
|
||||
const placeholders = costs.map(() => '(?, ?, ?)').join(",");
|
||||
const placeholders = costs.map((_, idx) =>
|
||||
`($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})`
|
||||
).join(",");
|
||||
const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach || 0]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_costs (order_id, pid, costeach)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE costeach = VALUES(costeach)
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
costeach = EXCLUDED.costeach
|
||||
`, values);
|
||||
}
|
||||
}
|
||||
|
||||
// Now combine all the data and insert into orders table
|
||||
// Pre-check all products at once instead of per batch
|
||||
// Pre-check all products at once
|
||||
const allOrderPids = [...new Set(orderItems.map(item => item.pid))];
|
||||
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
|
||||
"SELECT pid FROM products WHERE pid IN (?)",
|
||||
"SELECT pid FROM products WHERE pid = ANY($1)",
|
||||
[allOrderPids]
|
||||
) : [[]];
|
||||
const existingPids = new Set(existingProducts.map(p => p.pid));
|
||||
const existingPids = new Set(existingProducts.rows.map(p => p.pid));
|
||||
|
||||
// Process in larger batches
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
@@ -410,7 +387,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
ELSE 0
|
||||
END as discount,
|
||||
COALESCE(ot.tax, 0) as tax,
|
||||
0 as tax_included,
|
||||
false as tax_included,
|
||||
0 as shipping,
|
||||
om.customer,
|
||||
om.customer_name,
|
||||
@@ -422,61 +399,86 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
||||
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
|
||||
WHERE oi.order_id IN (?)
|
||||
WHERE oi.order_id = ANY($1)
|
||||
`, [batchIds]);
|
||||
|
||||
// Filter orders and track missing products - do this in a single pass
|
||||
// Filter orders and track missing products
|
||||
const validOrders = [];
|
||||
const values = [];
|
||||
const processedOrderItems = new Set(); // Track unique order items
|
||||
const processedOrders = new Set(); // Track unique orders
|
||||
const processedOrderItems = new Set();
|
||||
const processedOrders = new Set();
|
||||
|
||||
for (const order of orders) {
|
||||
for (const order of orders.rows) {
|
||||
if (!existingPids.has(order.pid)) {
|
||||
missingProducts.add(order.pid);
|
||||
skippedOrders.add(order.order_number);
|
||||
continue;
|
||||
}
|
||||
validOrders.push(order);
|
||||
values.push(...columnNames.map(col => order[col] ?? null));
|
||||
processedOrderItems.add(`${order.order_number}-${order.pid}`);
|
||||
processedOrders.add(order.order_number);
|
||||
}
|
||||
|
||||
if (validOrders.length > 0) {
|
||||
// Pre-compute the placeholders string once
|
||||
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
|
||||
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
|
||||
const placeholders = validOrders.map((_, idx) => {
|
||||
const base = idx * 15;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15})`;
|
||||
}).join(',');
|
||||
|
||||
const values = validOrders.flatMap(o => [
|
||||
o.order_number,
|
||||
o.pid,
|
||||
o.SKU,
|
||||
o.date,
|
||||
o.price,
|
||||
o.quantity,
|
||||
o.discount,
|
||||
o.tax,
|
||||
o.tax_included,
|
||||
o.shipping,
|
||||
o.customer,
|
||||
o.customer_name,
|
||||
o.status,
|
||||
o.canceled,
|
||||
o.costeach
|
||||
]);
|
||||
|
||||
const [result] = await localConnection.query(`
|
||||
WITH inserted_orders AS (
|
||||
INSERT INTO orders (
|
||||
order_number, pid, SKU, date, price, quantity, discount,
|
||||
tax, tax_included, shipping, customer, customer_name,
|
||||
status, canceled, costeach
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_number, pid) DO UPDATE SET
|
||||
SKU = EXCLUDED.SKU,
|
||||
date = EXCLUDED.date,
|
||||
price = EXCLUDED.price,
|
||||
quantity = EXCLUDED.quantity,
|
||||
discount = EXCLUDED.discount,
|
||||
tax = EXCLUDED.tax,
|
||||
tax_included = EXCLUDED.tax_included,
|
||||
shipping = EXCLUDED.shipping,
|
||||
customer = EXCLUDED.customer,
|
||||
customer_name = EXCLUDED.customer_name,
|
||||
status = EXCLUDED.status,
|
||||
canceled = EXCLUDED.canceled,
|
||||
costeach = EXCLUDED.costeach
|
||||
RETURNING xmax, xmin
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE xmax = 0) as inserted,
|
||||
COUNT(*) FILTER (WHERE xmax <> 0) as updated
|
||||
FROM inserted_orders
|
||||
`, values);
|
||||
|
||||
const result = await localConnection.query(`
|
||||
INSERT INTO orders (${columnNames.join(",")})
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
date = VALUES(date),
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
discount = VALUES(discount),
|
||||
tax = VALUES(tax),
|
||||
tax_included = VALUES(tax_included),
|
||||
shipping = VALUES(shipping),
|
||||
customer = VALUES(customer),
|
||||
customer_name = VALUES(customer_name),
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
costeach = VALUES(costeach)
|
||||
`, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat());
|
||||
|
||||
const affectedRows = result[0].affectedRows;
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += updates;
|
||||
importedCount += processedOrderItems.size; // Count unique order items processed
|
||||
const { inserted, updated } = result.rows[0];
|
||||
recordsAdded += inserted;
|
||||
recordsUpdated += updated;
|
||||
importedCount += processedOrderItems.size;
|
||||
}
|
||||
|
||||
// Update progress based on unique orders processed
|
||||
cumulativeProcessedOrders += processedOrders.size;
|
||||
outputProgress({
|
||||
status: "running",
|
||||
@@ -490,123 +492,21 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
});
|
||||
}
|
||||
|
||||
// Now try to import any orders that were skipped due to missing products
|
||||
if (skippedOrders.size > 0) {
|
||||
try {
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`,
|
||||
});
|
||||
|
||||
// Get the orders that were skipped
|
||||
const [skippedProdOrders] = await localConnection.query(`
|
||||
SELECT DISTINCT
|
||||
oi.order_id as order_number,
|
||||
oi.pid,
|
||||
oi.SKU,
|
||||
om.date,
|
||||
oi.price,
|
||||
oi.quantity,
|
||||
oi.base_discount + COALESCE(od.discount, 0) +
|
||||
CASE
|
||||
WHEN o.summary_discount > 0 THEN
|
||||
ROUND((o.summary_discount * (oi.price * oi.quantity)) /
|
||||
NULLIF(o.summary_subtotal, 0), 2)
|
||||
ELSE 0
|
||||
END as discount,
|
||||
COALESCE(ot.tax, 0) as tax,
|
||||
0 as tax_included,
|
||||
0 as shipping,
|
||||
om.customer,
|
||||
om.customer_name,
|
||||
om.status,
|
||||
om.canceled,
|
||||
COALESCE(tc.costeach, 0) as costeach
|
||||
FROM temp_order_items oi
|
||||
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
||||
LEFT JOIN _order o ON oi.order_id = o.order_id
|
||||
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
||||
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
|
||||
WHERE oi.order_id IN (?)
|
||||
`, [Array.from(skippedOrders)]);
|
||||
|
||||
// Check which products exist now
|
||||
const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))];
|
||||
const [existingProducts] = skippedPids.length > 0 ? await localConnection.query(
|
||||
"SELECT pid FROM products WHERE pid IN (?)",
|
||||
[skippedPids]
|
||||
) : [[]];
|
||||
const existingPids = new Set(existingProducts.map(p => p.pid));
|
||||
|
||||
// Filter orders that can now be imported
|
||||
const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid));
|
||||
const retryOrderItems = new Set(); // Track unique order items in retry
|
||||
|
||||
if (validOrders.length > 0) {
|
||||
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
||||
const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat();
|
||||
|
||||
const result = await localConnection.query(`
|
||||
INSERT INTO orders (${columnNames.join(", ")})
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
date = VALUES(date),
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
discount = VALUES(discount),
|
||||
tax = VALUES(tax),
|
||||
tax_included = VALUES(tax_included),
|
||||
shipping = VALUES(shipping),
|
||||
customer = VALUES(customer),
|
||||
customer_name = VALUES(customer_name),
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
costeach = VALUES(costeach)
|
||||
`, values);
|
||||
|
||||
const affectedRows = result[0].affectedRows;
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
// Track unique order items
|
||||
validOrders.forEach(order => {
|
||||
retryOrderItems.add(`${order.order_number}-${order.pid}`);
|
||||
});
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Successfully imported ${retryOrderItems.size} previously skipped order items`,
|
||||
});
|
||||
|
||||
// Update the main counters
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += updates;
|
||||
importedCount += retryOrderItems.size;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Warning: Failed to retry skipped orders:', error.message);
|
||||
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up temporary tables after ALL processing is complete
|
||||
// Clean up temporary tables
|
||||
await localConnection.query(`
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
|
||||
DROP TABLE IF EXISTS temp_order_items;
|
||||
DROP TABLE IF EXISTS temp_order_meta;
|
||||
DROP TABLE IF EXISTS temp_order_discounts;
|
||||
DROP TABLE IF EXISTS temp_order_taxes;
|
||||
DROP TABLE IF EXISTS temp_order_costs;
|
||||
`);
|
||||
|
||||
// Only update sync status if we get here (no errors thrown)
|
||||
// Update sync status
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
VALUES ('orders', NOW())
|
||||
ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW()
|
||||
ON CONFLICT (table_name) DO UPDATE SET
|
||||
last_sync_timestamp = NOW()
|
||||
`);
|
||||
|
||||
return {
|
||||
|
||||
Reference in New Issue
Block a user