Optimize order and product import scripts with improved performance and incremental update handling
- Refactor orders import to use temporary tables for more efficient data processing - Improve batch processing and memory management in order import script - Update product import to use temporary tables for inventory status - Modify purchase orders import to use updated timestamp for incremental updates - Enhance error handling and logging for import processes
This commit is contained in:
@@ -10,10 +10,10 @@ const importPurchaseOrders = require('./import/purchase-orders');
|
|||||||
dotenv.config({ path: path.join(__dirname, "../.env") });
|
dotenv.config({ path: path.join(__dirname, "../.env") });
|
||||||
|
|
||||||
// Constants to control which imports run
|
// Constants to control which imports run
|
||||||
const IMPORT_CATEGORIES = false;
|
const IMPORT_CATEGORIES = true;
|
||||||
const IMPORT_PRODUCTS = false;
|
const IMPORT_PRODUCTS = true;
|
||||||
const IMPORT_ORDERS = true;
|
const IMPORT_ORDERS = true;
|
||||||
const IMPORT_PURCHASE_ORDERS = false;
|
const IMPORT_PURCHASE_ORDERS = true;
|
||||||
|
|
||||||
// Add flag for incremental updates
|
// Add flag for incremental updates
|
||||||
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE === 'true';
|
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE === 'true';
|
||||||
|
|||||||
@@ -34,303 +34,384 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
);
|
);
|
||||||
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||||
|
|
||||||
// Count the total number of orders to be imported
|
// Create temporary tables for staging data
|
||||||
const [countResults] = await prodConnection.query(`
|
await localConnection.query(`
|
||||||
|
CREATE TEMPORARY TABLE temp_order_items (
|
||||||
|
order_id INT UNSIGNED,
|
||||||
|
pid INT UNSIGNED,
|
||||||
|
SKU VARCHAR(50),
|
||||||
|
price DECIMAL(10,3),
|
||||||
|
quantity INT,
|
||||||
|
base_discount DECIMAL(10,3),
|
||||||
|
PRIMARY KEY (order_id, pid)
|
||||||
|
) ENGINE=InnoDB;
|
||||||
|
|
||||||
|
CREATE TEMPORARY TABLE temp_order_meta (
|
||||||
|
order_id INT UNSIGNED PRIMARY KEY,
|
||||||
|
date DATE,
|
||||||
|
customer INT UNSIGNED,
|
||||||
|
customer_name VARCHAR(100),
|
||||||
|
status TINYINT UNSIGNED,
|
||||||
|
canceled TINYINT UNSIGNED
|
||||||
|
) ENGINE=InnoDB;
|
||||||
|
|
||||||
|
CREATE TEMPORARY TABLE temp_order_discounts (
|
||||||
|
order_id INT UNSIGNED,
|
||||||
|
pid INT UNSIGNED,
|
||||||
|
discount DECIMAL(10,3),
|
||||||
|
PRIMARY KEY (order_id, pid)
|
||||||
|
) ENGINE=InnoDB;
|
||||||
|
|
||||||
|
CREATE TEMPORARY TABLE temp_order_taxes (
|
||||||
|
order_id INT UNSIGNED,
|
||||||
|
pid INT UNSIGNED,
|
||||||
|
tax DECIMAL(10,3),
|
||||||
|
PRIMARY KEY (order_id, pid)
|
||||||
|
) ENGINE=InnoDB;
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Get base order items first
|
||||||
|
const [orderItems] = await prodConnection.query(`
|
||||||
SELECT
|
SELECT
|
||||||
COUNT(DISTINCT oi.order_id, oi.prod_pid) as total_all,
|
oi.order_id,
|
||||||
SUM(CASE
|
oi.prod_pid as pid,
|
||||||
WHEN o.stamp > ? OR o.date_placed > ? OR o.date_shipped > ? OR oi.stamp > ?
|
oi.prod_itemnumber as SKU,
|
||||||
THEN 1 ELSE 0
|
oi.prod_price as price,
|
||||||
END) as total_incremental
|
oi.qty_ordered as quantity,
|
||||||
|
COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount
|
||||||
FROM order_items oi
|
FROM order_items oi
|
||||||
JOIN _order o ON oi.order_id = o.order_id
|
JOIN _order o ON oi.order_id = o.order_id
|
||||||
WHERE o.order_status >= 15
|
WHERE o.order_status >= 15
|
||||||
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
|
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
|
||||||
AND o.date_placed_onlydate IS NOT NULL
|
AND o.date_placed_onlydate IS NOT NULL
|
||||||
`, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]);
|
${incrementalUpdate ? `
|
||||||
|
AND (
|
||||||
|
o.stamp > ?
|
||||||
|
OR o.date_placed > ?
|
||||||
|
OR o.date_shipped > ?
|
||||||
|
OR oi.stamp > ?
|
||||||
|
)
|
||||||
|
` : ''}
|
||||||
|
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||||
|
|
||||||
console.log('Count details:', {
|
const totalOrders = orderItems.length;
|
||||||
total_all: countResults[0].total_all,
|
let processed = 0;
|
||||||
total_incremental: countResults[0].total_incremental,
|
|
||||||
lastSyncTime,
|
|
||||||
incrementalUpdate
|
|
||||||
});
|
|
||||||
|
|
||||||
const totalOrders = incrementalUpdate ? countResults[0].total_incremental : countResults[0].total_all;
|
// Insert order items in batches
|
||||||
|
for (let i = 0; i < orderItems.length; i += 5000) {
|
||||||
|
const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
|
||||||
|
const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
|
||||||
|
const values = batch.flatMap(item => [
|
||||||
|
item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount
|
||||||
|
]);
|
||||||
|
|
||||||
outputProgress({
|
await localConnection.query(`
|
||||||
status: "running",
|
INSERT INTO temp_order_items VALUES ${placeholders}
|
||||||
operation: "Orders import",
|
`, values);
|
||||||
message: `Starting ${incrementalUpdate ? 'incremental' : 'full'} import of ${totalOrders} orders`,
|
|
||||||
current: 0,
|
|
||||||
total: totalOrders
|
|
||||||
});
|
|
||||||
|
|
||||||
// Fetch orders in batches
|
processed += batch.length;
|
||||||
const batchSize = 5000;
|
outputProgress({
|
||||||
let offset = 0;
|
status: "running",
|
||||||
let importedCount = 0;
|
operation: "Orders import",
|
||||||
let lastProgressUpdate = Date.now();
|
message: `Loading order items: ${processed} of ${totalOrders}`,
|
||||||
|
current: processed,
|
||||||
|
total: totalOrders
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
while (offset < totalOrders) {
|
// Get unique order IDs
|
||||||
// First get the base order data
|
const orderIds = [...new Set(orderItems.map(item => item.order_id))];
|
||||||
const [prodOrders] = await prodConnection.query(`
|
|
||||||
|
// Get order metadata in batches
|
||||||
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||||
|
const batchIds = orderIds.slice(i, i + 5000);
|
||||||
|
const [orders] = await prodConnection.query(`
|
||||||
SELECT
|
SELECT
|
||||||
oi.order_id as order_number,
|
o.order_id,
|
||||||
oi.prod_pid as pid,
|
|
||||||
oi.prod_itemnumber as SKU,
|
|
||||||
o.date_placed_onlydate as date,
|
o.date_placed_onlydate as date,
|
||||||
oi.prod_price as price,
|
|
||||||
oi.qty_ordered as quantity,
|
|
||||||
COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount,
|
|
||||||
o.order_cid as customer,
|
o.order_cid as customer,
|
||||||
CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name,
|
CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name,
|
||||||
o.order_status as status,
|
o.order_status as status,
|
||||||
CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled
|
CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled
|
||||||
FROM order_items oi
|
FROM _order o
|
||||||
JOIN _order o ON oi.order_id = o.order_id
|
|
||||||
LEFT JOIN users u ON o.order_cid = u.cid
|
LEFT JOIN users u ON o.order_cid = u.cid
|
||||||
WHERE o.order_status >= 15
|
WHERE o.order_id IN (?)
|
||||||
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
|
`, [batchIds]);
|
||||||
AND o.date_placed_onlydate IS NOT NULL
|
|
||||||
${incrementalUpdate ? `
|
|
||||||
AND (
|
|
||||||
o.stamp > ?
|
|
||||||
OR o.date_placed > ?
|
|
||||||
OR o.date_shipped > ?
|
|
||||||
OR oi.stamp > ?
|
|
||||||
)
|
|
||||||
` : ''}
|
|
||||||
ORDER BY oi.order_id, oi.prod_pid
|
|
||||||
LIMIT ? OFFSET ?
|
|
||||||
`, incrementalUpdate ?
|
|
||||||
[lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, batchSize, offset] :
|
|
||||||
[batchSize, offset]
|
|
||||||
);
|
|
||||||
|
|
||||||
if (prodOrders.length === 0) break;
|
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
|
||||||
|
const values = orders.flatMap(order => [
|
||||||
|
order.order_id, order.date, order.customer, order.customer_name, order.status, order.canceled
|
||||||
|
]);
|
||||||
|
|
||||||
// Get order numbers for this batch
|
await localConnection.query(`
|
||||||
const orderNumbers = [...new Set(prodOrders.map(o => o.order_number))];
|
INSERT INTO temp_order_meta VALUES ${placeholders}
|
||||||
const orderPids = prodOrders.map(o => o.pid);
|
`, values);
|
||||||
|
|
||||||
// Get promotional discounts in a separate query
|
outputProgress({
|
||||||
const [promoDiscounts] = await prodConnection.query(`
|
status: "running",
|
||||||
SELECT order_id, pid, amount
|
operation: "Orders import",
|
||||||
|
message: `Loading order metadata: ${i + orders.length} of ${orderIds.length}`,
|
||||||
|
current: i + orders.length,
|
||||||
|
total: orderIds.length
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get promotional discounts in batches
|
||||||
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||||
|
const batchIds = orderIds.slice(i, i + 5000);
|
||||||
|
const [discounts] = await prodConnection.query(`
|
||||||
|
SELECT order_id, pid, SUM(amount) as discount
|
||||||
FROM order_discount_items
|
FROM order_discount_items
|
||||||
WHERE order_id IN (?)
|
WHERE order_id IN (?)
|
||||||
`, [orderNumbers]);
|
GROUP BY order_id, pid
|
||||||
|
`, [batchIds]);
|
||||||
|
|
||||||
// Create a map for quick discount lookups
|
if (discounts.length > 0) {
|
||||||
const discountMap = new Map();
|
const placeholders = discounts.map(() => "(?, ?, ?)").join(",");
|
||||||
promoDiscounts.forEach(d => {
|
const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]);
|
||||||
const key = `${d.order_id}-${d.pid}`;
|
|
||||||
discountMap.set(key, d.amount || 0);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get tax information in a separate query
|
await localConnection.query(`
|
||||||
const [taxInfo] = await prodConnection.query(`
|
INSERT INTO temp_order_discounts VALUES ${placeholders}
|
||||||
SELECT oti.order_id, otip.pid, otip.item_taxes_to_collect
|
`, values);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get tax information in batches
|
||||||
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||||
|
const batchIds = orderIds.slice(i, i + 5000);
|
||||||
|
const [taxes] = await prodConnection.query(`
|
||||||
|
SELECT DISTINCT
|
||||||
|
oti.order_id,
|
||||||
|
otip.pid,
|
||||||
|
otip.item_taxes_to_collect as tax
|
||||||
FROM order_tax_info oti
|
FROM order_tax_info oti
|
||||||
|
JOIN (
|
||||||
|
SELECT order_id, MAX(stamp) as max_stamp
|
||||||
|
FROM order_tax_info
|
||||||
|
WHERE order_id IN (?)
|
||||||
|
GROUP BY order_id
|
||||||
|
) latest ON oti.order_id = latest.order_id AND oti.stamp = latest.max_stamp
|
||||||
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
||||||
WHERE oti.order_id IN (?)
|
`, [batchIds]);
|
||||||
AND (oti.order_id, oti.stamp) IN (
|
|
||||||
SELECT order_id, MAX(stamp)
|
|
||||||
FROM order_tax_info
|
|
||||||
WHERE order_id IN (?)
|
|
||||||
GROUP BY order_id
|
|
||||||
)
|
|
||||||
`, [orderNumbers, orderNumbers]);
|
|
||||||
|
|
||||||
// Create a map for quick tax lookups
|
if (taxes.length > 0) {
|
||||||
const taxMap = new Map();
|
// Remove any duplicates before inserting
|
||||||
taxInfo.forEach(t => {
|
const uniqueTaxes = new Map();
|
||||||
const key = `${t.order_id}-${t.pid}`;
|
taxes.forEach(t => {
|
||||||
taxMap.set(key, t.item_taxes_to_collect || 0);
|
const key = `${t.order_id}-${t.pid}`;
|
||||||
});
|
uniqueTaxes.set(key, t);
|
||||||
|
});
|
||||||
|
|
||||||
// Check for missing products
|
const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]);
|
||||||
const [existingProducts] = await localConnection.query(
|
if (values.length > 0) {
|
||||||
"SELECT pid FROM products WHERE pid IN (?)",
|
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
|
||||||
[orderPids]
|
await localConnection.query(`
|
||||||
);
|
INSERT INTO temp_order_taxes VALUES ${placeholders}
|
||||||
const existingPids = new Set(existingProducts.map(p => p.pid));
|
`, values);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Track missing products and filter orders
|
// Now combine all the data and insert into orders table
|
||||||
const validOrders = prodOrders.filter(order => {
|
let importedCount = 0;
|
||||||
if (!order.date) return false;
|
|
||||||
|
// Pre-check all products at once instead of per batch
|
||||||
|
const allOrderPids = [...new Set(orderItems.map(item => item.pid))];
|
||||||
|
const [existingProducts] = await localConnection.query(
|
||||||
|
"SELECT pid FROM products WHERE pid IN (?)",
|
||||||
|
[allOrderPids]
|
||||||
|
);
|
||||||
|
const existingPids = new Set(existingProducts.map(p => p.pid));
|
||||||
|
|
||||||
|
// Process in larger batches
|
||||||
|
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||||
|
const batchIds = orderIds.slice(i, i + 5000);
|
||||||
|
|
||||||
|
// Get combined data for this batch
|
||||||
|
const [orders] = await localConnection.query(`
|
||||||
|
SELECT
|
||||||
|
oi.order_id as order_number,
|
||||||
|
oi.pid,
|
||||||
|
oi.SKU,
|
||||||
|
om.date,
|
||||||
|
oi.price,
|
||||||
|
oi.quantity,
|
||||||
|
oi.base_discount + COALESCE(od.discount, 0) as discount,
|
||||||
|
COALESCE(ot.tax, 0) as tax,
|
||||||
|
0 as tax_included,
|
||||||
|
0 as shipping,
|
||||||
|
om.customer,
|
||||||
|
om.customer_name,
|
||||||
|
om.status,
|
||||||
|
om.canceled
|
||||||
|
FROM temp_order_items oi
|
||||||
|
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
||||||
|
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
||||||
|
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||||
|
WHERE oi.order_id IN (?)
|
||||||
|
`, [batchIds]);
|
||||||
|
|
||||||
|
// Filter orders and track missing products - do this in a single pass
|
||||||
|
const validOrders = [];
|
||||||
|
const values = [];
|
||||||
|
|
||||||
|
for (const order of orders) {
|
||||||
if (!existingPids.has(order.pid)) {
|
if (!existingPids.has(order.pid)) {
|
||||||
missingProducts.add(order.pid);
|
missingProducts.add(order.pid);
|
||||||
skippedOrders.add(order.order_number);
|
skippedOrders.add(order.order_number);
|
||||||
return false;
|
continue;
|
||||||
}
|
}
|
||||||
return true;
|
validOrders.push(order);
|
||||||
});
|
values.push(...columnNames.map(col => order[col] ?? null));
|
||||||
|
}
|
||||||
|
|
||||||
// Prepare values for insertion
|
if (validOrders.length > 0) {
|
||||||
const orderValues = validOrders.map(order => {
|
// Pre-compute the placeholders string once
|
||||||
const orderKey = `${order.order_number}-${order.pid}`;
|
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
|
||||||
const orderData = {
|
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
|
||||||
id: order.order_number,
|
|
||||||
order_number: order.order_number,
|
|
||||||
pid: order.pid,
|
|
||||||
SKU: order.SKU,
|
|
||||||
date: order.date,
|
|
||||||
price: order.price,
|
|
||||||
quantity: order.quantity,
|
|
||||||
discount: Number(order.base_discount || 0) + Number(discountMap.get(orderKey) || 0),
|
|
||||||
tax: Number(taxMap.get(orderKey) || 0),
|
|
||||||
tax_included: 0,
|
|
||||||
shipping: 0,
|
|
||||||
customer: order.customer,
|
|
||||||
customer_name: order.customer_name || '',
|
|
||||||
status: order.status,
|
|
||||||
canceled: order.canceled,
|
|
||||||
};
|
|
||||||
|
|
||||||
return columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null);
|
await localConnection.query(`
|
||||||
});
|
INSERT INTO orders (${columnNames.join(",")})
|
||||||
|
|
||||||
// Execute the insert
|
|
||||||
if (orderValues.length > 0) {
|
|
||||||
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
|
||||||
const insertQuery = `
|
|
||||||
INSERT INTO orders (${columnNames.join(", ")})
|
|
||||||
VALUES ${placeholders}
|
VALUES ${placeholders}
|
||||||
ON DUPLICATE KEY UPDATE
|
ON DUPLICATE KEY UPDATE
|
||||||
${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")}
|
${columnNames.map(col => `${col} = VALUES(${col})`).join(",")}
|
||||||
`;
|
`, values);
|
||||||
|
|
||||||
await localConnection.query(insertQuery, orderValues.flat());
|
importedCount += validOrders.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
importedCount += validOrders.length;
|
outputProgress({
|
||||||
offset += batchSize;
|
status: "running",
|
||||||
|
operation: "Orders import",
|
||||||
// Update progress every second
|
message: `Imported ${importedCount} of ${totalOrders} orders`,
|
||||||
const now = Date.now();
|
current: importedCount,
|
||||||
if (now - lastProgressUpdate >= 1000) {
|
total: totalOrders,
|
||||||
outputProgress({
|
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||||
status: "running",
|
remaining: estimateRemaining(startTime, importedCount, totalOrders),
|
||||||
operation: "Orders import",
|
rate: calculateRate(startTime, importedCount)
|
||||||
message: `Imported ${importedCount} of ${totalOrders} orders`,
|
});
|
||||||
current: importedCount,
|
|
||||||
total: totalOrders,
|
|
||||||
elapsed: formatElapsedTime((now - startTime) / 1000),
|
|
||||||
remaining: estimateRemaining(startTime, importedCount, totalOrders),
|
|
||||||
rate: calculateRate(startTime, importedCount)
|
|
||||||
});
|
|
||||||
lastProgressUpdate = now;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clean up temporary tables
|
||||||
|
await localConnection.query(`
|
||||||
|
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
||||||
|
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
||||||
|
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
|
||||||
|
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
|
||||||
|
`);
|
||||||
|
|
||||||
// Import missing products if any
|
// Import missing products if any
|
||||||
if (missingProducts.size > 0) {
|
if (missingProducts.size > 0) {
|
||||||
await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts));
|
try {
|
||||||
|
await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts));
|
||||||
|
|
||||||
// Retry skipped orders after importing products
|
// Retry skipped orders after importing products
|
||||||
if (skippedOrders.size > 0) {
|
if (skippedOrders.size > 0) {
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: "running",
|
status: "running",
|
||||||
operation: "Orders import",
|
operation: "Orders import",
|
||||||
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`
|
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`
|
||||||
});
|
});
|
||||||
|
|
||||||
const [skippedProdOrders] = await prodConnection.query(`
|
const [skippedProdOrders] = await prodConnection.query(`
|
||||||
SELECT
|
SELECT
|
||||||
o.order_id,
|
o.order_id,
|
||||||
CASE
|
CASE
|
||||||
WHEN o.date_placed = '0000-00-00 00:00:00' OR o.date_placed IS NULL THEN o.stamp
|
WHEN o.date_placed = '0000-00-00 00:00:00' OR o.date_placed IS NULL THEN o.stamp
|
||||||
ELSE o.date_placed
|
ELSE o.date_placed
|
||||||
END as date,
|
END as date,
|
||||||
o.order_cid,
|
o.order_cid,
|
||||||
o.bill_firstname,
|
o.bill_firstname,
|
||||||
o.bill_lastname,
|
o.bill_lastname,
|
||||||
o.order_email,
|
o.order_email,
|
||||||
o.order_status,
|
o.order_status,
|
||||||
o.date_shipped,
|
o.date_shipped,
|
||||||
o.date_cancelled,
|
o.date_cancelled,
|
||||||
oi.prod_pid,
|
oi.prod_pid,
|
||||||
oi.prod_itemnumber,
|
oi.prod_itemnumber,
|
||||||
oi.prod_price,
|
oi.prod_price,
|
||||||
oi.qty_ordered,
|
oi.qty_ordered,
|
||||||
oi.qty_back,
|
oi.qty_back,
|
||||||
oi.qty_placed,
|
oi.qty_placed,
|
||||||
oi.qty_placed_2,
|
oi.qty_placed_2,
|
||||||
oi.discounted,
|
oi.discounted,
|
||||||
oi.summary_cogs,
|
oi.summary_cogs,
|
||||||
oi.summary_profit,
|
oi.summary_profit,
|
||||||
oi.summary_orderdate,
|
oi.summary_orderdate,
|
||||||
oi.summary_paiddate,
|
oi.summary_paiddate,
|
||||||
oi.date_added,
|
oi.date_added,
|
||||||
oi.stamp
|
oi.stamp
|
||||||
FROM order_items oi
|
FROM order_items oi
|
||||||
JOIN _order o ON oi.order_id = o.order_id
|
JOIN _order o ON oi.order_id = o.order_id
|
||||||
WHERE o.order_id IN (?)
|
WHERE o.order_id IN (?)
|
||||||
`, [Array.from(skippedOrders)]);
|
`, [Array.from(skippedOrders)]);
|
||||||
|
|
||||||
// Prepare values for insertion
|
// Prepare values for insertion
|
||||||
const skippedOrderValues = skippedProdOrders.flatMap(order => {
|
const skippedOrderValues = skippedProdOrders.flatMap(order => {
|
||||||
if (!order.date) {
|
if (!order.date) {
|
||||||
console.log(`Warning: Skipped order ${order.order_id} has null date:`, JSON.stringify(order, null, 2));
|
console.log(`Warning: Skipped order ${order.order_id} has null date:`, JSON.stringify(order, null, 2));
|
||||||
return [];
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const canceled = order.date_cancelled !== '0000-00-00 00:00:00' ? 1 : 0;
|
||||||
|
const customerName = `${order.bill_firstname} ${order.bill_lastname}`;
|
||||||
|
|
||||||
|
// Create an object with keys based on column names
|
||||||
|
const orderData = {
|
||||||
|
id: order.order_id,
|
||||||
|
order_number: order.order_id,
|
||||||
|
pid: order.prod_pid,
|
||||||
|
SKU: order.prod_itemnumber,
|
||||||
|
date: order.date ? (
|
||||||
|
order.date instanceof Date ?
|
||||||
|
order.date.toJSON()?.slice(0,10) || null :
|
||||||
|
(typeof order.date === 'string' ? order.date.split(' ')[0] : null)
|
||||||
|
) : null,
|
||||||
|
price: order.prod_price,
|
||||||
|
quantity: order.qty_ordered,
|
||||||
|
discount: order.discounted,
|
||||||
|
tax: 0, // Placeholder, will be calculated later
|
||||||
|
tax_included: 0, // Placeholder, will be calculated later
|
||||||
|
shipping: 0, // Placeholder, will be calculated later
|
||||||
|
customer: order.order_email,
|
||||||
|
customer_name: customerName,
|
||||||
|
status: order.order_status,
|
||||||
|
canceled: canceled,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Map column names to values, handling missing columns
|
||||||
|
return [columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null)];
|
||||||
|
});
|
||||||
|
|
||||||
|
// Construct the insert query dynamically
|
||||||
|
const skippedPlaceholders = skippedProdOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
||||||
|
const skippedInsertQuery = `
|
||||||
|
INSERT INTO orders (${columnNames.join(", ")})
|
||||||
|
VALUES ${skippedPlaceholders}
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")}
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Execute the insert query
|
||||||
|
if (skippedOrderValues.length > 0) {
|
||||||
|
await localConnection.query(skippedInsertQuery, skippedOrderValues.flat());
|
||||||
}
|
}
|
||||||
|
|
||||||
const canceled = order.date_cancelled !== '0000-00-00 00:00:00' ? 1 : 0;
|
importedCount += skippedProdOrders.length;
|
||||||
const customerName = `${order.bill_firstname} ${order.bill_lastname}`;
|
|
||||||
|
|
||||||
// Create an object with keys based on column names
|
outputProgress({
|
||||||
const orderData = {
|
status: "running",
|
||||||
id: order.order_id,
|
operation: "Orders import",
|
||||||
order_number: order.order_id,
|
message: `Successfully imported ${skippedProdOrders.length} previously skipped orders`,
|
||||||
pid: order.prod_pid,
|
});
|
||||||
SKU: order.prod_itemnumber,
|
|
||||||
date: order.date ? (
|
|
||||||
order.date instanceof Date ?
|
|
||||||
order.date.toJSON()?.slice(0,10) || null :
|
|
||||||
(typeof order.date === 'string' ? order.date.split(' ')[0] : null)
|
|
||||||
) : null,
|
|
||||||
price: order.prod_price,
|
|
||||||
quantity: order.qty_ordered,
|
|
||||||
discount: order.discounted,
|
|
||||||
tax: 0, // Placeholder, will be calculated later
|
|
||||||
tax_included: 0, // Placeholder, will be calculated later
|
|
||||||
shipping: 0, // Placeholder, will be calculated later
|
|
||||||
customer: order.order_email,
|
|
||||||
customer_name: customerName,
|
|
||||||
status: order.order_status,
|
|
||||||
canceled: canceled,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Map column names to values, handling missing columns
|
|
||||||
return [columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null)];
|
|
||||||
});
|
|
||||||
|
|
||||||
// Construct the insert query dynamically
|
|
||||||
const skippedPlaceholders = skippedProdOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
|
||||||
const skippedInsertQuery = `
|
|
||||||
INSERT INTO orders (${columnNames.join(", ")})
|
|
||||||
VALUES ${skippedPlaceholders}
|
|
||||||
ON DUPLICATE KEY UPDATE
|
|
||||||
${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")}
|
|
||||||
`;
|
|
||||||
|
|
||||||
// Execute the insert query
|
|
||||||
if (skippedOrderValues.length > 0) {
|
|
||||||
await localConnection.query(skippedInsertQuery, skippedOrderValues.flat());
|
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
importedCount += skippedProdOrders.length;
|
console.warn('Warning: Failed to import missing products:', error.message);
|
||||||
|
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
|
||||||
outputProgress({
|
|
||||||
status: "running",
|
|
||||||
operation: "Orders import",
|
|
||||||
message: `Successfully imported ${skippedProdOrders.length} previously skipped orders`,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update sync status
|
// Update sync status - do this even if missing products import fails
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||||
VALUES ('orders', NOW())
|
VALUES ('orders', NOW())
|
||||||
|
|||||||
@@ -508,9 +508,8 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
// Setup temporary tables
|
// Setup temporary tables
|
||||||
await setupTemporaryTables(localConnection);
|
await setupTemporaryTables(localConnection);
|
||||||
|
|
||||||
// Materialize calculations for missing products
|
// Get inventory data from production first
|
||||||
await localConnection.query(`
|
const [prodInventory] = await prodConnection.query(`
|
||||||
INSERT INTO temp_inventory_status
|
|
||||||
SELECT
|
SELECT
|
||||||
p.pid,
|
p.pid,
|
||||||
COALESCE(si.available_local, 0) - COALESCE(ps.pending_qty, 0) as stock_quantity,
|
COALESCE(si.available_local, 0) - COALESCE(ps.pending_qty, 0) as stock_quantity,
|
||||||
@@ -540,6 +539,22 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
WHERE p.pid IN (?)
|
WHERE p.pid IN (?)
|
||||||
`, [missingPids, missingPids]);
|
`, [missingPids, missingPids]);
|
||||||
|
|
||||||
|
// Insert inventory data into temp table
|
||||||
|
if (prodInventory.length > 0) {
|
||||||
|
const placeholders = prodInventory.map(() => "(?, ?, ?, ?, ?)").join(",");
|
||||||
|
const values = prodInventory.flatMap(p => [
|
||||||
|
p.pid,
|
||||||
|
p.stock_quantity,
|
||||||
|
p.pending_qty,
|
||||||
|
p.preorder_count,
|
||||||
|
p.notions_inv_count
|
||||||
|
]);
|
||||||
|
|
||||||
|
await localConnection.query(`
|
||||||
|
INSERT INTO temp_inventory_status VALUES ${placeholders}
|
||||||
|
`, values);
|
||||||
|
}
|
||||||
|
|
||||||
// First get the column names from the table structure
|
// First get the column names from the table structure
|
||||||
const [columns] = await localConnection.query(`
|
const [columns] = await localConnection.query(`
|
||||||
SELECT COLUMN_NAME
|
SELECT COLUMN_NAME
|
||||||
@@ -560,21 +575,9 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
p.date_created,
|
p.date_created,
|
||||||
p.datein AS first_received,
|
p.datein AS first_received,
|
||||||
p.location,
|
p.location,
|
||||||
COALESCE(si.available_local, 0) - COALESCE(
|
tis.stock_quantity,
|
||||||
(SELECT SUM(oi.qty_ordered - oi.qty_placed)
|
tis.preorder_count,
|
||||||
FROM order_items oi
|
tis.notions_inv_count,
|
||||||
JOIN _order o ON oi.order_id = o.order_id
|
|
||||||
WHERE oi.prod_pid = p.pid
|
|
||||||
AND o.date_placed != '0000-00-00 00:00:00'
|
|
||||||
AND o.date_shipped = '0000-00-00 00:00:00'
|
|
||||||
AND oi.pick_finished = 0
|
|
||||||
AND oi.qty_back = 0
|
|
||||||
AND o.order_status != 15
|
|
||||||
AND o.order_status < 90
|
|
||||||
AND oi.qty_ordered >= oi.qty_placed
|
|
||||||
AND oi.qty_ordered > 0), 0) AS stock_quantity,
|
|
||||||
ci.onpreorder AS preorder_count,
|
|
||||||
pnb.inventory AS notions_inv_count,
|
|
||||||
COALESCE(pcp.price_each, 0) as price,
|
COALESCE(pcp.price_each, 0) as price,
|
||||||
COALESCE(p.sellingprice, 0) AS regular_price,
|
COALESCE(p.sellingprice, 0) AS regular_price,
|
||||||
COALESCE((SELECT ROUND(AVG(costeach), 5)
|
COALESCE((SELECT ROUND(AVG(costeach), 5)
|
||||||
@@ -630,8 +633,7 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
pls.date_sold as date_last_sold,
|
pls.date_sold as date_last_sold,
|
||||||
GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids
|
GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids
|
||||||
FROM products p
|
FROM products p
|
||||||
LEFT JOIN current_inventory ci ON p.pid = ci.pid
|
LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid
|
||||||
LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid
|
|
||||||
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
||||||
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
||||||
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
||||||
@@ -644,12 +646,12 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id
|
LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id
|
||||||
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
|
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
|
||||||
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
||||||
LEFT JOIN (
|
LEFT JOIN (
|
||||||
SELECT pid, MIN(price_each) as price_each
|
SELECT pid, MIN(price_each) as price_each
|
||||||
FROM product_current_prices
|
FROM product_current_prices
|
||||||
WHERE active = 1
|
WHERE active = 1
|
||||||
GROUP BY pid
|
GROUP BY pid
|
||||||
) pcp ON p.pid = pcp.pid
|
) pcp ON p.pid = pcp.pid
|
||||||
WHERE p.pid IN (?)
|
WHERE p.pid IN (?)
|
||||||
GROUP BY p.pid
|
GROUP BY p.pid
|
||||||
`, [missingPids]);
|
`, [missingPids]);
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
// Build incremental conditions
|
// Build incremental conditions
|
||||||
const incrementalWhereClause = incrementalUpdate
|
const incrementalWhereClause = incrementalUpdate
|
||||||
? `AND (
|
? `AND (
|
||||||
p.stamp > ?
|
p.date_updated > ?
|
||||||
OR p.date_modified > ?
|
OR p.date_modified > ?
|
||||||
OR p.date_ordered > ?
|
OR p.date_ordered > ?
|
||||||
OR p.date_estin > ?
|
OR p.date_estin > ?
|
||||||
@@ -81,7 +81,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
USE INDEX (idx_date_created)
|
USE INDEX (idx_date_created)
|
||||||
WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
|
WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
|
||||||
AND (date_ordered > ?
|
AND (date_ordered > ?
|
||||||
OR stamp > ?
|
OR date_updated > ?
|
||||||
OR date_modified > ?)
|
OR date_modified > ?)
|
||||||
UNION
|
UNION
|
||||||
SELECT DISTINCT r.receiving_id as po_id
|
SELECT DISTINCT r.receiving_id as po_id
|
||||||
|
|||||||
Reference in New Issue
Block a user