821 lines
31 KiB
JavaScript
821 lines
31 KiB
JavaScript
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
|
|
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
|
|
|
|
/**
|
|
* Imports orders from a production MySQL database to a local PostgreSQL database.
|
|
* It can run in two modes:
|
|
* 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time.
|
|
* 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp.
|
|
*
|
|
* @param {object} prodConnection - A MySQL connection to production DB (MySQL 5.7).
|
|
* @param {object} localConnection - A MySQL connection to local DB (MySQL 8.0).
|
|
* @param {boolean} incrementalUpdate - Set to false for a full sync; true for incremental.
|
|
*
|
|
* @returns {object} Information about the sync operation.
|
|
*/
|
|
async function importOrders(prodConnection, localConnection, incrementalUpdate = true) {
|
|
const startTime = Date.now();
|
|
const skippedOrders = new Set();
|
|
const missingProducts = new Set();
|
|
|
|
// Map order status codes to text values (consistent with PO status mapping in purchase-orders.js)
|
|
const orderStatusMap = {
|
|
0: 'created',
|
|
10: 'unfinished',
|
|
15: 'canceled',
|
|
16: 'combined',
|
|
20: 'placed',
|
|
22: 'placed_incomplete',
|
|
30: 'canceled',
|
|
40: 'awaiting_payment',
|
|
50: 'awaiting_products',
|
|
55: 'shipping_later',
|
|
56: 'shipping_together',
|
|
60: 'ready',
|
|
61: 'flagged',
|
|
62: 'fix_before_pick',
|
|
65: 'manual_picking',
|
|
70: 'in_pt',
|
|
80: 'picked',
|
|
90: 'awaiting_shipment',
|
|
91: 'remote_wait',
|
|
92: 'awaiting_pickup',
|
|
93: 'fix_before_ship',
|
|
95: 'shipped_confirmed',
|
|
100: 'shipped'
|
|
};
|
|
let recordsAdded = 0;
|
|
let recordsUpdated = 0;
|
|
let processedCount = 0;
|
|
let importedCount = 0;
|
|
let totalOrderItems = 0;
|
|
let totalUniqueOrders = 0;
|
|
let cumulativeProcessedOrders = 0;
|
|
|
|
try {
|
|
// Get last sync info - NOT in a transaction anymore
|
|
const [syncInfo] = await localConnection.query(
|
|
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
|
|
);
|
|
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
|
|
// Adjust for mysql2 driver timezone vs MySQL server timezone mismatch
|
|
const mysqlSyncTime = prodConnection.adjustDateForMySQL
|
|
? prodConnection.adjustDateForMySQL(lastSyncTime)
|
|
: lastSyncTime;
|
|
|
|
console.log('Orders: Using last sync time:', lastSyncTime, '(adjusted:', mysqlSyncTime, ')');
|
|
|
|
// First get count of order items - Keep MySQL compatible for production
|
|
const [[{ total }]] = await prodConnection.query(`
|
|
SELECT COUNT(*) as total
|
|
FROM order_items oi
|
|
JOIN _order o ON oi.order_id = o.order_id
|
|
WHERE o.order_status >= 15
|
|
AND o.date_placed >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
|
|
AND o.date_placed IS NOT NULL
|
|
${incrementalUpdate ? `
|
|
AND (
|
|
o.stamp > ?
|
|
OR oi.stamp > ?
|
|
OR EXISTS (
|
|
SELECT 1 FROM order_tax_info oti
|
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
|
WHERE oti.order_id = o.order_id
|
|
AND otip.pid = oi.prod_pid
|
|
AND oti.stamp > ?
|
|
)
|
|
)
|
|
` : ''}
|
|
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
|
|
|
|
totalOrderItems = total;
|
|
console.log('Orders: Found changes:', totalOrderItems);
|
|
|
|
// Get order items - Keep MySQL compatible for production
|
|
console.log('Orders: Starting MySQL query...');
|
|
const [orderItems] = await prodConnection.query(`
|
|
SELECT
|
|
oi.order_id,
|
|
oi.prod_pid,
|
|
COALESCE(NULLIF(TRIM(oi.prod_itemnumber), ''), 'NO-SKU') as SKU,
|
|
oi.prod_price as price,
|
|
oi.qty_ordered as quantity,
|
|
COALESCE(oi.prod_price_reg - oi.prod_price, 0) as base_discount,
|
|
oi.stamp as last_modified
|
|
FROM order_items oi
|
|
JOIN _order o ON oi.order_id = o.order_id
|
|
WHERE o.order_status >= 15
|
|
AND o.date_placed >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
|
|
AND o.date_placed IS NOT NULL
|
|
${incrementalUpdate ? `
|
|
AND (
|
|
o.stamp > ?
|
|
OR oi.stamp > ?
|
|
OR EXISTS (
|
|
SELECT 1 FROM order_tax_info oti
|
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
|
WHERE oti.order_id = o.order_id
|
|
AND otip.pid = oi.prod_pid
|
|
AND oti.stamp > ?
|
|
)
|
|
)
|
|
` : ''}
|
|
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
|
|
|
|
console.log('Orders: Found', orderItems.length, 'order items to process');
|
|
|
|
// Create tables in PostgreSQL for data processing
|
|
// Start a transaction just for creating the temp tables
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
await localConnection.query(`
|
|
DROP TABLE IF EXISTS temp_order_items;
|
|
DROP TABLE IF EXISTS temp_order_meta;
|
|
DROP TABLE IF EXISTS temp_order_discounts;
|
|
DROP TABLE IF EXISTS temp_order_taxes;
|
|
DROP TABLE IF EXISTS temp_order_costs;
|
|
DROP TABLE IF EXISTS temp_main_discounts;
|
|
DROP TABLE IF EXISTS temp_item_discounts;
|
|
|
|
CREATE TEMP TABLE temp_order_items (
|
|
order_id INTEGER NOT NULL,
|
|
pid INTEGER NOT NULL,
|
|
sku TEXT NOT NULL,
|
|
price NUMERIC(14, 4) NOT NULL,
|
|
quantity INTEGER NOT NULL,
|
|
base_discount NUMERIC(14, 4) DEFAULT 0,
|
|
PRIMARY KEY (order_id, pid)
|
|
);
|
|
|
|
CREATE TEMP TABLE temp_order_meta (
|
|
order_id INTEGER NOT NULL,
|
|
date TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
customer TEXT NOT NULL,
|
|
customer_name TEXT NOT NULL,
|
|
status TEXT,
|
|
canceled BOOLEAN,
|
|
summary_discount NUMERIC(14, 4) DEFAULT 0.0000,
|
|
summary_subtotal NUMERIC(14, 4) DEFAULT 0.0000,
|
|
summary_discount_subtotal NUMERIC(14, 4) DEFAULT 0.0000,
|
|
PRIMARY KEY (order_id)
|
|
);
|
|
|
|
CREATE TEMP TABLE temp_order_discounts (
|
|
order_id INTEGER NOT NULL,
|
|
pid INTEGER NOT NULL,
|
|
discount NUMERIC(14, 4) NOT NULL,
|
|
PRIMARY KEY (order_id, pid)
|
|
);
|
|
|
|
CREATE TEMP TABLE temp_main_discounts (
|
|
order_id INTEGER NOT NULL,
|
|
discount_id INTEGER NOT NULL,
|
|
discount_amount_subtotal NUMERIC(14, 4) DEFAULT 0.0000,
|
|
PRIMARY KEY (order_id, discount_id)
|
|
);
|
|
|
|
CREATE TEMP TABLE temp_item_discounts (
|
|
order_id INTEGER NOT NULL,
|
|
pid INTEGER NOT NULL,
|
|
discount_id INTEGER NOT NULL,
|
|
amount NUMERIC(14, 4) NOT NULL,
|
|
PRIMARY KEY (order_id, pid, discount_id)
|
|
);
|
|
|
|
CREATE TEMP TABLE temp_order_taxes (
|
|
order_id INTEGER NOT NULL,
|
|
pid INTEGER NOT NULL,
|
|
tax NUMERIC(14, 4) NOT NULL,
|
|
PRIMARY KEY (order_id, pid)
|
|
);
|
|
|
|
CREATE TEMP TABLE temp_order_costs (
|
|
order_id INTEGER NOT NULL,
|
|
pid INTEGER NOT NULL,
|
|
costeach NUMERIC(14, 4) DEFAULT 0.0000,
|
|
PRIMARY KEY (order_id, pid)
|
|
);
|
|
|
|
CREATE INDEX idx_temp_order_items_pid ON temp_order_items(pid);
|
|
CREATE INDEX idx_temp_order_meta_order_id ON temp_order_meta(order_id);
|
|
CREATE INDEX idx_temp_order_discounts_order_pid ON temp_order_discounts(order_id, pid);
|
|
CREATE INDEX idx_temp_order_taxes_order_pid ON temp_order_taxes(order_id, pid);
|
|
CREATE INDEX idx_temp_order_costs_order_pid ON temp_order_costs(order_id, pid);
|
|
CREATE INDEX idx_temp_main_discounts_discount_id ON temp_main_discounts(discount_id);
|
|
CREATE INDEX idx_temp_item_discounts_order_pid ON temp_item_discounts(order_id, pid);
|
|
CREATE INDEX idx_temp_item_discounts_discount_id ON temp_item_discounts(discount_id);
|
|
`);
|
|
await localConnection.commit();
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
|
|
// Insert order items in batches - each batch gets its own transaction
|
|
for (let i = 0; i < orderItems.length; i += 5000) {
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
|
|
const placeholders = batch.map((_, idx) =>
|
|
`($${idx * 6 + 1}, $${idx * 6 + 2}, $${idx * 6 + 3}, $${idx * 6 + 4}, $${idx * 6 + 5}, $${idx * 6 + 6})`
|
|
).join(",");
|
|
const values = batch.flatMap(item => [
|
|
item.order_id, item.prod_pid, item.SKU, item.price, item.quantity, item.base_discount
|
|
]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_items (order_id, pid, sku, price, quantity, base_discount)
|
|
VALUES ${placeholders}
|
|
ON CONFLICT (order_id, pid) DO UPDATE SET
|
|
sku = EXCLUDED.sku,
|
|
price = EXCLUDED.price,
|
|
quantity = EXCLUDED.quantity,
|
|
base_discount = EXCLUDED.base_discount
|
|
`, values);
|
|
|
|
await localConnection.commit();
|
|
|
|
processedCount = i + batch.length;
|
|
outputProgress({
|
|
status: "running",
|
|
operation: "Orders import",
|
|
message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
|
|
current: processedCount,
|
|
total: totalOrderItems,
|
|
elapsed: formatElapsedTime(startTime),
|
|
remaining: estimateRemaining(startTime, processedCount, totalOrderItems),
|
|
rate: calculateRate(startTime, processedCount)
|
|
});
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
// Get unique order IDs
|
|
const orderIds = [...new Set(orderItems.map(item => item.order_id))];
|
|
totalUniqueOrders = orderIds.length;
|
|
console.log('Orders: Processing', totalUniqueOrders, 'unique orders');
|
|
|
|
// Reset processed count for order processing phase
|
|
processedCount = 0;
|
|
|
|
// Process metadata, discounts, taxes, and costs in parallel
|
|
const METADATA_BATCH_SIZE = 2000;
|
|
const PG_BATCH_SIZE = 200;
|
|
|
|
// Add a helper function for title case conversion
|
|
function toTitleCase(str) {
|
|
if (!str) return '';
|
|
return str.toLowerCase().split(' ').map(word => {
|
|
return word.charAt(0).toUpperCase() + word.slice(1);
|
|
}).join(' ');
|
|
}
|
|
|
|
const processMetadataBatch = async (batchIds) => {
|
|
const [orders] = await prodConnection.query(`
|
|
SELECT
|
|
o.order_id,
|
|
o.date_placed as date,
|
|
o.order_cid as customer,
|
|
CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name,
|
|
o.order_status as status,
|
|
CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled,
|
|
o.summary_discount,
|
|
o.summary_subtotal,
|
|
o.summary_discount_subtotal
|
|
FROM _order o
|
|
LEFT JOIN users u ON o.order_cid = u.cid
|
|
WHERE o.order_id IN (?)
|
|
`, [batchIds]);
|
|
|
|
// Process in sub-batches for PostgreSQL
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
for (let j = 0; j < orders.length; j += PG_BATCH_SIZE) {
|
|
const subBatch = orders.slice(j, j + PG_BATCH_SIZE);
|
|
if (subBatch.length === 0) continue;
|
|
|
|
const placeholders = subBatch.map((_, idx) =>
|
|
`($${idx * 9 + 1}, $${idx * 9 + 2}, $${idx * 9 + 3}, $${idx * 9 + 4}, $${idx * 9 + 5}, $${idx * 9 + 6}, $${idx * 9 + 7}, $${idx * 9 + 8}, $${idx * 9 + 9})`
|
|
).join(",");
|
|
|
|
const values = subBatch.flatMap(order => [
|
|
order.order_id,
|
|
new Date(order.date), // Convert to TIMESTAMP WITH TIME ZONE
|
|
order.customer,
|
|
toTitleCase(order.customer_name) || '',
|
|
orderStatusMap[order.status] || order.status.toString(), // Map numeric status to text
|
|
order.canceled,
|
|
order.summary_discount || 0,
|
|
order.summary_subtotal || 0,
|
|
order.summary_discount_subtotal || 0
|
|
]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_meta (
|
|
order_id, date, customer, customer_name, status, canceled,
|
|
summary_discount, summary_subtotal, summary_discount_subtotal
|
|
)
|
|
VALUES ${placeholders}
|
|
ON CONFLICT (order_id) DO UPDATE SET
|
|
date = EXCLUDED.date,
|
|
customer = EXCLUDED.customer,
|
|
customer_name = EXCLUDED.customer_name,
|
|
status = EXCLUDED.status,
|
|
canceled = EXCLUDED.canceled,
|
|
summary_discount = EXCLUDED.summary_discount,
|
|
summary_subtotal = EXCLUDED.summary_subtotal,
|
|
summary_discount_subtotal = EXCLUDED.summary_discount_subtotal
|
|
`, values);
|
|
}
|
|
await localConnection.commit();
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
const processDiscountsBatch = async (batchIds) => {
|
|
// First, load main discount records
|
|
const [mainDiscounts] = await prodConnection.query(`
|
|
SELECT order_id, discount_id, discount_amount_subtotal
|
|
FROM order_discounts
|
|
WHERE order_id IN (?)
|
|
`, [batchIds]);
|
|
|
|
if (mainDiscounts.length > 0) {
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
for (let j = 0; j < mainDiscounts.length; j += PG_BATCH_SIZE) {
|
|
const subBatch = mainDiscounts.slice(j, j + PG_BATCH_SIZE);
|
|
if (subBatch.length === 0) continue;
|
|
|
|
const placeholders = subBatch.map((_, idx) =>
|
|
`($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})`
|
|
).join(",");
|
|
|
|
const values = subBatch.flatMap(d => [
|
|
d.order_id,
|
|
d.discount_id,
|
|
d.discount_amount_subtotal || 0
|
|
]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_main_discounts (order_id, discount_id, discount_amount_subtotal)
|
|
VALUES ${placeholders}
|
|
ON CONFLICT (order_id, discount_id) DO UPDATE SET
|
|
discount_amount_subtotal = EXCLUDED.discount_amount_subtotal
|
|
`, values);
|
|
}
|
|
await localConnection.commit();
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
// Then, load item discount records
|
|
const [discounts] = await prodConnection.query(`
|
|
SELECT order_id, pid, discount_id, amount
|
|
FROM order_discount_items
|
|
WHERE order_id IN (?)
|
|
`, [batchIds]);
|
|
|
|
if (discounts.length === 0) return;
|
|
|
|
// Process in memory to handle potential duplicates
|
|
const discountMap = new Map();
|
|
for (const d of discounts) {
|
|
const key = `${d.order_id}-${d.pid}-${d.discount_id}`;
|
|
discountMap.set(key, d);
|
|
}
|
|
|
|
const uniqueDiscounts = Array.from(discountMap.values());
|
|
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
for (let j = 0; j < uniqueDiscounts.length; j += PG_BATCH_SIZE) {
|
|
const subBatch = uniqueDiscounts.slice(j, j + PG_BATCH_SIZE);
|
|
if (subBatch.length === 0) continue;
|
|
|
|
const placeholders = subBatch.map((_, idx) =>
|
|
`($${idx * 4 + 1}, $${idx * 4 + 2}, $${idx * 4 + 3}, $${idx * 4 + 4})`
|
|
).join(",");
|
|
|
|
const values = subBatch.flatMap(d => [
|
|
d.order_id,
|
|
d.pid,
|
|
d.discount_id,
|
|
d.amount || 0
|
|
]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_item_discounts (order_id, pid, discount_id, amount)
|
|
VALUES ${placeholders}
|
|
ON CONFLICT (order_id, pid, discount_id) DO UPDATE SET
|
|
amount = EXCLUDED.amount
|
|
`, values);
|
|
}
|
|
|
|
// Create aggregated view with a simpler, safer query that avoids duplicates
|
|
await localConnection.query(`
|
|
TRUNCATE temp_order_discounts;
|
|
|
|
INSERT INTO temp_order_discounts (order_id, pid, discount)
|
|
SELECT order_id, pid, SUM(amount) as discount
|
|
FROM temp_item_discounts
|
|
GROUP BY order_id, pid
|
|
`);
|
|
|
|
await localConnection.commit();
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
const processTaxesBatch = async (batchIds) => {
|
|
// Optimized tax query to avoid subquery
|
|
const [taxes] = await prodConnection.query(`
|
|
SELECT oti.order_id, otip.pid, otip.item_taxes_to_collect as tax
|
|
FROM (
|
|
SELECT order_id, MAX(taxinfo_id) as latest_taxinfo_id
|
|
FROM order_tax_info
|
|
WHERE order_id IN (?)
|
|
GROUP BY order_id
|
|
) latest_info
|
|
JOIN order_tax_info oti ON oti.order_id = latest_info.order_id
|
|
AND oti.taxinfo_id = latest_info.latest_taxinfo_id
|
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
|
`, [batchIds]);
|
|
|
|
if (taxes.length === 0) return;
|
|
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
for (let j = 0; j < taxes.length; j += PG_BATCH_SIZE) {
|
|
const subBatch = taxes.slice(j, j + PG_BATCH_SIZE);
|
|
if (subBatch.length === 0) continue;
|
|
|
|
const placeholders = subBatch.map((_, idx) =>
|
|
`($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})`
|
|
).join(",");
|
|
|
|
const values = subBatch.flatMap(t => [
|
|
t.order_id,
|
|
t.pid,
|
|
t.tax || 0
|
|
]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_taxes (order_id, pid, tax)
|
|
VALUES ${placeholders}
|
|
ON CONFLICT (order_id, pid) DO UPDATE SET
|
|
tax = EXCLUDED.tax
|
|
`, values);
|
|
}
|
|
await localConnection.commit();
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
const processCostsBatch = async (batchIds) => {
|
|
// Modified query to ensure one row per order_id/pid by using a subquery
|
|
const [costs] = await prodConnection.query(`
|
|
SELECT
|
|
oc.orderid as order_id,
|
|
oc.pid,
|
|
oc.costeach
|
|
FROM order_costs oc
|
|
INNER JOIN (
|
|
SELECT
|
|
orderid,
|
|
pid,
|
|
MAX(id) as max_id
|
|
FROM order_costs
|
|
WHERE orderid IN (?)
|
|
AND pending = 0
|
|
GROUP BY orderid, pid
|
|
) latest ON oc.orderid = latest.orderid AND oc.pid = latest.pid AND oc.id = latest.max_id
|
|
`, [batchIds]);
|
|
|
|
if (costs.length === 0) return;
|
|
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
for (let j = 0; j < costs.length; j += PG_BATCH_SIZE) {
|
|
const subBatch = costs.slice(j, j + PG_BATCH_SIZE);
|
|
if (subBatch.length === 0) continue;
|
|
|
|
const placeholders = subBatch.map((_, idx) =>
|
|
`($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})`
|
|
).join(",");
|
|
|
|
const values = subBatch.flatMap(c => [
|
|
c.order_id,
|
|
c.pid,
|
|
c.costeach || 0
|
|
]);
|
|
|
|
await localConnection.query(`
|
|
INSERT INTO temp_order_costs (order_id, pid, costeach)
|
|
VALUES ${placeholders}
|
|
ON CONFLICT (order_id, pid) DO UPDATE SET
|
|
costeach = EXCLUDED.costeach
|
|
`, values);
|
|
}
|
|
await localConnection.commit();
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
// Process all data types for each batch
|
|
// Note: these run sequentially because they share a single PG connection
|
|
// and each manages its own transaction
|
|
for (let i = 0; i < orderIds.length; i += METADATA_BATCH_SIZE) {
|
|
const batchIds = orderIds.slice(i, i + METADATA_BATCH_SIZE);
|
|
|
|
await processMetadataBatch(batchIds);
|
|
await processDiscountsBatch(batchIds);
|
|
await processTaxesBatch(batchIds);
|
|
await processCostsBatch(batchIds);
|
|
|
|
processedCount = i + batchIds.length;
|
|
outputProgress({
|
|
status: "running",
|
|
operation: "Orders import",
|
|
message: `Loading order data: ${processedCount} of ${totalUniqueOrders}`,
|
|
current: processedCount,
|
|
total: totalUniqueOrders,
|
|
elapsed: formatElapsedTime(startTime),
|
|
remaining: estimateRemaining(startTime, processedCount, totalUniqueOrders),
|
|
rate: calculateRate(startTime, processedCount)
|
|
});
|
|
}
|
|
|
|
// Pre-check all products and preload cost_price into a temp table
|
|
// This avoids joining public.products in every sub-batch query (was causing 2x slowdown)
|
|
const allOrderPids = [...new Set(orderItems.map(item => item.prod_pid))];
|
|
console.log('Orders: Checking', allOrderPids.length, 'unique products');
|
|
|
|
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
|
|
"SELECT pid, cost_price FROM products WHERE pid = ANY($1::bigint[])",
|
|
[allOrderPids]
|
|
) : [{ rows: [] }];
|
|
|
|
const existingPids = new Set(existingProducts.rows.map(p => p.pid));
|
|
|
|
// Create temp table with product cost_price for fast lookup in sub-batch queries
|
|
await localConnection.query(`
|
|
DROP TABLE IF EXISTS temp_product_costs;
|
|
CREATE TEMP TABLE temp_product_costs (
|
|
pid BIGINT PRIMARY KEY,
|
|
cost_price NUMERIC(14, 4)
|
|
)
|
|
`);
|
|
if (existingProducts.rows.length > 0) {
|
|
const costPids = existingProducts.rows.filter(p => p.cost_price != null).map(p => p.pid);
|
|
const costPrices = existingProducts.rows.filter(p => p.cost_price != null).map(p => p.cost_price);
|
|
if (costPids.length > 0) {
|
|
await localConnection.query(`
|
|
INSERT INTO temp_product_costs (pid, cost_price)
|
|
SELECT * FROM UNNEST($1::bigint[], $2::numeric[])
|
|
`, [costPids, costPrices]);
|
|
}
|
|
}
|
|
|
|
// Process in smaller batches
|
|
for (let i = 0; i < orderIds.length; i += 2000) { // Increased from 1000 to 2000
|
|
const batchIds = orderIds.slice(i, i + 2000);
|
|
|
|
// Get combined data for this batch in sub-batches
|
|
const PG_BATCH_SIZE = 200; // Increased from 100 to 200
|
|
for (let j = 0; j < batchIds.length; j += PG_BATCH_SIZE) {
|
|
const subBatchIds = batchIds.slice(j, j + PG_BATCH_SIZE);
|
|
|
|
// Start a transaction for this sub-batch
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
const [orders] = await localConnection.query(`
|
|
WITH order_totals AS (
|
|
SELECT
|
|
oi.order_id,
|
|
oi.pid,
|
|
-- Instead of using ARRAY_AGG which can cause duplicate issues, use SUM with a CASE
|
|
SUM(CASE
|
|
WHEN COALESCE(md.discount_amount_subtotal, 0) > 0 THEN id.amount
|
|
ELSE 0
|
|
END) as promo_discount_sum,
|
|
COALESCE(ot.tax, 0) as total_tax,
|
|
COALESCE(oc.costeach, pc.cost_price, oi.price * 0.5) as costeach
|
|
FROM temp_order_items oi
|
|
LEFT JOIN temp_item_discounts id ON oi.order_id = id.order_id AND oi.pid = id.pid
|
|
LEFT JOIN temp_main_discounts md ON id.order_id = md.order_id AND id.discount_id = md.discount_id
|
|
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
|
LEFT JOIN temp_order_costs oc ON oi.order_id = oc.order_id AND oi.pid = oc.pid
|
|
LEFT JOIN temp_product_costs pc ON oi.pid = pc.pid
|
|
WHERE oi.order_id = ANY($1)
|
|
GROUP BY oi.order_id, oi.pid, ot.tax, oc.costeach, pc.cost_price
|
|
)
|
|
SELECT
|
|
oi.order_id as order_number,
|
|
oi.pid::bigint as pid,
|
|
oi.sku,
|
|
om.date,
|
|
oi.price,
|
|
oi.quantity,
|
|
(
|
|
-- Prorated Points Discount (e.g. loyalty points applied at order level)
|
|
CASE
|
|
WHEN om.summary_discount_subtotal > 0 AND om.summary_subtotal > 0 THEN
|
|
COALESCE(ROUND((om.summary_discount_subtotal * (oi.price * oi.quantity)) / NULLIF(om.summary_subtotal, 0), 4), 0)
|
|
ELSE 0
|
|
END
|
|
+
|
|
-- Specific Item-Level Promo Discount (coupon codes, etc.)
|
|
COALESCE(ot.promo_discount_sum, 0)
|
|
)::NUMERIC(14, 4) as discount,
|
|
COALESCE(ot.total_tax, 0)::NUMERIC(14, 4) as tax,
|
|
false as tax_included,
|
|
0 as shipping,
|
|
om.customer,
|
|
om.customer_name,
|
|
om.status,
|
|
om.canceled,
|
|
COALESCE(ot.costeach, pc.cost_price, oi.price * 0.5)::NUMERIC(14, 4) as costeach
|
|
FROM temp_order_items oi
|
|
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
|
LEFT JOIN order_totals ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
|
LEFT JOIN temp_product_costs pc ON oi.pid = pc.pid
|
|
WHERE oi.order_id = ANY($1)
|
|
ORDER BY oi.order_id, oi.pid
|
|
`, [subBatchIds]);
|
|
|
|
// Filter orders and track missing products
|
|
const validOrders = [];
|
|
const processedOrderItems = new Set();
|
|
const processedOrders = new Set();
|
|
|
|
for (const order of orders.rows) {
|
|
if (!existingPids.has(order.pid)) {
|
|
missingProducts.add(order.pid);
|
|
skippedOrders.add(order.order_number);
|
|
continue;
|
|
}
|
|
validOrders.push(order);
|
|
processedOrderItems.add(`${order.order_number}-${order.pid}`);
|
|
processedOrders.add(order.order_number);
|
|
}
|
|
|
|
// Process valid orders in smaller sub-batches
|
|
const FINAL_BATCH_SIZE = 100; // Increased from 50 to 100
|
|
for (let k = 0; k < validOrders.length; k += FINAL_BATCH_SIZE) {
|
|
const subBatch = validOrders.slice(k, k + FINAL_BATCH_SIZE);
|
|
|
|
const placeholders = subBatch.map((_, idx) => {
|
|
const base = idx * 15; // 15 columns including costeach
|
|
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15})`;
|
|
}).join(',');
|
|
|
|
const batchValues = subBatch.flatMap(o => [
|
|
o.order_number,
|
|
o.pid,
|
|
o.sku || 'NO-SKU',
|
|
o.date, // This is now a TIMESTAMP WITH TIME ZONE
|
|
o.price,
|
|
o.quantity,
|
|
o.discount,
|
|
o.tax,
|
|
o.tax_included,
|
|
o.shipping,
|
|
o.customer,
|
|
o.customer_name,
|
|
o.status, // Already mapped to text via orderStatusMap
|
|
o.canceled,
|
|
o.costeach
|
|
]);
|
|
|
|
const [result] = await localConnection.query(`
|
|
WITH inserted_orders AS (
|
|
INSERT INTO orders (
|
|
order_number, pid, sku, date, price, quantity, discount,
|
|
tax, tax_included, shipping, customer, customer_name,
|
|
status, canceled, costeach
|
|
)
|
|
VALUES ${placeholders}
|
|
ON CONFLICT (order_number, pid) DO UPDATE SET
|
|
sku = EXCLUDED.sku,
|
|
date = EXCLUDED.date,
|
|
price = EXCLUDED.price,
|
|
quantity = EXCLUDED.quantity,
|
|
discount = EXCLUDED.discount,
|
|
tax = EXCLUDED.tax,
|
|
tax_included = EXCLUDED.tax_included,
|
|
shipping = EXCLUDED.shipping,
|
|
customer = EXCLUDED.customer,
|
|
customer_name = EXCLUDED.customer_name,
|
|
status = EXCLUDED.status,
|
|
canceled = EXCLUDED.canceled,
|
|
costeach = EXCLUDED.costeach
|
|
WHERE -- Only update if at least one key field has changed
|
|
orders.price IS DISTINCT FROM EXCLUDED.price OR
|
|
orders.quantity IS DISTINCT FROM EXCLUDED.quantity OR
|
|
orders.discount IS DISTINCT FROM EXCLUDED.discount OR
|
|
orders.tax IS DISTINCT FROM EXCLUDED.tax OR
|
|
orders.status IS DISTINCT FROM EXCLUDED.status OR
|
|
orders.canceled IS DISTINCT FROM EXCLUDED.canceled OR
|
|
orders.costeach IS DISTINCT FROM EXCLUDED.costeach OR
|
|
orders.date IS DISTINCT FROM EXCLUDED.date
|
|
RETURNING xmax = 0 as inserted
|
|
)
|
|
SELECT
|
|
COUNT(*) FILTER (WHERE inserted) as inserted,
|
|
COUNT(*) FILTER (WHERE NOT inserted) as updated
|
|
FROM inserted_orders
|
|
`, batchValues);
|
|
|
|
const { inserted, updated } = result.rows[0];
|
|
recordsAdded += parseInt(inserted) || 0;
|
|
recordsUpdated += parseInt(updated) || 0;
|
|
importedCount += subBatch.length;
|
|
}
|
|
|
|
await localConnection.commit();
|
|
|
|
cumulativeProcessedOrders += processedOrders.size;
|
|
outputProgress({
|
|
status: "running",
|
|
operation: "Orders import",
|
|
message: `Importing orders: ${cumulativeProcessedOrders} of ${totalUniqueOrders}`,
|
|
current: cumulativeProcessedOrders,
|
|
total: totalUniqueOrders,
|
|
elapsed: formatElapsedTime(startTime),
|
|
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
|
|
rate: calculateRate(startTime, cumulativeProcessedOrders)
|
|
});
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
}
|
|
}
|
|
|
|
// Start a transaction for updating sync status and dropping temp tables
|
|
await localConnection.beginTransaction();
|
|
try {
|
|
// Update sync status
|
|
await localConnection.query(`
|
|
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
|
VALUES ('orders', NOW())
|
|
ON CONFLICT (table_name) DO UPDATE SET
|
|
last_sync_timestamp = NOW()
|
|
`);
|
|
|
|
// Cleanup temporary tables
|
|
await localConnection.query(`
|
|
DROP TABLE IF EXISTS temp_order_items;
|
|
DROP TABLE IF EXISTS temp_order_meta;
|
|
DROP TABLE IF EXISTS temp_order_discounts;
|
|
DROP TABLE IF EXISTS temp_order_taxes;
|
|
DROP TABLE IF EXISTS temp_order_costs;
|
|
DROP TABLE IF EXISTS temp_main_discounts;
|
|
DROP TABLE IF EXISTS temp_item_discounts;
|
|
DROP TABLE IF EXISTS temp_product_costs;
|
|
`);
|
|
|
|
// Commit final transaction
|
|
await localConnection.commit();
|
|
} catch (error) {
|
|
await localConnection.rollback();
|
|
throw error;
|
|
}
|
|
|
|
return {
|
|
status: "complete",
|
|
totalImported: Math.floor(importedCount) || 0,
|
|
recordsAdded: parseInt(recordsAdded) || 0,
|
|
recordsUpdated: parseInt(recordsUpdated) || 0,
|
|
totalSkipped: skippedOrders.size || 0,
|
|
missingProducts: missingProducts.size || 0,
|
|
totalProcessed: orderItems.length, // Total order items in source
|
|
incrementalUpdate,
|
|
lastSyncTime,
|
|
details: {
|
|
uniqueOrdersProcessed: cumulativeProcessedOrders,
|
|
totalOrderItems: orderItems.length,
|
|
skippedDueToMissingProducts: skippedOrders.size,
|
|
missingProductIds: Array.from(missingProducts).slice(0, 100) // First 100 for debugging
|
|
}
|
|
};
|
|
} catch (error) {
|
|
console.error("Error during orders import:", error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
module.exports = importOrders; |