Compare commits
4 Commits
d0abe9d9a2
...
d57239c40c
| Author | SHA1 | Date | |
|---|---|---|---|
| d57239c40c | |||
| 1c932e0df5 | |||
| a867117c3c | |||
| 996d3d36af |
@@ -184,6 +184,7 @@ CREATE TABLE IF NOT EXISTS import_history (
|
|||||||
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
end_time TIMESTAMP NULL,
|
end_time TIMESTAMP NULL,
|
||||||
duration_seconds INT,
|
duration_seconds INT,
|
||||||
|
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED,
|
||||||
records_added INT DEFAULT 0,
|
records_added INT DEFAULT 0,
|
||||||
records_updated INT DEFAULT 0,
|
records_updated INT DEFAULT 0,
|
||||||
is_incremental BOOLEAN DEFAULT FALSE,
|
is_incremental BOOLEAN DEFAULT FALSE,
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ CREATE TABLE products (
|
|||||||
notifies INT UNSIGNED DEFAULT 0,
|
notifies INT UNSIGNED DEFAULT 0,
|
||||||
date_last_sold DATE,
|
date_last_sold DATE,
|
||||||
PRIMARY KEY (pid),
|
PRIMARY KEY (pid),
|
||||||
UNIQUE KEY unique_sku (SKU),
|
INDEX idx_sku (SKU),
|
||||||
INDEX idx_vendor (vendor),
|
INDEX idx_vendor (vendor),
|
||||||
INDEX idx_brand (brand),
|
INDEX idx_brand (brand),
|
||||||
INDEX idx_location (location),
|
INDEX idx_location (location),
|
||||||
@@ -118,6 +118,7 @@ CREATE TABLE IF NOT EXISTS orders (
|
|||||||
status VARCHAR(20) DEFAULT 'pending',
|
status VARCHAR(20) DEFAULT 'pending',
|
||||||
canceled TINYINT(1) DEFAULT 0,
|
canceled TINYINT(1) DEFAULT 0,
|
||||||
PRIMARY KEY (id),
|
PRIMARY KEY (id),
|
||||||
|
UNIQUE KEY unique_order_line (order_number, pid),
|
||||||
KEY order_number (order_number),
|
KEY order_number (order_number),
|
||||||
KEY pid (pid),
|
KEY pid (pid),
|
||||||
KEY customer (customer),
|
KEY customer (customer),
|
||||||
@@ -147,7 +148,6 @@ CREATE TABLE purchase_orders (
|
|||||||
received_by INT,
|
received_by INT,
|
||||||
receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag',
|
receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag',
|
||||||
FOREIGN KEY (pid) REFERENCES products(pid),
|
FOREIGN KEY (pid) REFERENCES products(pid),
|
||||||
FOREIGN KEY (sku) REFERENCES products(SKU),
|
|
||||||
INDEX idx_po_id (po_id),
|
INDEX idx_po_id (po_id),
|
||||||
INDEX idx_vendor (vendor),
|
INDEX idx_vendor (vendor),
|
||||||
INDEX idx_status (status),
|
INDEX idx_status (status),
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ const importPurchaseOrders = require('./import/purchase-orders');
|
|||||||
dotenv.config({ path: path.join(__dirname, "../.env") });
|
dotenv.config({ path: path.join(__dirname, "../.env") });
|
||||||
|
|
||||||
// Constants to control which imports run
|
// Constants to control which imports run
|
||||||
const IMPORT_CATEGORIES = false;
|
const IMPORT_CATEGORIES = true;
|
||||||
const IMPORT_PRODUCTS = false;
|
const IMPORT_PRODUCTS = true;
|
||||||
const IMPORT_ORDERS = true;
|
const IMPORT_ORDERS = true;
|
||||||
const IMPORT_PURCHASE_ORDERS = true;
|
const IMPORT_PURCHASE_ORDERS = true;
|
||||||
|
|
||||||
@@ -48,7 +48,6 @@ const sshConfig = {
|
|||||||
connectionLimit: 10,
|
connectionLimit: 10,
|
||||||
queueLimit: 0,
|
queueLimit: 0,
|
||||||
namedPlaceholders: true,
|
namedPlaceholders: true,
|
||||||
maxAllowedPacket: 64 * 1024 * 1024, // 64MB
|
|
||||||
connectTimeout: 60000,
|
connectTimeout: 60000,
|
||||||
enableKeepAlive: true,
|
enableKeepAlive: true,
|
||||||
keepAliveInitialDelay: 10000,
|
keepAliveInitialDelay: 10000,
|
||||||
@@ -162,32 +161,36 @@ async function main() {
|
|||||||
results.categories = await importCategories(prodConnection, localConnection);
|
results.categories = await importCategories(prodConnection, localConnection);
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
if (isImportCancelled) throw new Error("Import cancelled");
|
||||||
completedSteps++;
|
completedSteps++;
|
||||||
if (results.categories.recordsAdded) totalRecordsAdded += results.categories.recordsAdded;
|
console.log('Categories import result:', results.categories);
|
||||||
if (results.categories.recordsUpdated) totalRecordsUpdated += results.categories.recordsUpdated;
|
totalRecordsAdded += results.categories?.recordsAdded || 0;
|
||||||
|
totalRecordsUpdated += results.categories?.recordsUpdated || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (IMPORT_PRODUCTS) {
|
if (IMPORT_PRODUCTS) {
|
||||||
results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
if (isImportCancelled) throw new Error("Import cancelled");
|
||||||
completedSteps++;
|
completedSteps++;
|
||||||
if (results.products.recordsAdded) totalRecordsAdded += results.products.recordsAdded;
|
console.log('Products import result:', results.products);
|
||||||
if (results.products.recordsUpdated) totalRecordsUpdated += results.products.recordsUpdated;
|
totalRecordsAdded += results.products?.recordsAdded || 0;
|
||||||
|
totalRecordsUpdated += results.products?.recordsUpdated || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (IMPORT_ORDERS) {
|
if (IMPORT_ORDERS) {
|
||||||
results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
if (isImportCancelled) throw new Error("Import cancelled");
|
||||||
completedSteps++;
|
completedSteps++;
|
||||||
if (results.orders.recordsAdded) totalRecordsAdded += results.orders.recordsAdded;
|
console.log('Orders import result:', results.orders);
|
||||||
if (results.orders.recordsUpdated) totalRecordsUpdated += results.orders.recordsUpdated;
|
totalRecordsAdded += results.orders?.recordsAdded || 0;
|
||||||
|
totalRecordsUpdated += results.orders?.recordsUpdated || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (IMPORT_PURCHASE_ORDERS) {
|
if (IMPORT_PURCHASE_ORDERS) {
|
||||||
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
if (isImportCancelled) throw new Error("Import cancelled");
|
||||||
completedSteps++;
|
completedSteps++;
|
||||||
if (results.purchaseOrders.recordsAdded) totalRecordsAdded += results.purchaseOrders.recordsAdded;
|
console.log('Purchase orders import result:', results.purchaseOrders);
|
||||||
if (results.purchaseOrders.recordsUpdated) totalRecordsUpdated += results.purchaseOrders.recordsUpdated;
|
totalRecordsAdded += results.purchaseOrders?.recordsAdded || 0;
|
||||||
|
totalRecordsUpdated += results.purchaseOrders?.recordsUpdated || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
const endTime = Date.now();
|
const endTime = Date.now();
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
|
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
|
||||||
const { importMissingProducts } = require('./products');
|
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Imports orders from a production MySQL database to a local MySQL database.
|
* Imports orders from a production MySQL database to a local MySQL database.
|
||||||
@@ -21,6 +21,46 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
let recordsUpdated = 0;
|
let recordsUpdated = 0;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Insert temporary table creation queries
|
||||||
|
await localConnection.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS temp_order_items (
|
||||||
|
order_id INT UNSIGNED NOT NULL,
|
||||||
|
pid INT UNSIGNED NOT NULL,
|
||||||
|
SKU VARCHAR(50) NOT NULL,
|
||||||
|
price DECIMAL(10,2) NOT NULL,
|
||||||
|
quantity INT NOT NULL,
|
||||||
|
base_discount DECIMAL(10,2) DEFAULT 0,
|
||||||
|
PRIMARY KEY (order_id, pid)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||||
|
`);
|
||||||
|
await localConnection.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS temp_order_meta (
|
||||||
|
order_id INT UNSIGNED NOT NULL,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
customer VARCHAR(100) NOT NULL,
|
||||||
|
customer_name VARCHAR(150) NOT NULL,
|
||||||
|
status INT,
|
||||||
|
canceled TINYINT(1),
|
||||||
|
PRIMARY KEY (order_id)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||||
|
`);
|
||||||
|
await localConnection.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS temp_order_discounts (
|
||||||
|
order_id INT UNSIGNED NOT NULL,
|
||||||
|
pid INT UNSIGNED NOT NULL,
|
||||||
|
discount DECIMAL(10,2) NOT NULL,
|
||||||
|
PRIMARY KEY (order_id, pid)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||||
|
`);
|
||||||
|
await localConnection.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS temp_order_taxes (
|
||||||
|
order_id INT UNSIGNED NOT NULL,
|
||||||
|
pid INT UNSIGNED NOT NULL,
|
||||||
|
tax DECIMAL(10,2) NOT NULL,
|
||||||
|
PRIMARY KEY (order_id, pid)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||||
|
`);
|
||||||
|
|
||||||
// Get column names from the local table
|
// Get column names from the local table
|
||||||
const [columns] = await localConnection.query(`
|
const [columns] = await localConnection.query(`
|
||||||
SELECT COLUMN_NAME
|
SELECT COLUMN_NAME
|
||||||
@@ -36,51 +76,11 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
);
|
);
|
||||||
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||||
|
|
||||||
// Create temporary tables for staging data
|
console.log('Orders: Using last sync time:', lastSyncTime);
|
||||||
await localConnection.query(`
|
|
||||||
CREATE TEMPORARY TABLE temp_order_items (
|
|
||||||
order_id INT UNSIGNED,
|
|
||||||
pid INT UNSIGNED,
|
|
||||||
SKU VARCHAR(50),
|
|
||||||
price DECIMAL(10,3),
|
|
||||||
quantity INT,
|
|
||||||
base_discount DECIMAL(10,3),
|
|
||||||
PRIMARY KEY (order_id, pid)
|
|
||||||
) ENGINE=InnoDB;
|
|
||||||
|
|
||||||
CREATE TEMPORARY TABLE temp_order_meta (
|
// First get all relevant order items with basic info
|
||||||
order_id INT UNSIGNED PRIMARY KEY,
|
const [[{ total }]] = await prodConnection.query(`
|
||||||
date DATE,
|
SELECT COUNT(*) as total
|
||||||
customer INT UNSIGNED,
|
|
||||||
customer_name VARCHAR(100),
|
|
||||||
status TINYINT UNSIGNED,
|
|
||||||
canceled TINYINT UNSIGNED
|
|
||||||
) ENGINE=InnoDB;
|
|
||||||
|
|
||||||
CREATE TEMPORARY TABLE temp_order_discounts (
|
|
||||||
order_id INT UNSIGNED,
|
|
||||||
pid INT UNSIGNED,
|
|
||||||
discount DECIMAL(10,3),
|
|
||||||
PRIMARY KEY (order_id, pid)
|
|
||||||
) ENGINE=InnoDB;
|
|
||||||
|
|
||||||
CREATE TEMPORARY TABLE temp_order_taxes (
|
|
||||||
order_id INT UNSIGNED,
|
|
||||||
pid INT UNSIGNED,
|
|
||||||
tax DECIMAL(10,3),
|
|
||||||
PRIMARY KEY (order_id, pid)
|
|
||||||
) ENGINE=InnoDB;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Get base order items first
|
|
||||||
const [orderItems] = await prodConnection.query(`
|
|
||||||
SELECT
|
|
||||||
oi.order_id,
|
|
||||||
oi.prod_pid as pid,
|
|
||||||
oi.prod_itemnumber as SKU,
|
|
||||||
oi.prod_price as price,
|
|
||||||
oi.qty_ordered as quantity,
|
|
||||||
COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount
|
|
||||||
FROM order_items oi
|
FROM order_items oi
|
||||||
USE INDEX (PRIMARY)
|
USE INDEX (PRIMARY)
|
||||||
JOIN _order o ON oi.order_id = o.order_id
|
JOIN _order o ON oi.order_id = o.order_id
|
||||||
@@ -89,15 +89,63 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
AND o.date_placed_onlydate IS NOT NULL
|
AND o.date_placed_onlydate IS NOT NULL
|
||||||
${incrementalUpdate ? `
|
${incrementalUpdate ? `
|
||||||
AND (
|
AND (
|
||||||
o.stamp > ?
|
o.stamp > ?
|
||||||
OR oi.stamp > ?
|
OR oi.stamp > ?
|
||||||
OR o.date_placed > ?
|
OR EXISTS (
|
||||||
OR o.date_shipped > ?
|
SELECT 1 FROM order_discount_items odi
|
||||||
OR o.date_cancelled > ?
|
WHERE odi.order_id = o.order_id
|
||||||
OR o.date_updated > ?
|
AND odi.pid = oi.prod_pid
|
||||||
|
)
|
||||||
|
OR EXISTS (
|
||||||
|
SELECT 1 FROM order_tax_info oti
|
||||||
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
||||||
|
WHERE oti.order_id = o.order_id
|
||||||
|
AND otip.pid = oi.prod_pid
|
||||||
|
AND oti.stamp > ?
|
||||||
|
)
|
||||||
)
|
)
|
||||||
` : ''}
|
` : ''}
|
||||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||||
|
|
||||||
|
console.log('Orders: Found changes:', total);
|
||||||
|
|
||||||
|
// Get order items in batches
|
||||||
|
const [orderItems] = await prodConnection.query(`
|
||||||
|
SELECT
|
||||||
|
oi.order_id,
|
||||||
|
oi.prod_pid as pid,
|
||||||
|
oi.prod_itemnumber as SKU,
|
||||||
|
oi.prod_price as price,
|
||||||
|
oi.qty_ordered as quantity,
|
||||||
|
COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount,
|
||||||
|
oi.stamp as last_modified
|
||||||
|
FROM order_items oi
|
||||||
|
USE INDEX (PRIMARY)
|
||||||
|
JOIN _order o ON oi.order_id = o.order_id
|
||||||
|
WHERE o.order_status >= 15
|
||||||
|
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
|
||||||
|
AND o.date_placed_onlydate IS NOT NULL
|
||||||
|
${incrementalUpdate ? `
|
||||||
|
AND (
|
||||||
|
o.stamp > ?
|
||||||
|
OR oi.stamp > ?
|
||||||
|
OR EXISTS (
|
||||||
|
SELECT 1 FROM order_discount_items odi
|
||||||
|
WHERE odi.order_id = o.order_id
|
||||||
|
AND odi.pid = oi.prod_pid
|
||||||
|
)
|
||||||
|
OR EXISTS (
|
||||||
|
SELECT 1 FROM order_tax_info oti
|
||||||
|
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
||||||
|
WHERE oti.order_id = o.order_id
|
||||||
|
AND otip.pid = oi.prod_pid
|
||||||
|
AND oti.stamp > ?
|
||||||
|
)
|
||||||
|
)
|
||||||
|
` : ''}
|
||||||
|
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||||
|
|
||||||
|
console.log('Orders: Processing', orderItems.length, 'order items');
|
||||||
|
|
||||||
const totalOrders = orderItems.length;
|
const totalOrders = orderItems.length;
|
||||||
let processed = 0;
|
let processed = 0;
|
||||||
@@ -111,7 +159,13 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
INSERT INTO temp_order_items VALUES ${placeholders}
|
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
|
||||||
|
VALUES ${placeholders}
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
SKU = VALUES(SKU),
|
||||||
|
price = VALUES(price),
|
||||||
|
quantity = VALUES(quantity),
|
||||||
|
base_discount = VALUES(base_discount)
|
||||||
`, values);
|
`, values);
|
||||||
|
|
||||||
processed += batch.length;
|
processed += batch.length;
|
||||||
@@ -275,16 +329,82 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
|
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
|
||||||
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
|
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
|
||||||
|
|
||||||
const query = `
|
// First check which orders exist and get their current values
|
||||||
INSERT INTO orders (${columnNames.join(",")})
|
const [existingOrders] = await localConnection.query(
|
||||||
VALUES ${placeholders}
|
`SELECT ${columnNames.join(',')} FROM orders WHERE (order_number, pid) IN (${validOrders.map(() => "(?,?)").join(",")})`,
|
||||||
ON DUPLICATE KEY UPDATE
|
validOrders.flatMap(o => [o.order_number, o.pid])
|
||||||
${columnNames.map(col => `${col} = VALUES(${col})`).join(",")}
|
);
|
||||||
`;
|
const existingOrderMap = new Map(
|
||||||
|
existingOrders.map(o => [`${o.order_number}-${o.pid}`, o])
|
||||||
|
);
|
||||||
|
|
||||||
const result = await localConnection.query(query, values.flat());
|
// Split into inserts and updates
|
||||||
recordsAdded += result.affectedRows - result.changedRows;
|
const insertsAndUpdates = validOrders.reduce((acc, order) => {
|
||||||
recordsUpdated += result.changedRows;
|
const key = `${order.order_number}-${order.pid}`;
|
||||||
|
if (existingOrderMap.has(key)) {
|
||||||
|
const existing = existingOrderMap.get(key);
|
||||||
|
// Check if any values are different
|
||||||
|
const hasChanges = columnNames.some(col => {
|
||||||
|
const newVal = order[col] ?? null;
|
||||||
|
const oldVal = existing[col] ?? null;
|
||||||
|
// Special handling for numbers to avoid type coercion issues
|
||||||
|
if (typeof newVal === 'number' && typeof oldVal === 'number') {
|
||||||
|
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
|
||||||
|
}
|
||||||
|
return newVal !== oldVal;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (hasChanges) {
|
||||||
|
acc.updates.push({
|
||||||
|
order_number: order.order_number,
|
||||||
|
pid: order.pid,
|
||||||
|
values: columnNames.map(col => order[col] ?? null)
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
acc.inserts.push({
|
||||||
|
order_number: order.order_number,
|
||||||
|
pid: order.pid,
|
||||||
|
values: columnNames.map(col => order[col] ?? null)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
|
||||||
|
// Handle inserts
|
||||||
|
if (insertsAndUpdates.inserts.length > 0) {
|
||||||
|
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(singlePlaceholder).join(",");
|
||||||
|
|
||||||
|
const insertResult = await localConnection.query(`
|
||||||
|
INSERT INTO orders (${columnNames.join(",")})
|
||||||
|
VALUES ${insertPlaceholders}
|
||||||
|
`, insertsAndUpdates.inserts.map(i => i.values).flat());
|
||||||
|
|
||||||
|
recordsAdded += insertResult[0].affectedRows;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle updates - now we know these actually have changes
|
||||||
|
if (insertsAndUpdates.updates.length > 0) {
|
||||||
|
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(singlePlaceholder).join(",");
|
||||||
|
|
||||||
|
const updateResult = await localConnection.query(`
|
||||||
|
INSERT INTO orders (${columnNames.join(",")})
|
||||||
|
VALUES ${updatePlaceholders}
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
SKU = VALUES(SKU),
|
||||||
|
date = VALUES(date),
|
||||||
|
price = VALUES(price),
|
||||||
|
quantity = VALUES(quantity),
|
||||||
|
discount = VALUES(discount),
|
||||||
|
tax = VALUES(tax),
|
||||||
|
tax_included = VALUES(tax_included),
|
||||||
|
shipping = VALUES(shipping),
|
||||||
|
customer = VALUES(customer),
|
||||||
|
customer_name = VALUES(customer_name),
|
||||||
|
status = VALUES(status),
|
||||||
|
canceled = VALUES(canceled)
|
||||||
|
`, insertsAndUpdates.updates.map(u => u.values).flat());
|
||||||
|
|
||||||
|
recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows
|
||||||
|
}
|
||||||
|
|
||||||
importedCount += validOrders.length;
|
importedCount += validOrders.length;
|
||||||
}
|
}
|
||||||
@@ -312,15 +432,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
// Import missing products if any
|
// Import missing products if any
|
||||||
if (missingProducts.size > 0) {
|
if (missingProducts.size > 0) {
|
||||||
try {
|
try {
|
||||||
// Setup temporary tables again since they were dropped
|
// Import missing products directly without materialization
|
||||||
await setupTemporaryTables(localConnection);
|
|
||||||
await materializeCalculations(prodConnection, localConnection);
|
|
||||||
|
|
||||||
await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts));
|
await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts));
|
||||||
|
|
||||||
// Clean up temporary tables after missing products import
|
|
||||||
await cleanupTemporaryTables(localConnection);
|
|
||||||
|
|
||||||
// Retry skipped orders after importing products
|
// Retry skipped orders after importing products
|
||||||
if (skippedOrders.size > 0) {
|
if (skippedOrders.size > 0) {
|
||||||
outputProgress({
|
outputProgress({
|
||||||
@@ -438,8 +552,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
return {
|
return {
|
||||||
status: "complete",
|
status: "complete",
|
||||||
totalImported: importedCount,
|
totalImported: importedCount,
|
||||||
recordsAdded,
|
recordsAdded: recordsAdded || 0,
|
||||||
recordsUpdated,
|
recordsUpdated: recordsUpdated || 0,
|
||||||
totalSkipped: skippedOrders.size,
|
totalSkipped: skippedOrders.size,
|
||||||
missingProducts: missingProducts.size,
|
missingProducts: missingProducts.size,
|
||||||
incrementalUpdate,
|
incrementalUpdate,
|
||||||
|
|||||||
@@ -13,40 +13,12 @@ const getImageUrls = (pid) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
async function setupTemporaryTables(connection) {
|
async function setupTemporaryTables(connection) {
|
||||||
await connection.query(`
|
await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_categories ( cat_id INT PRIMARY KEY, name VARCHAR(255) ) ENGINE=InnoDB;`);
|
||||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_categories (
|
await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_images ( pid INT, iid INT, image_type ENUM('thumbnail', '175', 'full'), url VARCHAR(255), PRIMARY KEY (pid, image_type) ) ENGINE=InnoDB;`);
|
||||||
cat_id INT PRIMARY KEY,
|
await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_inventory_status ( pid INT PRIMARY KEY, stock_quantity INT, pending_qty INT, preorder_count INT, notions_inv_count INT, needs_update BOOLEAN ) ENGINE=InnoDB;`);
|
||||||
name VARCHAR(255)
|
await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_prices ( pid INT PRIMARY KEY, price DECIMAL(10,2), regular_price DECIMAL(10,2), cost_price DECIMAL(10,5), needs_update BOOLEAN ) ENGINE=InnoDB;`);
|
||||||
) ENGINE=InnoDB;
|
await connection.query(`INSERT INTO temp_categories SELECT cat_id, name FROM categories;`);
|
||||||
|
await connection.query(`CREATE INDEX idx_temp_cat_id ON temp_categories(cat_id);`);
|
||||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_images (
|
|
||||||
pid INT,
|
|
||||||
iid INT,
|
|
||||||
image_type ENUM('thumbnail', '175', 'full'),
|
|
||||||
url VARCHAR(255),
|
|
||||||
PRIMARY KEY (pid, image_type)
|
|
||||||
) ENGINE=InnoDB;
|
|
||||||
|
|
||||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_inventory_status (
|
|
||||||
pid INT PRIMARY KEY,
|
|
||||||
stock_quantity INT,
|
|
||||||
pending_qty INT,
|
|
||||||
preorder_count INT,
|
|
||||||
notions_inv_count INT
|
|
||||||
) ENGINE=InnoDB;
|
|
||||||
|
|
||||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_prices (
|
|
||||||
pid INT PRIMARY KEY,
|
|
||||||
price DECIMAL(10,2),
|
|
||||||
regular_price DECIMAL(10,2),
|
|
||||||
cost_price DECIMAL(10,5)
|
|
||||||
) ENGINE=InnoDB;
|
|
||||||
|
|
||||||
INSERT INTO temp_categories
|
|
||||||
SELECT cat_id, name FROM categories;
|
|
||||||
|
|
||||||
CREATE INDEX idx_temp_cat_id ON temp_categories(cat_id);
|
|
||||||
`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function cleanupTemporaryTables(connection) {
|
async function cleanupTemporaryTables(connection) {
|
||||||
@@ -108,18 +80,20 @@ async function materializeCalculations(prodConnection, localConnection) {
|
|||||||
Math.max(0, row.stock_quantity - row.pending_qty), // Calculate final stock quantity
|
Math.max(0, row.stock_quantity - row.pending_qty), // Calculate final stock quantity
|
||||||
row.pending_qty,
|
row.pending_qty,
|
||||||
row.preorder_count,
|
row.preorder_count,
|
||||||
row.notions_inv_count
|
row.notions_inv_count,
|
||||||
|
true // Mark as needing update
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (values.length > 0) {
|
if (values.length > 0) {
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
INSERT INTO temp_inventory_status (pid, stock_quantity, pending_qty, preorder_count, notions_inv_count)
|
INSERT INTO temp_inventory_status (pid, stock_quantity, pending_qty, preorder_count, notions_inv_count, needs_update)
|
||||||
VALUES ?
|
VALUES ?
|
||||||
ON DUPLICATE KEY UPDATE
|
ON DUPLICATE KEY UPDATE
|
||||||
stock_quantity = VALUES(stock_quantity),
|
stock_quantity = VALUES(stock_quantity),
|
||||||
pending_qty = VALUES(pending_qty),
|
pending_qty = VALUES(pending_qty),
|
||||||
preorder_count = VALUES(preorder_count),
|
preorder_count = VALUES(preorder_count),
|
||||||
notions_inv_count = VALUES(notions_inv_count)
|
notions_inv_count = VALUES(notions_inv_count),
|
||||||
|
needs_update = TRUE
|
||||||
`, [values]);
|
`, [values]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -168,17 +142,19 @@ async function materializeCalculations(prodConnection, localConnection) {
|
|||||||
row.pid,
|
row.pid,
|
||||||
row.price,
|
row.price,
|
||||||
row.regular_price,
|
row.regular_price,
|
||||||
row.cost_price
|
row.cost_price,
|
||||||
|
true // Mark as needing update
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (values.length > 0) {
|
if (values.length > 0) {
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
INSERT INTO temp_product_prices (pid, price, regular_price, cost_price)
|
INSERT INTO temp_product_prices (pid, price, regular_price, cost_price, needs_update)
|
||||||
VALUES ?
|
VALUES ?
|
||||||
ON DUPLICATE KEY UPDATE
|
ON DUPLICATE KEY UPDATE
|
||||||
price = VALUES(price),
|
price = VALUES(price),
|
||||||
regular_price = VALUES(regular_price),
|
regular_price = VALUES(regular_price),
|
||||||
cost_price = VALUES(cost_price)
|
cost_price = VALUES(cost_price),
|
||||||
|
needs_update = TRUE
|
||||||
`, [values]);
|
`, [values]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -200,6 +176,8 @@ async function materializeCalculations(prodConnection, localConnection) {
|
|||||||
|
|
||||||
async function importProducts(prodConnection, localConnection, incrementalUpdate = true) {
|
async function importProducts(prodConnection, localConnection, incrementalUpdate = true) {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
|
let recordsAdded = 0;
|
||||||
|
let recordsUpdated = 0;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get column names first
|
// Get column names first
|
||||||
@@ -216,6 +194,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'products'"
|
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'products'"
|
||||||
);
|
);
|
||||||
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||||
|
|
||||||
|
console.log('Products: Using last sync time:', lastSyncTime);
|
||||||
|
|
||||||
// Setup temporary tables
|
// Setup temporary tables
|
||||||
await setupTemporaryTables(localConnection);
|
await setupTemporaryTables(localConnection);
|
||||||
@@ -227,23 +207,31 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
const [countResult] = await prodConnection.query(`
|
const [countResult] = await prodConnection.query(`
|
||||||
SELECT COUNT(*) as total
|
SELECT COUNT(*) as total
|
||||||
FROM products p
|
FROM products p
|
||||||
WHERE p.stamp > ?
|
LEFT JOIN current_inventory ci ON p.pid = ci.pid
|
||||||
OR EXISTS (
|
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
|
||||||
SELECT 1 FROM product_last_sold pls
|
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
||||||
WHERE p.pid = pls.pid
|
LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid
|
||||||
AND pls.date_sold > ?
|
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
||||||
)
|
WHERE ${incrementalUpdate ? `
|
||||||
OR p.date_created > ?
|
p.stamp > ? OR
|
||||||
OR p.datein > ?
|
ci.stamp > ? OR
|
||||||
`, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]);
|
pcp.date_deactive > ? OR
|
||||||
|
pcp.date_active > ? OR
|
||||||
|
sid.stamp > ? OR
|
||||||
|
pnb.date_updated > ? OR
|
||||||
|
pls.date_sold > ?
|
||||||
|
` : 'TRUE'}
|
||||||
|
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||||
|
|
||||||
|
console.log('Products: Found changes:', countResult[0].total);
|
||||||
|
|
||||||
const totalProducts = countResult[0].total;
|
const totalProducts = countResult[0].total;
|
||||||
|
|
||||||
// Main product query using materialized data - modified for incremental
|
// Main product query using materialized data - modified for incremental
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: "running",
|
status: "running",
|
||||||
operation: "Products import",
|
operation: "Products import",
|
||||||
message: "Fetching product data from production"
|
message: `Fetching ${incrementalUpdate ? 'updated' : 'all'} product data from production`
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create temporary table for production data
|
// Create temporary table for production data
|
||||||
@@ -279,7 +267,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
country_of_origin VARCHAR(5),
|
country_of_origin VARCHAR(5),
|
||||||
date_last_sold DATE,
|
date_last_sold DATE,
|
||||||
category_ids TEXT,
|
category_ids TEXT,
|
||||||
needs_update BOOLEAN DEFAULT FALSE,
|
needs_update BOOLEAN DEFAULT TRUE,
|
||||||
PRIMARY KEY (pid)
|
PRIMARY KEY (pid)
|
||||||
) ENGINE=InnoDB
|
) ENGINE=InnoDB
|
||||||
`);
|
`);
|
||||||
@@ -322,18 +310,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
p.totalsold AS total_sold,
|
p.totalsold AS total_sold,
|
||||||
p.country_of_origin,
|
p.country_of_origin,
|
||||||
pls.date_sold as date_last_sold,
|
pls.date_sold as date_last_sold,
|
||||||
GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids,
|
GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids
|
||||||
CASE WHEN
|
|
||||||
${incrementalUpdate ? `
|
|
||||||
p.stamp > ? OR
|
|
||||||
ci.stamp > ? OR
|
|
||||||
pcp.date_deactive > ? OR
|
|
||||||
pcp.date_active > ? OR
|
|
||||||
sid.stamp > ? OR
|
|
||||||
pnb.date_updated > ? OR
|
|
||||||
pls.date_sold > ?
|
|
||||||
` : 'TRUE'}
|
|
||||||
THEN 1 ELSE 0 END as needs_update
|
|
||||||
FROM products p
|
FROM products p
|
||||||
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
||||||
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
||||||
@@ -347,17 +324,61 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
LEFT JOIN current_inventory ci ON p.pid = ci.pid
|
LEFT JOIN current_inventory ci ON p.pid = ci.pid
|
||||||
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
|
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
|
||||||
LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid
|
LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid
|
||||||
|
WHERE ${incrementalUpdate ? `
|
||||||
|
p.stamp > ? OR
|
||||||
|
ci.stamp > ? OR
|
||||||
|
pcp.date_deactive > ? OR
|
||||||
|
pcp.date_active > ? OR
|
||||||
|
sid.stamp > ? OR
|
||||||
|
pnb.date_updated > ? OR
|
||||||
|
pls.date_sold > ?
|
||||||
|
` : 'TRUE'}
|
||||||
GROUP BY p.pid
|
GROUP BY p.pid
|
||||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||||
|
|
||||||
// Insert production data in batches, but only for products that need updates
|
// Insert production data in batches, but only for products that need updates
|
||||||
for (let i = 0; i < prodData.length; i += 1000) {
|
for (let i = 0; i < prodData.length; i += 1000) {
|
||||||
const batch = prodData.slice(i, i + 1000);
|
const batch = prodData.slice(i, i + 1000);
|
||||||
const placeholders = batch.map(() => "(?)").join(",");
|
const placeholders = batch.map(() => `(${Array(31).fill("?").join(",")})`).join(",");
|
||||||
|
|
||||||
|
// Map each row to exactly match our temp table columns
|
||||||
|
const values = batch.flatMap(row => [
|
||||||
|
row.pid,
|
||||||
|
row.title,
|
||||||
|
row.description,
|
||||||
|
row.SKU,
|
||||||
|
row.date_created,
|
||||||
|
row.first_received,
|
||||||
|
row.location,
|
||||||
|
row.barcode,
|
||||||
|
row.harmonized_tariff_code,
|
||||||
|
row.updated_at,
|
||||||
|
row.visible,
|
||||||
|
row.replenishable,
|
||||||
|
row.vendor,
|
||||||
|
row.vendor_reference,
|
||||||
|
row.notions_reference,
|
||||||
|
row.brand,
|
||||||
|
row.line,
|
||||||
|
row.subline,
|
||||||
|
row.artist,
|
||||||
|
row.moq,
|
||||||
|
row.rating,
|
||||||
|
row.reviews,
|
||||||
|
row.weight,
|
||||||
|
row.length,
|
||||||
|
row.width,
|
||||||
|
row.height,
|
||||||
|
row.total_sold,
|
||||||
|
row.country_of_origin,
|
||||||
|
row.date_last_sold,
|
||||||
|
row.category_ids,
|
||||||
|
true // needs_update
|
||||||
|
]);
|
||||||
|
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
INSERT INTO temp_prod_data VALUES ${placeholders}
|
INSERT INTO temp_prod_data VALUES ${placeholders}
|
||||||
`, batch.map(row => Object.values(row)));
|
`, values);
|
||||||
|
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: "running",
|
status: "running",
|
||||||
@@ -374,7 +395,18 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
let recordsAdded = 0;
|
let recordsAdded = 0;
|
||||||
let recordsUpdated = 0;
|
let recordsUpdated = 0;
|
||||||
|
|
||||||
while (processed < totalProducts) {
|
// Get actual count from temp table - only count products that need updates
|
||||||
|
const [[{ actualTotal }]] = await localConnection.query(`
|
||||||
|
SELECT COUNT(DISTINCT p.pid) as actualTotal
|
||||||
|
FROM temp_prod_data p
|
||||||
|
LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid
|
||||||
|
LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid
|
||||||
|
WHERE p.needs_update = 1
|
||||||
|
OR tis.needs_update = 1
|
||||||
|
OR tpp.needs_update = 1
|
||||||
|
`);
|
||||||
|
|
||||||
|
while (processed < actualTotal) {
|
||||||
const [batch] = await localConnection.query(`
|
const [batch] = await localConnection.query(`
|
||||||
SELECT
|
SELECT
|
||||||
p.*,
|
p.*,
|
||||||
@@ -387,9 +419,13 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
FROM temp_prod_data p
|
FROM temp_prod_data p
|
||||||
LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid
|
LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid
|
||||||
LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid
|
LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid
|
||||||
WHERE p.needs_update = 1
|
WHERE p.needs_update = 1
|
||||||
|
OR tis.needs_update = 1
|
||||||
|
OR tpp.needs_update = 1
|
||||||
LIMIT ? OFFSET ?
|
LIMIT ? OFFSET ?
|
||||||
`, [BATCH_SIZE, processed]);
|
`, [BATCH_SIZE, processed]);
|
||||||
|
|
||||||
|
if (!batch || batch.length === 0) break; // Exit if no more records
|
||||||
|
|
||||||
// Add image URLs
|
// Add image URLs
|
||||||
batch.forEach(row => {
|
batch.forEach(row => {
|
||||||
@@ -399,34 +435,94 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
row.image_full = urls.image_full;
|
row.image_full = urls.image_full;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Prepare product values - now using columnNames from above
|
if (batch.length > 0) {
|
||||||
const productValues = batch.flatMap(row =>
|
// MySQL 8.0 optimized insert with proper placeholders
|
||||||
columnNames.map(col => {
|
const placeholderGroup = `(${Array(columnNames.length).fill("?").join(",")})`;
|
||||||
const val = row[col] ?? null;
|
|
||||||
|
// First check which products already exist and get their current values
|
||||||
|
const [existingProducts] = await localConnection.query(
|
||||||
|
`SELECT ${columnNames.join(',')} FROM products WHERE pid IN (?)`,
|
||||||
|
[batch.map(p => p.pid)]
|
||||||
|
);
|
||||||
|
const existingPidsMap = new Map(existingProducts.map(p => [p.pid, p]));
|
||||||
|
|
||||||
|
// Helper function to map values consistently
|
||||||
|
const mapValues = (product) => columnNames.map(col => {
|
||||||
|
const val = product[col] ?? null;
|
||||||
if (col === "managing_stock") return 1;
|
if (col === "managing_stock") return 1;
|
||||||
if (typeof val === "number") return val || 0;
|
if (typeof val === "number") return val || 0;
|
||||||
return val;
|
return val;
|
||||||
})
|
});
|
||||||
);
|
|
||||||
|
|
||||||
// MySQL 8.0 optimized insert
|
// Split into inserts and updates, comparing values for updates
|
||||||
const placeholderGroup = `(${Array(columnNames.length).fill("?").join(",")})`;
|
const insertsAndUpdates = batch.reduce((acc, product) => {
|
||||||
const productPlaceholders = Array(batch.length).fill(placeholderGroup).join(",");
|
if (existingPidsMap.has(product.pid)) {
|
||||||
|
const existing = existingPidsMap.get(product.pid);
|
||||||
const insertQuery = `
|
// Check if any values are different
|
||||||
INSERT INTO products (${columnNames.join(",")})
|
const hasChanges = columnNames.some(col => {
|
||||||
VALUES ${productPlaceholders}
|
const newVal = product[col] ?? null;
|
||||||
AS new_products
|
const oldVal = existing[col] ?? null;
|
||||||
ON DUPLICATE KEY UPDATE
|
// Special handling for numbers to avoid type coercion issues
|
||||||
${columnNames
|
if (typeof newVal === 'number' && typeof oldVal === 'number') {
|
||||||
.filter(col => col !== "pid")
|
// Handle NaN and Infinity
|
||||||
.map(col => `${col} = new_products.${col}`)
|
if (isNaN(newVal) || isNaN(oldVal)) return isNaN(newVal) !== isNaN(oldVal);
|
||||||
.join(",")};
|
if (!isFinite(newVal) || !isFinite(oldVal)) return !isFinite(newVal) !== !isFinite(oldVal);
|
||||||
`;
|
// Allow for tiny floating point differences
|
||||||
|
return Math.abs(newVal - oldVal) > 0.00001;
|
||||||
|
}
|
||||||
|
if (col === 'managing_stock') return false; // Skip this as it's always 1
|
||||||
|
return newVal !== oldVal;
|
||||||
|
});
|
||||||
|
|
||||||
const result = await localConnection.query(insertQuery, productValues);
|
if (hasChanges) {
|
||||||
recordsAdded += result.affectedRows - result.changedRows;
|
acc.updates.push({
|
||||||
recordsUpdated += result.changedRows;
|
pid: product.pid,
|
||||||
|
values: mapValues(product)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
acc.inserts.push({
|
||||||
|
pid: product.pid,
|
||||||
|
values: mapValues(product)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
}, { inserts: [], updates: [] });
|
||||||
|
|
||||||
|
// Log summary for this batch
|
||||||
|
if (insertsAndUpdates.inserts.length > 0 || insertsAndUpdates.updates.length > 0) {
|
||||||
|
console.log(`Batch summary: ${insertsAndUpdates.inserts.length} new products, ${insertsAndUpdates.updates.length} updates`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle inserts
|
||||||
|
if (insertsAndUpdates.inserts.length > 0) {
|
||||||
|
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(",");
|
||||||
|
|
||||||
|
const insertResult = await localConnection.query(`
|
||||||
|
INSERT INTO products (${columnNames.join(",")})
|
||||||
|
VALUES ${insertPlaceholders}
|
||||||
|
`, insertsAndUpdates.inserts.map(i => i.values).flat());
|
||||||
|
|
||||||
|
recordsAdded += insertResult[0].affectedRows;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle updates - now we know these actually have changes
|
||||||
|
if (insertsAndUpdates.updates.length > 0) {
|
||||||
|
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(",");
|
||||||
|
|
||||||
|
const updateResult = await localConnection.query(`
|
||||||
|
INSERT INTO products (${columnNames.join(",")})
|
||||||
|
VALUES ${updatePlaceholders}
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
${columnNames
|
||||||
|
.filter(col => col !== "pid")
|
||||||
|
.map(col => `${col} = VALUES(${col})`)
|
||||||
|
.join(",")};
|
||||||
|
`, insertsAndUpdates.updates.map(u => u.values).flat());
|
||||||
|
|
||||||
|
recordsUpdated += insertsAndUpdates.updates.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Insert category relationships
|
// Insert category relationships
|
||||||
const categoryRelationships = [];
|
const categoryRelationships = [];
|
||||||
@@ -478,15 +574,16 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
processed += batch.length;
|
processed += batch.length; // Only increment by actual records processed
|
||||||
|
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: "running",
|
status: "running",
|
||||||
operation: "Products import",
|
operation: "Products import",
|
||||||
message: `Processed ${processed} of ${totalProducts} products`,
|
message: `Processed ${processed} of ${actualTotal} products`,
|
||||||
current: processed,
|
current: processed,
|
||||||
total: totalProducts,
|
total: actualTotal,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||||
remaining: estimateRemaining(startTime, processed, totalProducts),
|
remaining: estimateRemaining(startTime, processed, actualTotal),
|
||||||
rate: calculateRate(startTime, processed)
|
rate: calculateRate(startTime, processed)
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -508,10 +605,10 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
status: "complete",
|
status: "complete",
|
||||||
totalImported: totalProducts,
|
totalImported: actualTotal,
|
||||||
recordsAdded,
|
recordsAdded: recordsAdded || 0,
|
||||||
recordsUpdated,
|
recordsUpdated: recordsUpdated || 0,
|
||||||
incrementalUpdate: true,
|
incrementalUpdate,
|
||||||
lastSyncTime
|
lastSyncTime
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -521,67 +618,16 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
|
|
||||||
async function importMissingProducts(prodConnection, localConnection, missingPids) {
|
async function importMissingProducts(prodConnection, localConnection, missingPids) {
|
||||||
try {
|
try {
|
||||||
// Setup temporary tables
|
// Get column names first
|
||||||
await setupTemporaryTables(localConnection);
|
|
||||||
|
|
||||||
// Get inventory data from production first
|
|
||||||
const [prodInventory] = await prodConnection.query(`
|
|
||||||
SELECT
|
|
||||||
p.pid,
|
|
||||||
COALESCE(si.available_local, 0) - COALESCE(ps.pending_qty, 0) as stock_quantity,
|
|
||||||
COALESCE(ps.pending_qty, 0) as pending_qty,
|
|
||||||
COALESCE(ci.onpreorder, 0) as preorder_count,
|
|
||||||
COALESCE(pnb.inventory, 0) as notions_inv_count
|
|
||||||
FROM products p
|
|
||||||
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
|
||||||
LEFT JOIN current_inventory ci ON p.pid = ci.pid
|
|
||||||
LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid
|
|
||||||
LEFT JOIN (
|
|
||||||
SELECT oi.prod_pid,
|
|
||||||
SUM(oi.qty_ordered - oi.qty_placed) as pending_qty
|
|
||||||
FROM order_items oi
|
|
||||||
JOIN _order o ON oi.order_id = o.order_id
|
|
||||||
WHERE oi.prod_pid IN (?)
|
|
||||||
AND o.date_placed != '0000-00-00 00:00:00'
|
|
||||||
AND o.date_shipped = '0000-00-00 00:00:00'
|
|
||||||
AND oi.pick_finished = 0
|
|
||||||
AND oi.qty_back = 0
|
|
||||||
AND o.order_status != 15
|
|
||||||
AND o.order_status < 90
|
|
||||||
AND oi.qty_ordered >= oi.qty_placed
|
|
||||||
AND oi.qty_ordered > 0
|
|
||||||
GROUP BY oi.prod_pid
|
|
||||||
) ps ON p.pid = ps.prod_pid
|
|
||||||
WHERE p.pid IN (?)
|
|
||||||
`, [missingPids, missingPids]);
|
|
||||||
|
|
||||||
// Insert inventory data into temp table
|
|
||||||
if (prodInventory.length > 0) {
|
|
||||||
const placeholders = prodInventory.map(() => "(?, ?, ?, ?, ?)").join(",");
|
|
||||||
const values = prodInventory.flatMap(p => [
|
|
||||||
p.pid,
|
|
||||||
p.stock_quantity,
|
|
||||||
p.pending_qty,
|
|
||||||
p.preorder_count,
|
|
||||||
p.notions_inv_count
|
|
||||||
]);
|
|
||||||
|
|
||||||
await localConnection.query(`
|
|
||||||
INSERT INTO temp_inventory_status VALUES ${placeholders}
|
|
||||||
`, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
// First get the column names from the table structure
|
|
||||||
const [columns] = await localConnection.query(`
|
const [columns] = await localConnection.query(`
|
||||||
SELECT COLUMN_NAME
|
SELECT COLUMN_NAME
|
||||||
FROM INFORMATION_SCHEMA.COLUMNS
|
FROM INFORMATION_SCHEMA.COLUMNS
|
||||||
WHERE TABLE_NAME = 'products'
|
WHERE TABLE_NAME = 'products'
|
||||||
ORDER BY ORDINAL_POSITION
|
ORDER BY ORDINAL_POSITION
|
||||||
`);
|
`);
|
||||||
|
|
||||||
const columnNames = columns.map((col) => col.COLUMN_NAME);
|
const columnNames = columns.map((col) => col.COLUMN_NAME);
|
||||||
|
|
||||||
// Get the missing products from production
|
// Get the missing products with all their data in one optimized query
|
||||||
const [products] = await prodConnection.query(`
|
const [products] = await prodConnection.query(`
|
||||||
SELECT
|
SELECT
|
||||||
p.pid,
|
p.pid,
|
||||||
@@ -591,9 +637,22 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
p.date_created,
|
p.date_created,
|
||||||
p.datein AS first_received,
|
p.datein AS first_received,
|
||||||
p.location,
|
p.location,
|
||||||
tis.stock_quantity,
|
COALESCE(si.available_local, 0) - COALESCE(
|
||||||
tis.preorder_count,
|
(SELECT SUM(oi.qty_ordered - oi.qty_placed)
|
||||||
tis.notions_inv_count,
|
FROM order_items oi
|
||||||
|
JOIN _order o ON oi.order_id = o.order_id
|
||||||
|
WHERE oi.prod_pid = p.pid
|
||||||
|
AND o.date_placed != '0000-00-00 00:00:00'
|
||||||
|
AND o.date_shipped = '0000-00-00 00:00:00'
|
||||||
|
AND oi.pick_finished = 0
|
||||||
|
AND oi.qty_back = 0
|
||||||
|
AND o.order_status != 15
|
||||||
|
AND o.order_status < 90
|
||||||
|
AND oi.qty_ordered >= oi.qty_placed
|
||||||
|
AND oi.qty_ordered > 0), 0
|
||||||
|
) as stock_quantity,
|
||||||
|
COALESCE(ci.onpreorder, 0) as preorder_count,
|
||||||
|
COALESCE(pnb.inventory, 0) as notions_inv_count,
|
||||||
COALESCE(pcp.price_each, 0) as price,
|
COALESCE(pcp.price_each, 0) as price,
|
||||||
COALESCE(p.sellingprice, 0) AS regular_price,
|
COALESCE(p.sellingprice, 0) AS regular_price,
|
||||||
COALESCE((SELECT ROUND(AVG(costeach), 5)
|
COALESCE((SELECT ROUND(AVG(costeach), 5)
|
||||||
@@ -610,21 +669,6 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference,
|
CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference,
|
||||||
sid.notions_itemnumber AS notions_reference,
|
sid.notions_itemnumber AS notions_reference,
|
||||||
CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink,
|
CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink,
|
||||||
(SELECT CONCAT('https://sbing.com/i/products/0000/',
|
|
||||||
SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/',
|
|
||||||
p.pid, '-t-', MIN(PI.iid), '.jpg')
|
|
||||||
FROM product_images PI
|
|
||||||
WHERE PI.pid = p.pid AND PI.hidden = 0) AS image,
|
|
||||||
(SELECT CONCAT('https://sbing.com/i/products/0000/',
|
|
||||||
SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/',
|
|
||||||
p.pid, '-175x175-', MIN(PI.iid), '.jpg')
|
|
||||||
FROM product_images PI
|
|
||||||
WHERE PI.pid = p.pid AND PI.hidden = 0 AND PI.width = 175) AS image_175,
|
|
||||||
(SELECT CONCAT('https://sbing.com/i/products/0000/',
|
|
||||||
SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/',
|
|
||||||
p.pid, '-o-', MIN(PI.iid), '.jpg')
|
|
||||||
FROM product_images PI
|
|
||||||
WHERE PI.pid = p.pid AND PI.hidden = 0) AS image_full,
|
|
||||||
pc1.name AS brand,
|
pc1.name AS brand,
|
||||||
pc2.name AS line,
|
pc2.name AS line,
|
||||||
pc3.name AS subline,
|
pc3.name AS subline,
|
||||||
@@ -649,7 +693,6 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
pls.date_sold as date_last_sold,
|
pls.date_sold as date_last_sold,
|
||||||
GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids
|
GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids
|
||||||
FROM products p
|
FROM products p
|
||||||
LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid
|
|
||||||
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
||||||
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
||||||
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
||||||
@@ -662,16 +705,24 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id
|
LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id
|
||||||
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
|
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
|
||||||
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
||||||
LEFT JOIN (
|
LEFT JOIN current_inventory ci ON p.pid = ci.pid
|
||||||
SELECT pid, MIN(price_each) as price_each
|
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
|
||||||
FROM product_current_prices
|
LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid
|
||||||
WHERE active = 1
|
|
||||||
GROUP BY pid
|
|
||||||
) pcp ON p.pid = pcp.pid
|
|
||||||
WHERE p.pid IN (?)
|
WHERE p.pid IN (?)
|
||||||
GROUP BY p.pid
|
GROUP BY p.pid
|
||||||
`, [missingPids]);
|
`, [missingPids]);
|
||||||
|
|
||||||
|
// Add image URLs
|
||||||
|
products.forEach(product => {
|
||||||
|
const urls = getImageUrls(product.pid);
|
||||||
|
product.image = urls.image;
|
||||||
|
product.image_175 = urls.image_175;
|
||||||
|
product.image_full = urls.image_full;
|
||||||
|
});
|
||||||
|
|
||||||
|
let recordsAdded = 0;
|
||||||
|
let recordsUpdated = 0;
|
||||||
|
|
||||||
if (products.length > 0) {
|
if (products.length > 0) {
|
||||||
// Map values in the same order as columns
|
// Map values in the same order as columns
|
||||||
const productValues = products.flatMap(product =>
|
const productValues = products.flatMap(product =>
|
||||||
@@ -699,21 +750,13 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
`;
|
`;
|
||||||
|
|
||||||
const result = await localConnection.query(query, productValues);
|
const result = await localConnection.query(query, productValues);
|
||||||
recordsAdded += result.affectedRows - result.changedRows;
|
recordsAdded = result.affectedRows - result.changedRows;
|
||||||
recordsUpdated += result.changedRows;
|
recordsUpdated = result.changedRows;
|
||||||
|
|
||||||
// Verify products were inserted before proceeding with categories
|
|
||||||
const [insertedProducts] = await localConnection.query(
|
|
||||||
"SELECT pid FROM products WHERE pid IN (?)",
|
|
||||||
[products.map(p => p.pid)]
|
|
||||||
);
|
|
||||||
const insertedPids = new Set(insertedProducts.map(p => p.pid));
|
|
||||||
|
|
||||||
// Handle category relationships if any
|
// Handle category relationships if any
|
||||||
const categoryRelationships = [];
|
const categoryRelationships = [];
|
||||||
products.forEach(product => {
|
products.forEach(product => {
|
||||||
// Only add category relationships for products that were successfully inserted
|
if (product.category_ids) {
|
||||||
if (insertedPids.has(product.pid) && product.category_ids) {
|
|
||||||
const catIds = product.category_ids
|
const catIds = product.category_ids
|
||||||
.split(",")
|
.split(",")
|
||||||
.map(id => id.trim())
|
.map(id => id.trim())
|
||||||
@@ -744,10 +787,8 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
.map(() => "(?, ?)")
|
.map(() => "(?, ?)")
|
||||||
.join(",");
|
.join(",");
|
||||||
await localConnection.query(
|
await localConnection.query(
|
||||||
`
|
`INSERT IGNORE INTO product_categories (cat_id, pid)
|
||||||
INSERT IGNORE INTO product_categories (cat_id, pid)
|
VALUES ${catPlaceholders}`,
|
||||||
VALUES ${catPlaceholders}
|
|
||||||
`,
|
|
||||||
validRelationships.flat()
|
validRelationships.flat()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -758,15 +799,10 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
|||||||
status: "complete",
|
status: "complete",
|
||||||
totalImported: products.length,
|
totalImported: products.length,
|
||||||
recordsAdded,
|
recordsAdded,
|
||||||
recordsUpdated,
|
recordsUpdated
|
||||||
incrementalUpdate: true,
|
|
||||||
lastSyncTime
|
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
} finally {
|
|
||||||
// Cleanup temporary tables
|
|
||||||
await cleanupTemporaryTables(localConnection);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,22 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
);
|
);
|
||||||
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||||
|
|
||||||
|
console.log('Purchase Orders: Using last sync time:', lastSyncTime);
|
||||||
|
|
||||||
|
// Insert temporary table creation query for purchase orders
|
||||||
|
await localConnection.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS temp_purchase_orders (
|
||||||
|
po_id INT UNSIGNED NOT NULL,
|
||||||
|
pid INT UNSIGNED NOT NULL,
|
||||||
|
vendor VARCHAR(255),
|
||||||
|
date DATE,
|
||||||
|
expected_date DATE,
|
||||||
|
status INT,
|
||||||
|
notes TEXT,
|
||||||
|
PRIMARY KEY (po_id, pid)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||||
|
`);
|
||||||
|
|
||||||
outputProgress({
|
outputProgress({
|
||||||
operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} purchase orders import`,
|
operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} purchase orders import`,
|
||||||
status: "running",
|
status: "running",
|
||||||
@@ -82,6 +98,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions
|
lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions
|
||||||
] : []);
|
] : []);
|
||||||
|
|
||||||
|
console.log('Purchase Orders: Found changes:', total);
|
||||||
|
|
||||||
const [poList] = await prodConnection.query(`
|
const [poList] = await prodConnection.query(`
|
||||||
SELECT DISTINCT
|
SELECT DISTINCT
|
||||||
COALESCE(p.po_id, r.receiving_id) as po_id,
|
COALESCE(p.po_id, r.receiving_id) as po_id,
|
||||||
@@ -221,6 +239,22 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
const values = [];
|
const values = [];
|
||||||
let batchProcessed = 0;
|
let batchProcessed = 0;
|
||||||
|
|
||||||
|
// First check which PO lines already exist and get their current values
|
||||||
|
const poLines = Array.from(poProductMap.values())
|
||||||
|
.filter(p => validPids.has(p.pid))
|
||||||
|
.map(p => [p.po_id, p.pid]);
|
||||||
|
|
||||||
|
const [existingPOs] = await localConnection.query(
|
||||||
|
`SELECT ${columnNames.join(',')} FROM purchase_orders WHERE (po_id, pid) IN (${poLines.map(() => "(?,?)").join(",")})`,
|
||||||
|
poLines.flat()
|
||||||
|
);
|
||||||
|
const existingPOMap = new Map(
|
||||||
|
existingPOs.map(po => [`${po.po_id}-${po.pid}`, po])
|
||||||
|
);
|
||||||
|
|
||||||
|
// Split into inserts and updates
|
||||||
|
const insertsAndUpdates = { inserts: [], updates: [] };
|
||||||
|
|
||||||
for (const po of batch) {
|
for (const po of batch) {
|
||||||
const poProducts = Array.from(poProductMap.values())
|
const poProducts = Array.from(poProductMap.values())
|
||||||
.filter(p => p.po_id === po.po_id && validPids.has(p.pid));
|
.filter(p => p.po_id === po.po_id && validPids.has(p.pid));
|
||||||
@@ -280,7 +314,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
const firstReceiving = allReceivings[0] || {};
|
const firstReceiving = allReceivings[0] || {};
|
||||||
const lastReceiving = allReceivings[allReceivings.length - 1] || {};
|
const lastReceiving = allReceivings[allReceivings.length - 1] || {};
|
||||||
|
|
||||||
values.push(columnNames.map(col => {
|
const rowValues = columnNames.map(col => {
|
||||||
switch (col) {
|
switch (col) {
|
||||||
case 'po_id': return po.po_id;
|
case 'po_id': return po.po_id;
|
||||||
case 'vendor': return po.vendor;
|
case 'vendor': return po.vendor;
|
||||||
@@ -309,28 +343,75 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
});
|
});
|
||||||
default: return null;
|
default: return null;
|
||||||
}
|
}
|
||||||
}));
|
});
|
||||||
|
|
||||||
|
if (existingPOMap.has(key)) {
|
||||||
|
const existing = existingPOMap.get(key);
|
||||||
|
// Check if any values are different
|
||||||
|
const hasChanges = columnNames.some(col => {
|
||||||
|
const newVal = rowValues[columnNames.indexOf(col)];
|
||||||
|
const oldVal = existing[col] ?? null;
|
||||||
|
// Special handling for numbers to avoid type coercion issues
|
||||||
|
if (typeof newVal === 'number' && typeof oldVal === 'number') {
|
||||||
|
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
|
||||||
|
}
|
||||||
|
// Special handling for receiving_history - parse and compare
|
||||||
|
if (col === 'receiving_history') {
|
||||||
|
const newHistory = JSON.parse(newVal || '{}');
|
||||||
|
const oldHistory = JSON.parse(oldVal || '{}');
|
||||||
|
return JSON.stringify(newHistory) !== JSON.stringify(oldHistory);
|
||||||
|
}
|
||||||
|
return newVal !== oldVal;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (hasChanges) {
|
||||||
|
insertsAndUpdates.updates.push({
|
||||||
|
po_id: po.po_id,
|
||||||
|
pid: product.pid,
|
||||||
|
values: rowValues
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
insertsAndUpdates.inserts.push({
|
||||||
|
po_id: po.po_id,
|
||||||
|
pid: product.pid,
|
||||||
|
values: rowValues
|
||||||
|
});
|
||||||
|
}
|
||||||
batchProcessed++;
|
batchProcessed++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (values.length > 0) {
|
// Handle inserts
|
||||||
const placeholders = values.map(() =>
|
if (insertsAndUpdates.inserts.length > 0) {
|
||||||
`(${Array(columnNames.length).fill("?").join(",")})`
|
const insertPlaceholders = insertsAndUpdates.inserts
|
||||||
).join(",");
|
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
|
||||||
|
.join(",");
|
||||||
|
|
||||||
const query = `
|
const insertResult = await localConnection.query(`
|
||||||
INSERT INTO purchase_orders (${columnNames.join(",")})
|
INSERT INTO purchase_orders (${columnNames.join(",")})
|
||||||
VALUES ${placeholders}
|
VALUES ${insertPlaceholders}
|
||||||
|
`, insertsAndUpdates.inserts.map(i => i.values).flat());
|
||||||
|
|
||||||
|
recordsAdded += insertResult[0].affectedRows;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle updates - now we know these actually have changes
|
||||||
|
if (insertsAndUpdates.updates.length > 0) {
|
||||||
|
const updatePlaceholders = insertsAndUpdates.updates
|
||||||
|
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
|
||||||
|
.join(",");
|
||||||
|
|
||||||
|
const updateResult = await localConnection.query(`
|
||||||
|
INSERT INTO purchase_orders (${columnNames.join(",")})
|
||||||
|
VALUES ${updatePlaceholders}
|
||||||
ON DUPLICATE KEY UPDATE ${columnNames
|
ON DUPLICATE KEY UPDATE ${columnNames
|
||||||
.filter((col) => col !== "po_id" && col !== "pid")
|
.filter((col) => col !== "po_id" && col !== "pid")
|
||||||
.map((col) => `${col} = VALUES(${col})`)
|
.map((col) => `${col} = VALUES(${col})`)
|
||||||
.join(",")};
|
.join(",")};
|
||||||
`;
|
`, insertsAndUpdates.updates.map(u => u.values).flat());
|
||||||
|
|
||||||
const result = await localConnection.query(query, values.flat());
|
recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows
|
||||||
recordsAdded += result.affectedRows - result.changedRows;
|
|
||||||
recordsUpdated += result.changedRows;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
processed += batchProcessed;
|
processed += batchProcessed;
|
||||||
@@ -364,8 +445,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
return {
|
return {
|
||||||
status: "complete",
|
status: "complete",
|
||||||
totalImported: totalItems,
|
totalImported: totalItems,
|
||||||
recordsAdded,
|
recordsAdded: recordsAdded || 0,
|
||||||
recordsUpdated,
|
recordsUpdated: recordsUpdated || 0,
|
||||||
incrementalUpdate,
|
incrementalUpdate,
|
||||||
lastSyncTime
|
lastSyncTime
|
||||||
};
|
};
|
||||||
|
|||||||
82
inventory-server/scripts/import/purchase_orders.js
Normal file
82
inventory-server/scripts/import/purchase_orders.js
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
// Split into inserts and updates
|
||||||
|
const insertsAndUpdates = batch.reduce((acc, po) => {
|
||||||
|
const key = `${po.po_id}-${po.pid}`;
|
||||||
|
if (existingPOMap.has(key)) {
|
||||||
|
const existing = existingPOMap.get(key);
|
||||||
|
// Check if any values are different
|
||||||
|
const hasChanges = columnNames.some(col => {
|
||||||
|
const newVal = po[col] ?? null;
|
||||||
|
const oldVal = existing[col] ?? null;
|
||||||
|
// Special handling for numbers to avoid type coercion issues
|
||||||
|
if (typeof newVal === 'number' && typeof oldVal === 'number') {
|
||||||
|
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
|
||||||
|
}
|
||||||
|
// Special handling for receiving_history JSON
|
||||||
|
if (col === 'receiving_history') {
|
||||||
|
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
|
||||||
|
}
|
||||||
|
return newVal !== oldVal;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (hasChanges) {
|
||||||
|
console.log(`PO line changed: ${key}`, {
|
||||||
|
po_id: po.po_id,
|
||||||
|
pid: po.pid,
|
||||||
|
changes: columnNames.filter(col => {
|
||||||
|
const newVal = po[col] ?? null;
|
||||||
|
const oldVal = existing[col] ?? null;
|
||||||
|
if (typeof newVal === 'number' && typeof oldVal === 'number') {
|
||||||
|
return Math.abs(newVal - oldVal) > 0.00001;
|
||||||
|
}
|
||||||
|
if (col === 'receiving_history') {
|
||||||
|
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
|
||||||
|
}
|
||||||
|
return newVal !== oldVal;
|
||||||
|
})
|
||||||
|
});
|
||||||
|
acc.updates.push({
|
||||||
|
po_id: po.po_id,
|
||||||
|
pid: po.pid,
|
||||||
|
values: columnNames.map(col => po[col] ?? null)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`New PO line: ${key}`);
|
||||||
|
acc.inserts.push({
|
||||||
|
po_id: po.po_id,
|
||||||
|
pid: po.pid,
|
||||||
|
values: columnNames.map(col => po[col] ?? null)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
}, { inserts: [], updates: [] });
|
||||||
|
|
||||||
|
// Handle inserts
|
||||||
|
if (insertsAndUpdates.inserts.length > 0) {
|
||||||
|
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(",");
|
||||||
|
|
||||||
|
const insertResult = await localConnection.query(`
|
||||||
|
INSERT INTO purchase_orders (${columnNames.join(",")})
|
||||||
|
VALUES ${insertPlaceholders}
|
||||||
|
`, insertsAndUpdates.inserts.map(i => i.values).flat());
|
||||||
|
|
||||||
|
recordsAdded += insertResult[0].affectedRows;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle updates
|
||||||
|
if (insertsAndUpdates.updates.length > 0) {
|
||||||
|
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(",");
|
||||||
|
|
||||||
|
const updateResult = await localConnection.query(`
|
||||||
|
INSERT INTO purchase_orders (${columnNames.join(",")})
|
||||||
|
VALUES ${updatePlaceholders}
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
${columnNames
|
||||||
|
.filter(col => col !== "po_id" && col !== "pid")
|
||||||
|
.map(col => `${col} = VALUES(${col})`)
|
||||||
|
.join(",")};
|
||||||
|
`, insertsAndUpdates.updates.map(u => u.values).flat());
|
||||||
|
|
||||||
|
// Each update affects 2 rows in affectedRows, so we divide by 2 to get actual count
|
||||||
|
recordsUpdated += insertsAndUpdates.updates.length;
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user