Merge branch 'master' into add-product-upload-page

This commit is contained in:
2025-02-23 15:40:54 -05:00
parent 3f16413769
commit f628774267
47 changed files with 4674 additions and 3199 deletions

View File

@@ -10,22 +10,42 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
const [syncInfo] = await localConnection.query(
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'"
);
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
console.log('Purchase Orders: Using last sync time:', lastSyncTime);
// Insert temporary table creation query for purchase orders
// Create temporary tables with PostgreSQL syntax
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_purchase_orders (
po_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
DROP TABLE IF EXISTS temp_purchase_orders;
DROP TABLE IF EXISTS temp_po_receivings;
CREATE TEMP TABLE temp_purchase_orders (
po_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
sku VARCHAR(50),
name VARCHAR(255),
vendor VARCHAR(255),
date DATE,
expected_date DATE,
status INT,
date TIMESTAMP WITH TIME ZONE,
expected_date TIMESTAMP WITH TIME ZONE,
status INTEGER,
notes TEXT,
ordered INTEGER,
cost_price DECIMAL(10,3),
PRIMARY KEY (po_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
);
CREATE TEMP TABLE temp_po_receivings (
po_id INTEGER,
pid INTEGER NOT NULL,
receiving_id INTEGER NOT NULL,
qty_each INTEGER,
cost_each DECIMAL(10,3),
received_date TIMESTAMP WITH TIME ZONE,
received_by INTEGER,
received_by_name VARCHAR(255),
is_alt_po INTEGER,
PRIMARY KEY (receiving_id, pid)
);
`);
outputProgress({
@@ -33,8 +53,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
status: "running",
});
// Get column names first
const [columns] = await localConnection.query(`
// Get column names - Keep MySQL compatible for production
const [columns] = await prodConnection.query(`
SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'purchase_orders'
@@ -60,7 +80,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]
: [];
// First get all relevant PO IDs with basic info
// First get all relevant PO IDs with basic info - Keep MySQL compatible for production
const [[{ total }]] = await prodConnection.query(`
SELECT COUNT(*) as total
FROM (
@@ -99,6 +119,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
console.log('Purchase Orders: Found changes:', total);
// Get PO list - Keep MySQL compatible for production
const [poList] = await prodConnection.query(`
SELECT DISTINCT
COALESCE(p.po_id, r.receiving_id) as po_id,
@@ -109,12 +130,12 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
) as vendor,
CASE
WHEN p.po_id IS NOT NULL THEN
DATE(COALESCE(
COALESCE(
NULLIF(p.date_ordered, '0000-00-00 00:00:00'),
p.date_created
))
)
WHEN r.receiving_id IS NOT NULL THEN
DATE(r.date_created)
r.date_created
END as date,
CASE
WHEN p.date_estin = '0000-00-00' THEN NULL
@@ -185,14 +206,14 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
const batch = poList.slice(i, Math.min(i + BATCH_SIZE, poList.length));
const poIds = batch.map(po => po.po_id);
// Get all products for these POs in one query
// Get all products for these POs in one query - Keep MySQL compatible for production
const [poProducts] = await prodConnection.query(`
SELECT
pop.po_id,
pop.pid,
pr.itemnumber as sku,
pr.description as name,
pop.cost_each,
pop.cost_each as cost_price,
pop.qty_each as ordered
FROM po_products pop
USE INDEX (PRIMARY)
@@ -232,317 +253,397 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
ORDER BY r.po_id, rp.pid, rp.received_date
`, [batchPoIds, productPids]);
// Create maps for this sub-batch
const poProductMap = new Map();
productBatch.forEach(product => {
const key = `${product.po_id}-${product.pid}`;
poProductMap.set(key, product);
});
const receivingMap = new Map();
const altReceivingMap = new Map();
const noPOReceivingMap = new Map();
receivings.forEach(receiving => {
const key = `${receiving.po_id}-${receiving.pid}`;
if (receiving.is_alt_po === 2) {
// No PO
if (!noPOReceivingMap.has(receiving.pid)) {
noPOReceivingMap.set(receiving.pid, []);
}
noPOReceivingMap.get(receiving.pid).push(receiving);
} else if (receiving.is_alt_po === 1) {
// Different PO
if (!altReceivingMap.has(receiving.pid)) {
altReceivingMap.set(receiving.pid, []);
}
altReceivingMap.get(receiving.pid).push(receiving);
} else {
// Original PO
if (!receivingMap.has(key)) {
receivingMap.set(key, []);
}
receivingMap.get(key).push(receiving);
}
});
// Verify PIDs exist
const [existingPids] = await localConnection.query(
'SELECT pid FROM products WHERE pid IN (?)',
[productPids]
);
const validPids = new Set(existingPids.map(p => p.pid));
// First check which PO lines already exist and get their current values
const poLines = Array.from(poProductMap.values())
.filter(p => validPids.has(p.pid))
.map(p => [p.po_id, p.pid]);
const [existingPOs] = await localConnection.query(
`SELECT ${columnNames.join(',')} FROM purchase_orders WHERE (po_id, pid) IN (${poLines.map(() => "(?,?)").join(",")})`,
poLines.flat()
);
const existingPOMap = new Map(
existingPOs.map(po => [`${po.po_id}-${po.pid}`, po])
);
// Split into inserts and updates
const insertsAndUpdates = { inserts: [], updates: [] };
let batchProcessed = 0;
for (const po of batch) {
const poProducts = Array.from(poProductMap.values())
.filter(p => p.po_id === po.po_id && validPids.has(p.pid));
for (const product of poProducts) {
const key = `${po.po_id}-${product.pid}`;
const receivingHistory = receivingMap.get(key) || [];
const altReceivingHistory = altReceivingMap.get(product.pid) || [];
const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || [];
// Insert receivings into temp table
if (receivings.length > 0) {
// Process in smaller chunks to avoid parameter limits
const CHUNK_SIZE = 100; // Reduce chunk size to avoid parameter limits
for (let i = 0; i < receivings.length; i += CHUNK_SIZE) {
const chunk = receivings.slice(i, Math.min(i + CHUNK_SIZE, receivings.length));
// Combine all receivings and sort by date
const allReceivings = [
...receivingHistory.map(r => ({ ...r, type: 'original' })),
...altReceivingHistory.map(r => ({ ...r, type: 'alternate' })),
...noPOReceivingHistory.map(r => ({ ...r, type: 'no_po' }))
].sort((a, b) => new Date(a.received_date || '9999-12-31') - new Date(b.received_date || '9999-12-31'));
// Split receivings into original PO and others
const originalPOReceivings = allReceivings.filter(r => r.type === 'original');
const otherReceivings = allReceivings.filter(r => r.type !== 'original');
// Track FIFO fulfillment
let remainingToFulfill = product.ordered;
const fulfillmentTracking = [];
let totalReceived = 0;
let actualCost = null; // Will store the cost of the first receiving that fulfills this PO
let firstFulfillmentReceiving = null;
let lastFulfillmentReceiving = null;
for (const receiving of allReceivings) {
// Convert quantities to base units using supplier data
const baseQtyReceived = receiving.qty_each * (
receiving.type === 'original' ? 1 :
Math.max(1, product.supplier_qty_per_unit || 1)
);
const qtyToApply = Math.min(remainingToFulfill, baseQtyReceived);
if (qtyToApply > 0) {
// If this is the first receiving being applied, use its cost
if (actualCost === null && receiving.cost_each > 0) {
actualCost = receiving.cost_each;
firstFulfillmentReceiving = receiving;
}
lastFulfillmentReceiving = receiving;
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: qtyToApply,
qty_total: baseQtyReceived,
cost: receiving.cost_each || actualCost || product.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
remaining_qty: baseQtyReceived - qtyToApply
});
remainingToFulfill -= qtyToApply;
} else {
// Track excess receivings
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: 0,
qty_total: baseQtyReceived,
cost: receiving.cost_each || actualCost || product.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
is_excess: true
});
}
totalReceived += baseQtyReceived;
}
const receiving_status = !totalReceived ? 1 : // created
remainingToFulfill > 0 ? 30 : // partial
40; // full
function formatDate(dateStr) {
if (!dateStr) return null;
if (dateStr === '0000-00-00' || dateStr === '0000-00-00 00:00:00') return null;
if (typeof dateStr === 'string' && !dateStr.match(/^\d{4}-\d{2}-\d{2}/)) return null;
try {
const date = new Date(dateStr);
if (isNaN(date.getTime())) return null;
if (date.getFullYear() < 1900 || date.getFullYear() > 2100) return null;
return date.toISOString().split('T')[0];
} catch (e) {
return null;
}
}
const rowValues = columnNames.map(col => {
switch (col) {
case 'po_id': return po.po_id;
case 'vendor': return po.vendor;
case 'date': return formatDate(po.date);
case 'expected_date': return formatDate(po.expected_date);
case 'pid': return product.pid;
case 'sku': return product.sku;
case 'name': return product.name;
case 'cost_price': return actualCost || product.cost_each;
case 'po_cost_price': return product.cost_each;
case 'status': return po.status;
case 'notes': return po.notes;
case 'long_note': return po.long_note;
case 'ordered': return product.ordered;
case 'received': return totalReceived;
case 'unfulfilled': return remainingToFulfill;
case 'excess_received': return Math.max(0, totalReceived - product.ordered);
case 'received_date': return formatDate(firstFulfillmentReceiving?.received_date);
case 'last_received_date': return formatDate(lastFulfillmentReceiving?.received_date);
case 'received_by': return firstFulfillmentReceiving?.received_by_name || null;
case 'receiving_status': return receiving_status;
case 'receiving_history': return JSON.stringify({
fulfillment: fulfillmentTracking,
ordered_qty: product.ordered,
total_received: totalReceived,
remaining_unfulfilled: remainingToFulfill,
excess_received: Math.max(0, totalReceived - product.ordered),
po_cost: product.cost_each,
actual_cost: actualCost || product.cost_each
});
default: return null;
}
const values = [];
const placeholders = [];
chunk.forEach((r, idx) => {
values.push(
r.po_id,
r.pid,
r.receiving_id,
r.qty_each,
r.cost_each,
r.received_date,
r.received_by,
r.received_by_name || null,
r.is_alt_po
);
const offset = idx * 9;
placeholders.push(`($${offset + 1}, $${offset + 2}, $${offset + 3}, $${offset + 4}, $${offset + 5}, $${offset + 6}, $${offset + 7}, $${offset + 8}, $${offset + 9})`);
});
if (existingPOMap.has(key)) {
const existing = existingPOMap.get(key);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = rowValues[columnNames.indexOf(col)];
const oldVal = existing[col] ?? null;
// Special handling for numbers to avoid type coercion issues
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
}
// Special handling for receiving_history - parse and compare
if (col === 'receiving_history') {
const newHistory = JSON.parse(newVal || '{}');
const oldHistory = JSON.parse(oldVal || '{}');
return JSON.stringify(newHistory) !== JSON.stringify(oldHistory);
}
return newVal !== oldVal;
});
if (hasChanges) {
insertsAndUpdates.updates.push({
po_id: po.po_id,
pid: product.pid,
values: rowValues
});
}
} else {
insertsAndUpdates.inserts.push({
po_id: po.po_id,
pid: product.pid,
values: rowValues
});
}
batchProcessed++;
await localConnection.query(`
INSERT INTO temp_po_receivings (
po_id, pid, receiving_id, qty_each, cost_each, received_date,
received_by, received_by_name, is_alt_po
)
VALUES ${placeholders.join(',')}
ON CONFLICT (receiving_id, pid) DO UPDATE SET
po_id = EXCLUDED.po_id,
qty_each = EXCLUDED.qty_each,
cost_each = EXCLUDED.cost_each,
received_date = EXCLUDED.received_date,
received_by = EXCLUDED.received_by,
received_by_name = EXCLUDED.received_by_name,
is_alt_po = EXCLUDED.is_alt_po
`, values);
}
}
// Handle inserts
if (insertsAndUpdates.inserts.length > 0) {
const insertPlaceholders = insertsAndUpdates.inserts
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const insertResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${insertPlaceholders}
`, insertsAndUpdates.inserts.map(i => i.values).flat());
const affectedRows = insertResult[0].affectedRows;
// For an upsert, MySQL counts rows twice for updates
// So if affectedRows is odd, we have (updates * 2 + inserts)
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
// Process each PO product in chunks
const PRODUCT_CHUNK_SIZE = 100;
for (let i = 0; i < productBatch.length; i += PRODUCT_CHUNK_SIZE) {
const chunk = productBatch.slice(i, Math.min(i + PRODUCT_CHUNK_SIZE, productBatch.length));
const values = [];
const placeholders = [];
recordsAdded += inserts;
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
processed += batchProcessed;
}
// Handle updates - now we know these actually have changes
if (insertsAndUpdates.updates.length > 0) {
const updatePlaceholders = insertsAndUpdates.updates
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const updateResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE ${columnNames
.filter((col) => col !== "po_id" && col !== "pid")
.map((col) => `${col} = VALUES(${col})`)
.join(",")};
`, insertsAndUpdates.updates.map(u => u.values).flat());
const affectedRows = updateResult[0].affectedRows;
// For an upsert, MySQL counts rows twice for updates
// So if affectedRows is odd, we have (updates * 2 + inserts)
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
processed += batchProcessed;
}
// Update progress based on time interval
const now = Date.now();
if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) {
outputProgress({
status: "running",
operation: "Purchase orders import",
current: processed,
total: totalItems,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, processed, totalItems),
rate: calculateRate(startTime, processed)
chunk.forEach((product, idx) => {
const po = batch.find(p => p.po_id === product.po_id);
if (!po) return;
values.push(
product.po_id,
product.pid,
product.sku,
product.name,
po.vendor,
po.date,
po.expected_date,
po.status,
po.notes || po.long_note,
product.ordered,
product.cost_price
);
const offset = idx * 11; // Updated to match 11 fields
placeholders.push(`($${offset + 1}, $${offset + 2}, $${offset + 3}, $${offset + 4}, $${offset + 5}, $${offset + 6}, $${offset + 7}, $${offset + 8}, $${offset + 9}, $${offset + 10}, $${offset + 11})`);
});
lastProgressUpdate = now;
if (placeholders.length > 0) {
await localConnection.query(`
INSERT INTO temp_purchase_orders (
po_id, pid, sku, name, vendor, date, expected_date,
status, notes, ordered, cost_price
)
VALUES ${placeholders.join(',')}
ON CONFLICT (po_id, pid) DO UPDATE SET
sku = EXCLUDED.sku,
name = EXCLUDED.name,
vendor = EXCLUDED.vendor,
date = EXCLUDED.date,
expected_date = EXCLUDED.expected_date,
status = EXCLUDED.status,
notes = EXCLUDED.notes,
ordered = EXCLUDED.ordered,
cost_price = EXCLUDED.cost_price
`, values);
}
processed += chunk.length;
// Update progress based on time interval
const now = Date.now();
if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) {
outputProgress({
status: "running",
operation: "Purchase orders import",
current: processed,
total: totalItems,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, processed, totalItems),
rate: calculateRate(startTime, processed)
});
lastProgressUpdate = now;
}
}
}
}
// Only update sync status if we get here (no errors thrown)
// Insert final data into purchase_orders table in chunks
const FINAL_CHUNK_SIZE = 1000;
let totalProcessed = 0;
const totalPosResult = await localConnection.query('SELECT COUNT(*) as total_pos FROM temp_purchase_orders');
const total_pos = parseInt(totalPosResult.rows?.[0]?.total_pos || '0', 10);
outputProgress({
status: "running",
operation: "Purchase orders final import",
message: `Processing ${total_pos} purchase orders for final import`,
current: 0,
total: total_pos
});
// Process in chunks using cursor-based pagination
let lastPoId = 0;
let lastPid = 0;
let recordsAdded = 0;
let recordsUpdated = 0;
while (true) {
console.log('Fetching next chunk with lastPoId:', lastPoId, 'lastPid:', lastPid);
const chunkResult = await localConnection.query(`
SELECT po_id, pid FROM temp_purchase_orders
WHERE (po_id, pid) > ($1, $2)
ORDER BY po_id, pid
LIMIT $3
`, [lastPoId, lastPid, FINAL_CHUNK_SIZE]);
if (!chunkResult?.rows) {
console.error('No rows returned from chunk query:', chunkResult);
break;
}
const chunk = chunkResult.rows;
console.log('Got chunk of size:', chunk.length);
if (chunk.length === 0) break;
const result = await localConnection.query(`
WITH inserted_pos AS (
INSERT INTO purchase_orders (
po_id, pid, sku, name, cost_price, po_cost_price,
vendor, date, expected_date, status, notes,
ordered, received, receiving_status,
received_date, last_received_date, received_by,
receiving_history
)
SELECT
po.po_id,
po.pid,
po.sku,
po.name,
COALESCE(
(
SELECT cost_each
FROM temp_po_receivings r2
WHERE r2.pid = po.pid
AND r2.po_id = po.po_id
AND r2.is_alt_po = 0
AND r2.cost_each > 0
ORDER BY r2.received_date
LIMIT 1
),
po.cost_price
) as cost_price,
po.cost_price as po_cost_price,
po.vendor,
po.date,
po.expected_date,
po.status,
po.notes,
po.ordered,
COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0) as received,
CASE
WHEN COUNT(r.receiving_id) = 0 THEN 1 -- created
WHEN SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END) < po.ordered THEN 30 -- partial
ELSE 40 -- full
END as receiving_status,
MIN(CASE WHEN r.is_alt_po = 0 THEN r.received_date END) as received_date,
MAX(CASE WHEN r.is_alt_po = 0 THEN r.received_date END) as last_received_date,
(
SELECT r2.received_by_name
FROM temp_po_receivings r2
WHERE r2.pid = po.pid
AND r2.is_alt_po = 0
ORDER BY r2.received_date
LIMIT 1
) as received_by,
jsonb_build_object(
'ordered_qty', po.ordered,
'total_received', COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0),
'remaining_unfulfilled', GREATEST(0, po.ordered - COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0)),
'excess_received', GREATEST(0, COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0) - po.ordered),
'po_cost', po.cost_price,
'actual_cost', COALESCE(
(
SELECT cost_each
FROM temp_po_receivings r2
WHERE r2.pid = po.pid
AND r2.is_alt_po = 0
AND r2.cost_each > 0
ORDER BY r2.received_date
LIMIT 1
),
po.cost_price
),
'fulfillment', (
SELECT jsonb_agg(
jsonb_build_object(
'receiving_id', r2.receiving_id,
'qty_applied', CASE
WHEN r2.running_total <= po.ordered THEN r2.qty_each
WHEN r2.running_total - r2.qty_each < po.ordered THEN po.ordered - (r2.running_total - r2.qty_each)
ELSE 0
END,
'qty_total', r2.qty_each,
'cost', r2.cost_each,
'date', r2.received_date,
'received_by', r2.received_by,
'received_by_name', r2.received_by_name,
'type', CASE r2.is_alt_po
WHEN 0 THEN 'original'
WHEN 1 THEN 'alternate'
ELSE 'no_po'
END,
'remaining_qty', CASE
WHEN r2.running_total <= po.ordered THEN 0
WHEN r2.running_total - r2.qty_each < po.ordered THEN r2.running_total - po.ordered
ELSE r2.qty_each
END,
'is_excess', r2.running_total > po.ordered
)
ORDER BY r2.received_date
)
FROM (
SELECT
r2.*,
SUM(r2.qty_each) OVER (
PARTITION BY r2.pid
ORDER BY r2.received_date
ROWS UNBOUNDED PRECEDING
) as running_total
FROM temp_po_receivings r2
WHERE r2.pid = po.pid
) r2
),
'alternate_po_receivings', (
SELECT jsonb_agg(
jsonb_build_object(
'receiving_id', r2.receiving_id,
'qty', r2.qty_each,
'cost', r2.cost_each,
'date', r2.received_date,
'received_by', r2.received_by,
'received_by_name', r2.received_by_name
)
ORDER BY r2.received_date
)
FROM temp_po_receivings r2
WHERE r2.pid = po.pid AND r2.is_alt_po = 1
),
'no_po_receivings', (
SELECT jsonb_agg(
jsonb_build_object(
'receiving_id', r2.receiving_id,
'qty', r2.qty_each,
'cost', r2.cost_each,
'date', r2.received_date,
'received_by', r2.received_by,
'received_by_name', r2.received_by_name
)
ORDER BY r2.received_date
)
FROM temp_po_receivings r2
WHERE r2.pid = po.pid AND r2.is_alt_po = 2
)
) as receiving_history
FROM temp_purchase_orders po
LEFT JOIN temp_po_receivings r ON po.pid = r.pid
WHERE (po.po_id, po.pid) IN (
SELECT po_id, pid FROM UNNEST($1::int[], $2::int[])
)
GROUP BY po.po_id, po.pid, po.sku, po.name, po.vendor, po.date,
po.expected_date, po.status, po.notes, po.ordered, po.cost_price
ON CONFLICT (po_id, pid) DO UPDATE SET
vendor = EXCLUDED.vendor,
date = EXCLUDED.date,
expected_date = EXCLUDED.expected_date,
status = EXCLUDED.status,
notes = EXCLUDED.notes,
ordered = EXCLUDED.ordered,
received = EXCLUDED.received,
receiving_status = EXCLUDED.receiving_status,
received_date = EXCLUDED.received_date,
last_received_date = EXCLUDED.last_received_date,
received_by = EXCLUDED.received_by,
receiving_history = EXCLUDED.receiving_history,
cost_price = EXCLUDED.cost_price,
po_cost_price = EXCLUDED.po_cost_price
RETURNING xmax
)
SELECT
COUNT(*) FILTER (WHERE xmax = 0) as inserted,
COUNT(*) FILTER (WHERE xmax <> 0) as updated
FROM inserted_pos
`, [
chunk.map(r => r.po_id),
chunk.map(r => r.pid)
]);
// Add debug logging
console.log('Insert result:', result?.rows?.[0]);
// Handle the result properly for PostgreSQL with more defensive coding
const resultRow = result?.rows?.[0] || {};
const insertCount = parseInt(resultRow.inserted || '0', 10);
const updateCount = parseInt(resultRow.updated || '0', 10);
recordsAdded += insertCount;
recordsUpdated += updateCount;
totalProcessed += chunk.length;
// Update progress
outputProgress({
status: "running",
operation: "Purchase orders final import",
message: `Processed ${totalProcessed} of ${total_pos} purchase orders`,
current: totalProcessed,
total: total_pos,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, totalProcessed, total_pos),
rate: calculateRate(startTime, totalProcessed)
});
// Update last processed IDs for next chunk with safety check
if (chunk.length > 0) {
const lastItem = chunk[chunk.length - 1];
if (lastItem) {
lastPoId = lastItem.po_id;
lastPid = lastItem.pid;
}
}
}
// Update sync status
await localConnection.query(`
INSERT INTO sync_status (table_name, last_sync_timestamp)
VALUES ('purchase_orders', NOW())
ON DUPLICATE KEY UPDATE
last_sync_timestamp = NOW(),
last_sync_id = LAST_INSERT_ID(last_sync_id)
ON CONFLICT (table_name) DO UPDATE SET
last_sync_timestamp = NOW()
`);
// Clean up temporary tables
await localConnection.query(`
DROP TABLE IF EXISTS temp_purchase_orders;
DROP TABLE IF EXISTS temp_po_receivings;
`);
return {
status: "complete",
totalImported: totalItems,
recordsAdded: recordsAdded || 0,
recordsUpdated: recordsUpdated || 0,
incrementalUpdate,
lastSyncTime
recordsAdded,
recordsUpdated,
totalRecords: processed
};
} catch (error) {
outputProgress({
operation: `${incrementalUpdate ? 'Incremental' : 'Full'} purchase orders import failed`,
status: "error",
error: error.message,
});
console.error("Error during purchase orders import:", error);
// Attempt cleanup on error
try {
await localConnection.query(`
DROP TABLE IF EXISTS temp_purchase_orders;
DROP TABLE IF EXISTS temp_po_receivings;
`);
} catch (cleanupError) {
console.error('Error during cleanup:', cleanupError);
}
throw error;
}
}
module.exports = importPurchaseOrders;
module.exports = importPurchaseOrders;