21 Commits

Author SHA1 Message Date
9e1989ac66 Cleanup 2025-02-01 14:08:27 -05:00
5bfd6f6d04 Fix import script order count issues 2025-02-01 12:54:33 -05:00
1003ff3cf2 More incremental import fixes 2025-02-01 11:42:51 -05:00
2d0089dc52 Incremental import order fixes 2025-02-01 11:03:42 -05:00
50b86d6d8a Fix/add data to PO script 2025-02-01 10:51:47 -05:00
07f14c0017 Fix/add data to orders script and fix other import errors 2025-02-01 01:06:45 -05:00
e77b488cd4 Fix/add data to products script 2025-01-31 18:44:11 -05:00
d57239c40c Finish up import script incremental and reliability updates 2025-01-31 16:01:21 -05:00
1c932e0df5 More import script updates and fixes, better import_history tracking 2025-01-31 13:12:43 -05:00
a867117c3c Import script incremental fixes 2025-01-31 11:12:38 -05:00
996d3d36af Streamline incremental imports 2025-01-31 10:01:50 -05:00
d0abe9d9a2 - Modify import scripts to handle edge cases with empty arrays and null conditions
- Improve parameter handling in incremental update queries for purchase orders and products
2025-01-31 01:50:21 -05:00
5e4d1c3bd8 Improve import scripts with enhanced incremental update tracking and performance
- Add record tracking for added and updated records in import scripts
- Modify products import to use a dynamic 'needs_update' flag for selective updates
- Enhance order import with more comprehensive timestamp checks
- Update import-from-prod.js to handle and clean up previously running imports
- Improve error handling and connection management in import processes
2025-01-31 01:39:48 -05:00
1be97d6610 Enhance purchase order import with advanced receiving tracking and fulfillment logic
- Implement FIFO-based receiving fulfillment tracking
- Add detailed receiving history with excess and partial fulfillment support
- Improve vendor name resolution and fallback handling
- Optimize incremental update queries by removing redundant conditions
- Enhance receiving status calculation with more granular tracking
2025-01-31 01:25:48 -05:00
b506f89dd7 Optimize order and product import scripts with improved performance and incremental update handling
- Refactor orders import to use temporary tables for more efficient data processing
- Improve batch processing and memory management in order import script
- Update product import to use temporary tables for inventory status
- Modify purchase orders import to use updated timestamp for incremental updates
- Enhance error handling and logging for import processes
2025-01-30 21:13:53 -05:00
c433f1aae8 Enhance import scripts with incremental update support and improved error handling
- Update import-from-prod.js to support granular incremental updates for different import types
- Modify orders.js to handle complex order data retrieval with better performance and error tracking
- Add support for incremental updates in products.js import function
- Improve logging and progress tracking for import processes
2025-01-30 15:49:47 -05:00
31d4011902 Add back product-category import and product time estimates 2025-01-30 00:00:30 -05:00
6c5f119ee5 Import fixes/optimizations 2025-01-29 21:48:56 -05:00
3c5fb9e435 Optimize product import with dynamic batching and memory management 2025-01-29 19:14:58 -05:00
2b329a55a4 Increase product import batch size 2025-01-29 18:51:55 -05:00
0d377466aa Optimize database import queries with improved index selection 2025-01-29 18:42:29 -05:00
9 changed files with 1616 additions and 788 deletions

1
.gitignore vendored
View File

@@ -57,3 +57,4 @@ csv/**/*
**/csv/**/* **/csv/**/*
!csv/.gitkeep !csv/.gitkeep
inventory/tsconfig.tsbuildinfo inventory/tsconfig.tsbuildinfo
inventory-server/scripts/.fuse_hidden00000fa20000000a

View File

@@ -184,6 +184,7 @@ CREATE TABLE IF NOT EXISTS import_history (
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP NULL, end_time TIMESTAMP NULL,
duration_seconds INT, duration_seconds INT,
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED,
records_added INT DEFAULT 0, records_added INT DEFAULT 0,
records_updated INT DEFAULT 0, records_updated INT DEFAULT 0,
is_incremental BOOLEAN DEFAULT FALSE, is_incremental BOOLEAN DEFAULT FALSE,

View File

@@ -39,7 +39,7 @@ CREATE TABLE products (
tags TEXT, tags TEXT,
moq INT DEFAULT 1, moq INT DEFAULT 1,
uom INT DEFAULT 1, uom INT DEFAULT 1,
rating TINYINT UNSIGNED DEFAULT 0, rating DECIMAL(10,2) DEFAULT 0.00,
reviews INT UNSIGNED DEFAULT 0, reviews INT UNSIGNED DEFAULT 0,
weight DECIMAL(10,3), weight DECIMAL(10,3),
length DECIMAL(10,3), length DECIMAL(10,3),
@@ -52,7 +52,7 @@ CREATE TABLE products (
notifies INT UNSIGNED DEFAULT 0, notifies INT UNSIGNED DEFAULT 0,
date_last_sold DATE, date_last_sold DATE,
PRIMARY KEY (pid), PRIMARY KEY (pid),
UNIQUE KEY unique_sku (SKU), INDEX idx_sku (SKU),
INDEX idx_vendor (vendor), INDEX idx_vendor (vendor),
INDEX idx_brand (brand), INDEX idx_brand (brand),
INDEX idx_location (location), INDEX idx_location (location),
@@ -113,11 +113,13 @@ CREATE TABLE IF NOT EXISTS orders (
tax DECIMAL(10,3) DEFAULT 0.000, tax DECIMAL(10,3) DEFAULT 0.000,
tax_included TINYINT(1) DEFAULT 0, tax_included TINYINT(1) DEFAULT 0,
shipping DECIMAL(10,3) DEFAULT 0.000, shipping DECIMAL(10,3) DEFAULT 0.000,
costeach DECIMAL(10,3) DEFAULT 0.000,
customer VARCHAR(50) NOT NULL, customer VARCHAR(50) NOT NULL,
customer_name VARCHAR(100), customer_name VARCHAR(100),
status VARCHAR(20) DEFAULT 'pending', status VARCHAR(20) DEFAULT 'pending',
canceled TINYINT(1) DEFAULT 0, canceled TINYINT(1) DEFAULT 0,
PRIMARY KEY (id), PRIMARY KEY (id),
UNIQUE KEY unique_order_line (order_number, pid),
KEY order_number (order_number), KEY order_number (order_number),
KEY pid (pid), KEY pid (pid),
KEY customer (customer), KEY customer (customer),
@@ -135,7 +137,9 @@ CREATE TABLE purchase_orders (
expected_date DATE, expected_date DATE,
pid BIGINT NOT NULL, pid BIGINT NOT NULL,
sku VARCHAR(50) NOT NULL, sku VARCHAR(50) NOT NULL,
name VARCHAR(100) NOT NULL COMMENT 'Product name from products.description',
cost_price DECIMAL(10, 3) NOT NULL, cost_price DECIMAL(10, 3) NOT NULL,
po_cost_price DECIMAL(10, 3) NOT NULL COMMENT 'Original cost from PO, before receiving adjustments',
status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done', status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done',
receiving_status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,30=partial_received,40=full_received,50=paid', receiving_status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,30=partial_received,40=full_received,50=paid',
notes TEXT, notes TEXT,
@@ -147,7 +151,6 @@ CREATE TABLE purchase_orders (
received_by INT, received_by INT,
receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag', receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag',
FOREIGN KEY (pid) REFERENCES products(pid), FOREIGN KEY (pid) REFERENCES products(pid),
FOREIGN KEY (sku) REFERENCES products(SKU),
INDEX idx_po_id (po_id), INDEX idx_po_id (po_id),
INDEX idx_vendor (vendor), INDEX idx_vendor (vendor),
INDEX idx_status (status), INDEX idx_status (status),

View File

@@ -10,13 +10,13 @@ const importPurchaseOrders = require('./import/purchase-orders');
dotenv.config({ path: path.join(__dirname, "../.env") }); dotenv.config({ path: path.join(__dirname, "../.env") });
// Constants to control which imports run // Constants to control which imports run
const IMPORT_CATEGORIES = false; const IMPORT_CATEGORIES = true;
const IMPORT_PRODUCTS = false; const IMPORT_PRODUCTS = true;
const IMPORT_ORDERS = true; const IMPORT_ORDERS = true;
const IMPORT_PURCHASE_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true;
// Add flag for incremental updates // Add flag for incremental updates
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE === 'true'; const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
// SSH configuration // SSH configuration
// In import-from-prod.js // In import-from-prod.js
@@ -48,7 +48,6 @@ const sshConfig = {
connectionLimit: 10, connectionLimit: 10,
queueLimit: 0, queueLimit: 0,
namedPlaceholders: true, namedPlaceholders: true,
maxAllowedPacket: 64 * 1024 * 1024, // 64MB
connectTimeout: 60000, connectTimeout: 60000,
enableKeepAlive: true, enableKeepAlive: true,
keepAliveInitialDelay: 10000, keepAliveInitialDelay: 10000,
@@ -103,6 +102,17 @@ async function main() {
if (isImportCancelled) throw new Error("Import cancelled"); if (isImportCancelled) throw new Error("Import cancelled");
// Clean up any previously running imports that weren't completed
await localConnection.query(`
UPDATE import_history
SET
status = 'cancelled',
end_time = NOW(),
duration_seconds = TIMESTAMPDIFF(SECOND, start_time, NOW()),
error_message = 'Previous import was not completed properly'
WHERE status = 'running'
`);
// Initialize sync_status table if it doesn't exist // Initialize sync_status table if it doesn't exist
await localConnection.query(` await localConnection.query(`
CREATE TABLE IF NOT EXISTS sync_status ( CREATE TABLE IF NOT EXISTS sync_status (
@@ -151,32 +161,36 @@ async function main() {
results.categories = await importCategories(prodConnection, localConnection); results.categories = await importCategories(prodConnection, localConnection);
if (isImportCancelled) throw new Error("Import cancelled"); if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++; completedSteps++;
if (results.categories.recordsAdded) totalRecordsAdded += results.categories.recordsAdded; console.log('Categories import result:', results.categories);
if (results.categories.recordsUpdated) totalRecordsUpdated += results.categories.recordsUpdated; totalRecordsAdded += results.categories?.recordsAdded || 0;
totalRecordsUpdated += results.categories?.recordsUpdated || 0;
} }
if (IMPORT_PRODUCTS) { if (IMPORT_PRODUCTS) {
results.products = await importProducts(prodConnection, localConnection); results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled"); if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++; completedSteps++;
if (results.products.recordsAdded) totalRecordsAdded += results.products.recordsAdded; console.log('Products import result:', results.products);
if (results.products.recordsUpdated) totalRecordsUpdated += results.products.recordsUpdated; totalRecordsAdded += results.products?.recordsAdded || 0;
totalRecordsUpdated += results.products?.recordsUpdated || 0;
} }
if (IMPORT_ORDERS) { if (IMPORT_ORDERS) {
results.orders = await importOrders(prodConnection, localConnection); results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled"); if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++; completedSteps++;
if (results.orders.recordsAdded) totalRecordsAdded += results.orders.recordsAdded; console.log('Orders import result:', results.orders);
if (results.orders.recordsUpdated) totalRecordsUpdated += results.orders.recordsUpdated; totalRecordsAdded += results.orders?.recordsAdded || 0;
totalRecordsUpdated += results.orders?.recordsUpdated || 0;
} }
if (IMPORT_PURCHASE_ORDERS) { if (IMPORT_PURCHASE_ORDERS) {
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection); results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled"); if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++; completedSteps++;
if (results.purchaseOrders.recordsAdded) totalRecordsAdded += results.purchaseOrders.recordsAdded; console.log('Purchase orders import result:', results.purchaseOrders);
if (results.purchaseOrders.recordsUpdated) totalRecordsUpdated += results.purchaseOrders.recordsUpdated; totalRecordsAdded += results.purchaseOrders?.recordsAdded || 0;
totalRecordsUpdated += results.purchaseOrders?.recordsUpdated || 0;
} }
const endTime = Date.now(); const endTime = Date.now();
@@ -240,8 +254,8 @@ async function main() {
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000); const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
// Update import history with error // Update import history with error
if (importHistoryId) { if (importHistoryId && connections?.localConnection) {
await connections?.localConnection?.query(` await connections.localConnection.query(`
UPDATE import_history UPDATE import_history
SET SET
end_time = NOW(), end_time = NOW(),

View File

@@ -1,303 +1,568 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
const { importMissingProducts } = require('./products'); const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
async function importOrders(prodConnection, localConnection) {
outputProgress({
operation: "Starting orders import - Getting total count",
status: "running",
});
/**
* Imports orders from a production MySQL database to a local MySQL database.
* It can run in two modes:
* 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time.
* 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp.
*
* @param {object} prodConnection - A MySQL connection to production DB (MySQL 5.7).
* @param {object} localConnection - A MySQL connection to local DB (MySQL 8.0).
* @param {boolean} incrementalUpdate - Set to false for a full sync; true for incremental.
*
* @returns {object} Information about the sync operation.
*/
async function importOrders(prodConnection, localConnection, incrementalUpdate = true) {
const startTime = Date.now(); const startTime = Date.now();
const skippedOrders = new Set(); // Store orders that need to be retried const skippedOrders = new Set();
const missingProducts = new Set(); // Store products that need to be imported const missingProducts = new Set();
let recordsAdded = 0;
let recordsUpdated = 0;
let processedCount = 0;
let importedCount = 0;
let totalOrderItems = 0;
let totalUniqueOrders = 0;
// Add a cumulative counter for processed orders before the loop
let cumulativeProcessedOrders = 0;
try { try {
// Get last sync info // Insert temporary table creation queries
const [syncInfo] = await localConnection.query( await localConnection.query(`
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'" CREATE TABLE IF NOT EXISTS temp_order_items (
); order_id INT UNSIGNED NOT NULL,
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; pid INT UNSIGNED NOT NULL,
SKU VARCHAR(50) NOT NULL,
price DECIMAL(10,2) NOT NULL,
quantity INT NOT NULL,
base_discount DECIMAL(10,2) DEFAULT 0,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_meta (
order_id INT UNSIGNED NOT NULL,
date DATE NOT NULL,
customer VARCHAR(100) NOT NULL,
customer_name VARCHAR(150) NOT NULL,
status INT,
canceled TINYINT(1),
PRIMARY KEY (order_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_discounts (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
discount DECIMAL(10,2) NOT NULL,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_taxes (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
tax DECIMAL(10,2) NOT NULL,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_costs (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
costeach DECIMAL(10,3) DEFAULT 0.000,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
// First get the column names from the table structure // Get column names from the local table
const [columns] = await localConnection.query(` const [columns] = await localConnection.query(`
SELECT COLUMN_NAME SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'orders' WHERE TABLE_NAME = 'orders'
ORDER BY ORDINAL_POSITION ORDER BY ORDINAL_POSITION
`); `);
const columnNames = columns.map(col => col.COLUMN_NAME);
const columnNames = columns // Get last sync info
.map((col) => col.COLUMN_NAME) const [syncInfo] = await localConnection.query(
.filter((name) => name !== "id"); // Skip auto-increment ID "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
);
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
// Get total count first for progress indication - modified for incremental console.log('Orders: Using last sync time:', lastSyncTime);
const [countResult] = await prodConnection.query(`
// First get count of order items
const [[{ total }]] = await prodConnection.query(`
SELECT COUNT(*) as total SELECT COUNT(*) as total
FROM order_items oi FORCE INDEX (PRIMARY) FROM order_items oi
JOIN _order o FORCE INDEX (PRIMARY) ON oi.order_id = o.order_id USE INDEX (PRIMARY)
JOIN _order o ON oi.order_id = o.order_id
WHERE o.order_status >= 15 WHERE o.order_status >= 15
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND (o.date_placed_onlydate > ? AND o.date_placed_onlydate IS NOT NULL
OR o.stamp > ?) ${incrementalUpdate ? `
`, [lastSyncTime, lastSyncTime]); AND (
o.stamp > ?
OR oi.stamp > ?
OR EXISTS (
SELECT 1 FROM order_discount_items odi
WHERE odi.order_id = o.order_id
AND odi.pid = oi.prod_pid
)
OR EXISTS (
SELECT 1 FROM order_tax_info oti
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
WHERE oti.order_id = o.order_id
AND otip.pid = oi.prod_pid
AND oti.stamp > ?
)
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
const totalOrders = countResult[0].total; totalOrderItems = total;
console.log('Orders: Found changes:', totalOrderItems);
outputProgress({ // Get order items in batches
operation: `Starting orders import - Fetching ${totalOrders} orders from production`, const [orderItems] = await prodConnection.query(`
status: "running", SELECT
}); oi.order_id,
oi.prod_pid as pid,
oi.prod_itemnumber as SKU,
oi.prod_price as price,
oi.qty_ordered as quantity,
COALESCE(oi.prod_price_reg - oi.prod_price, 0) as base_discount,
oi.stamp as last_modified
FROM order_items oi
USE INDEX (PRIMARY)
JOIN _order o ON oi.order_id = o.order_id
WHERE o.order_status >= 15
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND o.date_placed_onlydate IS NOT NULL
${incrementalUpdate ? `
AND (
o.stamp > ?
OR oi.stamp > ?
OR EXISTS (
SELECT 1 FROM order_discount_items odi
WHERE odi.order_id = o.order_id
AND odi.pid = oi.prod_pid
)
OR EXISTS (
SELECT 1 FROM order_tax_info oti
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
WHERE oti.order_id = o.order_id
AND otip.pid = oi.prod_pid
AND oti.stamp > ?
)
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
const total = countResult[0].total; console.log('Orders: Processing', orderItems.length, 'order items');
let processed = 0;
// Process in batches // Insert order items in batches
const batchSize = 20000; // Increased from 1000 since order records are small for (let i = 0; i < orderItems.length; i += 5000) {
let offset = 0; const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
const values = batch.flatMap(item => [
item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount
]);
while (offset < total) { await localConnection.query(`
// First get orders without tax info INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
const [orders] = await prodConnection.query(` VALUES ${placeholders}
SELECT ON DUPLICATE KEY UPDATE
oi.order_id as order_number, SKU = VALUES(SKU),
oi.prod_pid as pid, price = VALUES(price),
oi.prod_itemnumber as SKU, quantity = VALUES(quantity),
o.date_placed_onlydate as date, base_discount = VALUES(base_discount)
oi.prod_price_reg as price, `, values);
oi.qty_ordered as quantity,
(oi.prod_price_reg - oi.prod_price) as discount,
0 as tax,
0 as tax_included,
ROUND(
((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) *
(oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2
) as shipping,
o.order_cid as customer,
CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name,
'pending' as status,
CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled
FROM order_items oi
FORCE INDEX (PRIMARY)
JOIN _order o USE INDEX (date_placed_onlydate, idx_status)
ON oi.order_id = o.order_id
WHERE o.order_status >= 15
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
AND (o.date_placed_onlydate > ?
OR o.stamp > ?)
LIMIT ? OFFSET ?
`, [lastSyncTime, lastSyncTime, batchSize, offset]);
// Then get tax info for these orders
if (orders.length > 0) {
const orderIds = [...new Set(orders.map(o => o.order_number))];
const [taxInfo] = await prodConnection.query(`
SELECT oti.order_id, otp.pid, otp.item_taxes_to_collect
FROM (
SELECT order_id, MAX(stamp) as latest_stamp
FROM order_tax_info USE INDEX (order_id, stamp)
WHERE order_id IN (?)
GROUP BY order_id
) latest
JOIN order_tax_info oti USE INDEX (order_id, stamp)
ON oti.order_id = latest.order_id
AND oti.stamp = latest.latest_stamp
JOIN order_tax_info_products otp FORCE INDEX (PRIMARY)
ON oti.taxinfo_id = otp.taxinfo_id
`, [orderIds]);
// Create a map for quick tax lookup
const taxMap = new Map();
taxInfo.forEach(t => {
taxMap.set(`${t.order_id}-${t.pid}`, t.item_taxes_to_collect);
});
// Add tax info to orders
orders.forEach(order => {
const taxKey = `${order.order_number}-${order.pid}`;
order.tax = taxMap.get(taxKey) || 0;
});
}
// Check if all products exist before inserting orders
const orderProductPids = [...new Set(orders.map((o) => o.pid))];
const [existingProducts] = await localConnection.query(
"SELECT pid FROM products WHERE pid IN (?)",
[orderProductPids]
);
const existingPids = new Set(existingProducts.map((p) => p.pid));
// Filter out orders with missing products and track them
const validOrders = orders.filter((order) => {
if (!existingPids.has(order.pid)) {
missingProducts.add(order.pid);
skippedOrders.add(order.order_number);
return false;
}
return true;
});
if (validOrders.length > 0) {
const placeholders = validOrders
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const updateClauses = columnNames
.filter((col) => col !== "order_number") // Don't update primary key
.map((col) => `${col} = VALUES(${col})`)
.join(",");
const query = `
INSERT INTO orders (${columnNames.join(",")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE ${updateClauses}
`;
await localConnection.query(
query,
validOrders.flatMap(order => columnNames.map(col => order[col]))
);
}
processed += orders.length;
offset += batchSize;
processedCount = i + batch.length;
outputProgress({ outputProgress({
status: "running", status: "running",
operation: "Orders import", operation: "Orders import",
current: processed, message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
total, current: processedCount,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000), total: totalOrderItems
remaining: estimateRemaining(startTime, processed, total),
rate: calculateRate(startTime, processed)
}); });
} }
// Now handle missing products and retry skipped orders // Get unique order IDs
if (missingProducts.size > 0) { const orderIds = [...new Set(orderItems.map(item => item.order_id))];
outputProgress({ totalUniqueOrders = orderIds.length;
operation: `Found ${missingProducts.size} missing products, importing them now`, console.log('Total unique order IDs:', totalUniqueOrders);
status: "running",
});
await importMissingProducts(prodConnection, localConnection, [ // Reset processed count for order processing phase
...missingProducts, processedCount = 0;
// Get order metadata in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`);
console.log('Sample of batch IDs:', batchIds.slice(0, 5));
const [orders] = await prodConnection.query(`
SELECT
o.order_id,
o.date_placed_onlydate as date,
o.order_cid as customer,
CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name,
o.order_status as status,
CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled
FROM _order o
LEFT JOIN users u ON o.order_cid = u.cid
WHERE o.order_id IN (?)
`, [batchIds]);
console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`);
const duplicates = orders.filter((order, index, self) =>
self.findIndex(o => o.order_id === order.order_id) !== index
);
if (duplicates.length > 0) {
console.log('Found duplicates:', duplicates);
}
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
const values = orders.flatMap(order => [
order.order_id, order.date, order.customer, order.customer_name, order.status, order.canceled
]); ]);
// Retry skipped orders await localConnection.query(`
if (skippedOrders.size > 0) { INSERT INTO temp_order_meta VALUES ${placeholders}
outputProgress({ ON DUPLICATE KEY UPDATE
operation: `Retrying ${skippedOrders.size} skipped orders`, date = VALUES(date),
status: "running", customer = VALUES(customer),
}); customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled)
`, values);
const [retryOrders] = await prodConnection.query(` processedCount = i + orders.length;
SELECT outputProgress({
oi.order_id as order_number, status: "running",
oi.prod_pid as pid, operation: "Orders import",
oi.prod_itemnumber as SKU, message: `Loading order metadata: ${processedCount} of ${totalUniqueOrders}`,
o.date_placed_onlydate as date, current: processedCount,
oi.prod_price_reg as price, total: totalUniqueOrders
oi.qty_ordered as quantity, });
(oi.prod_price_reg - oi.prod_price) as discount, }
0 as tax,
0 as tax_included,
ROUND(
((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) *
(oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2
) as shipping,
o.order_cid as customer,
CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name,
'pending' as status,
CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled
FROM order_items oi
JOIN _order o ON oi.order_id = o.order_id
WHERE oi.order_id IN (?)
`, [[...skippedOrders]]);
if (retryOrders.length > 0) { // Reset processed count for final phase
const retryOrderIds = [...new Set(retryOrders.map(o => o.order_number))]; processedCount = 0;
const [retryTaxInfo] = await prodConnection.query(`
SELECT oti.order_id, otp.pid, otp.item_taxes_to_collect
FROM (
SELECT order_id, MAX(stamp) as latest_stamp
FROM order_tax_info USE INDEX (order_id, stamp)
WHERE order_id IN (?)
GROUP BY order_id
) latest
JOIN order_tax_info oti USE INDEX (order_id, stamp)
ON oti.order_id = latest.order_id
AND oti.stamp = latest.latest_stamp
JOIN order_tax_info_products otp FORCE INDEX (PRIMARY)
ON oti.taxinfo_id = otp.taxinfo_id
`, [retryOrderIds]);
// Create a map for quick tax lookup // Get promotional discounts in batches
const taxMap = new Map(); for (let i = 0; i < orderIds.length; i += 5000) {
retryTaxInfo.forEach(t => { const batchIds = orderIds.slice(i, i + 5000);
taxMap.set(`${t.order_id}-${t.pid}`, t.item_taxes_to_collect); const [discounts] = await prodConnection.query(`
}); SELECT order_id, pid, SUM(amount) as discount
FROM order_discount_items
WHERE order_id IN (?)
GROUP BY order_id, pid
`, [batchIds]);
// Add tax info to orders if (discounts.length > 0) {
retryOrders.forEach(order => { const placeholders = discounts.map(() => "(?, ?, ?)").join(",");
const taxKey = `${order.order_number}-${order.pid}`; const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]);
order.tax = taxMap.get(taxKey) || 0;
});
}
const placeholders = retryOrders await localConnection.query(`
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`) INSERT INTO temp_order_discounts VALUES ${placeholders}
.join(","); ON DUPLICATE KEY UPDATE
const updateClauses = columnNames discount = VALUES(discount)
.filter((col) => col !== "order_number") // Don't update primary key `, values);
.map((col) => `${col} = VALUES(${col})`)
.join(",");
const query = `
INSERT INTO orders (${columnNames.join(",")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE ${updateClauses}
`;
await localConnection.query(
query,
retryOrders.flatMap(order => columnNames.map(col => order[col]))
);
} }
} }
// After successful import, update the sync status // Get tax information in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
const [taxes] = await prodConnection.query(`
SELECT DISTINCT
oti.order_id,
otip.pid,
otip.item_taxes_to_collect as tax
FROM order_tax_info oti
JOIN (
SELECT order_id, MAX(stamp) as max_stamp
FROM order_tax_info
WHERE order_id IN (?)
GROUP BY order_id
) latest ON oti.order_id = latest.order_id AND oti.stamp = latest.max_stamp
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
`, [batchIds]);
if (taxes.length > 0) {
// Remove any duplicates before inserting
const uniqueTaxes = new Map();
taxes.forEach(t => {
const key = `${t.order_id}-${t.pid}`;
uniqueTaxes.set(key, t);
});
const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]);
if (values.length > 0) {
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
await localConnection.query(`
INSERT INTO temp_order_taxes VALUES ${placeholders}
ON DUPLICATE KEY UPDATE tax = VALUES(tax)
`, values);
}
}
}
// Get costeach values in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
const [costs] = await prodConnection.query(`
SELECT orderid as order_id, pid, costeach
FROM order_costs
WHERE orderid IN (?)
`, [batchIds]);
if (costs.length > 0) {
const placeholders = costs.map(() => '(?, ?, ?)').join(",");
const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach]);
await localConnection.query(`
INSERT INTO temp_order_costs (order_id, pid, costeach)
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE costeach = VALUES(costeach)
`, values);
}
}
// Now combine all the data and insert into orders table
// Pre-check all products at once instead of per batch
const allOrderPids = [...new Set(orderItems.map(item => item.pid))];
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
"SELECT pid FROM products WHERE pid IN (?)",
[allOrderPids]
) : [[]];
const existingPids = new Set(existingProducts.map(p => p.pid));
// Process in larger batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
// Get combined data for this batch
const [orders] = await localConnection.query(`
SELECT
oi.order_id as order_number,
oi.pid,
oi.SKU,
om.date,
oi.price,
oi.quantity,
oi.base_discount + COALESCE(od.discount, 0) as discount,
COALESCE(ot.tax, 0) as tax,
0 as tax_included,
0 as shipping,
om.customer,
om.customer_name,
om.status,
om.canceled,
COALESCE(tc.costeach, 0) as costeach
FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
WHERE oi.order_id IN (?)
`, [batchIds]);
// Filter orders and track missing products - do this in a single pass
const validOrders = [];
const values = [];
const processedOrderItems = new Set(); // Track unique order items
const processedOrders = new Set(); // Track unique orders
for (const order of orders) {
if (!existingPids.has(order.pid)) {
missingProducts.add(order.pid);
skippedOrders.add(order.order_number);
continue;
}
validOrders.push(order);
values.push(...columnNames.map(col => order[col] ?? null));
processedOrderItems.add(`${order.order_number}-${order.pid}`);
processedOrders.add(order.order_number);
}
if (validOrders.length > 0) {
// Pre-compute the placeholders string once
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
const result = await localConnection.query(`
INSERT INTO orders (${columnNames.join(",")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
SKU = VALUES(SKU),
date = VALUES(date),
price = VALUES(price),
quantity = VALUES(quantity),
discount = VALUES(discount),
tax = VALUES(tax),
tax_included = VALUES(tax_included),
shipping = VALUES(shipping),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled),
costeach = VALUES(costeach)
`, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat());
const affectedRows = result[0].affectedRows;
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsAdded += inserts;
recordsUpdated += updates;
importedCount += processedOrderItems.size; // Count unique order items processed
}
// Update progress based on unique orders processed
cumulativeProcessedOrders += processedOrders.size;
outputProgress({
status: "running",
operation: "Orders import",
message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`,
current: cumulativeProcessedOrders,
total: totalUniqueOrders,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
rate: calculateRate(startTime, cumulativeProcessedOrders)
});
}
// Now try to import any orders that were skipped due to missing products
if (skippedOrders.size > 0) {
try {
outputProgress({
status: "running",
operation: "Orders import",
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`,
});
// Get the orders that were skipped
const [skippedProdOrders] = await localConnection.query(`
SELECT DISTINCT
oi.order_id as order_number,
oi.pid,
oi.SKU,
om.date,
oi.price,
oi.quantity,
oi.base_discount + COALESCE(od.discount, 0) as discount,
COALESCE(ot.tax, 0) as tax,
0 as tax_included,
0 as shipping,
om.customer,
om.customer_name,
om.status,
om.canceled,
COALESCE(tc.costeach, 0) as costeach
FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
WHERE oi.order_id IN (?)
`, [Array.from(skippedOrders)]);
// Check which products exist now
const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))];
const [existingProducts] = skippedPids.length > 0 ? await localConnection.query(
"SELECT pid FROM products WHERE pid IN (?)",
[skippedPids]
) : [[]];
const existingPids = new Set(existingProducts.map(p => p.pid));
// Filter orders that can now be imported
const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid));
const retryOrderItems = new Set(); // Track unique order items in retry
if (validOrders.length > 0) {
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat();
const result = await localConnection.query(`
INSERT INTO orders (${columnNames.join(", ")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
SKU = VALUES(SKU),
date = VALUES(date),
price = VALUES(price),
quantity = VALUES(quantity),
discount = VALUES(discount),
tax = VALUES(tax),
tax_included = VALUES(tax_included),
shipping = VALUES(shipping),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled),
costeach = VALUES(costeach)
`, values);
const affectedRows = result[0].affectedRows;
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
// Track unique order items
validOrders.forEach(order => {
retryOrderItems.add(`${order.order_number}-${order.pid}`);
});
outputProgress({
status: "running",
operation: "Orders import",
message: `Successfully imported ${retryOrderItems.size} previously skipped order items`,
});
// Update the main counters
recordsAdded += inserts;
recordsUpdated += updates;
importedCount += retryOrderItems.size;
}
} catch (error) {
console.warn('Warning: Failed to retry skipped orders:', error.message);
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
}
}
// Clean up temporary tables after ALL processing is complete
await localConnection.query(`
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
`);
// Only update sync status if we get here (no errors thrown)
await localConnection.query(` await localConnection.query(`
INSERT INTO sync_status (table_name, last_sync_timestamp) INSERT INTO sync_status (table_name, last_sync_timestamp)
VALUES ('orders', NOW()) VALUES ('orders', NOW())
ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW()
`); `);
const endTime = Date.now();
const durationSeconds = Math.round((endTime - startTime) / 1000);
outputProgress({
status: "complete",
operation: "Orders import completed",
current: total,
total,
duration: formatElapsedTime((Date.now() - startTime) / 1000),
});
return { return {
status: "complete", status: "complete",
totalImported: total, totalImported: Math.floor(importedCount),
recordsAdded: recordsAdded || 0,
recordsUpdated: Math.floor(recordsUpdated),
totalSkipped: skippedOrders.size,
missingProducts: missingProducts.size, missingProducts: missingProducts.size,
retriedOrders: skippedOrders.size, incrementalUpdate,
incrementalUpdate: true,
lastSyncTime lastSyncTime
}; };
} catch (error) { } catch (error) {
outputProgress({ console.error("Error during orders import:", error);
operation: "Orders import failed",
status: "error",
error: error.message,
});
throw error; throw error;
} }
} }
module.exports = importOrders; module.exports = importOrders;

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,9 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
async function importPurchaseOrders(prodConnection, localConnection) { async function importPurchaseOrders(prodConnection, localConnection, incrementalUpdate = true) {
const startTime = Date.now(); const startTime = Date.now();
let recordsAdded = 0;
let recordsUpdated = 0;
try { try {
// Get last sync info // Get last sync info
@@ -10,8 +12,24 @@ async function importPurchaseOrders(prodConnection, localConnection) {
); );
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
console.log('Purchase Orders: Using last sync time:', lastSyncTime);
// Insert temporary table creation query for purchase orders
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_purchase_orders (
po_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
vendor VARCHAR(255),
date DATE,
expected_date DATE,
status INT,
notes TEXT,
PRIMARY KEY (po_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
outputProgress({ outputProgress({
operation: "Starting purchase orders import - Initializing", operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} purchase orders import`,
status: "running", status: "running",
}); });
@@ -26,67 +44,134 @@ async function importPurchaseOrders(prodConnection, localConnection) {
.map((col) => col.COLUMN_NAME) .map((col) => col.COLUMN_NAME)
.filter((name) => name !== "id"); .filter((name) => name !== "id");
// First get all relevant PO IDs with basic info - modified for incremental // Build incremental conditions
const incrementalWhereClause = incrementalUpdate
? `AND (
p.date_updated > ?
OR p.date_ordered > ?
OR p.date_estin > ?
OR r.date_updated > ?
OR r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)`
: "";
const incrementalParams = incrementalUpdate
? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]
: [];
// First get all relevant PO IDs with basic info
const [[{ total }]] = await prodConnection.query(` const [[{ total }]] = await prodConnection.query(`
SELECT COUNT(*) as total SELECT COUNT(*) as total
FROM ( FROM (
SELECT DISTINCT pop.po_id, pop.pid SELECT DISTINCT pop.po_id, pop.pid
FROM po p FROM po p
FORCE INDEX (idx_date_created) USE INDEX (idx_date_created)
JOIN po_products pop ON p.po_id = pop.po_id JOIN po_products pop ON p.po_id = pop.po_id
JOIN suppliers s ON p.supplier_id = s.supplierid JOIN suppliers s ON p.supplier_id = s.supplierid
WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND (p.date_ordered > ? ${incrementalUpdate ? `
OR p.stamp > ? AND (
OR p.date_modified > ?) p.date_updated > ?
OR p.date_ordered > ?
OR p.date_estin > ?
)
` : ''}
UNION UNION
SELECT DISTINCT r.receiving_id as po_id, rp.pid SELECT DISTINCT r.receiving_id as po_id, rp.pid
FROM receivings_products rp FROM receivings_products rp
USE INDEX (received_date)
LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id
WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND (rp.received_date > ? ${incrementalUpdate ? `
OR rp.stamp > ?) AND (
r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)
` : ''}
) all_items ) all_items
`, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); `, incrementalUpdate ? [
lastSyncTime, lastSyncTime, lastSyncTime, // PO conditions
lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions
] : []);
console.log('Purchase Orders: Found changes:', total);
const [poList] = await prodConnection.query(` const [poList] = await prodConnection.query(`
SELECT DISTINCT SELECT DISTINCT
COALESCE(p.po_id, r.receiving_id) as po_id, COALESCE(p.po_id, r.receiving_id) as po_id,
COALESCE(
NULLIF(s1.companyname, ''),
NULLIF(s2.companyname, ''),
'Unknown Vendor'
) as vendor,
CASE CASE
WHEN p.po_id IS NOT NULL THEN s1.companyname WHEN p.po_id IS NOT NULL THEN
WHEN r.supplier_id IS NOT NULL THEN s2.companyname DATE(COALESCE(
ELSE 'No Supplier' NULLIF(p.date_ordered, '0000-00-00 00:00:00'),
END as vendor, p.date_created
CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_ordered) END as date, ))
CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_estin) END as expected_date, WHEN r.receiving_id IS NOT NULL THEN
DATE(r.date_created)
END as date,
CASE
WHEN p.date_estin = '0000-00-00' THEN NULL
WHEN p.date_estin IS NULL THEN NULL
WHEN p.date_estin NOT REGEXP '^[0-9]{4}-[0-9]{2}-[0-9]{2}$' THEN NULL
ELSE p.date_estin
END as expected_date,
COALESCE(p.status, 50) as status, COALESCE(p.status, 50) as status,
COALESCE(p.short_note, '') as notes, p.short_note as notes,
COALESCE(p.notes, '') as long_note p.notes as long_note
FROM ( FROM (
SELECT po_id FROM po SELECT po_id FROM po
WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) USE INDEX (idx_date_created)
AND (date_ordered > ? WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
OR stamp > ? ${incrementalUpdate ? `
OR date_modified > ?) AND (
date_ordered > ?
OR date_updated > ?
OR date_estin > ?
)
` : ''}
UNION UNION
SELECT DISTINCT r.receiving_id as po_id SELECT DISTINCT r.receiving_id as po_id
FROM receivings r FROM receivings r
JOIN receivings_products rp ON r.receiving_id = rp.receiving_id JOIN receivings_products rp USE INDEX (received_date) ON r.receiving_id = rp.receiving_id
WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND (rp.received_date > ? ${incrementalUpdate ? `
OR rp.stamp > ?) AND (
r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)
` : ''}
) ids ) ids
LEFT JOIN po p ON ids.po_id = p.po_id LEFT JOIN po p ON ids.po_id = p.po_id
LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid
LEFT JOIN receivings r ON ids.po_id = r.receiving_id LEFT JOIN receivings r ON ids.po_id = r.receiving_id
LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid
ORDER BY po_id ORDER BY po_id
`, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); `, incrementalUpdate ? [
lastSyncTime, lastSyncTime, lastSyncTime, // PO conditions
lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions
] : []);
console.log('Sample PO dates:', poList.slice(0, 5).map(po => ({
po_id: po.po_id,
raw_date_ordered: po.raw_date_ordered,
raw_date_created: po.raw_date_created,
raw_date_estin: po.raw_date_estin,
computed_date: po.date,
expected_date: po.expected_date
})));
const totalItems = total; const totalItems = total;
let processed = 0; let processed = 0;
let recordsAdded = 0;
let recordsUpdated = 0;
const BATCH_SIZE = 5000; const BATCH_SIZE = 5000;
const PROGRESS_INTERVAL = 500; const PROGRESS_INTERVAL = 500;
@@ -107,10 +192,11 @@ async function importPurchaseOrders(prodConnection, localConnection) {
pop.po_id, pop.po_id,
pop.pid, pop.pid,
pr.itemnumber as sku, pr.itemnumber as sku,
pop.cost_each as cost_price, pr.description as name,
pop.cost_each,
pop.qty_each as ordered pop.qty_each as ordered
FROM po_products pop FROM po_products pop
FORCE INDEX (PRIMARY) USE INDEX (PRIMARY)
JOIN products pr ON pop.pid = pr.pid JOIN products pr ON pop.pid = pr.pid
WHERE pop.po_id IN (?) WHERE pop.po_id IN (?)
`, [poIds]); `, [poIds]);
@@ -122,7 +208,7 @@ async function importPurchaseOrders(prodConnection, localConnection) {
const productPids = [...new Set(productBatch.map(p => p.pid))]; const productPids = [...new Set(productBatch.map(p => p.pid))];
const batchPoIds = [...new Set(productBatch.map(p => p.po_id))]; const batchPoIds = [...new Set(productBatch.map(p => p.po_id))];
// Get receivings for this batch // Get receivings for this batch with employee names
const [receivings] = await prodConnection.query(` const [receivings] = await prodConnection.query(`
SELECT SELECT
r.po_id, r.po_id,
@@ -130,17 +216,20 @@ async function importPurchaseOrders(prodConnection, localConnection) {
rp.receiving_id, rp.receiving_id,
rp.qty_each, rp.qty_each,
rp.cost_each, rp.cost_each,
DATE(NULLIF(rp.received_date, '0000-00-00 00:00:00')) as received_date, COALESCE(rp.received_date, r.date_created) as received_date,
rp.received_by, rp.received_by,
CONCAT(e.firstname, ' ', e.lastname) as received_by_name,
CASE CASE
WHEN r.po_id IS NULL THEN 2 -- No PO WHEN r.po_id IS NULL THEN 2 -- No PO
WHEN r.po_id IN (?) THEN 0 -- Original PO WHEN r.po_id IN (?) THEN 0 -- Original PO
ELSE 1 -- Different PO ELSE 1 -- Different PO
END as is_alt_po END as is_alt_po
FROM receivings_products rp FROM receivings_products rp
USE INDEX (received_date)
LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id
LEFT JOIN employees e ON rp.received_by = e.employeeid
WHERE rp.pid IN (?) WHERE rp.pid IN (?)
AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
ORDER BY r.po_id, rp.pid, rp.received_date ORDER BY r.po_id, rp.pid, rp.received_date
`, [batchPoIds, productPids]); `, [batchPoIds, productPids]);
@@ -185,8 +274,21 @@ async function importPurchaseOrders(prodConnection, localConnection) {
); );
const validPids = new Set(existingPids.map(p => p.pid)); const validPids = new Set(existingPids.map(p => p.pid));
// Prepare values for this sub-batch // First check which PO lines already exist and get their current values
const values = []; const poLines = Array.from(poProductMap.values())
.filter(p => validPids.has(p.pid))
.map(p => [p.po_id, p.pid]);
const [existingPOs] = await localConnection.query(
`SELECT ${columnNames.join(',')} FROM purchase_orders WHERE (po_id, pid) IN (${poLines.map(() => "(?,?)").join(",")})`,
poLines.flat()
);
const existingPOMap = new Map(
existingPOs.map(po => [`${po.po_id}-${po.pid}`, po])
);
// Split into inserts and updates
const insertsAndUpdates = { inserts: [], updates: [] };
let batchProcessed = 0; let batchProcessed = 0;
for (const po of batch) { for (const po of batch) {
@@ -199,80 +301,199 @@ async function importPurchaseOrders(prodConnection, localConnection) {
const altReceivingHistory = altReceivingMap.get(product.pid) || []; const altReceivingHistory = altReceivingMap.get(product.pid) || [];
const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || []; const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || [];
const received = receivingHistory.reduce((sum, r) => sum + r.qty_each, 0); // Combine all receivings and sort by date
const altReceived = altReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0); const allReceivings = [
const noPOReceived = noPOReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0); ...receivingHistory.map(r => ({ ...r, type: 'original' })),
const totalReceived = received + altReceived + noPOReceived; ...altReceivingHistory.map(r => ({ ...r, type: 'alternate' })),
...noPOReceivingHistory.map(r => ({ ...r, type: 'no_po' }))
].sort((a, b) => new Date(a.received_date || '9999-12-31') - new Date(b.received_date || '9999-12-31'));
// Split receivings into original PO and others
const originalPOReceivings = allReceivings.filter(r => r.type === 'original');
const otherReceivings = allReceivings.filter(r => r.type !== 'original');
// Track FIFO fulfillment
let remainingToFulfill = product.ordered;
const fulfillmentTracking = [];
let totalReceived = 0;
let actualCost = null; // Will store the cost of the first receiving that fulfills this PO
let firstFulfillmentReceiving = null;
let lastFulfillmentReceiving = null;
for (const receiving of allReceivings) {
const qtyToApply = Math.min(remainingToFulfill, receiving.qty_each);
if (qtyToApply > 0) {
// If this is the first receiving being applied, use its cost
if (actualCost === null) {
actualCost = receiving.cost_each;
firstFulfillmentReceiving = receiving;
}
lastFulfillmentReceiving = receiving;
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: qtyToApply,
qty_total: receiving.qty_each,
cost: receiving.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
remaining_qty: receiving.qty_each - qtyToApply
});
remainingToFulfill -= qtyToApply;
} else {
// Track excess receivings
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: 0,
qty_total: receiving.qty_each,
cost: receiving.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
is_excess: true
});
}
totalReceived += receiving.qty_each;
}
const receiving_status = !totalReceived ? 1 : // created const receiving_status = !totalReceived ? 1 : // created
totalReceived < product.ordered ? 30 : // partial remainingToFulfill > 0 ? 30 : // partial
40; // full 40; // full
const allReceivings = [...receivingHistory]; function formatDate(dateStr) {
if (altReceivingHistory.length > 0) { if (!dateStr) return null;
allReceivings.push(...altReceivingHistory); if (dateStr === '0000-00-00' || dateStr === '0000-00-00 00:00:00') return null;
if (typeof dateStr === 'string' && !dateStr.match(/^\d{4}-\d{2}-\d{2}/)) return null;
try {
const date = new Date(dateStr);
if (isNaN(date.getTime())) return null;
if (date.getFullYear() < 1900 || date.getFullYear() > 2100) return null;
return date.toISOString().split('T')[0];
} catch (e) {
return null;
}
} }
if (noPOReceivingHistory.length > 0) {
allReceivings.push(...noPOReceivingHistory);
}
allReceivings.sort((a, b) => new Date(a.received_date) - new Date(b.received_date));
const firstReceiving = allReceivings[0] || {}; const rowValues = columnNames.map(col => {
const lastReceiving = allReceivings[allReceivings.length - 1] || {};
values.push(columnNames.map(col => {
switch (col) { switch (col) {
case 'po_id': return po.po_id; case 'po_id': return po.po_id;
case 'vendor': return po.vendor; case 'vendor': return po.vendor;
case 'date': return po.date; case 'date': return formatDate(po.date);
case 'expected_date': return po.expected_date; case 'expected_date': return formatDate(po.expected_date);
case 'pid': return product.pid; case 'pid': return product.pid;
case 'sku': return product.sku; case 'sku': return product.sku;
case 'cost_price': return product.cost_price; case 'name': return product.name;
case 'cost_price': return actualCost || product.cost_each;
case 'po_cost_price': return product.cost_each;
case 'status': return po.status; case 'status': return po.status;
case 'notes': return po.notes; case 'notes': return po.notes;
case 'long_note': return po.long_note; case 'long_note': return po.long_note;
case 'ordered': return product.ordered; case 'ordered': return product.ordered;
case 'received': return totalReceived; case 'received': return totalReceived;
case 'received_date': return firstReceiving.received_date || null; case 'unfulfilled': return remainingToFulfill;
case 'last_received_date': return lastReceiving.received_date || null; case 'excess_received': return Math.max(0, totalReceived - product.ordered);
case 'received_by': return firstReceiving.received_by || null; case 'received_date': return formatDate(firstFulfillmentReceiving?.received_date);
case 'last_received_date': return formatDate(lastFulfillmentReceiving?.received_date);
case 'received_by': return firstFulfillmentReceiving?.received_by_name || null;
case 'receiving_status': return receiving_status; case 'receiving_status': return receiving_status;
case 'receiving_history': return JSON.stringify(allReceivings.map(r => ({ case 'receiving_history': return JSON.stringify({
receiving_id: r.receiving_id, fulfillment: fulfillmentTracking,
qty: r.qty_each, ordered_qty: product.ordered,
cost: r.cost_each, total_received: totalReceived,
date: r.received_date, remaining_unfulfilled: remainingToFulfill,
received_by: r.received_by, excess_received: Math.max(0, totalReceived - product.ordered),
alt_po: r.is_alt_po po_cost: product.cost_each,
}))); actual_cost: actualCost || product.cost_each
});
default: return null; default: return null;
} }
})); });
if (existingPOMap.has(key)) {
const existing = existingPOMap.get(key);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = rowValues[columnNames.indexOf(col)];
const oldVal = existing[col] ?? null;
// Special handling for numbers to avoid type coercion issues
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
}
// Special handling for receiving_history - parse and compare
if (col === 'receiving_history') {
const newHistory = JSON.parse(newVal || '{}');
const oldHistory = JSON.parse(oldVal || '{}');
return JSON.stringify(newHistory) !== JSON.stringify(oldHistory);
}
return newVal !== oldVal;
});
if (hasChanges) {
insertsAndUpdates.updates.push({
po_id: po.po_id,
pid: product.pid,
values: rowValues
});
}
} else {
insertsAndUpdates.inserts.push({
po_id: po.po_id,
pid: product.pid,
values: rowValues
});
}
batchProcessed++; batchProcessed++;
} }
} }
if (values.length > 0) { // Handle inserts
const placeholders = values.map(() => if (insertsAndUpdates.inserts.length > 0) {
`(${Array(columnNames.length).fill("?").join(",")})` const insertPlaceholders = insertsAndUpdates.inserts
).join(","); .map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const query = ` const insertResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")}) INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${placeholders} VALUES ${insertPlaceholders}
`, insertsAndUpdates.inserts.map(i => i.values).flat());
const affectedRows = insertResult[0].affectedRows;
// For an upsert, MySQL counts rows twice for updates
// So if affectedRows is odd, we have (updates * 2 + inserts)
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsAdded += inserts;
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
processed += batchProcessed;
}
// Handle updates - now we know these actually have changes
if (insertsAndUpdates.updates.length > 0) {
const updatePlaceholders = insertsAndUpdates.updates
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const updateResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE ${columnNames ON DUPLICATE KEY UPDATE ${columnNames
.filter((col) => col !== "po_id" && col !== "pid") .filter((col) => col !== "po_id" && col !== "pid")
.map((col) => `${col} = VALUES(${col})`) .map((col) => `${col} = VALUES(${col})`)
.join(",")}; .join(",")};
`; `, insertsAndUpdates.updates.map(u => u.values).flat());
const result = await localConnection.query(query, values.flat()); const affectedRows = updateResult[0].affectedRows;
recordsAdded += result.affectedRows - result.changedRows; // For an upsert, MySQL counts rows twice for updates
recordsUpdated += result.changedRows; // So if affectedRows is odd, we have (updates * 2 + inserts)
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
processed += batchProcessed;
} }
processed += batchProcessed;
// Update progress based on time interval // Update progress based on time interval
const now = Date.now(); const now = Date.now();
@@ -291,24 +512,27 @@ async function importPurchaseOrders(prodConnection, localConnection) {
} }
} }
// After successful import, update sync status // Only update sync status if we get here (no errors thrown)
await localConnection.query(` await localConnection.query(`
INSERT INTO sync_status (table_name, last_sync_timestamp) INSERT INTO sync_status (table_name, last_sync_timestamp)
VALUES ('purchase_orders', NOW()) VALUES ('purchase_orders', NOW())
ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() ON DUPLICATE KEY UPDATE
last_sync_timestamp = NOW(),
last_sync_id = LAST_INSERT_ID(last_sync_id)
`); `);
return { return {
status: "complete", status: "complete",
totalImported: totalItems, totalImported: totalItems,
recordsAdded, recordsAdded: recordsAdded || 0,
recordsUpdated, recordsUpdated: recordsUpdated || 0,
incrementalUpdate: !!syncInfo?.[0] incrementalUpdate,
lastSyncTime
}; };
} catch (error) { } catch (error) {
outputProgress({ outputProgress({
operation: "Purchase orders import failed", operation: `${incrementalUpdate ? 'Incremental' : 'Full'} purchase orders import failed`,
status: "error", status: "error",
error: error.message, error: error.message,
}); });

View File

@@ -0,0 +1,82 @@
// Split into inserts and updates
const insertsAndUpdates = batch.reduce((acc, po) => {
const key = `${po.po_id}-${po.pid}`;
if (existingPOMap.has(key)) {
const existing = existingPOMap.get(key);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = po[col] ?? null;
const oldVal = existing[col] ?? null;
// Special handling for numbers to avoid type coercion issues
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
}
// Special handling for receiving_history JSON
if (col === 'receiving_history') {
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
}
return newVal !== oldVal;
});
if (hasChanges) {
console.log(`PO line changed: ${key}`, {
po_id: po.po_id,
pid: po.pid,
changes: columnNames.filter(col => {
const newVal = po[col] ?? null;
const oldVal = existing[col] ?? null;
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001;
}
if (col === 'receiving_history') {
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
}
return newVal !== oldVal;
})
});
acc.updates.push({
po_id: po.po_id,
pid: po.pid,
values: columnNames.map(col => po[col] ?? null)
});
}
} else {
console.log(`New PO line: ${key}`);
acc.inserts.push({
po_id: po.po_id,
pid: po.pid,
values: columnNames.map(col => po[col] ?? null)
});
}
return acc;
}, { inserts: [], updates: [] });
// Handle inserts
if (insertsAndUpdates.inserts.length > 0) {
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(",");
const insertResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${insertPlaceholders}
`, insertsAndUpdates.inserts.map(i => i.values).flat());
recordsAdded += insertResult[0].affectedRows;
}
// Handle updates
if (insertsAndUpdates.updates.length > 0) {
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(",");
const updateResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE
${columnNames
.filter(col => col !== "po_id" && col !== "pid")
.map(col => `${col} = VALUES(${col})`)
.join(",")};
`, insertsAndUpdates.updates.map(u => u.values).flat());
// Each update affects 2 rows in affectedRows, so we divide by 2 to get actual count
recordsUpdated += insertsAndUpdates.updates.length;
}

View File

@@ -0,0 +1,180 @@
const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const question = (query) => new Promise((resolve) => rl.question(query, resolve));
async function loadScript(name) {
try {
return await require(name);
} catch (error) {
console.error(`Failed to load script ${name}:`, error);
return null;
}
}
async function runWithTimeout(fn) {
return new Promise((resolve, reject) => {
// Create a child process for the script
const child = require('child_process').fork(fn.toString(), [], {
stdio: 'inherit'
});
child.on('exit', (code) => {
if (code === 0) {
resolve();
} else {
reject(new Error(`Script exited with code ${code}`));
}
});
child.on('error', (err) => {
reject(err);
});
});
}
function clearScreen() {
process.stdout.write('\x1Bc');
}
const scripts = {
'Import Scripts': {
'1': { name: 'Full Import From Production', path: './import-from-prod' },
'2': { name: 'Individual Import Scripts ▸', submenu: {
'1': { name: 'Import Orders', path: './import/orders', key: 'importOrders' },
'2': { name: 'Import Products', path: './import/products', key: 'importProducts' },
'3': { name: 'Import Purchase Orders', path: './import/purchase-orders' },
'4': { name: 'Import Categories', path: './import/categories' },
'b': { name: 'Back to Main Menu' }
}}
},
'Metrics': {
'3': { name: 'Calculate All Metrics', path: './calculate-metrics' },
'4': { name: 'Individual Metric Scripts ▸', submenu: {
'1': { name: 'Brand Metrics', path: './metrics/brand-metrics' },
'2': { name: 'Category Metrics', path: './metrics/category-metrics' },
'3': { name: 'Financial Metrics', path: './metrics/financial-metrics' },
'4': { name: 'Product Metrics', path: './metrics/product-metrics' },
'5': { name: 'Sales Forecasts', path: './metrics/sales-forecasts' },
'6': { name: 'Time Aggregates', path: './metrics/time-aggregates' },
'7': { name: 'Vendor Metrics', path: './metrics/vendor-metrics' },
'b': { name: 'Back to Main Menu' }
}}
},
'Database Management': {
'5': { name: 'Test Production Connection', path: './test-prod-connection' }
},
'Reset Scripts': {
'6': { name: 'Reset Database', path: './reset-db' },
'7': { name: 'Reset Metrics', path: './reset-metrics' }
}
};
let lastRun = null;
async function displayMenu(menuItems, title = 'Inventory Management Script Runner') {
clearScreen();
console.log(`\n${title}\n`);
for (const [category, items] of Object.entries(menuItems)) {
console.log(`\n${category}:`);
Object.entries(items).forEach(([key, script]) => {
console.log(`${key}. ${script.name}`);
});
}
if (lastRun) {
console.log('\nQuick Access:');
console.log(`r. Repeat Last Script (${lastRun.name})`);
}
console.log('\nq. Quit\n');
}
async function handleSubmenu(submenu, title) {
while (true) {
await displayMenu({"Individual Scripts": submenu}, title);
const choice = await question('Select an option (or b to go back): ');
if (choice.toLowerCase() === 'b') {
return null;
}
if (submenu[choice]) {
return submenu[choice];
}
console.log('Invalid selection. Please try again.');
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
async function runScript(script) {
console.log(`\nRunning: ${script.name}`);
try {
const scriptPath = require.resolve(script.path);
await runWithTimeout(scriptPath);
console.log('\nScript completed successfully');
lastRun = script;
} catch (error) {
console.error('\nError running script:', error);
}
await question('\nPress Enter to continue...');
}
async function main() {
while (true) {
await displayMenu(scripts);
const choice = await question('Select an option: ');
if (choice.toLowerCase() === 'q') {
break;
}
if (choice.toLowerCase() === 'r' && lastRun) {
await runScript(lastRun);
continue;
}
let selectedScript = null;
for (const category of Object.values(scripts)) {
if (category[choice]) {
selectedScript = category[choice];
break;
}
}
if (!selectedScript) {
console.log('Invalid selection. Please try again.');
await new Promise(resolve => setTimeout(resolve, 1000));
continue;
}
if (selectedScript.submenu) {
const submenuChoice = await handleSubmenu(
selectedScript.submenu,
selectedScript.name
);
if (submenuChoice && submenuChoice.path) {
await runScript(submenuChoice);
}
} else if (selectedScript.path) {
await runScript(selectedScript);
}
}
rl.close();
process.exit(0);
}
if (require.main === module) {
main().catch(error => {
console.error('Fatal error:', error);
process.exit(1);
});
}