25 Commits

Author SHA1 Message Date
9e1989ac66 Cleanup 2025-02-01 14:08:27 -05:00
5bfd6f6d04 Fix import script order count issues 2025-02-01 12:54:33 -05:00
1003ff3cf2 More incremental import fixes 2025-02-01 11:42:51 -05:00
2d0089dc52 Incremental import order fixes 2025-02-01 11:03:42 -05:00
50b86d6d8a Fix/add data to PO script 2025-02-01 10:51:47 -05:00
07f14c0017 Fix/add data to orders script and fix other import errors 2025-02-01 01:06:45 -05:00
e77b488cd4 Fix/add data to products script 2025-01-31 18:44:11 -05:00
d57239c40c Finish up import script incremental and reliability updates 2025-01-31 16:01:21 -05:00
1c932e0df5 More import script updates and fixes, better import_history tracking 2025-01-31 13:12:43 -05:00
a867117c3c Import script incremental fixes 2025-01-31 11:12:38 -05:00
996d3d36af Streamline incremental imports 2025-01-31 10:01:50 -05:00
d0abe9d9a2 - Modify import scripts to handle edge cases with empty arrays and null conditions
- Improve parameter handling in incremental update queries for purchase orders and products
2025-01-31 01:50:21 -05:00
5e4d1c3bd8 Improve import scripts with enhanced incremental update tracking and performance
- Add record tracking for added and updated records in import scripts
- Modify products import to use a dynamic 'needs_update' flag for selective updates
- Enhance order import with more comprehensive timestamp checks
- Update import-from-prod.js to handle and clean up previously running imports
- Improve error handling and connection management in import processes
2025-01-31 01:39:48 -05:00
1be97d6610 Enhance purchase order import with advanced receiving tracking and fulfillment logic
- Implement FIFO-based receiving fulfillment tracking
- Add detailed receiving history with excess and partial fulfillment support
- Improve vendor name resolution and fallback handling
- Optimize incremental update queries by removing redundant conditions
- Enhance receiving status calculation with more granular tracking
2025-01-31 01:25:48 -05:00
b506f89dd7 Optimize order and product import scripts with improved performance and incremental update handling
- Refactor orders import to use temporary tables for more efficient data processing
- Improve batch processing and memory management in order import script
- Update product import to use temporary tables for inventory status
- Modify purchase orders import to use updated timestamp for incremental updates
- Enhance error handling and logging for import processes
2025-01-30 21:13:53 -05:00
c433f1aae8 Enhance import scripts with incremental update support and improved error handling
- Update import-from-prod.js to support granular incremental updates for different import types
- Modify orders.js to handle complex order data retrieval with better performance and error tracking
- Add support for incremental updates in products.js import function
- Improve logging and progress tracking for import processes
2025-01-30 15:49:47 -05:00
31d4011902 Add back product-category import and product time estimates 2025-01-30 00:00:30 -05:00
6c5f119ee5 Import fixes/optimizations 2025-01-29 21:48:56 -05:00
3c5fb9e435 Optimize product import with dynamic batching and memory management 2025-01-29 19:14:58 -05:00
2b329a55a4 Increase product import batch size 2025-01-29 18:51:55 -05:00
0d377466aa Optimize database import queries with improved index selection 2025-01-29 18:42:29 -05:00
fb5bf4a144 Optimize order import with improved tax info retrieval and performance 2025-01-29 18:21:31 -05:00
4d8a677c5b Remove import history tracking from purchase orders import script 2025-01-29 16:33:37 -05:00
655c071960 Limit order and purchase order imports to last 5 years 2025-01-29 16:25:24 -05:00
d2a2dbc812 Add incremental import support and tracking for database synchronization 2025-01-29 16:22:00 -05:00
11 changed files with 1880 additions and 673 deletions

1
.gitignore vendored
View File

@@ -57,3 +57,4 @@ csv/**/*
**/csv/**/*
!csv/.gitkeep
inventory/tsconfig.tsbuildinfo
inventory-server/scripts/.fuse_hidden00000fa20000000a

View File

@@ -169,4 +169,28 @@ ORDER BY
ELSE 4
END,
c.name,
st.vendor;
st.vendor;
CREATE TABLE IF NOT EXISTS sync_status (
table_name VARCHAR(50) PRIMARY KEY,
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT,
INDEX idx_last_sync (last_sync_timestamp)
);
CREATE TABLE IF NOT EXISTS import_history (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
table_name VARCHAR(50) NOT NULL,
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP NULL,
duration_seconds INT,
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED,
records_added INT DEFAULT 0,
records_updated INT DEFAULT 0,
is_incremental BOOLEAN DEFAULT FALSE,
status ENUM('running', 'completed', 'failed', 'cancelled') DEFAULT 'running',
error_message TEXT,
additional_info JSON,
INDEX idx_table_time (table_name, start_time),
INDEX idx_status (status)
);

View File

@@ -39,7 +39,7 @@ CREATE TABLE products (
tags TEXT,
moq INT DEFAULT 1,
uom INT DEFAULT 1,
rating TINYINT UNSIGNED DEFAULT 0,
rating DECIMAL(10,2) DEFAULT 0.00,
reviews INT UNSIGNED DEFAULT 0,
weight DECIMAL(10,3),
length DECIMAL(10,3),
@@ -52,7 +52,7 @@ CREATE TABLE products (
notifies INT UNSIGNED DEFAULT 0,
date_last_sold DATE,
PRIMARY KEY (pid),
UNIQUE KEY unique_sku (SKU),
INDEX idx_sku (SKU),
INDEX idx_vendor (vendor),
INDEX idx_brand (brand),
INDEX idx_location (location),
@@ -113,11 +113,13 @@ CREATE TABLE IF NOT EXISTS orders (
tax DECIMAL(10,3) DEFAULT 0.000,
tax_included TINYINT(1) DEFAULT 0,
shipping DECIMAL(10,3) DEFAULT 0.000,
costeach DECIMAL(10,3) DEFAULT 0.000,
customer VARCHAR(50) NOT NULL,
customer_name VARCHAR(100),
status VARCHAR(20) DEFAULT 'pending',
canceled TINYINT(1) DEFAULT 0,
PRIMARY KEY (id),
UNIQUE KEY unique_order_line (order_number, pid),
KEY order_number (order_number),
KEY pid (pid),
KEY customer (customer),
@@ -135,7 +137,9 @@ CREATE TABLE purchase_orders (
expected_date DATE,
pid BIGINT NOT NULL,
sku VARCHAR(50) NOT NULL,
name VARCHAR(100) NOT NULL COMMENT 'Product name from products.description',
cost_price DECIMAL(10, 3) NOT NULL,
po_cost_price DECIMAL(10, 3) NOT NULL COMMENT 'Original cost from PO, before receiving adjustments',
status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done',
receiving_status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,30=partial_received,40=full_received,50=paid',
notes TEXT,
@@ -147,7 +151,6 @@ CREATE TABLE purchase_orders (
received_by INT,
receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag',
FOREIGN KEY (pid) REFERENCES products(pid),
FOREIGN KEY (sku) REFERENCES products(SKU),
INDEX idx_po_id (po_id),
INDEX idx_vendor (vendor),
INDEX idx_status (status),

View File

@@ -10,12 +10,16 @@ const importPurchaseOrders = require('./import/purchase-orders');
dotenv.config({ path: path.join(__dirname, "../.env") });
// Constants to control which imports run
const IMPORT_CATEGORIES = false;
const IMPORT_PRODUCTS = false;
const IMPORT_ORDERS = false;
const IMPORT_CATEGORIES = true;
const IMPORT_PRODUCTS = true;
const IMPORT_ORDERS = true;
const IMPORT_PURCHASE_ORDERS = true;
// Add flag for incremental updates
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
// SSH configuration
// In import-from-prod.js
const sshConfig = {
ssh: {
host: process.env.PROD_SSH_HOST,
@@ -24,16 +28,16 @@ const sshConfig = {
privateKey: process.env.PROD_SSH_KEY_PATH
? require("fs").readFileSync(process.env.PROD_SSH_KEY_PATH)
: undefined,
compress: true, // Enable SSH compression
},
// Production database configuration
prodDbConfig: {
host: process.env.PROD_DB_HOST || "localhost",
user: process.env.PROD_DB_USER,
password: process.env.PROD_DB_PASSWORD,
database: process.env.PROD_DB_NAME,
port: process.env.PROD_DB_PORT || 3306,
timezone: 'Z',
},
// Local database configuration
localDbConfig: {
host: process.env.DB_HOST,
user: process.env.DB_USER,
@@ -44,6 +48,12 @@ const sshConfig = {
connectionLimit: 10,
queueLimit: 0,
namedPlaceholders: true,
connectTimeout: 60000,
enableKeepAlive: true,
keepAliveInitialDelay: 10000,
compress: true,
timezone: 'Z',
stringifyObjects: false,
}
};
@@ -68,6 +78,7 @@ async function main() {
const startTime = Date.now();
let connections;
let completedSteps = 0;
let importHistoryId;
const totalSteps = [
IMPORT_CATEGORIES,
IMPORT_PRODUCTS,
@@ -80,7 +91,7 @@ async function main() {
outputProgress({
status: "running",
operation: "Import process",
message: "Initializing SSH tunnel...",
message: `Initializing SSH tunnel for ${INCREMENTAL_UPDATE ? 'incremental' : 'full'} import...`,
current: completedSteps,
total: totalSteps,
elapsed: formatElapsedTime(startTime)
@@ -91,6 +102,50 @@ async function main() {
if (isImportCancelled) throw new Error("Import cancelled");
// Clean up any previously running imports that weren't completed
await localConnection.query(`
UPDATE import_history
SET
status = 'cancelled',
end_time = NOW(),
duration_seconds = TIMESTAMPDIFF(SECOND, start_time, NOW()),
error_message = 'Previous import was not completed properly'
WHERE status = 'running'
`);
// Initialize sync_status table if it doesn't exist
await localConnection.query(`
CREATE TABLE IF NOT EXISTS sync_status (
table_name VARCHAR(50) PRIMARY KEY,
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT,
INDEX idx_last_sync (last_sync_timestamp)
);
`);
// Create import history record for the overall session
const [historyResult] = await localConnection.query(`
INSERT INTO import_history (
table_name,
start_time,
is_incremental,
status,
additional_info
) VALUES (
'all_tables',
NOW(),
?,
'running',
JSON_OBJECT(
'categories_enabled', ?,
'products_enabled', ?,
'orders_enabled', ?,
'purchase_orders_enabled', ?
)
)
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS]);
importHistoryId = historyResult.insertId;
const results = {
categories: null,
products: null,
@@ -98,37 +153,88 @@ async function main() {
purchaseOrders: null
};
let totalRecordsAdded = 0;
let totalRecordsUpdated = 0;
// Run each import based on constants
if (IMPORT_CATEGORIES) {
results.categories = await importCategories(prodConnection, localConnection);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Categories import result:', results.categories);
totalRecordsAdded += results.categories?.recordsAdded || 0;
totalRecordsUpdated += results.categories?.recordsUpdated || 0;
}
if (IMPORT_PRODUCTS) {
results.products = await importProducts(prodConnection, localConnection);
results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Products import result:', results.products);
totalRecordsAdded += results.products?.recordsAdded || 0;
totalRecordsUpdated += results.products?.recordsUpdated || 0;
}
if (IMPORT_ORDERS) {
results.orders = await importOrders(prodConnection, localConnection);
results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Orders import result:', results.orders);
totalRecordsAdded += results.orders?.recordsAdded || 0;
totalRecordsUpdated += results.orders?.recordsUpdated || 0;
}
if (IMPORT_PURCHASE_ORDERS) {
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection);
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Purchase orders import result:', results.purchaseOrders);
totalRecordsAdded += results.purchaseOrders?.recordsAdded || 0;
totalRecordsUpdated += results.purchaseOrders?.recordsUpdated || 0;
}
const endTime = Date.now();
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
// Update import history with final stats
await localConnection.query(`
UPDATE import_history
SET
end_time = NOW(),
duration_seconds = ?,
records_added = ?,
records_updated = ?,
status = 'completed',
additional_info = JSON_OBJECT(
'categories_enabled', ?,
'products_enabled', ?,
'orders_enabled', ?,
'purchase_orders_enabled', ?,
'categories_result', CAST(? AS JSON),
'products_result', CAST(? AS JSON),
'orders_result', CAST(? AS JSON),
'purchase_orders_result', CAST(? AS JSON)
)
WHERE id = ?
`, [
totalElapsedSeconds,
totalRecordsAdded,
totalRecordsUpdated,
IMPORT_CATEGORIES,
IMPORT_PRODUCTS,
IMPORT_ORDERS,
IMPORT_PURCHASE_ORDERS,
JSON.stringify(results.categories),
JSON.stringify(results.products),
JSON.stringify(results.orders),
JSON.stringify(results.purchaseOrders),
importHistoryId
]);
outputProgress({
status: "complete",
operation: "Import process",
message: `All imports completed successfully in ${formatElapsedTime(totalElapsedSeconds)}`,
message: `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import completed successfully in ${formatElapsedTime(totalElapsedSeconds)}`,
current: completedSteps,
total: totalSteps,
elapsed: formatElapsedTime(startTime),
@@ -146,13 +252,27 @@ async function main() {
} catch (error) {
const endTime = Date.now();
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
// Update import history with error
if (importHistoryId && connections?.localConnection) {
await connections.localConnection.query(`
UPDATE import_history
SET
end_time = NOW(),
duration_seconds = ?,
status = ?,
error_message = ?
WHERE id = ?
`, [totalElapsedSeconds, error.message === "Import cancelled" ? 'cancelled' : 'failed', error.message, importHistoryId]);
}
console.error("Error during import process:", error);
outputProgress({
status: error.message === "Import cancelled" ? "cancelled" : "error",
operation: "Import process",
message: error.message === "Import cancelled"
? `Import cancelled by user after ${formatElapsedTime(totalElapsedSeconds)}`
: `Import failed after ${formatElapsedTime(totalElapsedSeconds)}`,
? `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import cancelled by user after ${formatElapsedTime(totalElapsedSeconds)}`
: `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import failed after ${formatElapsedTime(totalElapsedSeconds)}`,
error: error.message,
current: completedSteps,
total: totalSteps,

View File

@@ -1,240 +1,568 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
const { importMissingProducts } = require('./products');
async function importOrders(prodConnection, localConnection) {
outputProgress({
operation: "Starting orders import - Getting total count",
status: "running",
});
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
/**
* Imports orders from a production MySQL database to a local MySQL database.
* It can run in two modes:
* 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time.
* 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp.
*
* @param {object} prodConnection - A MySQL connection to production DB (MySQL 5.7).
* @param {object} localConnection - A MySQL connection to local DB (MySQL 8.0).
* @param {boolean} incrementalUpdate - Set to false for a full sync; true for incremental.
*
* @returns {object} Information about the sync operation.
*/
async function importOrders(prodConnection, localConnection, incrementalUpdate = true) {
const startTime = Date.now();
const skippedOrders = new Set(); // Store orders that need to be retried
const missingProducts = new Set(); // Store products that need to be imported
const skippedOrders = new Set();
const missingProducts = new Set();
let recordsAdded = 0;
let recordsUpdated = 0;
let processedCount = 0;
let importedCount = 0;
let totalOrderItems = 0;
let totalUniqueOrders = 0;
// Add a cumulative counter for processed orders before the loop
let cumulativeProcessedOrders = 0;
try {
// First get the column names from the table structure
// Insert temporary table creation queries
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_items (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
SKU VARCHAR(50) NOT NULL,
price DECIMAL(10,2) NOT NULL,
quantity INT NOT NULL,
base_discount DECIMAL(10,2) DEFAULT 0,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_meta (
order_id INT UNSIGNED NOT NULL,
date DATE NOT NULL,
customer VARCHAR(100) NOT NULL,
customer_name VARCHAR(150) NOT NULL,
status INT,
canceled TINYINT(1),
PRIMARY KEY (order_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_discounts (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
discount DECIMAL(10,2) NOT NULL,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_taxes (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
tax DECIMAL(10,2) NOT NULL,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_costs (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
costeach DECIMAL(10,3) DEFAULT 0.000,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
// Get column names from the local table
const [columns] = await localConnection.query(`
SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'orders'
ORDER BY ORDINAL_POSITION
`);
const columnNames = columns.map(col => col.COLUMN_NAME);
const columnNames = columns
.map((col) => col.COLUMN_NAME)
.filter((name) => name !== "id"); // Skip auto-increment ID
// Get last sync info
const [syncInfo] = await localConnection.query(
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
);
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
// Get total count first for progress indication
const [countResult] = await prodConnection.query(`
console.log('Orders: Using last sync time:', lastSyncTime);
// First get count of order items
const [[{ total }]] = await prodConnection.query(`
SELECT COUNT(*) as total
FROM order_items oi FORCE INDEX (PRIMARY)
JOIN _order o FORCE INDEX (PRIMARY) ON oi.order_id = o.order_id
FROM order_items oi
USE INDEX (PRIMARY)
JOIN _order o ON oi.order_id = o.order_id
WHERE o.order_status >= 15
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
`);
const totalOrders = countResult[0].total;
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND o.date_placed_onlydate IS NOT NULL
${incrementalUpdate ? `
AND (
o.stamp > ?
OR oi.stamp > ?
OR EXISTS (
SELECT 1 FROM order_discount_items odi
WHERE odi.order_id = o.order_id
AND odi.pid = oi.prod_pid
)
OR EXISTS (
SELECT 1 FROM order_tax_info oti
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
WHERE oti.order_id = o.order_id
AND otip.pid = oi.prod_pid
AND oti.stamp > ?
)
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
outputProgress({
operation: `Starting orders import - Fetching ${totalOrders} orders from production`,
status: "running",
});
totalOrderItems = total;
console.log('Orders: Found changes:', totalOrderItems);
const total = countResult[0].total;
let processed = 0;
// Get order items in batches
const [orderItems] = await prodConnection.query(`
SELECT
oi.order_id,
oi.prod_pid as pid,
oi.prod_itemnumber as SKU,
oi.prod_price as price,
oi.qty_ordered as quantity,
COALESCE(oi.prod_price_reg - oi.prod_price, 0) as base_discount,
oi.stamp as last_modified
FROM order_items oi
USE INDEX (PRIMARY)
JOIN _order o ON oi.order_id = o.order_id
WHERE o.order_status >= 15
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND o.date_placed_onlydate IS NOT NULL
${incrementalUpdate ? `
AND (
o.stamp > ?
OR oi.stamp > ?
OR EXISTS (
SELECT 1 FROM order_discount_items odi
WHERE odi.order_id = o.order_id
AND odi.pid = oi.prod_pid
)
OR EXISTS (
SELECT 1 FROM order_tax_info oti
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
WHERE oti.order_id = o.order_id
AND otip.pid = oi.prod_pid
AND oti.stamp > ?
)
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
// Process in batches
const batchSize = 1000;
let offset = 0;
console.log('Orders: Processing', orderItems.length, 'order items');
while (offset < total) {
const [orders] = await prodConnection.query(`
SELECT
oi.order_id as order_number,
oi.prod_pid as pid,
oi.prod_itemnumber as SKU,
o.date_placed_onlydate as date,
oi.prod_price_reg as price,
oi.qty_ordered as quantity,
(oi.prod_price_reg - oi.prod_price) as discount,
(
SELECT
otp.item_taxes_to_collect
FROM
order_tax_info oti
JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id
WHERE
oti.order_id = o.order_id
AND otp.pid = oi.prod_pid
ORDER BY
oti.stamp DESC
LIMIT 1
) as tax,
0 as tax_included,
ROUND(
((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) *
(oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2
) as shipping,
o.order_cid as customer,
CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name,
'pending' as status,
CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled
FROM order_items oi
JOIN _order o ON oi.order_id = o.order_id
WHERE o.order_status >= 15
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
LIMIT ? OFFSET ?
`, [batchSize, offset]);
// Insert order items in batches
for (let i = 0; i < orderItems.length; i += 5000) {
const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
const values = batch.flatMap(item => [
item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount
]);
// Check if all products exist before inserting orders
const orderProductPids = [...new Set(orders.map((o) => o.pid))];
const [existingProducts] = await localConnection.query(
"SELECT pid FROM products WHERE pid IN (?)",
[orderProductPids]
);
const existingPids = new Set(existingProducts.map((p) => p.pid));
// Filter out orders with missing products and track them
const validOrders = orders.filter((order) => {
if (!existingPids.has(order.pid)) {
missingProducts.add(order.pid);
skippedOrders.add(order.order_number);
return false;
}
return true;
});
if (validOrders.length > 0) {
const placeholders = validOrders
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const updateClauses = columnNames
.filter((col) => col !== "order_number") // Don't update primary key
.map((col) => `${col} = VALUES(${col})`)
.join(",");
const query = `
INSERT INTO orders (${columnNames.join(",")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE ${updateClauses}
`;
await localConnection.query(
query,
validOrders.flatMap(order => columnNames.map(col => order[col]))
);
}
processed += orders.length;
offset += batchSize;
await localConnection.query(`
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
SKU = VALUES(SKU),
price = VALUES(price),
quantity = VALUES(quantity),
base_discount = VALUES(base_discount)
`, values);
processedCount = i + batch.length;
outputProgress({
status: "running",
operation: "Orders import",
current: processed,
total,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, processed, total),
rate: calculateRate(startTime, processed)
message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
current: processedCount,
total: totalOrderItems
});
}
// Now handle missing products and retry skipped orders
if (missingProducts.size > 0) {
outputProgress({
operation: `Found ${missingProducts.size} missing products, importing them now`,
status: "running",
});
// Get unique order IDs
const orderIds = [...new Set(orderItems.map(item => item.order_id))];
totalUniqueOrders = orderIds.length;
console.log('Total unique order IDs:', totalUniqueOrders);
await importMissingProducts(prodConnection, localConnection, [
...missingProducts,
// Reset processed count for order processing phase
processedCount = 0;
// Get order metadata in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`);
console.log('Sample of batch IDs:', batchIds.slice(0, 5));
const [orders] = await prodConnection.query(`
SELECT
o.order_id,
o.date_placed_onlydate as date,
o.order_cid as customer,
CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name,
o.order_status as status,
CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled
FROM _order o
LEFT JOIN users u ON o.order_cid = u.cid
WHERE o.order_id IN (?)
`, [batchIds]);
console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`);
const duplicates = orders.filter((order, index, self) =>
self.findIndex(o => o.order_id === order.order_id) !== index
);
if (duplicates.length > 0) {
console.log('Found duplicates:', duplicates);
}
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
const values = orders.flatMap(order => [
order.order_id, order.date, order.customer, order.customer_name, order.status, order.canceled
]);
// Retry skipped orders
if (skippedOrders.size > 0) {
outputProgress({
operation: `Retrying ${skippedOrders.size} skipped orders`,
status: "running",
});
await localConnection.query(`
INSERT INTO temp_order_meta VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
date = VALUES(date),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled)
`, values);
const [retryOrders] = await prodConnection.query(`
SELECT
oi.order_id as order_number,
oi.prod_pid as pid,
oi.prod_itemnumber as SKU,
o.date_placed_onlydate as date,
oi.prod_price_reg as price,
oi.qty_ordered as quantity,
(oi.prod_price_reg - oi.prod_price) as discount,
(
SELECT
otp.item_taxes_to_collect
FROM
order_tax_info oti
JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id
WHERE
oti.order_id = o.order_id
AND otp.pid = oi.prod_pid
ORDER BY
oti.stamp DESC
LIMIT 1
) as tax,
0 as tax_included,
ROUND(
((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) *
(oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2
) as shipping,
o.order_cid as customer,
CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name,
'pending' as status,
CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled
FROM order_items oi
JOIN _order o ON oi.order_id = o.order_id
WHERE oi.order_id IN (?)
`, [[...skippedOrders]]);
processedCount = i + orders.length;
outputProgress({
status: "running",
operation: "Orders import",
message: `Loading order metadata: ${processedCount} of ${totalUniqueOrders}`,
current: processedCount,
total: totalUniqueOrders
});
}
const placeholders = retryOrders
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const updateClauses = columnNames
.filter((col) => col !== "order_number") // Don't update primary key
.map((col) => `${col} = VALUES(${col})`)
.join(",");
// Reset processed count for final phase
processedCount = 0;
const query = `
INSERT INTO orders (${columnNames.join(",")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE ${updateClauses}
`;
// Get promotional discounts in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
const [discounts] = await prodConnection.query(`
SELECT order_id, pid, SUM(amount) as discount
FROM order_discount_items
WHERE order_id IN (?)
GROUP BY order_id, pid
`, [batchIds]);
await localConnection.query(
query,
retryOrders.flatMap(order => columnNames.map(col => order[col]))
);
if (discounts.length > 0) {
const placeholders = discounts.map(() => "(?, ?, ?)").join(",");
const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]);
await localConnection.query(`
INSERT INTO temp_order_discounts VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
discount = VALUES(discount)
`, values);
}
}
outputProgress({
status: "complete",
operation: "Orders import completed",
current: total,
total,
duration: formatElapsedTime((Date.now() - startTime) / 1000),
});
// Get tax information in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
const [taxes] = await prodConnection.query(`
SELECT DISTINCT
oti.order_id,
otip.pid,
otip.item_taxes_to_collect as tax
FROM order_tax_info oti
JOIN (
SELECT order_id, MAX(stamp) as max_stamp
FROM order_tax_info
WHERE order_id IN (?)
GROUP BY order_id
) latest ON oti.order_id = latest.order_id AND oti.stamp = latest.max_stamp
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
`, [batchIds]);
if (taxes.length > 0) {
// Remove any duplicates before inserting
const uniqueTaxes = new Map();
taxes.forEach(t => {
const key = `${t.order_id}-${t.pid}`;
uniqueTaxes.set(key, t);
});
const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]);
if (values.length > 0) {
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
await localConnection.query(`
INSERT INTO temp_order_taxes VALUES ${placeholders}
ON DUPLICATE KEY UPDATE tax = VALUES(tax)
`, values);
}
}
}
// Get costeach values in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
const [costs] = await prodConnection.query(`
SELECT orderid as order_id, pid, costeach
FROM order_costs
WHERE orderid IN (?)
`, [batchIds]);
if (costs.length > 0) {
const placeholders = costs.map(() => '(?, ?, ?)').join(",");
const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach]);
await localConnection.query(`
INSERT INTO temp_order_costs (order_id, pid, costeach)
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE costeach = VALUES(costeach)
`, values);
}
}
// Now combine all the data and insert into orders table
// Pre-check all products at once instead of per batch
const allOrderPids = [...new Set(orderItems.map(item => item.pid))];
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
"SELECT pid FROM products WHERE pid IN (?)",
[allOrderPids]
) : [[]];
const existingPids = new Set(existingProducts.map(p => p.pid));
// Process in larger batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
// Get combined data for this batch
const [orders] = await localConnection.query(`
SELECT
oi.order_id as order_number,
oi.pid,
oi.SKU,
om.date,
oi.price,
oi.quantity,
oi.base_discount + COALESCE(od.discount, 0) as discount,
COALESCE(ot.tax, 0) as tax,
0 as tax_included,
0 as shipping,
om.customer,
om.customer_name,
om.status,
om.canceled,
COALESCE(tc.costeach, 0) as costeach
FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
WHERE oi.order_id IN (?)
`, [batchIds]);
// Filter orders and track missing products - do this in a single pass
const validOrders = [];
const values = [];
const processedOrderItems = new Set(); // Track unique order items
const processedOrders = new Set(); // Track unique orders
for (const order of orders) {
if (!existingPids.has(order.pid)) {
missingProducts.add(order.pid);
skippedOrders.add(order.order_number);
continue;
}
validOrders.push(order);
values.push(...columnNames.map(col => order[col] ?? null));
processedOrderItems.add(`${order.order_number}-${order.pid}`);
processedOrders.add(order.order_number);
}
if (validOrders.length > 0) {
// Pre-compute the placeholders string once
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
const result = await localConnection.query(`
INSERT INTO orders (${columnNames.join(",")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
SKU = VALUES(SKU),
date = VALUES(date),
price = VALUES(price),
quantity = VALUES(quantity),
discount = VALUES(discount),
tax = VALUES(tax),
tax_included = VALUES(tax_included),
shipping = VALUES(shipping),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled),
costeach = VALUES(costeach)
`, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat());
const affectedRows = result[0].affectedRows;
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsAdded += inserts;
recordsUpdated += updates;
importedCount += processedOrderItems.size; // Count unique order items processed
}
// Update progress based on unique orders processed
cumulativeProcessedOrders += processedOrders.size;
outputProgress({
status: "running",
operation: "Orders import",
message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`,
current: cumulativeProcessedOrders,
total: totalUniqueOrders,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
rate: calculateRate(startTime, cumulativeProcessedOrders)
});
}
// Now try to import any orders that were skipped due to missing products
if (skippedOrders.size > 0) {
try {
outputProgress({
status: "running",
operation: "Orders import",
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`,
});
// Get the orders that were skipped
const [skippedProdOrders] = await localConnection.query(`
SELECT DISTINCT
oi.order_id as order_number,
oi.pid,
oi.SKU,
om.date,
oi.price,
oi.quantity,
oi.base_discount + COALESCE(od.discount, 0) as discount,
COALESCE(ot.tax, 0) as tax,
0 as tax_included,
0 as shipping,
om.customer,
om.customer_name,
om.status,
om.canceled,
COALESCE(tc.costeach, 0) as costeach
FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
WHERE oi.order_id IN (?)
`, [Array.from(skippedOrders)]);
// Check which products exist now
const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))];
const [existingProducts] = skippedPids.length > 0 ? await localConnection.query(
"SELECT pid FROM products WHERE pid IN (?)",
[skippedPids]
) : [[]];
const existingPids = new Set(existingProducts.map(p => p.pid));
// Filter orders that can now be imported
const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid));
const retryOrderItems = new Set(); // Track unique order items in retry
if (validOrders.length > 0) {
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat();
const result = await localConnection.query(`
INSERT INTO orders (${columnNames.join(", ")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
SKU = VALUES(SKU),
date = VALUES(date),
price = VALUES(price),
quantity = VALUES(quantity),
discount = VALUES(discount),
tax = VALUES(tax),
tax_included = VALUES(tax_included),
shipping = VALUES(shipping),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled),
costeach = VALUES(costeach)
`, values);
const affectedRows = result[0].affectedRows;
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
// Track unique order items
validOrders.forEach(order => {
retryOrderItems.add(`${order.order_number}-${order.pid}`);
});
outputProgress({
status: "running",
operation: "Orders import",
message: `Successfully imported ${retryOrderItems.size} previously skipped order items`,
});
// Update the main counters
recordsAdded += inserts;
recordsUpdated += updates;
importedCount += retryOrderItems.size;
}
} catch (error) {
console.warn('Warning: Failed to retry skipped orders:', error.message);
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
}
}
// Clean up temporary tables after ALL processing is complete
await localConnection.query(`
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
`);
// Only update sync status if we get here (no errors thrown)
await localConnection.query(`
INSERT INTO sync_status (table_name, last_sync_timestamp)
VALUES ('orders', NOW())
ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW()
`);
return {
status: "complete",
totalImported: total,
totalImported: Math.floor(importedCount),
recordsAdded: recordsAdded || 0,
recordsUpdated: Math.floor(recordsUpdated),
totalSkipped: skippedOrders.size,
missingProducts: missingProducts.size,
retriedOrders: skippedOrders.size
incrementalUpdate,
lastSyncTime
};
} catch (error) {
outputProgress({
operation: "Orders import failed",
status: "error",
error: error.message,
});
console.error("Error during orders import:", error);
throw error;
}
}
module.exports = importOrders;
module.exports = importOrders;

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,38 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
async function importPurchaseOrders(prodConnection, localConnection) {
outputProgress({
operation: "Starting purchase orders import - Initializing",
status: "running",
});
async function importPurchaseOrders(prodConnection, localConnection, incrementalUpdate = true) {
const startTime = Date.now();
let recordsAdded = 0;
let recordsUpdated = 0;
try {
// Get last sync info
const [syncInfo] = await localConnection.query(
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'"
);
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
console.log('Purchase Orders: Using last sync time:', lastSyncTime);
// Insert temporary table creation query for purchase orders
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_purchase_orders (
po_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
vendor VARCHAR(255),
date DATE,
expected_date DATE,
status INT,
notes TEXT,
PRIMARY KEY (po_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
outputProgress({
operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} purchase orders import`,
status: "running",
});
// Get column names for the insert
const [columns] = await localConnection.query(`
SELECT COLUMN_NAME
@@ -20,52 +44,131 @@ async function importPurchaseOrders(prodConnection, localConnection) {
.map((col) => col.COLUMN_NAME)
.filter((name) => name !== "id");
// First get all relevant PO IDs with basic info - this is much faster than the full join
// Build incremental conditions
const incrementalWhereClause = incrementalUpdate
? `AND (
p.date_updated > ?
OR p.date_ordered > ?
OR p.date_estin > ?
OR r.date_updated > ?
OR r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)`
: "";
const incrementalParams = incrementalUpdate
? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]
: [];
// First get all relevant PO IDs with basic info
const [[{ total }]] = await prodConnection.query(`
SELECT COUNT(*) as total
FROM (
SELECT DISTINCT pop.po_id, pop.pid
FROM po p
FORCE INDEX (idx_date_created)
USE INDEX (idx_date_created)
JOIN po_products pop ON p.po_id = pop.po_id
JOIN suppliers s ON p.supplier_id = s.supplierid
WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
${incrementalUpdate ? `
AND (
p.date_updated > ?
OR p.date_ordered > ?
OR p.date_estin > ?
)
` : ''}
UNION
SELECT DISTINCT r.receiving_id as po_id, rp.pid
FROM receivings_products rp
USE INDEX (received_date)
LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id
WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
${incrementalUpdate ? `
AND (
r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)
` : ''}
) all_items
`);
`, incrementalUpdate ? [
lastSyncTime, lastSyncTime, lastSyncTime, // PO conditions
lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions
] : []);
console.log('Purchase Orders: Found changes:', total);
const [poList] = await prodConnection.query(`
SELECT DISTINCT
COALESCE(p.po_id, r.receiving_id) as po_id,
COALESCE(
NULLIF(s1.companyname, ''),
NULLIF(s2.companyname, ''),
'Unknown Vendor'
) as vendor,
CASE
WHEN p.po_id IS NOT NULL THEN s1.companyname
WHEN r.supplier_id IS NOT NULL THEN s2.companyname
ELSE 'No Supplier'
END as vendor,
CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_ordered) END as date,
CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_estin) END as expected_date,
WHEN p.po_id IS NOT NULL THEN
DATE(COALESCE(
NULLIF(p.date_ordered, '0000-00-00 00:00:00'),
p.date_created
))
WHEN r.receiving_id IS NOT NULL THEN
DATE(r.date_created)
END as date,
CASE
WHEN p.date_estin = '0000-00-00' THEN NULL
WHEN p.date_estin IS NULL THEN NULL
WHEN p.date_estin NOT REGEXP '^[0-9]{4}-[0-9]{2}-[0-9]{2}$' THEN NULL
ELSE p.date_estin
END as expected_date,
COALESCE(p.status, 50) as status,
COALESCE(p.short_note, '') as notes,
COALESCE(p.notes, '') as long_note
p.short_note as notes,
p.notes as long_note
FROM (
SELECT po_id FROM po
WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
USE INDEX (idx_date_created)
WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
${incrementalUpdate ? `
AND (
date_ordered > ?
OR date_updated > ?
OR date_estin > ?
)
` : ''}
UNION
SELECT DISTINCT r.receiving_id as po_id
FROM receivings r
JOIN receivings_products rp ON r.receiving_id = rp.receiving_id
WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
JOIN receivings_products rp USE INDEX (received_date) ON r.receiving_id = rp.receiving_id
WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
${incrementalUpdate ? `
AND (
r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)
` : ''}
) ids
LEFT JOIN po p ON ids.po_id = p.po_id
LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid
LEFT JOIN receivings r ON ids.po_id = r.receiving_id
LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid
ORDER BY po_id
`);
`, incrementalUpdate ? [
lastSyncTime, lastSyncTime, lastSyncTime, // PO conditions
lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions
] : []);
console.log('Sample PO dates:', poList.slice(0, 5).map(po => ({
po_id: po.po_id,
raw_date_ordered: po.raw_date_ordered,
raw_date_created: po.raw_date_created,
raw_date_estin: po.raw_date_estin,
computed_date: po.date,
expected_date: po.expected_date
})));
const totalItems = total;
let processed = 0;
@@ -89,10 +192,11 @@ async function importPurchaseOrders(prodConnection, localConnection) {
pop.po_id,
pop.pid,
pr.itemnumber as sku,
pop.cost_each as cost_price,
pr.description as name,
pop.cost_each,
pop.qty_each as ordered
FROM po_products pop
FORCE INDEX (PRIMARY)
USE INDEX (PRIMARY)
JOIN products pr ON pop.pid = pr.pid
WHERE pop.po_id IN (?)
`, [poIds]);
@@ -104,7 +208,7 @@ async function importPurchaseOrders(prodConnection, localConnection) {
const productPids = [...new Set(productBatch.map(p => p.pid))];
const batchPoIds = [...new Set(productBatch.map(p => p.po_id))];
// Get receivings for this batch
// Get receivings for this batch with employee names
const [receivings] = await prodConnection.query(`
SELECT
r.po_id,
@@ -112,17 +216,20 @@ async function importPurchaseOrders(prodConnection, localConnection) {
rp.receiving_id,
rp.qty_each,
rp.cost_each,
DATE(NULLIF(rp.received_date, '0000-00-00 00:00:00')) as received_date,
COALESCE(rp.received_date, r.date_created) as received_date,
rp.received_by,
CONCAT(e.firstname, ' ', e.lastname) as received_by_name,
CASE
WHEN r.po_id IS NULL THEN 2 -- No PO
WHEN r.po_id IN (?) THEN 0 -- Original PO
ELSE 1 -- Different PO
END as is_alt_po
FROM receivings_products rp
USE INDEX (received_date)
LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id
LEFT JOIN employees e ON rp.received_by = e.employeeid
WHERE rp.pid IN (?)
AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
ORDER BY r.po_id, rp.pid, rp.received_date
`, [batchPoIds, productPids]);
@@ -167,8 +274,21 @@ async function importPurchaseOrders(prodConnection, localConnection) {
);
const validPids = new Set(existingPids.map(p => p.pid));
// Prepare values for this sub-batch
const values = [];
// First check which PO lines already exist and get their current values
const poLines = Array.from(poProductMap.values())
.filter(p => validPids.has(p.pid))
.map(p => [p.po_id, p.pid]);
const [existingPOs] = await localConnection.query(
`SELECT ${columnNames.join(',')} FROM purchase_orders WHERE (po_id, pid) IN (${poLines.map(() => "(?,?)").join(",")})`,
poLines.flat()
);
const existingPOMap = new Map(
existingPOs.map(po => [`${po.po_id}-${po.pid}`, po])
);
// Split into inserts and updates
const insertsAndUpdates = { inserts: [], updates: [] };
let batchProcessed = 0;
for (const po of batch) {
@@ -181,78 +301,199 @@ async function importPurchaseOrders(prodConnection, localConnection) {
const altReceivingHistory = altReceivingMap.get(product.pid) || [];
const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || [];
const received = receivingHistory.reduce((sum, r) => sum + r.qty_each, 0);
const altReceived = altReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0);
const noPOReceived = noPOReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0);
const totalReceived = received + altReceived + noPOReceived;
// Combine all receivings and sort by date
const allReceivings = [
...receivingHistory.map(r => ({ ...r, type: 'original' })),
...altReceivingHistory.map(r => ({ ...r, type: 'alternate' })),
...noPOReceivingHistory.map(r => ({ ...r, type: 'no_po' }))
].sort((a, b) => new Date(a.received_date || '9999-12-31') - new Date(b.received_date || '9999-12-31'));
// Split receivings into original PO and others
const originalPOReceivings = allReceivings.filter(r => r.type === 'original');
const otherReceivings = allReceivings.filter(r => r.type !== 'original');
// Track FIFO fulfillment
let remainingToFulfill = product.ordered;
const fulfillmentTracking = [];
let totalReceived = 0;
let actualCost = null; // Will store the cost of the first receiving that fulfills this PO
let firstFulfillmentReceiving = null;
let lastFulfillmentReceiving = null;
for (const receiving of allReceivings) {
const qtyToApply = Math.min(remainingToFulfill, receiving.qty_each);
if (qtyToApply > 0) {
// If this is the first receiving being applied, use its cost
if (actualCost === null) {
actualCost = receiving.cost_each;
firstFulfillmentReceiving = receiving;
}
lastFulfillmentReceiving = receiving;
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: qtyToApply,
qty_total: receiving.qty_each,
cost: receiving.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
remaining_qty: receiving.qty_each - qtyToApply
});
remainingToFulfill -= qtyToApply;
} else {
// Track excess receivings
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: 0,
qty_total: receiving.qty_each,
cost: receiving.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
is_excess: true
});
}
totalReceived += receiving.qty_each;
}
const receiving_status = !totalReceived ? 1 : // created
totalReceived < product.ordered ? 30 : // partial
remainingToFulfill > 0 ? 30 : // partial
40; // full
const allReceivings = [...receivingHistory];
if (altReceivingHistory.length > 0) {
allReceivings.push(...altReceivingHistory);
function formatDate(dateStr) {
if (!dateStr) return null;
if (dateStr === '0000-00-00' || dateStr === '0000-00-00 00:00:00') return null;
if (typeof dateStr === 'string' && !dateStr.match(/^\d{4}-\d{2}-\d{2}/)) return null;
try {
const date = new Date(dateStr);
if (isNaN(date.getTime())) return null;
if (date.getFullYear() < 1900 || date.getFullYear() > 2100) return null;
return date.toISOString().split('T')[0];
} catch (e) {
return null;
}
}
if (noPOReceivingHistory.length > 0) {
allReceivings.push(...noPOReceivingHistory);
}
allReceivings.sort((a, b) => new Date(a.received_date) - new Date(b.received_date));
const firstReceiving = allReceivings[0] || {};
const lastReceiving = allReceivings[allReceivings.length - 1] || {};
values.push(columnNames.map(col => {
const rowValues = columnNames.map(col => {
switch (col) {
case 'po_id': return po.po_id;
case 'vendor': return po.vendor;
case 'date': return po.date;
case 'expected_date': return po.expected_date;
case 'date': return formatDate(po.date);
case 'expected_date': return formatDate(po.expected_date);
case 'pid': return product.pid;
case 'sku': return product.sku;
case 'cost_price': return product.cost_price;
case 'name': return product.name;
case 'cost_price': return actualCost || product.cost_each;
case 'po_cost_price': return product.cost_each;
case 'status': return po.status;
case 'notes': return po.notes;
case 'long_note': return po.long_note;
case 'ordered': return product.ordered;
case 'received': return totalReceived;
case 'received_date': return firstReceiving.received_date || null;
case 'last_received_date': return lastReceiving.received_date || null;
case 'received_by': return firstReceiving.received_by || null;
case 'unfulfilled': return remainingToFulfill;
case 'excess_received': return Math.max(0, totalReceived - product.ordered);
case 'received_date': return formatDate(firstFulfillmentReceiving?.received_date);
case 'last_received_date': return formatDate(lastFulfillmentReceiving?.received_date);
case 'received_by': return firstFulfillmentReceiving?.received_by_name || null;
case 'receiving_status': return receiving_status;
case 'receiving_history': return JSON.stringify(allReceivings.map(r => ({
receiving_id: r.receiving_id,
qty: r.qty_each,
cost: r.cost_each,
date: r.received_date,
received_by: r.received_by,
alt_po: r.is_alt_po
})));
case 'receiving_history': return JSON.stringify({
fulfillment: fulfillmentTracking,
ordered_qty: product.ordered,
total_received: totalReceived,
remaining_unfulfilled: remainingToFulfill,
excess_received: Math.max(0, totalReceived - product.ordered),
po_cost: product.cost_each,
actual_cost: actualCost || product.cost_each
});
default: return null;
}
}));
});
if (existingPOMap.has(key)) {
const existing = existingPOMap.get(key);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = rowValues[columnNames.indexOf(col)];
const oldVal = existing[col] ?? null;
// Special handling for numbers to avoid type coercion issues
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
}
// Special handling for receiving_history - parse and compare
if (col === 'receiving_history') {
const newHistory = JSON.parse(newVal || '{}');
const oldHistory = JSON.parse(oldVal || '{}');
return JSON.stringify(newHistory) !== JSON.stringify(oldHistory);
}
return newVal !== oldVal;
});
if (hasChanges) {
insertsAndUpdates.updates.push({
po_id: po.po_id,
pid: product.pid,
values: rowValues
});
}
} else {
insertsAndUpdates.inserts.push({
po_id: po.po_id,
pid: product.pid,
values: rowValues
});
}
batchProcessed++;
}
}
if (values.length > 0) {
const placeholders = values.map(() =>
`(${Array(columnNames.length).fill("?").join(",")})`
).join(",");
// Handle inserts
if (insertsAndUpdates.inserts.length > 0) {
const insertPlaceholders = insertsAndUpdates.inserts
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const query = `
const insertResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${placeholders}
VALUES ${insertPlaceholders}
`, insertsAndUpdates.inserts.map(i => i.values).flat());
const affectedRows = insertResult[0].affectedRows;
// For an upsert, MySQL counts rows twice for updates
// So if affectedRows is odd, we have (updates * 2 + inserts)
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsAdded += inserts;
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
processed += batchProcessed;
}
// Handle updates - now we know these actually have changes
if (insertsAndUpdates.updates.length > 0) {
const updatePlaceholders = insertsAndUpdates.updates
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const updateResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE ${columnNames
.filter((col) => col !== "po_id" && col !== "pid")
.map((col) => `${col} = VALUES(${col})`)
.join(",")};
`;
`, insertsAndUpdates.updates.map(u => u.values).flat());
await localConnection.query(query, values.flat());
const affectedRows = updateResult[0].affectedRows;
// For an upsert, MySQL counts rows twice for updates
// So if affectedRows is odd, we have (updates * 2 + inserts)
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
processed += batchProcessed;
}
processed += batchProcessed;
// Update progress based on time interval
const now = Date.now();
@@ -271,21 +512,27 @@ async function importPurchaseOrders(prodConnection, localConnection) {
}
}
outputProgress({
status: "complete",
operation: "Purchase orders import completed",
current: totalItems,
total: totalItems,
duration: formatElapsedTime((Date.now() - startTime) / 1000),
});
// Only update sync status if we get here (no errors thrown)
await localConnection.query(`
INSERT INTO sync_status (table_name, last_sync_timestamp)
VALUES ('purchase_orders', NOW())
ON DUPLICATE KEY UPDATE
last_sync_timestamp = NOW(),
last_sync_id = LAST_INSERT_ID(last_sync_id)
`);
return {
status: "complete",
totalImported: totalItems
totalImported: totalItems,
recordsAdded: recordsAdded || 0,
recordsUpdated: recordsUpdated || 0,
incrementalUpdate,
lastSyncTime
};
} catch (error) {
outputProgress({
operation: "Purchase orders import failed",
operation: `${incrementalUpdate ? 'Incremental' : 'Full'} purchase orders import failed`,
status: "error",
error: error.message,
});

View File

@@ -0,0 +1,82 @@
// Split into inserts and updates
const insertsAndUpdates = batch.reduce((acc, po) => {
const key = `${po.po_id}-${po.pid}`;
if (existingPOMap.has(key)) {
const existing = existingPOMap.get(key);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = po[col] ?? null;
const oldVal = existing[col] ?? null;
// Special handling for numbers to avoid type coercion issues
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
}
// Special handling for receiving_history JSON
if (col === 'receiving_history') {
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
}
return newVal !== oldVal;
});
if (hasChanges) {
console.log(`PO line changed: ${key}`, {
po_id: po.po_id,
pid: po.pid,
changes: columnNames.filter(col => {
const newVal = po[col] ?? null;
const oldVal = existing[col] ?? null;
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001;
}
if (col === 'receiving_history') {
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
}
return newVal !== oldVal;
})
});
acc.updates.push({
po_id: po.po_id,
pid: po.pid,
values: columnNames.map(col => po[col] ?? null)
});
}
} else {
console.log(`New PO line: ${key}`);
acc.inserts.push({
po_id: po.po_id,
pid: po.pid,
values: columnNames.map(col => po[col] ?? null)
});
}
return acc;
}, { inserts: [], updates: [] });
// Handle inserts
if (insertsAndUpdates.inserts.length > 0) {
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(",");
const insertResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${insertPlaceholders}
`, insertsAndUpdates.inserts.map(i => i.values).flat());
recordsAdded += insertResult[0].affectedRows;
}
// Handle updates
if (insertsAndUpdates.updates.length > 0) {
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(",");
const updateResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE
${columnNames
.filter(col => col !== "po_id" && col !== "pid")
.map(col => `${col} = VALUES(${col})`)
.join(",")};
`, insertsAndUpdates.updates.map(u => u.values).flat());
// Each update affects 2 rows in affectedRows, so we divide by 2 to get actual count
recordsUpdated += insertsAndUpdates.updates.length;
}

View File

@@ -156,7 +156,7 @@ async function resetDatabase() {
SELECT GROUP_CONCAT(table_name) as tables
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name != 'users'
AND table_name NOT IN ('users', 'import_history')
`);
if (!tables[0].tables) {

View File

@@ -0,0 +1,180 @@
const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const question = (query) => new Promise((resolve) => rl.question(query, resolve));
async function loadScript(name) {
try {
return await require(name);
} catch (error) {
console.error(`Failed to load script ${name}:`, error);
return null;
}
}
async function runWithTimeout(fn) {
return new Promise((resolve, reject) => {
// Create a child process for the script
const child = require('child_process').fork(fn.toString(), [], {
stdio: 'inherit'
});
child.on('exit', (code) => {
if (code === 0) {
resolve();
} else {
reject(new Error(`Script exited with code ${code}`));
}
});
child.on('error', (err) => {
reject(err);
});
});
}
function clearScreen() {
process.stdout.write('\x1Bc');
}
const scripts = {
'Import Scripts': {
'1': { name: 'Full Import From Production', path: './import-from-prod' },
'2': { name: 'Individual Import Scripts ▸', submenu: {
'1': { name: 'Import Orders', path: './import/orders', key: 'importOrders' },
'2': { name: 'Import Products', path: './import/products', key: 'importProducts' },
'3': { name: 'Import Purchase Orders', path: './import/purchase-orders' },
'4': { name: 'Import Categories', path: './import/categories' },
'b': { name: 'Back to Main Menu' }
}}
},
'Metrics': {
'3': { name: 'Calculate All Metrics', path: './calculate-metrics' },
'4': { name: 'Individual Metric Scripts ▸', submenu: {
'1': { name: 'Brand Metrics', path: './metrics/brand-metrics' },
'2': { name: 'Category Metrics', path: './metrics/category-metrics' },
'3': { name: 'Financial Metrics', path: './metrics/financial-metrics' },
'4': { name: 'Product Metrics', path: './metrics/product-metrics' },
'5': { name: 'Sales Forecasts', path: './metrics/sales-forecasts' },
'6': { name: 'Time Aggregates', path: './metrics/time-aggregates' },
'7': { name: 'Vendor Metrics', path: './metrics/vendor-metrics' },
'b': { name: 'Back to Main Menu' }
}}
},
'Database Management': {
'5': { name: 'Test Production Connection', path: './test-prod-connection' }
},
'Reset Scripts': {
'6': { name: 'Reset Database', path: './reset-db' },
'7': { name: 'Reset Metrics', path: './reset-metrics' }
}
};
let lastRun = null;
async function displayMenu(menuItems, title = 'Inventory Management Script Runner') {
clearScreen();
console.log(`\n${title}\n`);
for (const [category, items] of Object.entries(menuItems)) {
console.log(`\n${category}:`);
Object.entries(items).forEach(([key, script]) => {
console.log(`${key}. ${script.name}`);
});
}
if (lastRun) {
console.log('\nQuick Access:');
console.log(`r. Repeat Last Script (${lastRun.name})`);
}
console.log('\nq. Quit\n');
}
async function handleSubmenu(submenu, title) {
while (true) {
await displayMenu({"Individual Scripts": submenu}, title);
const choice = await question('Select an option (or b to go back): ');
if (choice.toLowerCase() === 'b') {
return null;
}
if (submenu[choice]) {
return submenu[choice];
}
console.log('Invalid selection. Please try again.');
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
async function runScript(script) {
console.log(`\nRunning: ${script.name}`);
try {
const scriptPath = require.resolve(script.path);
await runWithTimeout(scriptPath);
console.log('\nScript completed successfully');
lastRun = script;
} catch (error) {
console.error('\nError running script:', error);
}
await question('\nPress Enter to continue...');
}
async function main() {
while (true) {
await displayMenu(scripts);
const choice = await question('Select an option: ');
if (choice.toLowerCase() === 'q') {
break;
}
if (choice.toLowerCase() === 'r' && lastRun) {
await runScript(lastRun);
continue;
}
let selectedScript = null;
for (const category of Object.values(scripts)) {
if (category[choice]) {
selectedScript = category[choice];
break;
}
}
if (!selectedScript) {
console.log('Invalid selection. Please try again.');
await new Promise(resolve => setTimeout(resolve, 1000));
continue;
}
if (selectedScript.submenu) {
const submenuChoice = await handleSubmenu(
selectedScript.submenu,
selectedScript.name
);
if (submenuChoice && submenuChoice.path) {
await runScript(submenuChoice);
}
} else if (selectedScript.path) {
await runScript(selectedScript);
}
}
rl.close();
process.exit(0);
}
if (require.main === module) {
main().catch(error => {
console.error('Fatal error:', error);
process.exit(1);
});
}

View File

@@ -13,7 +13,7 @@ import {
AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Loader2, RefreshCw, Upload, X, Database } from "lucide-react";
import { Loader2, RefreshCw, X, Database } from "lucide-react";
import config from '../../config';
import { toast } from "sonner";
@@ -36,11 +36,6 @@ interface ImportProgress {
duration?: string;
}
interface ImportLimits {
products: number;
orders: number;
purchaseOrders: number;
}
export function DataManagement() {
const [isImportingProd, setIsImportingProd] = useState(false);