Add/update inital try of order components and add csv update script + update import script

This commit is contained in:
2025-01-10 00:01:43 -05:00
parent afe8510751
commit 8bdd188dfe
17 changed files with 38513 additions and 37881 deletions

View File

@@ -4,8 +4,10 @@ const csv = require('csv-parse');
const mysql = require('mysql2/promise');
const dotenv = require('dotenv');
// For testing purposes, limit the number of rows to import
const TEST_ROW_LIMIT = 5000;
// For testing purposes, limit the number of rows to import (0 = no limit)
const PRODUCTS_TEST_LIMIT = 0;
const ORDERS_TEST_LIMIT = 5000;
const PURCHASE_ORDERS_TEST_LIMIT = 0;
dotenv.config({ path: path.join(__dirname, '../.env') });
@@ -17,8 +19,46 @@ const dbConfig = {
multipleStatements: true
};
// Helper function to count total rows in a CSV file
async function countRows(filePath) {
return new Promise((resolve, reject) => {
let count = 0;
fs.createReadStream(filePath)
.pipe(csv.parse())
.on('data', () => count++)
.on('error', reject)
.on('end', () => resolve(count - 1)); // Subtract 1 for header row
});
}
// Helper function to format time duration
function formatDuration(seconds) {
if (seconds < 60) return `${Math.round(seconds)}s`;
const minutes = Math.floor(seconds / 60);
seconds = Math.round(seconds % 60);
return `${minutes}m ${seconds}s`;
}
// Helper function to update progress with time estimate
function updateProgress(current, total, operation, startTime) {
const percentage = ((current / total) * 100).toFixed(1);
const elapsed = (Date.now() - startTime) / 1000;
const rate = current / elapsed; // rows per second
const remaining = (total - current) / rate;
process.stdout.write(
`\r${operation}: ${current.toLocaleString()}/${total.toLocaleString()} rows ` +
`(${percentage}%) - Rate: ${Math.round(rate)}/s - ` +
`Elapsed: ${formatDuration(elapsed)} - ` +
`Est. remaining: ${formatDuration(remaining)}`
);
}
async function importProducts(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting products import (${totalRows.toLocaleString()} total rows${PRODUCTS_TEST_LIMIT > 0 ? ` - limited to ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -29,14 +69,22 @@ async function importProducts(connection, filePath) {
let updated = 0;
let added = 0;
let rowCount = 0;
let lastUpdate = Date.now();
for await (const record of parser) {
// if (rowCount >= TEST_ROW_LIMIT) {
// console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
// break;
// }
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
console.log(`\nReached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`);
break;
}
rowCount++;
// Update progress every 100ms to avoid console flooding
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Products', startTime);
lastUpdate = now;
}
// Check if product exists
const [existing] = await connection.query('SELECT product_id FROM products WHERE product_id = ?', [record.product_id]);
@@ -69,14 +117,19 @@ async function importProducts(connection, filePath) {
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing product ${record.product_id}:`, error.message);
console.error(`\nError importing product ${record.product_id}:`, error.message);
}
}
console.log(`Products import completed: ${added} added, ${updated} updated (processed ${rowCount} rows)`);
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nProducts import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated (processed ${rowCount.toLocaleString()} rows)`);
}
async function importOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), ORDERS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting orders import (${totalRows.toLocaleString()} total rows${ORDERS_TEST_LIMIT > 0 ? ` - limited to ${ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -92,14 +145,22 @@ async function importOrders(connection, filePath) {
let updated = 0;
let added = 0;
let rowCount = 0;
let lastUpdate = Date.now();
for await (const record of parser) {
if (rowCount >= TEST_ROW_LIMIT) {
console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
if (ORDERS_TEST_LIMIT > 0 && rowCount >= ORDERS_TEST_LIMIT) {
console.log(`\nReached test limit of ${ORDERS_TEST_LIMIT.toLocaleString()} rows`);
break;
}
rowCount++;
// Update progress every 100ms
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Orders', startTime);
lastUpdate = now;
}
if (!validProductIds.has(record.product_id)) {
skipped++;
continue;
@@ -128,15 +189,20 @@ async function importOrders(connection, filePath) {
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing order ${record.order_number}, product ${record.product_id}:`, error.message);
console.error(`\nError importing order ${record.order_number}, product ${record.product_id}:`, error.message);
skipped++;
}
}
console.log(`Orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`);
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nOrders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
}
async function importPurchaseOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PURCHASE_ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PURCHASE_ORDERS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting purchase orders import (${totalRows.toLocaleString()} total rows${PURCHASE_ORDERS_TEST_LIMIT > 0 ? ` - limited to ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -152,14 +218,22 @@ async function importPurchaseOrders(connection, filePath) {
let updated = 0;
let added = 0;
let rowCount = 0;
let lastUpdate = Date.now();
for await (const record of parser) {
if (rowCount >= TEST_ROW_LIMIT) {
console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
if (PURCHASE_ORDERS_TEST_LIMIT > 0 && rowCount >= PURCHASE_ORDERS_TEST_LIMIT) {
console.log(`\nReached test limit of ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows`);
break;
}
rowCount++;
// Update progress every 100ms
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Purchase Orders', startTime);
lastUpdate = now;
}
if (!validProductIds.has(record.product_id)) {
skipped++;
continue;
@@ -188,14 +262,18 @@ async function importPurchaseOrders(connection, filePath) {
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing PO ${record.po_id}, product ${record.product_id}:`, error.message);
console.error(`\nError importing PO ${record.po_id}, product ${record.product_id}:`, error.message);
skipped++;
}
}
console.log(`Purchase orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`);
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nPurchase orders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
}
async function main() {
console.log('Starting import process...');
const startTime = Date.now();
const connection = await mysql.createConnection(dbConfig);
try {
@@ -205,18 +283,14 @@ async function main() {
await connection.query(schemaSQL);
// Import products first since they're referenced by other tables
console.log('Importing products...');
await importProducts(connection, path.join(__dirname, '../csv/39f2x83-products.csv'));
console.log('Importing orders...');
await importOrders(connection, path.join(__dirname, '../csv/39f2x83-orders.csv'));
console.log('Importing purchase orders...');
await importPurchaseOrders(connection, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
console.log('All imports completed successfully');
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nAll imports completed successfully in ${duration} seconds`);
} catch (error) {
console.error('Error during import:', error);
console.error('\nError during import:', error);
process.exit(1);
} finally {
await connection.end();