Add/update inital try of order components and add csv update script + update import script

This commit is contained in:
2025-01-10 00:01:43 -05:00
parent afe8510751
commit 8bdd188dfe
17 changed files with 38513 additions and 37881 deletions

View File

@@ -4,8 +4,10 @@ const csv = require('csv-parse');
const mysql = require('mysql2/promise');
const dotenv = require('dotenv');
// For testing purposes, limit the number of rows to import
const TEST_ROW_LIMIT = 5000;
// For testing purposes, limit the number of rows to import (0 = no limit)
const PRODUCTS_TEST_LIMIT = 0;
const ORDERS_TEST_LIMIT = 5000;
const PURCHASE_ORDERS_TEST_LIMIT = 0;
dotenv.config({ path: path.join(__dirname, '../.env') });
@@ -17,8 +19,46 @@ const dbConfig = {
multipleStatements: true
};
// Helper function to count total rows in a CSV file
async function countRows(filePath) {
return new Promise((resolve, reject) => {
let count = 0;
fs.createReadStream(filePath)
.pipe(csv.parse())
.on('data', () => count++)
.on('error', reject)
.on('end', () => resolve(count - 1)); // Subtract 1 for header row
});
}
// Helper function to format time duration
function formatDuration(seconds) {
if (seconds < 60) return `${Math.round(seconds)}s`;
const minutes = Math.floor(seconds / 60);
seconds = Math.round(seconds % 60);
return `${minutes}m ${seconds}s`;
}
// Helper function to update progress with time estimate
function updateProgress(current, total, operation, startTime) {
const percentage = ((current / total) * 100).toFixed(1);
const elapsed = (Date.now() - startTime) / 1000;
const rate = current / elapsed; // rows per second
const remaining = (total - current) / rate;
process.stdout.write(
`\r${operation}: ${current.toLocaleString()}/${total.toLocaleString()} rows ` +
`(${percentage}%) - Rate: ${Math.round(rate)}/s - ` +
`Elapsed: ${formatDuration(elapsed)} - ` +
`Est. remaining: ${formatDuration(remaining)}`
);
}
async function importProducts(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting products import (${totalRows.toLocaleString()} total rows${PRODUCTS_TEST_LIMIT > 0 ? ` - limited to ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -29,14 +69,22 @@ async function importProducts(connection, filePath) {
let updated = 0;
let added = 0;
let rowCount = 0;
let lastUpdate = Date.now();
for await (const record of parser) {
// if (rowCount >= TEST_ROW_LIMIT) {
// console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
// break;
// }
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
console.log(`\nReached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`);
break;
}
rowCount++;
// Update progress every 100ms to avoid console flooding
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Products', startTime);
lastUpdate = now;
}
// Check if product exists
const [existing] = await connection.query('SELECT product_id FROM products WHERE product_id = ?', [record.product_id]);
@@ -69,14 +117,19 @@ async function importProducts(connection, filePath) {
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing product ${record.product_id}:`, error.message);
console.error(`\nError importing product ${record.product_id}:`, error.message);
}
}
console.log(`Products import completed: ${added} added, ${updated} updated (processed ${rowCount} rows)`);
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nProducts import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated (processed ${rowCount.toLocaleString()} rows)`);
}
async function importOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), ORDERS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting orders import (${totalRows.toLocaleString()} total rows${ORDERS_TEST_LIMIT > 0 ? ` - limited to ${ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -92,14 +145,22 @@ async function importOrders(connection, filePath) {
let updated = 0;
let added = 0;
let rowCount = 0;
let lastUpdate = Date.now();
for await (const record of parser) {
if (rowCount >= TEST_ROW_LIMIT) {
console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
if (ORDERS_TEST_LIMIT > 0 && rowCount >= ORDERS_TEST_LIMIT) {
console.log(`\nReached test limit of ${ORDERS_TEST_LIMIT.toLocaleString()} rows`);
break;
}
rowCount++;
// Update progress every 100ms
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Orders', startTime);
lastUpdate = now;
}
if (!validProductIds.has(record.product_id)) {
skipped++;
continue;
@@ -128,15 +189,20 @@ async function importOrders(connection, filePath) {
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing order ${record.order_number}, product ${record.product_id}:`, error.message);
console.error(`\nError importing order ${record.order_number}, product ${record.product_id}:`, error.message);
skipped++;
}
}
console.log(`Orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`);
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nOrders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
}
async function importPurchaseOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PURCHASE_ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PURCHASE_ORDERS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting purchase orders import (${totalRows.toLocaleString()} total rows${PURCHASE_ORDERS_TEST_LIMIT > 0 ? ` - limited to ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -152,14 +218,22 @@ async function importPurchaseOrders(connection, filePath) {
let updated = 0;
let added = 0;
let rowCount = 0;
let lastUpdate = Date.now();
for await (const record of parser) {
if (rowCount >= TEST_ROW_LIMIT) {
console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
if (PURCHASE_ORDERS_TEST_LIMIT > 0 && rowCount >= PURCHASE_ORDERS_TEST_LIMIT) {
console.log(`\nReached test limit of ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows`);
break;
}
rowCount++;
// Update progress every 100ms
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Purchase Orders', startTime);
lastUpdate = now;
}
if (!validProductIds.has(record.product_id)) {
skipped++;
continue;
@@ -188,14 +262,18 @@ async function importPurchaseOrders(connection, filePath) {
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing PO ${record.po_id}, product ${record.product_id}:`, error.message);
console.error(`\nError importing PO ${record.po_id}, product ${record.product_id}:`, error.message);
skipped++;
}
}
console.log(`Purchase orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`);
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nPurchase orders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
}
async function main() {
console.log('Starting import process...');
const startTime = Date.now();
const connection = await mysql.createConnection(dbConfig);
try {
@@ -205,18 +283,14 @@ async function main() {
await connection.query(schemaSQL);
// Import products first since they're referenced by other tables
console.log('Importing products...');
await importProducts(connection, path.join(__dirname, '../csv/39f2x83-products.csv'));
console.log('Importing orders...');
await importOrders(connection, path.join(__dirname, '../csv/39f2x83-orders.csv'));
console.log('Importing purchase orders...');
await importPurchaseOrders(connection, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
console.log('All imports completed successfully');
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nAll imports completed successfully in ${duration} seconds`);
} catch (error) {
console.error('Error during import:', error);
console.error('\nError during import:', error);
process.exit(1);
} finally {
await connection.end();

View File

@@ -24,10 +24,10 @@ async function setupDatabase() {
console.log('Schema created successfully');
// Create stored procedures
console.log('Setting up stored procedures...');
const proceduresSQL = fs.readFileSync(path.join(__dirname, '../db/procedures.sql'), 'utf8');
await connection.query(proceduresSQL);
console.log('Stored procedures created successfully');
// console.log('Setting up stored procedures...');
// const proceduresSQL = fs.readFileSync(path.join(__dirname, '../db/procedures.sql'), 'utf8');
// await connection.query(proceduresSQL);
// console.log('Stored procedures created successfully');
console.log('Database setup completed successfully');
} catch (error) {

View File

@@ -0,0 +1,97 @@
const fs = require('fs');
const path = require('path');
const https = require('https');
// Configuration
const FILES = [
{
name: '39f2x83-products.csv',
url: 'https://feeds.acherryontop.com/39f2x83-products.csv'
},
{
name: '39f2x83-orders.csv',
url: 'https://feeds.acherryontop.com/39f2x83-orders.csv'
},
{
name: '39f2x83-purchase_orders.csv',
url: 'https://feeds.acherryontop.com/39f2x83-purchase_orders.csv'
}
];
const CSV_DIR = path.join(__dirname, '..', 'csv');
// Ensure CSV directory exists
if (!fs.existsSync(CSV_DIR)) {
fs.mkdirSync(CSV_DIR, { recursive: true });
}
// Function to download a file
function downloadFile(url, filePath) {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(filePath);
https.get(url, response => {
if (response.statusCode !== 200) {
reject(new Error(`Failed to download: ${response.statusCode} ${response.statusMessage}`));
return;
}
const totalSize = parseInt(response.headers['content-length'], 10);
let downloadedSize = 0;
response.on('data', chunk => {
downloadedSize += chunk.length;
const progress = (downloadedSize / totalSize * 100).toFixed(2);
process.stdout.write(`\rDownloading ${path.basename(filePath)}: ${progress}%`);
});
response.pipe(file);
file.on('finish', () => {
process.stdout.write('\n');
file.close();
resolve();
});
}).on('error', error => {
fs.unlink(filePath, () => {}); // Delete the file if download failed
reject(error);
});
file.on('error', error => {
fs.unlink(filePath, () => {}); // Delete the file if there was an error
reject(error);
});
});
}
// Main function to update all files
async function updateFiles() {
console.log('Starting CSV file updates...');
for (const file of FILES) {
const filePath = path.join(CSV_DIR, file.name);
try {
// Delete existing file if it exists
if (fs.existsSync(filePath)) {
console.log(`Removing existing file: ${file.name}`);
fs.unlinkSync(filePath);
}
// Download new file
console.log(`Downloading ${file.name}...`);
await downloadFile(file.url, filePath);
console.log(`Successfully updated ${file.name}`);
} catch (error) {
console.error(`Error updating ${file.name}:`, error.message);
}
}
console.log('CSV file update complete!');
}
// Run the update
updateFiles().catch(error => {
console.error('Update failed:', error);
process.exit(1);
});