Fix csv update/import on settings page + lots of cors work

This commit is contained in:
2025-01-10 14:17:07 -05:00
parent dbdf77331c
commit a1f4e57394
9 changed files with 957 additions and 329 deletions

View File

@@ -4,10 +4,10 @@ const csv = require('csv-parse');
const mysql = require('mysql2/promise');
const dotenv = require('dotenv');
// For testing purposes, limit the number of rows to import (0 = no limit)
const PRODUCTS_TEST_LIMIT = 0;
const ORDERS_TEST_LIMIT = 10000;
const PURCHASE_ORDERS_TEST_LIMIT = 10000;
// Get test limits from environment variables
const PRODUCTS_TEST_LIMIT = parseInt(process.env.PRODUCTS_TEST_LIMIT || '0');
const ORDERS_TEST_LIMIT = parseInt(process.env.ORDERS_TEST_LIMIT || '10000');
const PURCHASE_ORDERS_TEST_LIMIT = parseInt(process.env.PURCHASE_ORDERS_TEST_LIMIT || '10000');
dotenv.config({ path: path.join(__dirname, '../.env') });
@@ -19,6 +19,17 @@ const dbConfig = {
multipleStatements: true
};
// Helper function to output progress in JSON format
function outputProgress(data) {
if (!data.status) {
data = {
status: 'running',
...data
};
}
console.log(JSON.stringify(data));
}
// Helper function to count total rows in a CSV file
async function countRows(filePath) {
return new Promise((resolve, reject) => {
@@ -41,24 +52,33 @@ function formatDuration(seconds) {
// Helper function to update progress with time estimate
function updateProgress(current, total, operation, startTime) {
const percentage = ((current / total) * 100).toFixed(1);
const elapsed = (Date.now() - startTime) / 1000;
const rate = current / elapsed; // rows per second
const remaining = (total - current) / rate;
process.stdout.write(
`\r${operation}: ${current.toLocaleString()}/${total.toLocaleString()} rows ` +
`(${percentage}%) - Rate: ${Math.round(rate)}/s - ` +
`Elapsed: ${formatDuration(elapsed)} - ` +
`Est. remaining: ${formatDuration(remaining)}`
);
outputProgress({
status: 'running',
operation,
current,
total,
rate,
elapsed: formatDuration(elapsed),
remaining: formatDuration(remaining),
percentage: ((current / total) * 100).toFixed(1)
});
}
async function importProducts(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting products import (${totalRows.toLocaleString()} total rows${PRODUCTS_TEST_LIMIT > 0 ? ` - limited to ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
outputProgress({
operation: 'Starting products import',
current: 0,
total: totalRows,
testLimit: PRODUCTS_TEST_LIMIT,
percentage: '0'
});
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -73,7 +93,12 @@ async function importProducts(connection, filePath) {
for await (const record of parser) {
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
console.log(`\nReached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`);
outputProgress({
operation: 'Products import',
message: `Reached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`,
current: rowCount,
total: totalRows
});
break;
}
rowCount++;
@@ -81,7 +106,7 @@ async function importProducts(connection, filePath) {
// Update progress every 100ms to avoid console flooding
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Products', startTime);
updateProgress(rowCount, totalRows, 'Products import', startTime);
lastUpdate = now;
}
@@ -121,15 +146,29 @@ async function importProducts(connection, filePath) {
}
}
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nProducts import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated (processed ${rowCount.toLocaleString()} rows)`);
outputProgress({
status: 'running',
operation: 'Products import completed',
current: rowCount,
total: totalRows,
added,
updated,
duration: formatDuration((Date.now() - startTime) / 1000),
percentage: '100'
});
}
async function importOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), ORDERS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting orders import (${totalRows.toLocaleString()} total rows${ORDERS_TEST_LIMIT > 0 ? ` - limited to ${ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
outputProgress({
operation: 'Starting orders import',
current: 0,
total: totalRows,
testLimit: ORDERS_TEST_LIMIT,
percentage: '0'
});
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -149,7 +188,12 @@ async function importOrders(connection, filePath) {
for await (const record of parser) {
if (ORDERS_TEST_LIMIT > 0 && rowCount >= ORDERS_TEST_LIMIT) {
console.log(`\nReached test limit of ${ORDERS_TEST_LIMIT.toLocaleString()} rows`);
outputProgress({
operation: 'Orders import',
message: `Reached test limit of ${ORDERS_TEST_LIMIT.toLocaleString()} rows`,
current: rowCount,
total: totalRows
});
break;
}
rowCount++;
@@ -157,7 +201,7 @@ async function importOrders(connection, filePath) {
// Update progress every 100ms
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Orders', startTime);
updateProgress(rowCount, totalRows, 'Orders import', startTime);
lastUpdate = now;
}
@@ -194,15 +238,30 @@ async function importOrders(connection, filePath) {
}
}
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nOrders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
outputProgress({
status: 'running',
operation: 'Orders import completed',
current: rowCount,
total: totalRows,
added,
updated,
skipped,
duration: formatDuration((Date.now() - startTime) / 1000),
percentage: '100'
});
}
async function importPurchaseOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
const totalRows = PURCHASE_ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PURCHASE_ORDERS_TEST_LIMIT) : await countRows(filePath);
const startTime = Date.now();
console.log(`\nStarting purchase orders import (${totalRows.toLocaleString()} total rows${PURCHASE_ORDERS_TEST_LIMIT > 0 ? ` - limited to ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
outputProgress({
operation: 'Starting purchase orders import',
current: 0,
total: totalRows,
testLimit: PURCHASE_ORDERS_TEST_LIMIT,
percentage: '0'
});
function convertDate(dateStr) {
if (!dateStr) return null;
@@ -222,7 +281,12 @@ async function importPurchaseOrders(connection, filePath) {
for await (const record of parser) {
if (PURCHASE_ORDERS_TEST_LIMIT > 0 && rowCount >= PURCHASE_ORDERS_TEST_LIMIT) {
console.log(`\nReached test limit of ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows`);
outputProgress({
operation: 'Purchase orders import',
message: `Reached test limit of ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows`,
current: rowCount,
total: totalRows
});
break;
}
rowCount++;
@@ -230,7 +294,7 @@ async function importPurchaseOrders(connection, filePath) {
// Update progress every 100ms
const now = Date.now();
if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Purchase Orders', startTime);
updateProgress(rowCount, totalRows, 'Purchase orders import', startTime);
lastUpdate = now;
}
@@ -267,18 +331,35 @@ async function importPurchaseOrders(connection, filePath) {
}
}
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nPurchase orders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
outputProgress({
status: 'running',
operation: 'Purchase orders import completed',
current: rowCount,
total: totalRows,
added,
updated,
skipped,
duration: formatDuration((Date.now() - startTime) / 1000),
percentage: '100'
});
}
async function main() {
console.log('Starting import process...');
outputProgress({
operation: 'Starting import process',
message: 'Connecting to database...'
});
const startTime = Date.now();
const connection = await mysql.createConnection(dbConfig);
try {
// Check if tables exist, if not create them
console.log('Checking database schema...');
outputProgress({
operation: 'Checking database schema',
message: 'Creating tables if needed...'
});
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
await connection.query(schemaSQL);
@@ -287,14 +368,21 @@ async function main() {
await importOrders(connection, path.join(__dirname, '../csv/39f2x83-orders.csv'));
await importPurchaseOrders(connection, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nAll imports completed successfully in ${duration} seconds`);
outputProgress({
status: 'complete',
operation: 'Import process completed',
duration: formatDuration((Date.now() - startTime) / 1000)
});
} catch (error) {
console.error('\nError during import:', error);
outputProgress({
status: 'error',
error: error.message
});
process.exit(1);
} finally {
await connection.end();
}
}
// Run the import
main();

View File

@@ -38,17 +38,42 @@ function downloadFile(url, filePath) {
const totalSize = parseInt(response.headers['content-length'], 10);
let downloadedSize = 0;
let lastProgressUpdate = Date.now();
const startTime = Date.now();
response.on('data', chunk => {
downloadedSize += chunk.length;
const progress = (downloadedSize / totalSize * 100).toFixed(2);
process.stdout.write(`\rDownloading ${path.basename(filePath)}: ${progress}%`);
const now = Date.now();
// Update progress at most every 100ms to avoid console flooding
if (now - lastProgressUpdate > 100) {
const elapsed = (now - startTime) / 1000;
const rate = downloadedSize / elapsed;
const remaining = (totalSize - downloadedSize) / rate;
console.log(JSON.stringify({
status: 'running',
operation: `Downloading ${path.basename(filePath)}`,
current: downloadedSize,
total: totalSize,
rate: (rate / 1024 / 1024).toFixed(2), // MB/s
elapsed: formatDuration(elapsed),
remaining: formatDuration(remaining),
percentage: ((downloadedSize / totalSize) * 100).toFixed(1)
}));
lastProgressUpdate = now;
}
});
response.pipe(file);
file.on('finish', () => {
process.stdout.write('\n');
console.log(JSON.stringify({
status: 'running',
operation: `Completed ${path.basename(filePath)}`,
current: totalSize,
total: totalSize,
percentage: '100'
}));
file.close();
resolve();
});
@@ -64,34 +89,79 @@ function downloadFile(url, filePath) {
});
}
// Helper function to format duration
function formatDuration(seconds) {
if (seconds < 60) return `${Math.round(seconds)}s`;
const minutes = Math.floor(seconds / 60);
seconds = Math.round(seconds % 60);
return `${minutes}m ${seconds}s`;
}
// Main function to update all files
async function updateFiles() {
console.log('Starting CSV file updates...');
console.log(JSON.stringify({
status: 'running',
operation: 'Starting CSV file updates',
total: FILES.length,
current: 0
}));
for (const file of FILES) {
for (let i = 0; i < FILES.length; i++) {
const file = FILES[i];
const filePath = path.join(CSV_DIR, file.name);
try {
// Delete existing file if it exists
if (fs.existsSync(filePath)) {
console.log(`Removing existing file: ${file.name}`);
console.log(JSON.stringify({
status: 'running',
operation: `Removing existing file: ${file.name}`,
current: i,
total: FILES.length,
percentage: ((i / FILES.length) * 100).toFixed(1)
}));
fs.unlinkSync(filePath);
}
// Download new file
console.log(`Downloading ${file.name}...`);
console.log(JSON.stringify({
status: 'running',
operation: `Starting download: ${file.name}`,
current: i,
total: FILES.length,
percentage: ((i / FILES.length) * 100).toFixed(1)
}));
await downloadFile(file.url, filePath);
console.log(`Successfully updated ${file.name}`);
console.log(JSON.stringify({
status: 'running',
operation: `Successfully updated ${file.name}`,
current: i + 1,
total: FILES.length,
percentage: (((i + 1) / FILES.length) * 100).toFixed(1)
}));
} catch (error) {
console.error(`Error updating ${file.name}:`, error.message);
console.error(JSON.stringify({
status: 'error',
operation: `Error updating ${file.name}`,
error: error.message
}));
throw error;
}
}
console.log('CSV file update complete!');
console.log(JSON.stringify({
status: 'complete',
operation: 'CSV file update complete',
current: FILES.length,
total: FILES.length,
percentage: '100'
}));
}
// Run the update
updateFiles().catch(error => {
console.error('Update failed:', error);
console.error(JSON.stringify({
error: `Update failed: ${error.message}`
}));
process.exit(1);
});