Fix incorrect error during import, better logging during import
This commit is contained in:
@@ -46,12 +46,13 @@ function logError(error, context = '') {
|
||||
}
|
||||
|
||||
// Helper function to log import progress
|
||||
function logImport(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const logMessage = `[${timestamp}] ${message}\n`;
|
||||
|
||||
// Log to import file
|
||||
fs.appendFileSync(IMPORT_LOG, logMessage);
|
||||
function logImport(message, isSignificant = false) {
|
||||
// Only write to disk if it's a significant event
|
||||
if (isSignificant) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const logMessage = `[${timestamp}] ${message}\n`;
|
||||
fs.appendFileSync(IMPORT_LOG, logMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to format duration
|
||||
@@ -70,7 +71,26 @@ function formatDuration(seconds) {
|
||||
|
||||
// Helper function to output progress
|
||||
function outputProgress(data) {
|
||||
// Always send to stdout for frontend
|
||||
process.stdout.write(JSON.stringify(data) + '\n');
|
||||
|
||||
// Log significant events to disk
|
||||
const isSignificant =
|
||||
// Operation starts
|
||||
(data.operation && !data.current) ||
|
||||
// Operation completions and errors
|
||||
data.status === 'complete' ||
|
||||
data.status === 'error' ||
|
||||
// Test limits reached
|
||||
data.message?.includes('test limit') ||
|
||||
// Schema changes
|
||||
data.operation?.includes('Creating database schema') ||
|
||||
// Parallel import starts
|
||||
data.message?.includes('Processing orders and purchase orders simultaneously');
|
||||
|
||||
if (isSignificant) {
|
||||
logImport(`${data.operation || 'Operation'}${data.message ? ': ' + data.message : ''}${data.error ? ' Error: ' + data.error : ''}${data.status ? ' Status: ' + data.status : ''}`, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to count total rows in a CSV file
|
||||
@@ -930,15 +950,16 @@ async function importPurchaseOrders(pool, filePath) {
|
||||
}
|
||||
|
||||
async function main() {
|
||||
outputProgress({
|
||||
operation: 'Starting import process',
|
||||
message: 'Creating connection pool...'
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
let pool;
|
||||
let importInProgress = false;
|
||||
|
||||
try {
|
||||
outputProgress({
|
||||
operation: 'Starting import process',
|
||||
message: 'Creating connection pool...'
|
||||
});
|
||||
|
||||
pool = mysql.createPool(dbConfig);
|
||||
|
||||
// Check if tables exist, if not create them
|
||||
@@ -970,6 +991,8 @@ async function main() {
|
||||
|
||||
// Import all data
|
||||
try {
|
||||
importInProgress = true;
|
||||
|
||||
// Import products first since they're referenced by other tables
|
||||
await importProducts(pool, path.join(__dirname, '../csv/39f2x83-products.csv'));
|
||||
|
||||
@@ -984,20 +1007,31 @@ async function main() {
|
||||
importPurchaseOrders(pool, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'))
|
||||
]);
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Import process completed',
|
||||
duration: formatDuration((Date.now() - startTime) / 1000)
|
||||
});
|
||||
// Only output completion if we haven't encountered an error
|
||||
if (importInProgress) {
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Import process completed',
|
||||
duration: formatDuration((Date.now() - startTime) / 1000)
|
||||
});
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
importInProgress = false;
|
||||
logError(error, 'Error during import');
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Import process',
|
||||
error: error.message
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
importInProgress = false;
|
||||
logError(error, 'Fatal error during import process');
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Import process',
|
||||
error: error.message
|
||||
});
|
||||
process.exit(1);
|
||||
|
||||
Reference in New Issue
Block a user