More import optimizations + reset optimizations
This commit is contained in:
@@ -124,7 +124,14 @@ async function importProducts(pool, filePath) {
|
||||
});
|
||||
|
||||
function convertDate(dateStr) {
|
||||
if (!dateStr) return null;
|
||||
if (!dateStr) {
|
||||
// Default to current date for missing dates
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
return `${year}-${month}-${day}`;
|
||||
}
|
||||
const [day, month, year] = dateStr.split('-');
|
||||
return `${year}-${month}-${day}`;
|
||||
}
|
||||
@@ -268,8 +275,16 @@ async function importProducts(pool, filePath) {
|
||||
|
||||
// Update stats
|
||||
if (result.affectedRows > 0) {
|
||||
updated += result.affectedRows - result.insertId;
|
||||
added += result.insertId;
|
||||
// For INSERT ... ON DUPLICATE KEY UPDATE:
|
||||
// - If a row is inserted, affectedRows = 1
|
||||
// - If a row is updated, affectedRows = 2
|
||||
// So we can calculate:
|
||||
// - Number of inserts = number of rows where affectedRows = 1
|
||||
// - Number of updates = number of rows where affectedRows = 2
|
||||
const insertCount = result.affectedRows - result.changedRows;
|
||||
const updateCount = result.changedRows;
|
||||
added += insertCount;
|
||||
updated += updateCount;
|
||||
}
|
||||
|
||||
// Process categories within the same transaction
|
||||
@@ -304,7 +319,14 @@ async function importOrders(pool, filePath) {
|
||||
});
|
||||
|
||||
function convertDate(dateStr) {
|
||||
if (!dateStr) return null;
|
||||
if (!dateStr) {
|
||||
// Default to current date for missing dates
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
return `${year}-${month}-${day}`;
|
||||
}
|
||||
const [day, month, year] = dateStr.split('-');
|
||||
return `${year}-${month}-${day}`;
|
||||
}
|
||||
@@ -430,8 +452,16 @@ async function importOrders(pool, filePath) {
|
||||
|
||||
// Update stats
|
||||
if (result.affectedRows > 0) {
|
||||
updated += result.affectedRows - result.insertId;
|
||||
added += result.insertId;
|
||||
// For INSERT ... ON DUPLICATE KEY UPDATE:
|
||||
// - If a row is inserted, affectedRows = 1
|
||||
// - If a row is updated, affectedRows = 2
|
||||
// So we can calculate:
|
||||
// - Number of inserts = number of rows where affectedRows = 1
|
||||
// - Number of updates = number of rows where affectedRows = 2
|
||||
const insertCount = result.affectedRows - result.changedRows;
|
||||
const updateCount = result.changedRows;
|
||||
added += insertCount;
|
||||
updated += updateCount;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`\nError processing batch:`, error.message);
|
||||
@@ -456,7 +486,14 @@ async function importPurchaseOrders(pool, filePath) {
|
||||
});
|
||||
|
||||
function convertDate(dateStr) {
|
||||
if (!dateStr) return null;
|
||||
if (!dateStr) {
|
||||
// Default to current date for missing dates
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
return `${year}-${month}-${day}`;
|
||||
}
|
||||
const [day, month, year] = dateStr.split('-');
|
||||
return `${year}-${month}-${day}`;
|
||||
}
|
||||
@@ -583,8 +620,16 @@ async function importPurchaseOrders(pool, filePath) {
|
||||
|
||||
// Update stats
|
||||
if (result.affectedRows > 0) {
|
||||
updated += result.affectedRows - result.insertId;
|
||||
added += result.insertId;
|
||||
// For INSERT ... ON DUPLICATE KEY UPDATE:
|
||||
// - If a row is inserted, affectedRows = 1
|
||||
// - If a row is updated, affectedRows = 2
|
||||
// So we can calculate:
|
||||
// - Number of inserts = number of rows where affectedRows = 1
|
||||
// - Number of updates = number of rows where affectedRows = 2
|
||||
const insertCount = result.affectedRows - result.changedRows;
|
||||
const updateCount = result.changedRows;
|
||||
added += insertCount;
|
||||
updated += updateCount;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`\nError processing batch:`, error.message);
|
||||
@@ -617,8 +662,17 @@ async function main() {
|
||||
|
||||
// Import products first since they're referenced by other tables
|
||||
await importProducts(pool, path.join(__dirname, '../csv/39f2x83-products.csv'));
|
||||
await importOrders(pool, path.join(__dirname, '../csv/39f2x83-orders.csv'));
|
||||
await importPurchaseOrders(pool, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
|
||||
|
||||
// Process orders and purchase orders in parallel
|
||||
outputProgress({
|
||||
operation: 'Starting parallel import',
|
||||
message: 'Processing orders and purchase orders simultaneously...'
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
importOrders(pool, path.join(__dirname, '../csv/39f2x83-orders.csv')),
|
||||
importPurchaseOrders(pool, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'))
|
||||
]);
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const mysql = require('mysql2/promise');
|
||||
const path = require('path');
|
||||
const dotenv = require('dotenv');
|
||||
const { spawn } = require('child_process');
|
||||
const fs = require('fs');
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, '../.env') });
|
||||
|
||||
@@ -33,77 +33,48 @@ async function resetDatabase() {
|
||||
const connection = await mysql.createConnection(dbConfig);
|
||||
|
||||
try {
|
||||
// Get list of all tables
|
||||
// Get list of all tables efficiently
|
||||
outputProgress({
|
||||
operation: 'Getting table list',
|
||||
message: 'Retrieving all table names...'
|
||||
});
|
||||
|
||||
const [tables] = await connection.query(
|
||||
'SELECT table_name FROM information_schema.tables WHERE table_schema = ?',
|
||||
[dbConfig.database]
|
||||
// More efficient query to get table names
|
||||
const [tables] = await connection.query(`
|
||||
SELECT GROUP_CONCAT(table_name) as tables
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()`
|
||||
);
|
||||
|
||||
if (tables.length === 0) {
|
||||
if (!tables[0].tables) {
|
||||
outputProgress({
|
||||
operation: 'No tables found',
|
||||
message: 'Database is already empty'
|
||||
});
|
||||
} else {
|
||||
// Disable foreign key checks to allow dropping tables with dependencies
|
||||
// Disable foreign key checks and drop all tables in one query
|
||||
outputProgress({
|
||||
operation: 'Dropping tables',
|
||||
message: 'Dropping all tables...'
|
||||
});
|
||||
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 0');
|
||||
|
||||
// Drop each table
|
||||
for (let i = 0; i < tables.length; i++) {
|
||||
const tableName = tables[i].TABLE_NAME;
|
||||
outputProgress({
|
||||
operation: 'Dropping tables',
|
||||
message: `Dropping table: ${tableName}`,
|
||||
current: i + 1,
|
||||
total: tables.length,
|
||||
percentage: (((i + 1) / tables.length) * 100).toFixed(1)
|
||||
});
|
||||
await connection.query(`DROP TABLE IF EXISTS \`${tableName}\``);
|
||||
}
|
||||
|
||||
// Re-enable foreign key checks
|
||||
|
||||
// Create DROP TABLE statements for all tables at once
|
||||
const dropQuery = `DROP TABLE IF EXISTS ${tables[0].tables.split(',').map(table => '`' + table + '`').join(', ')}`;
|
||||
await connection.query(dropQuery);
|
||||
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 1');
|
||||
}
|
||||
|
||||
// Run setup-db.js
|
||||
// Read and execute schema directly instead of spawning a process
|
||||
outputProgress({
|
||||
operation: 'Running database setup',
|
||||
message: 'Creating new tables...'
|
||||
});
|
||||
|
||||
const setupScript = path.join(__dirname, 'setup-db.js');
|
||||
const setupProcess = spawn('node', [setupScript]);
|
||||
|
||||
setupProcess.stdout.on('data', (data) => {
|
||||
const output = data.toString().trim();
|
||||
outputProgress({
|
||||
operation: 'Database setup',
|
||||
message: output
|
||||
});
|
||||
});
|
||||
|
||||
setupProcess.stderr.on('data', (data) => {
|
||||
const error = data.toString().trim();
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
error
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
setupProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`Setup process exited with code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
|
||||
await connection.query(schemaSQL);
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
|
||||
Reference in New Issue
Block a user