Clean up from simplifying
This commit is contained in:
@@ -102,16 +102,6 @@ async function main() {
|
||||
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
|
||||
// Initialize product_metric_status table for any missing products
|
||||
await localConnection.query(`
|
||||
INSERT IGNORE INTO product_metric_status (pid)
|
||||
SELECT pid FROM products p
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM product_metric_status pms
|
||||
WHERE pms.pid = p.pid
|
||||
)
|
||||
`);
|
||||
|
||||
// Clean up any previously running imports that weren't completed
|
||||
await localConnection.query(`
|
||||
UPDATE import_history
|
||||
|
||||
@@ -136,21 +136,6 @@ async function importCategories(prodConnection, localConnection) {
|
||||
total: totalInserted,
|
||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
});
|
||||
|
||||
// Mark all products in these categories for recalculation
|
||||
if (categoriesToInsert.length > 0) {
|
||||
const affectedCatIds = categoriesToInsert.map(c => c.cat_id);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO product_metric_status (pid, needs_recalculation)
|
||||
SELECT DISTINCT pc.pid, TRUE
|
||||
FROM product_categories pc
|
||||
WHERE pc.cat_id IN (?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
needs_recalculation = TRUE,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`, [affectedCatIds]);
|
||||
}
|
||||
}
|
||||
|
||||
// After all imports, if we skipped any categories, throw an error
|
||||
|
||||
@@ -473,18 +473,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += updates;
|
||||
importedCount += processedOrderItems.size;
|
||||
|
||||
// Mark affected products for recalculation
|
||||
const affectedPids = [...new Set(validOrders.map(o => o.pid))];
|
||||
if (affectedPids.length > 0) {
|
||||
await localConnection.query(`
|
||||
INSERT INTO product_metric_status (pid, needs_recalculation)
|
||||
VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
needs_recalculation = TRUE,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`, affectedPids);
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress based on unique orders processed
|
||||
@@ -597,18 +585,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += updates;
|
||||
importedCount += retryOrderItems.size;
|
||||
|
||||
// Mark affected products for recalculation
|
||||
const affectedPids = [...new Set(validOrders.map(o => o.pid))];
|
||||
if (affectedPids.length > 0) {
|
||||
await localConnection.query(`
|
||||
INSERT INTO product_metric_status (pid, needs_recalculation)
|
||||
VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
needs_recalculation = TRUE,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`, affectedPids);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Warning: Failed to retry skipped orders:', error.message);
|
||||
|
||||
@@ -203,8 +203,8 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
});
|
||||
|
||||
// Insert all product data into temp table in batches
|
||||
for (let i = 0; i < prodData.length; i += 1000) {
|
||||
const batch = prodData.slice(i, i + 1000);
|
||||
for (let i = 0; i < prodData.length; i += 5000) {
|
||||
const batch = prodData.slice(i, i + 5000);
|
||||
const values = batch.map(row => [
|
||||
row.pid,
|
||||
row.title,
|
||||
@@ -315,7 +315,7 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Products import",
|
||||
message: `Processed ${Math.min(i + 1000, prodData.length)} of ${prodData.length} product records`,
|
||||
message: `Processed ${Math.min(i + 5000, prodData.length)} of ${prodData.length} product records`,
|
||||
current: i + batch.length,
|
||||
total: prodData.length
|
||||
});
|
||||
@@ -473,14 +473,6 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
...insertsAndUpdates.updates.map(p => p.pid),
|
||||
...insertsAndUpdates.inserts.map(p => p.pid)
|
||||
];
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO product_metric_status (pid, needs_recalculation)
|
||||
VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
needs_recalculation = TRUE,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`, affectedPids);
|
||||
}
|
||||
|
||||
// Process category relationships
|
||||
|
||||
@@ -474,18 +474,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
|
||||
processed += batchProcessed;
|
||||
|
||||
// Mark affected products for recalculation
|
||||
const affectedPids = [...new Set(productBatch.map(p => p.pid))];
|
||||
if (affectedPids.length > 0) {
|
||||
await localConnection.query(`
|
||||
INSERT INTO product_metric_status (pid, needs_recalculation)
|
||||
VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
needs_recalculation = TRUE,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`, affectedPids);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle updates - now we know these actually have changes
|
||||
@@ -511,18 +499,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
|
||||
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
|
||||
processed += batchProcessed;
|
||||
|
||||
// Mark affected products for recalculation
|
||||
const affectedPids = [...new Set(productBatch.map(p => p.pid))];
|
||||
if (affectedPids.length > 0) {
|
||||
await localConnection.query(`
|
||||
INSERT INTO product_metric_status (pid, needs_recalculation)
|
||||
VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
needs_recalculation = TRUE,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`, affectedPids);
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress based on time interval
|
||||
|
||||
@@ -43,8 +43,7 @@ const CONFIG_TABLES = [
|
||||
'sales_seasonality',
|
||||
'turnover_config',
|
||||
'sync_status',
|
||||
'metric_calculation_config',
|
||||
'product_metric_status'
|
||||
'metric_calculation_config'
|
||||
];
|
||||
|
||||
// Split SQL into individual statements
|
||||
@@ -501,29 +500,6 @@ async function resetDatabase() {
|
||||
}
|
||||
}
|
||||
|
||||
// Verify triggers exist
|
||||
const [triggers] = await connection.query('SHOW TRIGGERS');
|
||||
const expectedTriggers = [
|
||||
'orders_after_insert_update',
|
||||
'purchase_orders_after_insert_update',
|
||||
'products_after_insert_update'
|
||||
];
|
||||
|
||||
const missingTriggers = expectedTriggers.filter(
|
||||
triggerName => !triggers.some(t => t.Trigger === triggerName)
|
||||
);
|
||||
|
||||
if (missingTriggers.length > 0) {
|
||||
throw new Error(
|
||||
`Missing required triggers: ${missingTriggers.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Triggers verified',
|
||||
message: `Successfully verified triggers: ${expectedTriggers.join(', ')}`
|
||||
});
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Database reset complete',
|
||||
|
||||
@@ -34,8 +34,8 @@ const METRICS_TABLES = [
|
||||
'sales_forecasts',
|
||||
'temp_purchase_metrics',
|
||||
'temp_sales_metrics',
|
||||
'vendor_metrics', //before vendor_details for foreign key
|
||||
'vendor_time_metrics', //before vendor_details for foreign key
|
||||
'vendor_metrics',
|
||||
'vendor_time_metrics',
|
||||
'vendor_details'
|
||||
];
|
||||
|
||||
|
||||
Reference in New Issue
Block a user