From 9c34e2490988de42bfca2883c69cf543d2e343f6 Mon Sep 17 00:00:00 2001 From: Matt Date: Tue, 28 Jan 2025 20:54:05 -0500 Subject: [PATCH 01/33] Enhance metrics calculation scripts with improved progress tracking and cancellation support --- inventory-server/scripts/calculate-metrics.js | 59 +++- inventory-server/scripts/import-csv.js | 24 +- inventory-server/scripts/import-from-prod.js | 80 ++--- .../scripts/metrics/brand-metrics.js | 63 +++- .../scripts/metrics/category-metrics.js | 129 +++++--- .../scripts/metrics/financial-metrics.js | 63 +++- .../scripts/metrics/product-metrics.js | 71 ++++- .../scripts/metrics/sales-forecasts.js | 119 ++++++- .../scripts/metrics/time-aggregates.js | 59 +++- .../scripts/metrics/vendor-metrics.js | 62 +++- inventory-server/scripts/reset-metrics.js | 216 ++++++++++++- inventory-server/scripts/update-csv.js | 297 +++++++++--------- 12 files changed, 915 insertions(+), 327 deletions(-) diff --git a/inventory-server/scripts/calculate-metrics.js b/inventory-server/scripts/calculate-metrics.js index 57b1394..1a12d57 100644 --- a/inventory-server/scripts/calculate-metrics.js +++ b/inventory-server/scripts/calculate-metrics.js @@ -186,6 +186,19 @@ async function calculateMetrics() { } // Calculate ABC classification + outputProgress({ + status: 'running', + operation: 'Starting ABC classification', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1'); const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 }; @@ -202,6 +215,19 @@ async function calculateMetrics() { ) ENGINE=MEMORY `); + outputProgress({ + status: 'running', + operation: 'Creating revenue rankings', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + await connection.query(` INSERT INTO temp_revenue_ranks SELECT @@ -222,11 +248,26 @@ async function calculateMetrics() { const [rankingCount] = await connection.query('SELECT MAX(rank_num) as total_count FROM temp_revenue_ranks'); const totalCount = rankingCount[0].total_count || 1; + outputProgress({ + status: 'running', + operation: 'Updating ABC classifications', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Process updates in batches let abcProcessedCount = 0; const batchSize = 5000; while (true) { + if (isCancelled) return processedCount; + // First get a batch of PIDs that need updating const [pids] = await connection.query(` SELECT pm.pid @@ -267,6 +308,18 @@ async function calculateMetrics() { pids.map(row => row.pid)]); abcProcessedCount += result.affectedRows; + processedCount = Math.floor(totalProducts * (0.99 + (abcProcessedCount / totalCount) * 0.01)); + + outputProgress({ + status: 'running', + operation: 'ABC classification progress', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); // Small delay between batches to allow other transactions await new Promise(resolve => setTimeout(resolve, 100)); @@ -276,14 +329,14 @@ async function calculateMetrics() { await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_revenue_ranks'); // Final success message - global.outputProgress({ + outputProgress({ status: 'complete', operation: 'Metrics calculation complete', current: totalProducts, total: totalProducts, - elapsed: global.formatElapsedTime(startTime), + elapsed: formatElapsedTime(startTime), remaining: '0s', - rate: global.calculateRate(startTime, totalProducts), + rate: calculateRate(startTime, totalProducts), percentage: '100' }); diff --git a/inventory-server/scripts/import-csv.js b/inventory-server/scripts/import-csv.js index 15201f1..04ab8ef 100644 --- a/inventory-server/scripts/import-csv.js +++ b/inventory-server/scripts/import-csv.js @@ -3,6 +3,7 @@ const path = require('path'); const csv = require('csv-parse'); const mysql = require('mysql2/promise'); const dotenv = require('dotenv'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./metrics/utils/progress'); // Get test limits from environment variables const PRODUCTS_TEST_LIMIT = parseInt(process.env.PRODUCTS_TEST_LIMIT || '0'); @@ -106,20 +107,19 @@ async function countRows(filePath) { } // Helper function to update progress with time estimate -function updateProgress(current, total, operation, startTime) { - const elapsed = (Date.now() - startTime) / 1000; - const rate = current / elapsed; // rows per second - const remaining = (total - current) / rate; - +function updateProgress(current, total, operation, startTime, added = 0, updated = 0, skipped = 0) { outputProgress({ status: 'running', operation, current, total, - rate, - elapsed: formatDuration(elapsed), - remaining: formatDuration(remaining), - percentage: ((current / total) * 100).toFixed(1) + rate: calculateRate(startTime, current), + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, current, total), + percentage: ((current / total) * 100).toFixed(1), + added, + updated, + skipped }); } @@ -474,7 +474,7 @@ async function importProducts(pool, filePath) { // Update progress every 100ms to avoid console flooding const now = Date.now(); if (now - lastUpdate > 100) { - updateProgress(rowCount, totalRows, 'Products import', startTime); + updateProgress(rowCount, totalRows, 'Products import', startTime, added, updated, 0); lastUpdate = now; } @@ -678,7 +678,7 @@ async function importOrders(pool, filePath) { // Update progress every 100ms const now = Date.now(); if (now - lastUpdate > 100) { - updateProgress(rowCount, totalRows, 'Orders import', startTime); + updateProgress(rowCount, totalRows, 'Orders import', startTime, added, updated, skipped); lastUpdate = now; } @@ -845,7 +845,7 @@ async function importPurchaseOrders(pool, filePath) { // Update progress every 100ms const now = Date.now(); if (now - lastUpdate > 100) { - updateProgress(rowCount, totalRows, 'Purchase orders import', startTime); + updateProgress(rowCount, totalRows, 'Purchase orders import', startTime, added, updated, skipped); lastUpdate = now; } diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index b413956..a4226d3 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -2,6 +2,7 @@ const mysql = require("mysql2/promise"); const { Client } = require("ssh2"); const dotenv = require("dotenv"); const path = require("path"); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./metrics/utils/progress'); dotenv.config({ path: path.join(__dirname, "../.env") }); @@ -43,52 +44,38 @@ const localDbConfig = { namedPlaceholders: true, }; -// Helper function to output progress -function outputProgress(data) { - process.stdout.write(JSON.stringify(data) + "\n"); -} - -// Helper function to format duration -function formatDuration(seconds) { - const hours = Math.floor(seconds / 3600); - const minutes = Math.floor((seconds % 3600) / 60); - seconds = Math.floor(seconds % 60); - - const parts = []; - if (hours > 0) parts.push(`${hours}h`); - if (minutes > 0) parts.push(`${minutes}m`); - if (seconds > 0 || parts.length === 0) parts.push(`${seconds}s`); - - return parts.join(" "); -} - -// Helper function to update progress with time estimate -function updateProgress(current, total, operation, startTime) { - const elapsed = (Date.now() - startTime) / 1000; - const rate = current / elapsed; - const remaining = (total - current) / rate; - - outputProgress({ - status: "running", - operation, - current, - total, - rate, - elapsed: formatDuration(elapsed), - remaining: formatDuration(remaining), - percentage: ((current / total) * 100).toFixed(1), - }); -} +// Constants +const BATCH_SIZE = 1000; +const PROGRESS_INTERVAL = 1000; // Update progress every second let isImportCancelled = false; // Add cancel function function cancelImport() { - isImportCancelled = true; - outputProgress({ - status: "cancelled", - operation: "Import cancelled", - }); + isImportCancelled = true; + outputProgress({ + status: 'cancelled', + operation: 'Import cancelled', + current: 0, + total: 0, + elapsed: null, + remaining: null, + rate: 0 + }); +} + +// Helper function to update progress with time estimate +function updateProgress(current, total, operation, startTime) { + outputProgress({ + status: 'running', + operation, + current, + total, + rate: calculateRate(startTime, current), + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, current, total), + percentage: ((current / total) * 100).toFixed(1) + }); } async function setupSshTunnel() { @@ -276,7 +263,7 @@ async function importCategories(prodConnection, localConnection) { operation: "Categories import completed", current: totalInserted, total: totalInserted, - duration: formatDuration((Date.now() - startTime) / 1000), + duration: formatElapsedTime((Date.now() - startTime) / 1000), }); } catch (error) { console.error("Error importing categories:", error); @@ -510,7 +497,6 @@ async function importProducts(prodConnection, localConnection) { const total = rows.length; // Process products in batches - const BATCH_SIZE = 100; for (let i = 0; i < rows.length; i += BATCH_SIZE) { let batch = rows.slice(i, i + BATCH_SIZE); @@ -641,7 +627,7 @@ async function importProducts(prodConnection, localConnection) { operation: "Products import completed", current: total, total, - duration: formatDuration((Date.now() - startTime) / 1000), + duration: formatElapsedTime((Date.now() - startTime) / 1000), }); } catch (error) { console.error("Error importing products:", error); @@ -1384,7 +1370,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { timing: { start_time: new Date(startTime).toISOString(), end_time: new Date(endTime).toISOString(), - elapsed_time: formatDuration((endTime - startTime) / 1000), + elapsed_time: formatElapsedTime((endTime - startTime) / 1000), elapsed_seconds: Math.round((endTime - startTime) / 1000) } }); @@ -1459,7 +1445,7 @@ async function main() { timing: { start_time: new Date(startTime).toISOString(), end_time: new Date(endTime).toISOString(), - elapsed_time: formatDuration((endTime - startTime) / 1000), + elapsed_time: formatElapsedTime((endTime - startTime) / 1000), elapsed_seconds: Math.round((endTime - startTime) / 1000) } }); @@ -1473,7 +1459,7 @@ async function main() { timing: { start_time: new Date(startTime).toISOString(), end_time: new Date(endTime).toISOString(), - elapsed_time: formatDuration((endTime - startTime) / 1000), + elapsed_time: formatElapsedTime((endTime - startTime) / 1000), elapsed_seconds: Math.round((endTime - startTime) / 1000) } }); diff --git a/inventory-server/scripts/metrics/brand-metrics.js b/inventory-server/scripts/metrics/brand-metrics.js index 5b9a698..5b90765 100644 --- a/inventory-server/scripts/metrics/brand-metrics.js +++ b/inventory-server/scripts/metrics/brand-metrics.js @@ -1,18 +1,32 @@ -const { outputProgress } = require('./utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress'); const { getConnection } = require('./utils/db'); -async function calculateBrandMetrics(startTime, totalProducts, processedCount) { +async function calculateBrandMetrics(startTime, totalProducts, processedCount, isCancelled = false) { const connection = await getConnection(); try { + if (isCancelled) { + outputProgress({ + status: 'cancelled', + operation: 'Brand metrics calculation cancelled', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: null, + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + return processedCount; + } + outputProgress({ status: 'running', - operation: 'Calculating brand metrics', - current: Math.floor(totalProducts * 0.95), + operation: 'Starting brand metrics calculation', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.95), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.95)), - percentage: '95' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); // Calculate brand metrics with optimized queries @@ -111,6 +125,20 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) { last_calculated_at = CURRENT_TIMESTAMP `); + processedCount = Math.floor(totalProducts * 0.97); + outputProgress({ + status: 'running', + operation: 'Brand metrics calculated, starting time-based metrics', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Calculate brand time-based metrics with optimized query await connection.query(` INSERT INTO brand_time_metrics ( @@ -170,9 +198,26 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) { avg_margin = VALUES(avg_margin) `); - return Math.floor(totalProducts * 0.98); + processedCount = Math.floor(totalProducts * 0.99); + outputProgress({ + status: 'running', + operation: 'Brand time-based metrics calculated', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + return processedCount; + } catch (error) { + logError(error, 'Error calculating brand metrics'); + throw error; } finally { - connection.release(); + if (connection) { + connection.release(); + } } } diff --git a/inventory-server/scripts/metrics/category-metrics.js b/inventory-server/scripts/metrics/category-metrics.js index 9dd92f3..9a658bb 100644 --- a/inventory-server/scripts/metrics/category-metrics.js +++ b/inventory-server/scripts/metrics/category-metrics.js @@ -1,18 +1,32 @@ -const { outputProgress } = require('./utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress'); const { getConnection } = require('./utils/db'); -async function calculateCategoryMetrics(startTime, totalProducts, processedCount) { +async function calculateCategoryMetrics(startTime, totalProducts, processedCount, isCancelled = false) { const connection = await getConnection(); try { + if (isCancelled) { + outputProgress({ + status: 'cancelled', + operation: 'Category metrics calculation cancelled', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: null, + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + return processedCount; + } + outputProgress({ status: 'running', - operation: 'Calculating category metrics', - current: Math.floor(totalProducts * 0.85), + operation: 'Starting category metrics calculation', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.85), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.85)), - percentage: '85' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); // First, calculate base category metrics @@ -44,6 +58,20 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount last_calculated_at = VALUES(last_calculated_at) `); + processedCount = Math.floor(totalProducts * 0.90); + outputProgress({ + status: 'running', + operation: 'Base category metrics calculated, updating with margin data', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Then update with margin and turnover data await connection.query(` WITH category_sales AS ( @@ -68,6 +96,20 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount cm.last_calculated_at = NOW() `); + processedCount = Math.floor(totalProducts * 0.95); + outputProgress({ + status: 'running', + operation: 'Margin data updated, calculating growth rates', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Finally update growth rates await connection.query(` WITH current_period AS ( @@ -112,6 +154,20 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount WHERE cp.cat_id IS NOT NULL OR pp.cat_id IS NOT NULL `); + processedCount = Math.floor(totalProducts * 0.97); + outputProgress({ + status: 'running', + operation: 'Growth rates calculated, updating time-based metrics', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Calculate time-based metrics await connection.query(` INSERT INTO category_time_metrics ( @@ -157,49 +213,26 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount turnover_rate = VALUES(turnover_rate) `); - // Calculate sales metrics for different time periods - const periods = [30, 90, 180, 365]; - for (const days of periods) { - await connection.query(` - INSERT INTO category_sales_metrics ( - category_id, - brand, - period_start, - period_end, - avg_daily_sales, - total_sold, - num_products, - avg_price, - last_calculated_at - ) - SELECT - pc.cat_id as category_id, - COALESCE(p.brand, 'Unbranded') as brand, - DATE_SUB(CURDATE(), INTERVAL ? DAY) as period_start, - CURDATE() as period_end, - COALESCE(SUM(o.quantity), 0) / ? as avg_daily_sales, - COALESCE(SUM(o.quantity), 0) as total_sold, - COUNT(DISTINCT p.pid) as num_products, - COALESCE(AVG(o.price), 0) as avg_price, - NOW() as last_calculated_at - FROM product_categories pc - JOIN products p ON pc.pid = p.pid - LEFT JOIN orders o ON p.pid = o.pid - AND o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) - AND o.canceled = false - GROUP BY pc.cat_id, p.brand - ON DUPLICATE KEY UPDATE - avg_daily_sales = VALUES(avg_daily_sales), - total_sold = VALUES(total_sold), - num_products = VALUES(num_products), - avg_price = VALUES(avg_price), - last_calculated_at = NOW() - `, [days, days, days]); - } + processedCount = Math.floor(totalProducts * 0.99); + outputProgress({ + status: 'running', + operation: 'Time-based metrics calculated', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); - return Math.floor(totalProducts * 0.9); + return processedCount; + } catch (error) { + logError(error, 'Error calculating category metrics'); + throw error; } finally { - connection.release(); + if (connection) { + connection.release(); + } } } diff --git a/inventory-server/scripts/metrics/financial-metrics.js b/inventory-server/scripts/metrics/financial-metrics.js index 30d94bc..3c85871 100644 --- a/inventory-server/scripts/metrics/financial-metrics.js +++ b/inventory-server/scripts/metrics/financial-metrics.js @@ -1,18 +1,32 @@ -const { outputProgress } = require('./utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress'); const { getConnection } = require('./utils/db'); -async function calculateFinancialMetrics(startTime, totalProducts, processedCount) { +async function calculateFinancialMetrics(startTime, totalProducts, processedCount, isCancelled = false) { const connection = await getConnection(); try { + if (isCancelled) { + outputProgress({ + status: 'cancelled', + operation: 'Financial metrics calculation cancelled', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: null, + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + return processedCount; + } + outputProgress({ status: 'running', - operation: 'Calculating financial metrics', - current: Math.floor(totalProducts * 0.6), + operation: 'Starting financial metrics calculation', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.6), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.6)), - percentage: '60' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); // Calculate financial metrics with optimized query @@ -48,6 +62,20 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun END `); + processedCount = Math.floor(totalProducts * 0.65); + outputProgress({ + status: 'running', + operation: 'Base financial metrics calculated, updating time aggregates', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Update time-based aggregates with optimized query await connection.query(` WITH monthly_financials AS ( @@ -78,9 +106,26 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun END `); - return Math.floor(totalProducts * 0.7); + processedCount = Math.floor(totalProducts * 0.70); + outputProgress({ + status: 'running', + operation: 'Time-based aggregates updated', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + return processedCount; + } catch (error) { + logError(error, 'Error calculating financial metrics'); + throw error; } finally { - connection.release(); + if (connection) { + connection.release(); + } } } diff --git a/inventory-server/scripts/metrics/product-metrics.js b/inventory-server/scripts/metrics/product-metrics.js index 0747fa3..ed177e6 100644 --- a/inventory-server/scripts/metrics/product-metrics.js +++ b/inventory-server/scripts/metrics/product-metrics.js @@ -1,4 +1,4 @@ -const { outputProgress, logError } = require('./utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress'); const { getConnection } = require('./utils/db'); // Helper function to handle NaN and undefined values @@ -9,24 +9,38 @@ function sanitizeValue(value) { return value; } -async function calculateProductMetrics(startTime, totalProducts, processedCount = 0) { +async function calculateProductMetrics(startTime, totalProducts, processedCount = 0, isCancelled = false) { const connection = await getConnection(); try { // Skip flags are inherited from the parent scope const SKIP_PRODUCT_BASE_METRICS = 0; - const SKIP_PRODUCT_TIME_AGGREGATES =0; + const SKIP_PRODUCT_TIME_AGGREGATES = 0; + + if (isCancelled) { + outputProgress({ + status: 'cancelled', + operation: 'Product metrics calculation cancelled', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: null, + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + return processedCount; + } // Calculate base product metrics if (!SKIP_PRODUCT_BASE_METRICS) { outputProgress({ status: 'running', - operation: 'Calculating base product metrics', - current: Math.floor(totalProducts * 0.2), + operation: 'Starting base product metrics calculation', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.2), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.2)), - percentage: '20' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); // Calculate base metrics @@ -72,8 +86,17 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount `); processedCount = Math.floor(totalProducts * 0.4); + outputProgress({ + status: 'running', + operation: 'Base product metrics calculated', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); } else { - console.log('Skipping base product metrics calculation'); processedCount = Math.floor(totalProducts * 0.4); outputProgress({ status: 'running', @@ -83,21 +106,23 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount elapsed: formatElapsedTime(startTime), remaining: estimateRemaining(startTime, processedCount, totalProducts), rate: calculateRate(startTime, processedCount), - percentage: '40' + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); } + if (isCancelled) return processedCount; + // Calculate product time aggregates if (!SKIP_PRODUCT_TIME_AGGREGATES) { outputProgress({ status: 'running', - operation: 'Calculating product time aggregates', - current: Math.floor(totalProducts * 0.4), + operation: 'Starting product time aggregates calculation', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.4), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.4)), - percentage: '40' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); // Calculate time-based aggregates @@ -151,8 +176,17 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount `); processedCount = Math.floor(totalProducts * 0.6); + outputProgress({ + status: 'running', + operation: 'Product time aggregates calculated', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); } else { - console.log('Skipping product time aggregates calculation'); processedCount = Math.floor(totalProducts * 0.6); outputProgress({ status: 'running', @@ -162,11 +196,14 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount elapsed: formatElapsedTime(startTime), remaining: estimateRemaining(startTime, processedCount, totalProducts), rate: calculateRate(startTime, processedCount), - percentage: '60' + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); } return processedCount; + } catch (error) { + logError(error, 'Error calculating product metrics'); + throw error; } finally { if (connection) { connection.release(); diff --git a/inventory-server/scripts/metrics/sales-forecasts.js b/inventory-server/scripts/metrics/sales-forecasts.js index 4930803..f02ddb0 100644 --- a/inventory-server/scripts/metrics/sales-forecasts.js +++ b/inventory-server/scripts/metrics/sales-forecasts.js @@ -1,18 +1,32 @@ -const { outputProgress } = require('./utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress'); const { getConnection } = require('./utils/db'); -async function calculateSalesForecasts(startTime, totalProducts, processedCount) { +async function calculateSalesForecasts(startTime, totalProducts, processedCount, isCancelled = false) { const connection = await getConnection(); try { + if (isCancelled) { + outputProgress({ + status: 'cancelled', + operation: 'Sales forecasts calculation cancelled', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: null, + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + return processedCount; + } + outputProgress({ status: 'running', - operation: 'Calculating sales forecasts', - current: Math.floor(totalProducts * 0.98), + operation: 'Starting sales forecasts calculation', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.98), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.98)), - percentage: '98' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); // First, create a temporary table for forecast dates @@ -42,6 +56,20 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount) ) numbers `); + processedCount = Math.floor(totalProducts * 0.92); + outputProgress({ + status: 'running', + operation: 'Forecast dates prepared, calculating daily sales stats', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Create temporary table for daily sales stats await connection.query(` CREATE TEMPORARY TABLE IF NOT EXISTS temp_daily_sales AS @@ -57,6 +85,20 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount) GROUP BY o.pid, DAYOFWEEK(o.date) `); + processedCount = Math.floor(totalProducts * 0.94); + outputProgress({ + status: 'running', + operation: 'Daily sales stats calculated, preparing product stats', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Create temporary table for product stats await connection.query(` CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_stats AS @@ -68,6 +110,20 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount) GROUP BY pid `); + processedCount = Math.floor(totalProducts * 0.96); + outputProgress({ + status: 'running', + operation: 'Product stats prepared, calculating product-level forecasts', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Calculate product-level forecasts await connection.query(` INSERT INTO sales_forecasts ( @@ -116,6 +172,20 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount) last_calculated_at = NOW() `); + processedCount = Math.floor(totalProducts * 0.98); + outputProgress({ + status: 'running', + operation: 'Product forecasts calculated, preparing category stats', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Create temporary table for category stats await connection.query(` CREATE TEMPORARY TABLE IF NOT EXISTS temp_category_sales AS @@ -142,6 +212,20 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount) GROUP BY cat_id `); + processedCount = Math.floor(totalProducts * 0.99); + outputProgress({ + status: 'running', + operation: 'Category stats prepared, calculating category-level forecasts', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Calculate category-level forecasts await connection.query(` INSERT INTO category_forecasts ( @@ -199,9 +283,26 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount) DROP TEMPORARY TABLE IF EXISTS temp_category_stats; `); - return Math.floor(totalProducts * 1.0); + processedCount = Math.floor(totalProducts * 1.0); + outputProgress({ + status: 'running', + operation: 'Category forecasts calculated and temporary tables cleaned up', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + return processedCount; + } catch (error) { + logError(error, 'Error calculating sales forecasts'); + throw error; } finally { - connection.release(); + if (connection) { + connection.release(); + } } } diff --git a/inventory-server/scripts/metrics/time-aggregates.js b/inventory-server/scripts/metrics/time-aggregates.js index f068441..7c8e436 100644 --- a/inventory-server/scripts/metrics/time-aggregates.js +++ b/inventory-server/scripts/metrics/time-aggregates.js @@ -1,18 +1,32 @@ -const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress'); const { getConnection } = require('./utils/db'); -async function calculateTimeAggregates(startTime, totalProducts, processedCount) { +async function calculateTimeAggregates(startTime, totalProducts, processedCount, isCancelled = false) { const connection = await getConnection(); try { + if (isCancelled) { + outputProgress({ + status: 'cancelled', + operation: 'Time aggregates calculation cancelled', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: null, + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + return processedCount; + } + outputProgress({ status: 'running', - operation: 'Calculating time aggregates', - current: Math.floor(totalProducts * 0.95), + operation: 'Starting time aggregates calculation', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.95), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.95)), - percentage: '95' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); // Initial insert of time-based aggregates @@ -109,6 +123,20 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount) profit_margin = VALUES(profit_margin) `); + processedCount = Math.floor(totalProducts * 0.60); + outputProgress({ + status: 'running', + operation: 'Base time aggregates calculated, updating financial metrics', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + if (isCancelled) return processedCount; + // Update with financial metrics await connection.query(` UPDATE product_time_aggregates pta @@ -136,7 +164,22 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount) END `); - return Math.floor(totalProducts * 0.65); + processedCount = Math.floor(totalProducts * 0.65); + outputProgress({ + status: 'running', + operation: 'Financial metrics updated', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + return processedCount; + } catch (error) { + logError(error, 'Error calculating time aggregates'); + throw error; } finally { if (connection) { connection.release(); diff --git a/inventory-server/scripts/metrics/vendor-metrics.js b/inventory-server/scripts/metrics/vendor-metrics.js index 7f5493e..e8be0b0 100644 --- a/inventory-server/scripts/metrics/vendor-metrics.js +++ b/inventory-server/scripts/metrics/vendor-metrics.js @@ -1,18 +1,32 @@ -const { outputProgress } = require('./utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress'); const { getConnection } = require('./utils/db'); -async function calculateVendorMetrics(startTime, totalProducts, processedCount) { +async function calculateVendorMetrics(startTime, totalProducts, processedCount, isCancelled = false) { const connection = await getConnection(); try { + if (isCancelled) { + outputProgress({ + status: 'cancelled', + operation: 'Vendor metrics calculation cancelled', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: null, + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + return processedCount; + } + outputProgress({ status: 'running', - operation: 'Ensuring vendors exist in vendor_details', - current: Math.floor(totalProducts * 0.7), + operation: 'Starting vendor metrics calculation', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.7), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.7)), - percentage: '70' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); // First ensure all vendors exist in vendor_details @@ -27,17 +41,20 @@ async function calculateVendorMetrics(startTime, totalProducts, processedCount) WHERE vendor IS NOT NULL `); + processedCount = Math.floor(totalProducts * 0.8); outputProgress({ status: 'running', - operation: 'Calculating vendor metrics', - current: Math.floor(totalProducts * 0.8), + operation: 'Vendor details updated, calculating metrics', + current: processedCount, total: totalProducts, elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.8), totalProducts), - rate: calculateRate(startTime, Math.floor(totalProducts * 0.8)), - percentage: '80' + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) }); + if (isCancelled) return processedCount; + // Now calculate vendor metrics await connection.query(` INSERT INTO vendor_metrics ( @@ -130,9 +147,26 @@ async function calculateVendorMetrics(startTime, totalProducts, processedCount) last_calculated_at = VALUES(last_calculated_at) `); - return Math.floor(totalProducts * 0.9); + processedCount = Math.floor(totalProducts * 0.9); + outputProgress({ + status: 'running', + operation: 'Vendor metrics calculated', + current: processedCount, + total: totalProducts, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processedCount, totalProducts), + rate: calculateRate(startTime, processedCount), + percentage: ((processedCount / totalProducts) * 100).toFixed(1) + }); + + return processedCount; + } catch (error) { + logError(error, 'Error calculating vendor metrics'); + throw error; } finally { - connection.release(); + if (connection) { + connection.release(); + } } } diff --git a/inventory-server/scripts/reset-metrics.js b/inventory-server/scripts/reset-metrics.js index 0d796ff..f05fd64 100644 --- a/inventory-server/scripts/reset-metrics.js +++ b/inventory-server/scripts/reset-metrics.js @@ -12,6 +12,12 @@ const dbConfig = { }; function outputProgress(data) { + if (!data.status) { + data = { + status: 'running', + ...data + }; + } console.log(JSON.stringify(data)); } @@ -51,36 +57,228 @@ const REQUIRED_CORE_TABLES = [ 'purchase_orders' ]; +// Split SQL into individual statements +function splitSQLStatements(sql) { + sql = sql.replace(/\r\n/g, '\n'); + let statements = []; + let currentStatement = ''; + let inString = false; + let stringChar = ''; + + for (let i = 0; i < sql.length; i++) { + const char = sql[i]; + const nextChar = sql[i + 1] || ''; + + if ((char === "'" || char === '"') && sql[i - 1] !== '\\') { + if (!inString) { + inString = true; + stringChar = char; + } else if (char === stringChar) { + inString = false; + } + } + + if (!inString && char === '-' && nextChar === '-') { + while (i < sql.length && sql[i] !== '\n') i++; + continue; + } + + if (!inString && char === '/' && nextChar === '*') { + i += 2; + while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++; + i++; + continue; + } + + if (!inString && char === ';') { + if (currentStatement.trim()) { + statements.push(currentStatement.trim()); + } + currentStatement = ''; + } else { + currentStatement += char; + } + } + + if (currentStatement.trim()) { + statements.push(currentStatement.trim()); + } + + return statements; +} + async function resetMetrics() { let connection; try { + outputProgress({ + operation: 'Starting metrics reset', + message: 'Connecting to database...' + }); + connection = await mysql.createConnection(dbConfig); await connection.beginTransaction(); + // Verify required core tables exist + outputProgress({ + operation: 'Verifying core tables', + message: 'Checking required tables exist...' + }); + + const [tables] = await connection.query(` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name IN (?) + `, [REQUIRED_CORE_TABLES]); + + const existingCoreTables = tables.map(t => t.table_name); + const missingCoreTables = REQUIRED_CORE_TABLES.filter(t => !existingCoreTables.includes(t)); + + if (missingCoreTables.length > 0) { + throw new Error(`Required core tables missing: ${missingCoreTables.join(', ')}`); + } + + // Verify config tables exist + outputProgress({ + operation: 'Verifying config tables', + message: 'Checking configuration tables exist...' + }); + + const [configTables] = await connection.query(` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name IN (?) + `, [CONFIG_TABLES]); + + const existingConfigTables = configTables.map(t => t.table_name); + const missingConfigTables = CONFIG_TABLES.filter(t => !existingConfigTables.includes(t)); + + if (missingConfigTables.length > 0) { + throw new Error(`Required config tables missing: ${missingConfigTables.join(', ')}`); + } + // Drop all metrics tables + outputProgress({ + operation: 'Dropping metrics tables', + message: 'Removing existing metrics tables...' + }); + for (const table of METRICS_TABLES) { - console.log(`Dropping table: ${table}`); try { await connection.query(`DROP TABLE IF EXISTS ${table}`); - console.log(`Successfully dropped: ${table}`); + outputProgress({ + operation: 'Table dropped', + message: `Successfully dropped table: ${table}` + }); } catch (err) { - console.error(`Error dropping ${table}:`, err.message); + outputProgress({ + status: 'error', + operation: 'Drop table error', + message: `Error dropping table ${table}: ${err.message}` + }); throw err; } } - // Recreate all metrics tables from schema - const schemaSQL = fs.readFileSync(path.resolve(__dirname, '../db/metrics-schema.sql'), 'utf8'); - await connection.query(schemaSQL); - console.log('All metrics tables recreated successfully'); + // Read metrics schema + outputProgress({ + operation: 'Reading schema', + message: 'Loading metrics schema file...' + }); + + const schemaPath = path.resolve(__dirname, '../db/metrics-schema.sql'); + if (!fs.existsSync(schemaPath)) { + throw new Error(`Schema file not found at: ${schemaPath}`); + } + + const schemaSQL = fs.readFileSync(schemaPath, 'utf8'); + const statements = splitSQLStatements(schemaSQL); + + outputProgress({ + operation: 'Schema loaded', + message: `Found ${statements.length} SQL statements to execute` + }); + + // Execute schema statements + for (let i = 0; i < statements.length; i++) { + const stmt = statements[i]; + try { + const [result] = await connection.query(stmt); + + // Check for warnings + const [warnings] = await connection.query('SHOW WARNINGS'); + if (warnings && warnings.length > 0) { + outputProgress({ + status: 'warning', + operation: 'SQL Warning', + message: warnings + }); + } + + outputProgress({ + operation: 'SQL Progress', + message: { + statement: i + 1, + total: statements.length, + preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), + affectedRows: result.affectedRows + } + }); + } catch (sqlError) { + outputProgress({ + status: 'error', + operation: 'SQL Error', + message: { + error: sqlError.message, + sqlState: sqlError.sqlState, + errno: sqlError.errno, + statement: stmt, + statementNumber: i + 1 + } + }); + throw sqlError; + } + } + + // Verify metrics tables were created + outputProgress({ + operation: 'Verifying metrics tables', + message: 'Checking all metrics tables were created...' + }); + + const [metricsTablesResult] = await connection.query(` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name IN (?) + `, [METRICS_TABLES]); + + const existingMetricsTables = metricsTablesResult.map(t => t.table_name); + const missingMetricsTables = METRICS_TABLES.filter(t => !existingMetricsTables.includes(t)); + + if (missingMetricsTables.length > 0) { + throw new Error(`Failed to create metrics tables: ${missingMetricsTables.join(', ')}`); + } await connection.commit(); - console.log('All metrics tables reset successfully'); + + outputProgress({ + status: 'complete', + operation: 'Reset complete', + message: 'All metrics tables have been reset successfully' + }); } catch (error) { + outputProgress({ + status: 'error', + operation: 'Reset failed', + message: error.message, + stack: error.stack + }); + if (connection) { await connection.rollback(); } - console.error('Error resetting metrics:', error); throw error; } finally { if (connection) { diff --git a/inventory-server/scripts/update-csv.js b/inventory-server/scripts/update-csv.js index fc038cd..26e5556 100644 --- a/inventory-server/scripts/update-csv.js +++ b/inventory-server/scripts/update-csv.js @@ -1,167 +1,180 @@ -const fs = require('fs'); const path = require('path'); -const https = require('https'); +const fs = require('fs'); +const axios = require('axios'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./metrics/utils/progress'); + +// Change working directory to script directory +process.chdir(path.dirname(__filename)); + +require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') }); -// Configuration const FILES = [ - { - name: '39f2x83-products.csv', - url: 'https://feeds.acherryontop.com/39f2x83-products.csv' - }, - { - name: '39f2x83-orders.csv', - url: 'https://feeds.acherryontop.com/39f2x83-orders.csv' - }, - { - name: '39f2x83-purchase_orders.csv', - url: 'https://feeds.acherryontop.com/39f2x83-purchase_orders.csv' - } + { + name: '39f2x83-products.csv', + url: process.env.PRODUCTS_CSV_URL + }, + { + name: '39f2x83-orders.csv', + url: process.env.ORDERS_CSV_URL + }, + { + name: '39f2x83-purchase_orders.csv', + url: process.env.PURCHASE_ORDERS_CSV_URL + } ]; -const CSV_DIR = path.join(__dirname, '..', 'csv'); +let isCancelled = false; -// Ensure CSV directory exists -if (!fs.existsSync(CSV_DIR)) { - fs.mkdirSync(CSV_DIR, { recursive: true }); +function cancelUpdate() { + isCancelled = true; + outputProgress({ + status: 'cancelled', + operation: 'CSV update cancelled', + current: 0, + total: FILES.length, + elapsed: null, + remaining: null, + rate: 0 + }); } -// Function to download a file -function downloadFile(url, filePath) { - return new Promise((resolve, reject) => { - const file = fs.createWriteStream(filePath); +async function downloadFile(file, index, startTime) { + if (isCancelled) return; + + const csvDir = path.join(__dirname, '../csv'); + if (!fs.existsSync(csvDir)) { + fs.mkdirSync(csvDir, { recursive: true }); + } + + const writer = fs.createWriteStream(path.join(csvDir, file.name)); - https.get(url, response => { - if (response.statusCode !== 200) { - reject(new Error(`Failed to download: ${response.statusCode} ${response.statusMessage}`)); - return; - } + try { + const response = await axios({ + url: file.url, + method: 'GET', + responseType: 'stream' + }); - const totalSize = parseInt(response.headers['content-length'], 10); - let downloadedSize = 0; - let lastProgressUpdate = Date.now(); - const startTime = Date.now(); + const totalLength = response.headers['content-length']; + let downloadedLength = 0; + let lastProgressUpdate = Date.now(); + const PROGRESS_INTERVAL = 1000; // Update progress every second - response.on('data', chunk => { - downloadedSize += chunk.length; - const now = Date.now(); - // Update progress at most every 100ms to avoid console flooding - if (now - lastProgressUpdate > 100) { - const elapsed = (now - startTime) / 1000; - const rate = downloadedSize / elapsed; - const remaining = (totalSize - downloadedSize) / rate; - - console.log(JSON.stringify({ - status: 'running', - operation: `Downloading ${path.basename(filePath)}`, - current: downloadedSize, - total: totalSize, - rate: (rate / 1024 / 1024).toFixed(2), // MB/s - elapsed: formatDuration(elapsed), - remaining: formatDuration(remaining), - percentage: ((downloadedSize / totalSize) * 100).toFixed(1) - })); - lastProgressUpdate = now; - } - }); + response.data.on('data', (chunk) => { + if (isCancelled) { + writer.end(); + return; + } - response.pipe(file); + downloadedLength += chunk.length; + + // Update progress based on time interval + const now = Date.now(); + if (now - lastProgressUpdate >= PROGRESS_INTERVAL) { + const progress = (downloadedLength / totalLength) * 100; + outputProgress({ + status: 'running', + operation: `Downloading ${file.name}`, + current: index + (downloadedLength / totalLength), + total: FILES.length, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, index + (downloadedLength / totalLength), FILES.length), + rate: calculateRate(startTime, index + (downloadedLength / totalLength)), + percentage: progress.toFixed(1), + file_progress: { + name: file.name, + downloaded: downloadedLength, + total: totalLength, + percentage: progress.toFixed(1) + } + }); + lastProgressUpdate = now; + } + }); - file.on('finish', () => { - console.log(JSON.stringify({ - status: 'running', - operation: `Completed ${path.basename(filePath)}`, - current: totalSize, - total: totalSize, - percentage: '100' - })); - file.close(); - resolve(); - }); - }).on('error', error => { - fs.unlink(filePath, () => {}); // Delete the file if download failed - reject(error); - }); + response.data.pipe(writer); - file.on('error', error => { - fs.unlink(filePath, () => {}); // Delete the file if there was an error - reject(error); - }); - }); -} - -// Helper function to format duration -function formatDuration(seconds) { - if (seconds < 60) return `${Math.round(seconds)}s`; - const minutes = Math.floor(seconds / 60); - seconds = Math.round(seconds % 60); - return `${minutes}m ${seconds}s`; + return new Promise((resolve, reject) => { + writer.on('finish', resolve); + writer.on('error', reject); + }); + } catch (error) { + fs.unlinkSync(path.join(csvDir, file.name)); + throw error; + } } // Main function to update all files async function updateFiles() { - console.log(JSON.stringify({ - status: 'running', - operation: 'Starting CSV file updates', - total: FILES.length, - current: 0 - })); - - for (let i = 0; i < FILES.length; i++) { - const file = FILES[i]; - const filePath = path.join(CSV_DIR, file.name); + const startTime = Date.now(); + outputProgress({ + status: 'running', + operation: 'Starting CSV update', + current: 0, + total: FILES.length, + elapsed: '0s', + remaining: null, + rate: 0, + percentage: '0' + }); + try { - // Delete existing file if it exists - if (fs.existsSync(filePath)) { - console.log(JSON.stringify({ - status: 'running', - operation: `Removing existing file: ${file.name}`, - current: i, - total: FILES.length, - percentage: ((i / FILES.length) * 100).toFixed(1) - })); - fs.unlinkSync(filePath); - } + for (let i = 0; i < FILES.length; i++) { + if (isCancelled) { + return; + } - // Download new file - console.log(JSON.stringify({ - status: 'running', - operation: `Starting download: ${file.name}`, - current: i, - total: FILES.length, - percentage: ((i / FILES.length) * 100).toFixed(1) - })); - await downloadFile(file.url, filePath); - console.log(JSON.stringify({ - status: 'running', - operation: `Successfully updated ${file.name}`, - current: i + 1, - total: FILES.length, - percentage: (((i + 1) / FILES.length) * 100).toFixed(1) - })); + const file = FILES[i]; + await downloadFile(file, i, startTime); + + outputProgress({ + status: 'running', + operation: 'CSV update in progress', + current: i + 1, + total: FILES.length, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, i + 1, FILES.length), + rate: calculateRate(startTime, i + 1), + percentage: (((i + 1) / FILES.length) * 100).toFixed(1) + }); + } + + outputProgress({ + status: 'complete', + operation: 'CSV update complete', + current: FILES.length, + total: FILES.length, + elapsed: formatElapsedTime(startTime), + remaining: '0s', + rate: calculateRate(startTime, FILES.length), + percentage: '100' + }); } catch (error) { - console.error(JSON.stringify({ - status: 'error', - operation: `Error updating ${file.name}`, - error: error.message - })); - throw error; + outputProgress({ + status: 'error', + operation: 'CSV update failed', + error: error.message, + current: 0, + total: FILES.length, + elapsed: formatElapsedTime(startTime), + remaining: null, + rate: 0 + }); + throw error; } - } - - console.log(JSON.stringify({ - status: 'complete', - operation: 'CSV file update complete', - current: FILES.length, - total: FILES.length, - percentage: '100' - })); } -// Run the update -updateFiles().catch(error => { - console.error(JSON.stringify({ - error: `Update failed: ${error.message}` - })); - process.exit(1); -}); \ No newline at end of file +// Run the update only if this is the main module +if (require.main === module) { + updateFiles().catch((error) => { + console.error('Error updating CSV files:', error); + process.exit(1); + }); +} + +// Export the functions needed by the route +module.exports = { + updateFiles, + cancelUpdate +}; \ No newline at end of file From ebebd37f11f61edeafc0181d813b74c53a67411f Mon Sep 17 00:00:00 2001 From: Matt Date: Tue, 28 Jan 2025 22:12:24 -0500 Subject: [PATCH 02/33] Improve metrics reset script with robust table management and error handling --- inventory-server/scripts/reset-metrics.js | 124 +++++++++++++--------- 1 file changed, 71 insertions(+), 53 deletions(-) diff --git a/inventory-server/scripts/reset-metrics.js b/inventory-server/scripts/reset-metrics.js index f05fd64..10499d0 100644 --- a/inventory-server/scripts/reset-metrics.js +++ b/inventory-server/scripts/reset-metrics.js @@ -21,7 +21,7 @@ function outputProgress(data) { console.log(JSON.stringify(data)); } -// Explicitly define all metrics-related tables +// Explicitly define all metrics-related tables in dependency order const METRICS_TABLES = [ 'brand_metrics', 'brand_time_metrics', @@ -40,23 +40,6 @@ const METRICS_TABLES = [ 'vendor_details' ]; -// Config tables that must exist -const CONFIG_TABLES = [ - 'stock_thresholds', - 'lead_time_thresholds', - 'sales_velocity_config', - 'abc_classification_config', - 'safety_stock_config', - 'turnover_config' -]; - -// Core tables that must exist -const REQUIRED_CORE_TABLES = [ - 'products', - 'orders', - 'purchase_orders' -]; - // Split SQL into individual statements function splitSQLStatements(sql) { sql = sql.replace(/\r\n/g, '\n'); @@ -118,55 +101,44 @@ async function resetMetrics() { connection = await mysql.createConnection(dbConfig); await connection.beginTransaction(); - // Verify required core tables exist - outputProgress({ - operation: 'Verifying core tables', - message: 'Checking required tables exist...' - }); - - const [tables] = await connection.query(` + // First verify current state + const [initialTables] = await connection.query(` SELECT table_name FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name IN (?) - `, [REQUIRED_CORE_TABLES]); + `, [METRICS_TABLES]); - const existingCoreTables = tables.map(t => t.table_name); - const missingCoreTables = REQUIRED_CORE_TABLES.filter(t => !existingCoreTables.includes(t)); - - if (missingCoreTables.length > 0) { - throw new Error(`Required core tables missing: ${missingCoreTables.join(', ')}`); - } - - // Verify config tables exist outputProgress({ - operation: 'Verifying config tables', - message: 'Checking configuration tables exist...' + operation: 'Initial state', + message: `Found ${initialTables.length} existing metrics tables: ${initialTables.map(t => t.table_name).join(', ')}` }); - const [configTables] = await connection.query(` - SELECT table_name - FROM information_schema.tables - WHERE table_schema = DATABASE() - AND table_name IN (?) - `, [CONFIG_TABLES]); + // Disable foreign key checks at the start + await connection.query('SET FOREIGN_KEY_CHECKS = 0'); - const existingConfigTables = configTables.map(t => t.table_name); - const missingConfigTables = CONFIG_TABLES.filter(t => !existingConfigTables.includes(t)); - - if (missingConfigTables.length > 0) { - throw new Error(`Required config tables missing: ${missingConfigTables.join(', ')}`); - } - - // Drop all metrics tables + // Drop all metrics tables in reverse order to handle dependencies outputProgress({ operation: 'Dropping metrics tables', message: 'Removing existing metrics tables...' }); - for (const table of METRICS_TABLES) { + for (const table of [...METRICS_TABLES].reverse()) { try { await connection.query(`DROP TABLE IF EXISTS ${table}`); + + // Verify the table was actually dropped + const [checkDrop] = await connection.query(` + SELECT COUNT(*) as count + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name = ? + `, [table]); + + if (checkDrop[0].count > 0) { + throw new Error(`Failed to drop table ${table} - table still exists`); + } + outputProgress({ operation: 'Table dropped', message: `Successfully dropped table: ${table}` @@ -181,6 +153,18 @@ async function resetMetrics() { } } + // Verify all tables were dropped + const [afterDrop] = await connection.query(` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name IN (?) + `, [METRICS_TABLES]); + + if (afterDrop.length > 0) { + throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.map(t => t.table_name).join(', ')}`); + } + // Read metrics schema outputProgress({ operation: 'Reading schema', @@ -212,9 +196,29 @@ async function resetMetrics() { outputProgress({ status: 'warning', operation: 'SQL Warning', - message: warnings + message: { + statement: i + 1, + warnings: warnings + } }); } + + // If this is a CREATE TABLE statement, verify the table was created + if (stmt.trim().toLowerCase().startsWith('create table')) { + const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1]; + if (tableName) { + const [checkCreate] = await connection.query(` + SELECT COUNT(*) as count + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name = ? + `, [tableName]); + + if (checkCreate[0].count === 0) { + throw new Error(`Failed to create table ${tableName} - table does not exist after CREATE statement`); + } + } + } outputProgress({ operation: 'SQL Progress', @@ -241,6 +245,9 @@ async function resetMetrics() { } } + // Re-enable foreign key checks after all tables are created + await connection.query('SET FOREIGN_KEY_CHECKS = 1'); + // Verify metrics tables were created outputProgress({ operation: 'Verifying metrics tables', @@ -248,12 +255,19 @@ async function resetMetrics() { }); const [metricsTablesResult] = await connection.query(` - SELECT table_name + SELECT table_name, table_rows, create_time FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name IN (?) `, [METRICS_TABLES]); + outputProgress({ + operation: 'Tables found', + message: `Found ${metricsTablesResult.length} tables: ${metricsTablesResult.map(t => + `${t.table_name} (created: ${t.create_time})` + ).join(', ')}` + }); + const existingMetricsTables = metricsTablesResult.map(t => t.table_name); const missingMetricsTables = METRICS_TABLES.filter(t => !existingMetricsTables.includes(t)); @@ -278,10 +292,14 @@ async function resetMetrics() { if (connection) { await connection.rollback(); + // Make sure to re-enable foreign key checks even if there's an error + await connection.query('SET FOREIGN_KEY_CHECKS = 1').catch(() => {}); } throw error; } finally { if (connection) { + // One final attempt to ensure foreign key checks are enabled + await connection.query('SET FOREIGN_KEY_CHECKS = 1').catch(() => {}); await connection.end(); } } From d56f1e143761efa08b7ab220b3083b907b4c1b43 Mon Sep 17 00:00:00 2001 From: Matt Date: Tue, 28 Jan 2025 23:57:09 -0500 Subject: [PATCH 03/33] Move sales seasonality table to config schema and finish up standardizing scripts --- inventory-server/db/config-schema.sql | 17 +++++++ inventory-server/db/metrics-schema.sql | 42 ++++++++-------- inventory-server/scripts/reset-db.js | 1 + inventory-server/scripts/reset-metrics.js | 58 ++++++++++++++--------- 4 files changed, 74 insertions(+), 44 deletions(-) diff --git a/inventory-server/db/config-schema.sql b/inventory-server/db/config-schema.sql index a095763..a2f7639 100644 --- a/inventory-server/db/config-schema.sql +++ b/inventory-server/db/config-schema.sql @@ -88,6 +88,16 @@ CREATE TABLE IF NOT EXISTS turnover_config ( UNIQUE KEY unique_category_vendor (category_id, vendor) ); +-- Create table for sales seasonality factors +CREATE TABLE IF NOT EXISTS sales_seasonality ( + month INT NOT NULL, + seasonality_factor DECIMAL(5,3) DEFAULT 0, + last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (month), + CHECK (month BETWEEN 1 AND 12), + CHECK (seasonality_factor BETWEEN -1.0 AND 1.0) +); + -- Insert default global thresholds if not exists INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days) VALUES (1, NULL, NULL, 7, 14, 90) @@ -129,6 +139,13 @@ ON DUPLICATE KEY UPDATE calculation_period_days = VALUES(calculation_period_days), target_rate = VALUES(target_rate); +-- Insert default seasonality factors (neutral) +INSERT INTO sales_seasonality (month, seasonality_factor) +VALUES + (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), + (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0) +ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP; + -- View to show thresholds with category names CREATE OR REPLACE VIEW stock_thresholds_view AS SELECT diff --git a/inventory-server/db/metrics-schema.sql b/inventory-server/db/metrics-schema.sql index 818980d..ffa9984 100644 --- a/inventory-server/db/metrics-schema.sql +++ b/inventory-server/db/metrics-schema.sql @@ -287,26 +287,6 @@ CREATE TABLE IF NOT EXISTS category_forecasts ( INDEX idx_category_forecast_last_calculated (last_calculated_at) ); --- Create table for sales seasonality factors -CREATE TABLE IF NOT EXISTS sales_seasonality ( - month INT NOT NULL, - seasonality_factor DECIMAL(5,3) DEFAULT 0, - last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (month), - CHECK (month BETWEEN 1 AND 12), - CHECK (seasonality_factor BETWEEN -1.0 AND 1.0) -); - --- Insert default seasonality factors (neutral) -INSERT INTO sales_seasonality (month, seasonality_factor) -VALUES - (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), - (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0) -ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP; - --- Re-enable foreign key checks -SET FOREIGN_KEY_CHECKS = 1; - -- Create view for inventory health CREATE OR REPLACE VIEW inventory_health AS WITH product_thresholds AS ( @@ -427,4 +407,24 @@ FROM LEFT JOIN categories p ON c.parent_id = p.cat_id LEFT JOIN - category_metrics cm ON c.cat_id = cm.category_id; \ No newline at end of file + category_metrics cm ON c.cat_id = cm.category_id; + +-- Re-enable foreign key checks +SET FOREIGN_KEY_CHECKS = 1; + +-- Create table for sales seasonality factors +CREATE TABLE IF NOT EXISTS sales_seasonality ( + month INT NOT NULL, + seasonality_factor DECIMAL(5,3) DEFAULT 0, + last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (month), + CHECK (month BETWEEN 1 AND 12), + CHECK (seasonality_factor BETWEEN -1.0 AND 1.0) +); + +-- Insert default seasonality factors (neutral) +INSERT INTO sales_seasonality (month, seasonality_factor) +VALUES + (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), + (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0) +ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP; \ No newline at end of file diff --git a/inventory-server/scripts/reset-db.js b/inventory-server/scripts/reset-db.js index abe285b..297284b 100644 --- a/inventory-server/scripts/reset-db.js +++ b/inventory-server/scripts/reset-db.js @@ -40,6 +40,7 @@ const CONFIG_TABLES = [ 'sales_velocity_config', 'abc_classification_config', 'safety_stock_config', + 'sales_seasonality', 'turnover_config' ]; diff --git a/inventory-server/scripts/reset-metrics.js b/inventory-server/scripts/reset-metrics.js index 10499d0..fcbb9a9 100644 --- a/inventory-server/scripts/reset-metrics.js +++ b/inventory-server/scripts/reset-metrics.js @@ -32,7 +32,6 @@ const METRICS_TABLES = [ 'product_metrics', 'product_time_aggregates', 'sales_forecasts', - 'sales_seasonality', 'temp_purchase_metrics', 'temp_sales_metrics', 'vendor_metrics', //before vendor_details for foreign key @@ -103,15 +102,15 @@ async function resetMetrics() { // First verify current state const [initialTables] = await connection.query(` - SELECT table_name + SELECT TABLE_NAME as name FROM information_schema.tables - WHERE table_schema = DATABASE() - AND table_name IN (?) + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME IN (?) `, [METRICS_TABLES]); outputProgress({ operation: 'Initial state', - message: `Found ${initialTables.length} existing metrics tables: ${initialTables.map(t => t.table_name).join(', ')}` + message: `Found ${initialTables.length} existing metrics tables: ${initialTables.map(t => t.name).join(', ')}` }); // Disable foreign key checks at the start @@ -131,8 +130,8 @@ async function resetMetrics() { const [checkDrop] = await connection.query(` SELECT COUNT(*) as count FROM information_schema.tables - WHERE table_schema = DATABASE() - AND table_name = ? + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = ? `, [table]); if (checkDrop[0].count > 0) { @@ -155,14 +154,14 @@ async function resetMetrics() { // Verify all tables were dropped const [afterDrop] = await connection.query(` - SELECT table_name + SELECT TABLE_NAME as name FROM information_schema.tables - WHERE table_schema = DATABASE() - AND table_name IN (?) + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME IN (?) `, [METRICS_TABLES]); if (afterDrop.length > 0) { - throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.map(t => t.table_name).join(', ')}`); + throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.map(t => t.name).join(', ')}`); } // Read metrics schema @@ -188,7 +187,7 @@ async function resetMetrics() { for (let i = 0; i < statements.length; i++) { const stmt = statements[i]; try { - const [result] = await connection.query(stmt); + await connection.query(stmt); // Check for warnings const [warnings] = await connection.query('SHOW WARNINGS'); @@ -208,15 +207,20 @@ async function resetMetrics() { const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1]; if (tableName) { const [checkCreate] = await connection.query(` - SELECT COUNT(*) as count + SELECT TABLE_NAME as name, CREATE_TIME as created FROM information_schema.tables - WHERE table_schema = DATABASE() - AND table_name = ? + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = ? `, [tableName]); - if (checkCreate[0].count === 0) { + if (checkCreate.length === 0) { throw new Error(`Failed to create table ${tableName} - table does not exist after CREATE statement`); } + + outputProgress({ + operation: 'Table created', + message: `Successfully created table: ${tableName} at ${checkCreate[0].created}` + }); } } @@ -225,8 +229,7 @@ async function resetMetrics() { message: { statement: i + 1, total: statements.length, - preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), - affectedRows: result.affectedRows + preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '') } }); } catch (sqlError) { @@ -255,23 +258,32 @@ async function resetMetrics() { }); const [metricsTablesResult] = await connection.query(` - SELECT table_name, table_rows, create_time + SELECT + TABLE_NAME as name, + TABLE_ROWS as \`rows\`, + CREATE_TIME as created FROM information_schema.tables - WHERE table_schema = DATABASE() - AND table_name IN (?) + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME IN (?) `, [METRICS_TABLES]); outputProgress({ operation: 'Tables found', message: `Found ${metricsTablesResult.length} tables: ${metricsTablesResult.map(t => - `${t.table_name} (created: ${t.create_time})` + `${t.name} (created: ${t.created})` ).join(', ')}` }); - const existingMetricsTables = metricsTablesResult.map(t => t.table_name); + const existingMetricsTables = metricsTablesResult.map(t => t.name); const missingMetricsTables = METRICS_TABLES.filter(t => !existingMetricsTables.includes(t)); if (missingMetricsTables.length > 0) { + // Do one final check of the actual tables + const [finalCheck] = await connection.query('SHOW TABLES'); + outputProgress({ + operation: 'Final table check', + message: `All database tables: ${finalCheck.map(t => Object.values(t)[0]).join(', ')}` + }); throw new Error(`Failed to create metrics tables: ${missingMetricsTables.join(', ')}`); } From b57854976385dc032d22830f9dcb58c4fcfdd49f Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 00:04:47 -0500 Subject: [PATCH 04/33] Remove CSV scripts from frontend --- .../components/settings/DataManagement.tsx | 374 +++++------------- 1 file changed, 109 insertions(+), 265 deletions(-) diff --git a/inventory/src/components/settings/DataManagement.tsx b/inventory/src/components/settings/DataManagement.tsx index a76ccd6..4ba827b 100644 --- a/inventory/src/components/settings/DataManagement.tsx +++ b/inventory/src/components/settings/DataManagement.tsx @@ -43,27 +43,18 @@ interface ImportLimits { } export function DataManagement() { - const [isUpdating, setIsUpdating] = useState(false); - const [isImportingCSV, setIsImportingCSV] = useState(false); const [isImportingProd, setIsImportingProd] = useState(false); const [isResetting, setIsResetting] = useState(false); - const [updateProgress, setUpdateProgress] = useState(null); const [importProgress, setImportProgress] = useState(null); const [purchaseOrdersProgress, setPurchaseOrdersProgress] = useState(null); const [resetProgress, setResetProgress] = useState(null); const [eventSource, setEventSource] = useState(null); - const [] = useState({ - products: 0, - orders: 0, - purchaseOrders: 0 - }); const [isResettingMetrics, setIsResettingMetrics] = useState(false); const [resetMetricsProgress, setResetMetricsProgress] = useState(null); const [isCalculatingMetrics, setIsCalculatingMetrics] = useState(false); const [metricsProgress, setMetricsProgress] = useState(null); // Add states for completed operations - const [lastUpdateStatus, setLastUpdateStatus] = useState(null); const [lastImportStatus, setLastImportStatus] = useState(null); const [lastResetStatus, setLastResetStatus] = useState(null); const [lastMetricsStatus, setLastMetricsStatus] = useState(null); @@ -77,7 +68,7 @@ export function DataManagement() { // Helper to check if any operation is running const isAnyOperationRunning = () => { - return isUpdating || isImportingCSV || isImportingProd || isTestingConnection || isResetting || isCalculatingMetrics; + return isImportingProd || isTestingConnection || isResetting || isCalculatingMetrics || isResettingMetrics; }; // Helper function to get progress bar color based on status @@ -132,7 +123,7 @@ export function DataManagement() { }; // Helper function to render progress - const renderProgress = (progress: any, operationType: 'update' | 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => { + const renderProgress = (progress: any, operationType: 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => { if (!progress) return null; const status = progress.status?.toLowerCase(); @@ -218,7 +209,7 @@ export function DataManagement() { }; // Helper to connect to event source - const connectToEventSource = (type: 'update' | 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => { + const connectToEventSource = (type: 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => { console.log(`Setting up EventSource for ${type}...`); // Clean up existing connection first @@ -257,8 +248,7 @@ export function DataManagement() { // Try to reconnect via status check if the operation might still be running if ( (type === 'calculate-metrics' && isCalculatingMetrics) || - (type === 'import' && isImportingCSV) || - (type === 'update' && isUpdating) || + (type === 'import' && isImportingProd) || (type === 'reset' && isResetting) || (type === 'reset-metrics' && isResettingMetrics) ) { @@ -295,7 +285,7 @@ export function DataManagement() { }; const handleProgressUpdate = ( - type: 'update' | 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics', + type: 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics', progressData: any, source: EventSource ) => { @@ -342,7 +332,6 @@ export function DataManagement() { if (!otherProgress || otherProgress.status === 'complete' || otherProgress.status === 'error' || otherProgress.status === 'cancelled') { source.close(); setEventSource(null); - setIsImportingCSV(false); setIsImportingProd(false); // Show appropriate toast based on final status @@ -374,12 +363,6 @@ export function DataManagement() { let operationName; switch (type) { - case 'update': - setProgress = setUpdateProgress; - setLastStatus = setLastUpdateStatus; - setIsRunning = setIsUpdating; - operationName = 'Update'; - break; case 'reset': setProgress = setResetProgress; setLastStatus = setLastResetStatus; @@ -435,7 +418,7 @@ export function DataManagement() { } }; - const handleCancel = async (operation: 'update' | 'import' | 'reset' | 'calculate-metrics') => { + const handleCancel = async (operation: 'import' | 'reset' | 'calculate-metrics') => { try { const response = await fetch(`${config.apiUrl}/csv/cancel?operation=${operation}`, { method: 'POST', @@ -448,13 +431,9 @@ export function DataManagement() { // Reset the appropriate state if (operation === 'import') { - setIsImportingCSV(false); setIsImportingProd(false); setImportProgress(null); setPurchaseOrdersProgress(null); - } else if (operation === 'update') { - setIsUpdating(false); - setUpdateProgress(null); } // ... other operation states ... } catch (error) { @@ -511,7 +490,7 @@ export function DataManagement() { if (operation.includes('import')) { console.log('Import is running'); - setIsImportingCSV(true); + setIsImportingProd(true); if (operation.includes('purchase orders')) { setPurchaseOrdersProgress(importData.progress || importData); } else { @@ -520,13 +499,6 @@ export function DataManagement() { if (!eventSource) { connectToEventSource('import'); } - } else if (operation.includes('update')) { - console.log('Update is running'); - setIsUpdating(true); - setUpdateProgress(importData.progress || importData); - if (!eventSource) { - connectToEventSource('update'); - } } else if (operation.includes('reset')) { if (operation.includes('metrics')) { console.log('Reset metrics is running'); @@ -549,8 +521,6 @@ export function DataManagement() { const operation = (importData.lastStatus?.operation || '').toLowerCase(); if (operation.includes('import')) { setLastImportStatus(importData.lastStatus); - } else if (operation.includes('update')) { - setLastUpdateStatus(importData.lastStatus); } else if (operation.includes('reset')) { if (operation.includes('metrics')) { setLastResetMetricsStatus(importData.lastStatus); @@ -569,39 +539,30 @@ export function DataManagement() { checkStatus(); }, []); - const handleUpdateCSV = async () => { - setIsUpdating(true); - setUpdateProgress({ status: 'running', operation: 'Starting CSV update' }); - + const handleTestConnection = async () => { + setIsTestingConnection(true); try { - connectToEventSource('update'); - - const response = await fetch(`${config.apiUrl}/csv/update`, { - method: 'POST', + const response = await fetch(`${config.apiUrl}/test-prod-connection`, { credentials: 'include' }); - if (!response.ok) { - const data = await response.json().catch(() => ({})); - if (data.error === 'Import already in progress') { - return; - } - throw new Error(data.error || `Failed to update CSV files: ${response.status} ${response.statusText}`); + const data = await response.json(); + + if (response.ok) { + toast.success(`Successfully connected to production database. Found ${data.productCount.toLocaleString()} products.`); + } else { + throw new Error(data.error || 'Failed to connect to production database'); } } catch (error) { - if (eventSource) { - eventSource.close(); - setEventSource(null); - } - setIsUpdating(false); - setUpdateProgress(null); - toast.error(`CSV update failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + toast.error(`Connection test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + } finally { + setIsTestingConnection(false); } }; - const handleImportCSV = async () => { - setIsImportingCSV(true); - setImportProgress({ status: 'running', operation: 'Starting CSV import' }); + const handleImportFromProd = async () => { + setIsImportingProd(true); + setImportProgress({ status: 'running', operation: 'Starting import from production' }); try { connectToEventSource('import'); @@ -620,20 +581,93 @@ export function DataManagement() { } // Start new import - const response = await fetch(`${config.apiUrl}/csv/import`, { + const response = await fetch(`${config.apiUrl}/csv/import-from-prod`, { method: 'POST', credentials: 'include' + }).catch(error => { + console.log('Import request error (may be timeout):', error); + return null; }); - const data = await response.json(); - if (!response.ok) { - throw new Error(data.error || 'Failed to start CSV import'); + // If we got no response but have progress, assume it's still running + if (!response && (importProgress?.current || purchaseOrdersProgress?.current)) { + console.log('No response but import appears to be running, continuing...'); + return; + } + + // If we got a response, check if it indicates an actual error + if (response) { + const data = await response.json().catch(() => null); + if (!response.ok && data?.error && !data.error.includes('already in progress')) { + throw new Error(data.error || 'Failed to start production import'); + } } } catch (error) { - toast.error(`CSV import failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - setIsImportingCSV(false); - setImportProgress(null); - setPurchaseOrdersProgress(null); + // Only handle actual errors, not timeouts or connection issues + if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) { + toast.error(`Production import failed: ${error.message}`); + setIsImportingProd(false); + setImportProgress(null); + setPurchaseOrdersProgress(null); + } else { + console.log('Ignoring network error, import may still be running:', error); + } + } + }; + + const handleCalculateMetrics = async () => { + setIsCalculatingMetrics(true); + setMetricsProgress({ status: 'running', operation: 'Starting metrics calculation' }); + + try { + connectToEventSource('calculate-metrics'); + + // First check if metrics calculation is already running + const statusResponse = await fetch(`${config.apiUrl}/csv/calculate-metrics/status`, { + credentials: 'include' + }).catch(() => null); + + if (statusResponse) { + const statusData = await statusResponse.json().catch(() => null); + if (statusData?.active && statusData?.progress) { + console.log('Metrics calculation already running, connecting to existing process'); + setMetricsProgress(statusData.progress); + return; + } + } + + // Start new metrics calculation + const response = await fetch(`${config.apiUrl}/csv/calculate-metrics`, { + method: 'POST', + credentials: 'include' + }).catch(error => { + // Ignore network errors as the calculation might still be running + console.log('Metrics calculation request error (may be timeout):', error); + return null; + }); + + // If we got no response but have progress, assume it's still running + if (!response && metricsProgress?.current) { + console.log('No response but metrics calculation appears to be running, continuing...'); + return; + } + + // If we got a response, check if it indicates an actual error + if (response) { + const data = await response.json().catch(() => null); + if (!response.ok && data?.error && !data.error.includes('already in progress')) { + throw new Error(data.error || 'Failed to calculate metrics'); + } + } + } catch (error) { + // Only handle actual errors, not timeouts or connection issues + if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) { + toast.error(`Metrics calculation failed: ${error.message}`); + setIsCalculatingMetrics(false); + setMetricsProgress(null); + } else { + console.log('Ignoring network error, metrics calculation may still be running:', error); + } } }; @@ -726,138 +760,6 @@ export function DataManagement() { } }; - const handleCalculateMetrics = async () => { - setIsCalculatingMetrics(true); - setMetricsProgress({ status: 'running', operation: 'Starting metrics calculation' }); - - try { - connectToEventSource('calculate-metrics'); - - // First check if metrics calculation is already running - const statusResponse = await fetch(`${config.apiUrl}/csv/calculate-metrics/status`, { - credentials: 'include' - }).catch(() => null); - - if (statusResponse) { - const statusData = await statusResponse.json().catch(() => null); - if (statusData?.active && statusData?.progress) { - console.log('Metrics calculation already running, connecting to existing process'); - setMetricsProgress(statusData.progress); - return; - } - } - - // Start new metrics calculation - const response = await fetch(`${config.apiUrl}/csv/calculate-metrics`, { - method: 'POST', - credentials: 'include' - }).catch(error => { - // Ignore network errors as the calculation might still be running - console.log('Metrics calculation request error (may be timeout):', error); - return null; - }); - - // If we got no response but have progress, assume it's still running - if (!response && metricsProgress?.current) { - console.log('No response but metrics calculation appears to be running, continuing...'); - return; - } - - // If we got a response, check if it indicates an actual error - if (response) { - const data = await response.json().catch(() => null); - if (!response.ok && data?.error && !data.error.includes('already in progress')) { - throw new Error(data.error || 'Failed to calculate metrics'); - } - } - } catch (error) { - // Only handle actual errors, not timeouts or connection issues - if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) { - toast.error(`Metrics calculation failed: ${error.message}`); - setIsCalculatingMetrics(false); - setMetricsProgress(null); - } else { - console.log('Ignoring network error, metrics calculation may still be running:', error); - } - } - }; - - const handleTestConnection = async () => { - setIsTestingConnection(true); - try { - const response = await fetch(`${config.apiUrl}/test-prod-connection`, { - credentials: 'include' - }); - - const data = await response.json(); - - if (response.ok) { - toast.success(`Successfully connected to production database. Found ${data.productCount.toLocaleString()} products.`); - } else { - throw new Error(data.error || 'Failed to connect to production database'); - } - } catch (error) { - toast.error(`Connection test failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - } finally { - setIsTestingConnection(false); - } - }; - - const handleImportFromProd = async () => { - setIsImportingProd(true); - setImportProgress({ status: 'running', operation: 'Starting import from production' }); - - try { - connectToEventSource('import'); - - // First check if import is already running - const statusResponse = await fetch(`${config.apiUrl}/csv/status`, { - credentials: 'include' - }).catch(() => null); - - if (statusResponse) { - const statusData = await statusResponse.json().catch(() => null); - if (statusData?.active && statusData?.progress) { - console.log('Import already running, connecting to existing process'); - return; - } - } - - // Start new import - const response = await fetch(`${config.apiUrl}/csv/import-from-prod`, { - method: 'POST', - credentials: 'include' - }).catch(error => { - console.log('Import request error (may be timeout):', error); - return null; - }); - - // If we got no response but have progress, assume it's still running - if (!response && (importProgress?.current || purchaseOrdersProgress?.current)) { - console.log('No response but import appears to be running, continuing...'); - return; - } - - // If we got a response, check if it indicates an actual error - if (response) { - const data = await response.json().catch(() => null); - if (!response.ok && data?.error && !data.error.includes('already in progress')) { - throw new Error(data.error || 'Failed to start production import'); - } - } - } catch (error) { - // Only handle actual errors, not timeouts or connection issues - if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) { - toast.error(`Production import failed: ${error.message}`); - setIsImportingProd(false); - setImportProgress(null); - setPurchaseOrdersProgress(null); - } else { - console.log('Ignoring network error, import may still be running:', error); - } - } - }; - return (
{/* Test Production Connection Card */} @@ -887,91 +789,33 @@ export function DataManagement() { - {/* Update CSV Card */} - - - Update CSV Files - Download the latest CSV data files - - -
- - - {isUpdating && ( - - )} -
- - {(isUpdating || lastUpdateStatus) && renderProgress(updateProgress || lastUpdateStatus, 'update')} -
-
- {/* Import Data Card */} Import Data - Import data from CSV files or production database + Import data from production database
- - - {(isImportingCSV || isImportingProd) && ( + {isImportingProd && (
- {(isImportingCSV || isImportingProd || lastImportStatus) && ( + {(isImportingProd || lastImportStatus) && (
{renderProgress(importProgress || lastImportStatus, 'import')} {renderProgress(purchaseOrdersProgress, 'import')} From 814d5d1a841e49af3a87ee9435435aaf2cdda260 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 00:18:04 -0500 Subject: [PATCH 05/33] Break up prod import script into pieces and move csv scripts into folder --- inventory-server/scripts/import-from-prod.js | 1442 +---------------- inventory-server/scripts/import/categories.js | 168 ++ inventory-server/scripts/import/orders.js | 235 +++ inventory-server/scripts/import/products.js | 561 +++++++ .../scripts/import/purchase-orders.js | 290 ++++ inventory-server/scripts/import/utils.js | 102 ++ .../scripts/{ => old_csv}/import-csv.js | 2 +- .../scripts/{ => old_csv}/update-csv.js | 2 +- 8 files changed, 1436 insertions(+), 1366 deletions(-) create mode 100644 inventory-server/scripts/import/categories.js create mode 100644 inventory-server/scripts/import/orders.js create mode 100644 inventory-server/scripts/import/products.js create mode 100644 inventory-server/scripts/import/purchase-orders.js create mode 100644 inventory-server/scripts/import/utils.js rename inventory-server/scripts/{ => old_csv}/import-csv.js (99%) rename inventory-server/scripts/{ => old_csv}/update-csv.js (98%) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index a4226d3..94bb506 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -1,8 +1,11 @@ const mysql = require("mysql2/promise"); -const { Client } = require("ssh2"); const dotenv = require("dotenv"); const path = require("path"); -const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./metrics/utils/progress'); +const { setupSshTunnel, outputProgress, formatElapsedTime, prodDbConfig, localDbConfig } = require('./import/utils'); +const importCategories = require('./import/categories'); +const { importProducts } = require('./import/products'); +const importOrders = require('./import/orders'); +const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); @@ -12,42 +15,6 @@ const IMPORT_PRODUCTS = true; const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; -// SSH configuration -const sshConfig = { - host: process.env.PROD_SSH_HOST, - port: process.env.PROD_SSH_PORT || 22, - username: process.env.PROD_SSH_USER, - privateKey: process.env.PROD_SSH_KEY_PATH - ? require("fs").readFileSync(process.env.PROD_SSH_KEY_PATH) - : undefined, -}; - -// Production database configuration -const prodDbConfig = { - host: process.env.PROD_DB_HOST || "localhost", - user: process.env.PROD_DB_USER, - password: process.env.PROD_DB_PASSWORD, - database: process.env.PROD_DB_NAME, - port: process.env.PROD_DB_PORT || 3306, -}; - -// Local database configuration -const localDbConfig = { - host: process.env.DB_HOST, - user: process.env.DB_USER, - password: process.env.DB_PASSWORD, - database: process.env.DB_NAME, - multipleStatements: true, - waitForConnections: true, - connectionLimit: 10, - queueLimit: 0, - namedPlaceholders: true, -}; - -// Constants -const BATCH_SIZE = 1000; -const PROGRESS_INTERVAL = 1000; // Update progress every second - let isImportCancelled = false; // Add cancel function @@ -55,7 +22,8 @@ function cancelImport() { isImportCancelled = true; outputProgress({ status: 'cancelled', - operation: 'Import cancelled', + operation: 'Import process', + message: 'Import cancelled by user', current: 0, total: 0, elapsed: null, @@ -64,1329 +32,7 @@ function cancelImport() { }); } -// Helper function to update progress with time estimate -function updateProgress(current, total, operation, startTime) { - outputProgress({ - status: 'running', - operation, - current, - total, - rate: calculateRate(startTime, current), - elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, current, total), - percentage: ((current / total) * 100).toFixed(1) - }); -} - -async function setupSshTunnel() { - return new Promise((resolve, reject) => { - const ssh = new Client(); - - ssh.on('error', (err) => { - console.error('SSH connection error:', err); - // Don't reject here, just log the error - }); - - ssh.on('end', () => { - console.log('SSH connection ended normally'); - }); - - ssh.on('close', () => { - console.log('SSH connection closed'); - }); - - ssh - .on("ready", () => { - ssh.forwardOut( - "127.0.0.1", - 0, - prodDbConfig.host, - prodDbConfig.port, - async (err, stream) => { - if (err) reject(err); - resolve({ ssh, stream }); - } - ); - }) - .connect(sshConfig); - }); -} - -async function importCategories(prodConnection, localConnection) { - outputProgress({ - operation: "Starting categories import", - status: "running", - }); - - const startTime = Date.now(); - const typeOrder = [10, 20, 11, 21, 12, 13]; - let totalInserted = 0; - let skippedCategories = []; - - try { - // Process each type in order with its own query - for (const type of typeOrder) { - const [categories] = await prodConnection.query( - ` - SELECT - pc.cat_id, - pc.name, - pc.type, - CASE - WHEN pc.type IN (10, 20) THEN NULL -- Top level categories should have no parent - WHEN pc.master_cat_id IS NULL THEN NULL - ELSE pc.master_cat_id - END as parent_id, - pc.combined_name as description - FROM product_categories pc - WHERE pc.type = ? - ORDER BY pc.cat_id - `, - [type] - ); - - if (categories.length === 0) continue; - - console.log(`\nProcessing ${categories.length} type ${type} categories`); - if (type === 10) { - console.log("Type 10 categories:", JSON.stringify(categories, null, 2)); - } - - // For types that can have parents (11, 21, 12, 13), verify parent existence - let categoriesToInsert = categories; - if (![10, 20].includes(type)) { - // Get all parent IDs - const parentIds = [ - ...new Set( - categories.map((c) => c.parent_id).filter((id) => id !== null) - ), - ]; - - // Check which parents exist - const [existingParents] = await localConnection.query( - "SELECT cat_id FROM categories WHERE cat_id IN (?)", - [parentIds] - ); - const existingParentIds = new Set(existingParents.map((p) => p.cat_id)); - - // Filter categories and track skipped ones - categoriesToInsert = categories.filter( - (cat) => - cat.parent_id === null || existingParentIds.has(cat.parent_id) - ); - const invalidCategories = categories.filter( - (cat) => - cat.parent_id !== null && !existingParentIds.has(cat.parent_id) - ); - - if (invalidCategories.length > 0) { - const skippedInfo = invalidCategories.map((c) => ({ - id: c.cat_id, - name: c.name, - type: c.type, - missing_parent: c.parent_id, - })); - skippedCategories.push(...skippedInfo); - - console.log( - "\nSkipping categories with missing parents:", - invalidCategories - .map( - (c) => - `${c.cat_id} - ${c.name} (missing parent: ${c.parent_id})` - ) - .join("\n") - ); - } - - if (categoriesToInsert.length === 0) { - console.log( - `No valid categories of type ${type} to insert - all had missing parents` - ); - continue; - } - } - - console.log( - `Inserting ${categoriesToInsert.length} type ${type} categories` - ); - - const placeholders = categoriesToInsert - .map(() => "(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)") - .join(","); - - const values = categoriesToInsert.flatMap((cat) => [ - cat.cat_id, - cat.name, - cat.type, - cat.parent_id, - cat.description, - "active", - ]); - - // Insert categories and create relationships in one query to avoid race conditions - await localConnection.query( - ` - INSERT INTO categories (cat_id, name, type, parent_id, description, status, created_at, updated_at) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE - name = VALUES(name), - type = VALUES(type), - parent_id = VALUES(parent_id), - description = VALUES(description), - status = VALUES(status), - updated_at = CURRENT_TIMESTAMP - `, - values - ); - - totalInserted += categoriesToInsert.length; - updateProgress( - totalInserted, - totalInserted, - "Categories import", - startTime - ); - } - - // After all imports, if we skipped any categories, throw an error - if (skippedCategories.length > 0) { - const error = new Error( - "Categories import completed with errors - some categories were skipped due to missing parents" - ); - error.skippedCategories = skippedCategories; - throw error; - } - - outputProgress({ - status: "complete", - operation: "Categories import completed", - current: totalInserted, - total: totalInserted, - duration: formatElapsedTime((Date.now() - startTime) / 1000), - }); - } catch (error) { - console.error("Error importing categories:", error); - if (error.skippedCategories) { - console.error( - "Skipped categories:", - JSON.stringify(error.skippedCategories, null, 2) - ); - } - throw error; - } -} - -async function importProducts(prodConnection, localConnection) { - outputProgress({ - operation: "Starting products import - Getting schema", - status: "running", - }); - - const startTime = Date.now(); - - try { - // First get the column names from the table structure - const [columns] = await localConnection.query(` - SELECT COLUMN_NAME - FROM INFORMATION_SCHEMA.COLUMNS - WHERE TABLE_NAME = 'products' - ORDER BY ORDINAL_POSITION - `); - - const columnNames = columns.map((col) => col.COLUMN_NAME); - - // Get total count first for progress indication - outputProgress({ - operation: "Starting products import - Getting total count", - status: "running", - }); - - const [countResult] = await prodConnection.query(` - SELECT COUNT(*) as total - FROM products p - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - WHERE pls.date_sold >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR p.datein >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR pls.date_sold IS NULL - `); - const totalProducts = countResult[0].total; - - outputProgress({ - operation: `Starting products import - Fetching ${totalProducts} products from production`, - status: "running", - }); - - // Get products from production with optimized query - const [rows] = await prodConnection.query(` - SELECT - p.pid, - p.description AS title, - p.notes AS description, - p.itemnumber AS SKU, - p.date_created, - p.datein AS first_received, - p.location, - COALESCE(si.available_local, 0) - COALESCE( - (SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0), 0) AS stock_quantity, - ci.onpreorder AS preorder_count, - pnb.inventory AS notions_inv_count, - COALESCE(pcp.price_each, 0) as price, - COALESCE(p.sellingprice, 0) AS regular_price, - COALESCE((SELECT ROUND(AVG(costeach), 5) - FROM product_inventory - WHERE pid = p.pid - AND COUNT > 0), 0) AS cost_price, - NULL AS landing_cost_price, - p.upc AS barcode, - p.harmonized_tariff_code, - p.stamp AS updated_at, - CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, - CASE WHEN p.reorder >= 0 THEN 1 ELSE 0 END AS replenishable, - s.companyname AS vendor, - CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference, - sid.notions_itemnumber AS notions_reference, - CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-t-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0) AS image, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-175x175-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0 AND PI.width = 175) AS image_175, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-o-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0) AS image_full, - pc1.name AS brand, - pc2.name AS line, - pc3.name AS subline, - pc4.name AS artist, - NULL AS options, - NULL AS tags, - COALESCE(CASE - WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit - ELSE sid.supplier_qty_per_unit - END, sid.notions_qty_per_unit) AS moq, - NULL AS uom, - p.rating, - p.rating_votes AS reviews, - p.weight, - p.length, - p.width, - p.height, - (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, - (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, - p.totalsold AS total_sold, - p.country_of_origin, - pls.date_sold as date_last_sold, - GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids - FROM products p - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN supplier_item_data sid ON p.pid = sid.pid - LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid - LEFT JOIN product_category_index pci ON p.pid = pci.pid - LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id - AND pc.type IN (10, 20, 11, 21, 12, 13) - AND pci.cat_id NOT IN (16, 17) - LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id - LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id - LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id - LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid - WHERE (pls.date_sold >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR p.datein >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR pls.date_sold IS NULL) - GROUP BY p.pid - `); - - // Debug log to check for specific product - const debugProduct = rows.find((row) => row.pid === 620972); - if (debugProduct) { - console.log("Found product 620972:", debugProduct); - } else { - console.log("Product 620972 not found in query results"); - - // Debug query to check why it's missing - const [debugResult] = await prodConnection.query( - ` - SELECT - p.pid, - p.itemnumber, - p.date_created, - p.datein, - pls.date_sold, - si.show, - si.buyable, - pcp.price_each - FROM products p - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid - WHERE p.pid = ? - `, - [620972] - ); - - console.log("Debug query result:", debugResult); - } - - // Also check for the other missing products - const missingPids = [ - 208348, 317600, 370009, 429494, 466233, 471156, 474582, 476214, 484394, - 484755, 484756, 493549, 620972, - ]; - const [missingProducts] = await prodConnection.query( - ` - SELECT - p.pid, - p.itemnumber, - p.date_created, - p.datein, - pls.date_sold, - si.show, - si.buyable, - pcp.price_each - FROM products p - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid - WHERE p.pid IN (?) - `, - [missingPids] - ); - - console.log("Debug results for missing products:", missingProducts); - - let current = 0; - const total = rows.length; - - // Process products in batches - for (let i = 0; i < rows.length; i += BATCH_SIZE) { - let batch = rows.slice(i, i + BATCH_SIZE); - - // Prepare product values and category relationships in parallel - const productValues = []; - const categoryRelationships = []; - - batch.forEach((row) => { - // Map values in the same order as columns - const rowValues = columnNames.map((col) => { - const val = row[col] ?? null; - if (col === "managing_stock") return 1; - if (typeof val === "number") return val || 0; - return val; - }); - productValues.push(...rowValues); - - // Add category relationships - if (row.category_ids) { - const catIds = row.category_ids - .split(",") - .map((id) => id.trim()) - .filter((id) => id) - .map(Number); - catIds.forEach((catId) => { - if (catId) categoryRelationships.push([catId, row.pid]); - }); - } - }); - - // Generate placeholders based on column count - const placeholderGroup = `(${Array(columnNames.length) - .fill("?") - .join(",")})`; - const productPlaceholders = Array(batch.length) - .fill(placeholderGroup) - .join(","); - - // Build the query dynamically - const insertQuery = ` - INSERT INTO products (${columnNames.join(",")}) - VALUES ${productPlaceholders} - ON DUPLICATE KEY UPDATE ${columnNames - .filter((col) => col !== "pid") - .map((col) => `${col} = VALUES(${col})`) - .join(",")} - `; - - // First insert the products and wait for it to complete - await localConnection.query(insertQuery, productValues); - - // Now that products are inserted, handle category relationships - if (categoryRelationships.length > 0) { - // Get unique category IDs to verify they exist - const uniqueCatIds = [ - ...new Set(categoryRelationships.map(([catId]) => catId)), - ]; - - console.log("Checking categories:", uniqueCatIds); - - // Check which categories exist - const [existingCats] = await localConnection.query( - "SELECT cat_id FROM categories WHERE cat_id IN (?)", - [uniqueCatIds] - ); - const existingCatIds = new Set(existingCats.map((c) => c.cat_id)); - - // Log missing categories - const missingCatIds = uniqueCatIds.filter( - (id) => !existingCatIds.has(id) - ); - if (missingCatIds.length > 0) { - console.error("Missing categories:", missingCatIds); - - // Query production to see what these categories are - const [missingCats] = await prodConnection.query( - ` - SELECT cat_id, name, type, master_cat_id, hidden - FROM product_categories - WHERE cat_id IN (?) - `, - [missingCatIds] - ); - - console.error("Missing category details:", missingCats); - console.warn( - "Skipping invalid category relationships - continuing with import" - ); - continue; - } - - // Verify products exist before inserting relationships - const productIds = [ - ...new Set(categoryRelationships.map(([_, pid]) => pid)), - ]; - const [existingProducts] = await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [productIds] - ); - const existingProductIds = new Set(existingProducts.map((p) => p.pid)); - - // Filter relationships to only include existing products - const validRelationships = categoryRelationships.filter(([_, pid]) => - existingProductIds.has(pid) - ); - - if (validRelationships.length > 0) { - const catPlaceholders = validRelationships - .map(() => "(?, ?)") - .join(","); - await localConnection.query( - ` - INSERT INTO product_categories (cat_id, pid) - VALUES ${catPlaceholders} - ON DUPLICATE KEY UPDATE cat_id = VALUES(cat_id) - `, - validRelationships.flat() - ); - } - } - - current += batch.length; - updateProgress(current, total, "Products import", startTime); - } - - outputProgress({ - status: "complete", - operation: "Products import completed", - current: total, - total, - duration: formatElapsedTime((Date.now() - startTime) / 1000), - }); - } catch (error) { - console.error("Error importing products:", error); - throw error; - } -} - -// Helper function to get date ranges for chunked queries -async function getDateRanges( - prodConnection, - table, - dateField, - startYearsAgo = 2, - chunkMonths = 3 -) { - const ranges = []; - const [result] = await prodConnection.query( - ` - SELECT - DATE_SUB(CURRENT_DATE, INTERVAL ? YEAR) as start_date, - CURRENT_DATE as end_date - `, - [startYearsAgo] - ); - - let currentDate = new Date(result[0].end_date); - const startDate = new Date(result[0].start_date); - - while (currentDate > startDate) { - const rangeEnd = new Date(currentDate); - currentDate.setMonth(currentDate.getMonth() - chunkMonths); - const rangeStart = new Date(Math.max(currentDate, startDate)); - - ranges.push({ - start: rangeStart.toISOString().split("T")[0], - end: rangeEnd.toISOString().split("T")[0], - }); - } - - return ranges; -} - -async function importMissingProducts(prodConnection, localConnection, missingPids) { - // First get the column names from the table structure - const [columns] = await localConnection.query(` - SELECT COLUMN_NAME - FROM INFORMATION_SCHEMA.COLUMNS - WHERE TABLE_NAME = 'products' - ORDER BY ORDINAL_POSITION - `); - - const columnNames = columns.map((col) => col.COLUMN_NAME); - - // Get the missing products from production - const [products] = await prodConnection.query(` - SELECT - p.pid, - p.description AS title, - p.notes AS description, - p.itemnumber AS SKU, - p.date_created, - p.datein AS first_received, - p.location, - COALESCE(si.available_local, 0) - COALESCE( - (SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0), 0) AS stock_quantity, - ci.onpreorder AS preorder_count, - pnb.inventory AS notions_inv_count, - COALESCE(pcp.price_each, 0) as price, - COALESCE(p.sellingprice, 0) AS regular_price, - COALESCE((SELECT ROUND(AVG(costeach), 5) - FROM product_inventory - WHERE pid = p.pid - AND COUNT > 0), 0) AS cost_price, - NULL AS landing_cost_price, - p.upc AS barcode, - p.harmonized_tariff_code, - p.stamp AS updated_at, - CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, - CASE WHEN p.reorder >= 0 THEN 1 ELSE 0 END AS replenishable, - s.companyname AS vendor, - CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference, - sid.notions_itemnumber AS notions_reference, - CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-t-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0) AS image, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-175x175-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0 AND PI.width = 175) AS image_175, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-o-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0) AS image_full, - pc1.name AS brand, - pc2.name AS line, - pc3.name AS subline, - pc4.name AS artist, - NULL AS options, - NULL AS tags, - COALESCE(CASE - WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit - ELSE sid.supplier_qty_per_unit - END, sid.notions_qty_per_unit) AS moq, - NULL AS uom, - p.rating, - p.rating_votes AS reviews, - p.weight, - p.length, - p.width, - p.height, - (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, - (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, - p.totalsold AS total_sold, - p.country_of_origin, - pls.date_sold as date_last_sold, - GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids - FROM products p - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN supplier_item_data sid ON p.pid = sid.pid - LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid - LEFT JOIN product_category_index pci ON p.pid = pci.pid - LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id - AND pc.type IN (10, 20, 11, 21, 12, 13) - AND pci.cat_id NOT IN (16, 17) - LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id - LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id - LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id - LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid - WHERE p.pid IN (?) - GROUP BY p.pid - `, [missingPids]); - - if (products.length > 0) { - // Map values in the same order as columns - const productValues = products.flatMap(product => - columnNames.map(col => { - const val = product[col] ?? null; - if (col === "managing_stock") return 1; - if (typeof val === "number") return val || 0; - return val; - }) - ); - - // Generate placeholders for all products - const placeholders = products - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - - // Build and execute the query - const query = ` - INSERT INTO products (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${columnNames - .filter((col) => col !== "pid") - .map((col) => `${col} = VALUES(${col})`) - .join(",")} - `; - - await localConnection.query(query, productValues); - - // Verify products were inserted before proceeding with categories - const [insertedProducts] = await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [products.map(p => p.pid)] - ); - const insertedPids = new Set(insertedProducts.map(p => p.pid)); - - // Handle category relationships if any - const categoryRelationships = []; - products.forEach(product => { - // Only add category relationships for products that were successfully inserted - if (insertedPids.has(product.pid) && product.category_ids) { - const catIds = product.category_ids - .split(",") - .map(id => id.trim()) - .filter(id => id) - .map(Number); - catIds.forEach(catId => { - if (catId) categoryRelationships.push([catId, product.pid]); - }); - } - }); - - if (categoryRelationships.length > 0) { - // Verify categories exist before inserting relationships - const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))]; - const [existingCats] = await localConnection.query( - "SELECT cat_id FROM categories WHERE cat_id IN (?)", - [uniqueCatIds] - ); - const existingCatIds = new Set(existingCats.map(c => c.cat_id)); - - // Filter relationships to only include existing categories - const validRelationships = categoryRelationships.filter(([catId]) => - existingCatIds.has(catId) - ); - - if (validRelationships.length > 0) { - const catPlaceholders = validRelationships - .map(() => "(?, ?)") - .join(","); - await localConnection.query( - ` - INSERT INTO product_categories (cat_id, pid) - VALUES ${catPlaceholders} - ON DUPLICATE KEY UPDATE cat_id = VALUES(cat_id) - `, - validRelationships.flat() - ); - } - } - } -} - -async function importOrders(prodConnection, localConnection) { - outputProgress({ - operation: "Starting orders import - Getting total count", - status: "running", - }); - - const startTime = Date.now(); - const skippedOrders = new Set(); // Store orders that need to be retried - const missingProducts = new Set(); // Store products that need to be imported - - try { - // First get the column names from the table structure - const [columns] = await localConnection.query(` - SELECT COLUMN_NAME - FROM INFORMATION_SCHEMA.COLUMNS - WHERE TABLE_NAME = 'orders' - ORDER BY ORDINAL_POSITION - `); - - const columnNames = columns - .map((col) => col.COLUMN_NAME) - .filter((name) => name !== "id"); // Skip auto-increment ID - - // Get total count first for progress indication - outputProgress({ - operation: "Starting orders import - Getting total count", - status: "running", - }); - - const [countResult] = await prodConnection.query(` - SELECT COUNT(*) as total - FROM order_items oi FORCE INDEX (PRIMARY) - JOIN _order o FORCE INDEX (PRIMARY) ON oi.order_id = o.order_id - WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - `); - const totalOrders = countResult[0].total; - - outputProgress({ - operation: `Starting orders import - Fetching ${totalOrders} orders from production`, - status: "running", - }); - - const total = countResult[0].total; - let processed = 0; - - // Process in batches - const batchSize = 1000; - let offset = 0; - - while (offset < total) { - const [orders] = await prodConnection.query(` - SELECT - oi.order_id as order_number, - oi.prod_pid as pid, - oi.prod_itemnumber as SKU, - o.date_placed_onlydate as date, - oi.prod_price_reg as price, - oi.qty_ordered as quantity, - (oi.prod_price_reg - oi.prod_price) as discount, - ( - SELECT - otp.item_taxes_to_collect - FROM - order_tax_info oti - JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id - WHERE - oti.order_id = o.order_id - AND otp.pid = oi.prod_pid - ORDER BY - oti.stamp DESC - LIMIT 1 - ) as tax, - 0 as tax_included, - ROUND( - ((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) * - (oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2 - ) as shipping, - o.order_cid as customer, - CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name, - 'pending' as status, - CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - LIMIT ? OFFSET ? - `, [batchSize, offset]); - - // Check if all products exist before inserting orders - const orderProductPids = [...new Set(orders.map((o) => o.pid))]; - const [existingProducts] = await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [orderProductPids] - ); - const existingPids = new Set(existingProducts.map((p) => p.pid)); - - // Filter out orders with missing products and track them - const validOrders = orders.filter((order) => { - if (!existingPids.has(order.pid)) { - missingProducts.add(order.pid); - skippedOrders.add(order.order_number); - return false; - } - return true; - }); - - if (validOrders.length > 0) { - const placeholders = validOrders - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - const updateClauses = columnNames - .filter((col) => col !== "order_number") // Don't update primary key - .map((col) => `${col} = VALUES(${col})`) - .join(","); - - const query = ` - INSERT INTO orders (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${updateClauses} - `; - - await localConnection.query( - query, - validOrders.flatMap(order => columnNames.map(col => order[col])) - ); - } - - processed += orders.length; - offset += batchSize; - - updateProgress( - processed, - total, - "Orders import", - startTime - ); - } - - // Now handle missing products and retry skipped orders - if (missingProducts.size > 0) { - outputProgress({ - operation: `Found ${missingProducts.size} missing products, importing them now`, - status: "running", - }); - - await importMissingProducts(prodConnection, localConnection, [ - ...missingProducts, - ]); - - // Retry skipped orders - if (skippedOrders.size > 0) { - outputProgress({ - operation: `Retrying ${skippedOrders.size} skipped orders`, - status: "running", - }); - - const [retryOrders] = await prodConnection.query(` - SELECT - oi.order_id as order_number, - oi.prod_pid as pid, - oi.prod_itemnumber as SKU, - o.date_placed_onlydate as date, - oi.prod_price_reg as price, - oi.qty_ordered as quantity, - (oi.prod_price_reg - oi.prod_price) as discount, - ( - SELECT - otp.item_taxes_to_collect - FROM - order_tax_info oti - JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id - WHERE - oti.order_id = o.order_id - AND otp.pid = oi.prod_pid - ORDER BY - oti.stamp DESC - LIMIT 1 - ) as tax, - 0 as tax_included, - ROUND( - ((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) * - (oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2 - ) as shipping, - o.order_cid as customer, - CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name, - 'pending' as status, - CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.order_id IN (?) - `, [[...skippedOrders]]); - - const placeholders = retryOrders - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - const updateClauses = columnNames - .filter((col) => col !== "order_number") // Don't update primary key - .map((col) => `${col} = VALUES(${col})`) - .join(","); - - const query = ` - INSERT INTO orders (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${updateClauses} - `; - - await localConnection.query( - query, - retryOrders.flatMap(order => columnNames.map(col => order[col])) - ); - } - } - - const endTime = Date.now(); - outputProgress({ - operation: `Orders import complete in ${Math.round( - (endTime - startTime) / 1000 - )}s`, - status: "complete", - }); - } catch (error) { - outputProgress({ - operation: "Orders import failed", - status: "error", - error: error.message, - }); - throw error; - } -} - -async function importPurchaseOrders(prodConnection, localConnection) { - outputProgress({ - operation: "Starting purchase orders import - Initializing", - status: "running", - }); - - const startTime = Date.now(); - - try { - // Get column names for the insert - const [columns] = await localConnection.query(` - SELECT COLUMN_NAME - FROM INFORMATION_SCHEMA.COLUMNS - WHERE TABLE_NAME = 'purchase_orders' - ORDER BY ORDINAL_POSITION - `); - const columnNames = columns - .map((col) => col.COLUMN_NAME) - .filter((name) => name !== "id"); - - // First get all relevant PO IDs with basic info - this is much faster than the full join - const [[{ total }]] = await prodConnection.query(` - SELECT COUNT(*) as total - FROM ( - SELECT DISTINCT pop.po_id, pop.pid - FROM po p - FORCE INDEX (idx_date_created) - JOIN po_products pop ON p.po_id = pop.po_id - JOIN suppliers s ON p.supplier_id = s.supplierid - WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - UNION - SELECT DISTINCT r.receiving_id as po_id, rp.pid - FROM receivings_products rp - LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id - WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - ) all_items - `); - - const [poList] = await prodConnection.query(` - SELECT DISTINCT - COALESCE(p.po_id, r.receiving_id) as po_id, - CASE - WHEN p.po_id IS NOT NULL THEN s1.companyname - WHEN r.supplier_id IS NOT NULL THEN s2.companyname - ELSE 'No Supplier' - END as vendor, - CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_ordered) END as date, - CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_estin) END as expected_date, - COALESCE(p.status, 50) as status, - COALESCE(p.short_note, '') as notes, - COALESCE(p.notes, '') as long_note - FROM ( - SELECT po_id FROM po - WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - UNION - SELECT DISTINCT r.receiving_id as po_id - FROM receivings r - JOIN receivings_products rp ON r.receiving_id = rp.receiving_id - WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - ) ids - LEFT JOIN po p ON ids.po_id = p.po_id - LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid - LEFT JOIN receivings r ON ids.po_id = r.receiving_id - LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid - ORDER BY po_id - `); - - const totalItems = total; - let processed = 0; - - const BATCH_SIZE = 5000; - const PROGRESS_INTERVAL = 500; - let lastProgressUpdate = Date.now(); - - outputProgress({ - operation: `Starting purchase orders import - Processing ${totalItems} purchase order items`, - status: "running", - }); - - for (let i = 0; i < poList.length; i += BATCH_SIZE) { - const batch = poList.slice(i, Math.min(i + BATCH_SIZE, poList.length)); - const poIds = batch.map(po => po.po_id); - - // Get all products for these POs in one query - const [poProducts] = await prodConnection.query(` - SELECT - pop.po_id, - pop.pid, - pr.itemnumber as sku, - pop.cost_each as cost_price, - pop.qty_each as ordered - FROM po_products pop - FORCE INDEX (PRIMARY) - JOIN products pr ON pop.pid = pr.pid - WHERE pop.po_id IN (?) - `, [poIds]); - - // Process PO products in smaller sub-batches to avoid packet size issues - const SUB_BATCH_SIZE = 5000; - for (let j = 0; j < poProducts.length; j += SUB_BATCH_SIZE) { - const productBatch = poProducts.slice(j, j + SUB_BATCH_SIZE); - const productPids = [...new Set(productBatch.map(p => p.pid))]; - const batchPoIds = [...new Set(productBatch.map(p => p.po_id))]; - - // Get receivings for this batch - const [receivings] = await prodConnection.query(` - SELECT - r.po_id, - rp.pid, - rp.receiving_id, - rp.qty_each, - rp.cost_each, - DATE(NULLIF(rp.received_date, '0000-00-00 00:00:00')) as received_date, - rp.received_by, - CASE - WHEN r.po_id IS NULL THEN 2 -- No PO - WHEN r.po_id IN (?) THEN 0 -- Original PO - ELSE 1 -- Different PO - END as is_alt_po - FROM receivings_products rp - LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id - WHERE rp.pid IN (?) - AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - ORDER BY r.po_id, rp.pid, rp.received_date - `, [batchPoIds, productPids]); - - // Create maps for this sub-batch - const poProductMap = new Map(); - productBatch.forEach(product => { - const key = `${product.po_id}-${product.pid}`; - poProductMap.set(key, product); - }); - - const receivingMap = new Map(); - const altReceivingMap = new Map(); - const noPOReceivingMap = new Map(); - - receivings.forEach(receiving => { - const key = `${receiving.po_id}-${receiving.pid}`; - if (receiving.is_alt_po === 2) { - // No PO - if (!noPOReceivingMap.has(receiving.pid)) { - noPOReceivingMap.set(receiving.pid, []); - } - noPOReceivingMap.get(receiving.pid).push(receiving); - } else if (receiving.is_alt_po === 1) { - // Different PO - if (!altReceivingMap.has(receiving.pid)) { - altReceivingMap.set(receiving.pid, []); - } - altReceivingMap.get(receiving.pid).push(receiving); - } else { - // Original PO - if (!receivingMap.has(key)) { - receivingMap.set(key, []); - } - receivingMap.get(key).push(receiving); - } - }); - - // Verify PIDs exist - const [existingPids] = await localConnection.query( - 'SELECT pid FROM products WHERE pid IN (?)', - [productPids] - ); - const validPids = new Set(existingPids.map(p => p.pid)); - - // Prepare values for this sub-batch - const values = []; - let batchProcessed = 0; - - for (const po of batch) { - const poProducts = Array.from(poProductMap.values()) - .filter(p => p.po_id === po.po_id && validPids.has(p.pid)); - - for (const product of poProducts) { - const key = `${po.po_id}-${product.pid}`; - const receivingHistory = receivingMap.get(key) || []; - const altReceivingHistory = altReceivingMap.get(product.pid) || []; - const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || []; - - const received = receivingHistory.reduce((sum, r) => sum + r.qty_each, 0); - const altReceived = altReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0); - const noPOReceived = noPOReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0); - const totalReceived = received + altReceived + noPOReceived; - - const receiving_status = !totalReceived ? 1 : // created - totalReceived < product.ordered ? 30 : // partial - 40; // full - - const allReceivings = [...receivingHistory]; - if (altReceivingHistory.length > 0) { - allReceivings.push(...altReceivingHistory); - } - if (noPOReceivingHistory.length > 0) { - allReceivings.push(...noPOReceivingHistory); - } - allReceivings.sort((a, b) => new Date(a.received_date) - new Date(b.received_date)); - - const firstReceiving = allReceivings[0] || {}; - const lastReceiving = allReceivings[allReceivings.length - 1] || {}; - - values.push(columnNames.map(col => { - switch (col) { - case 'po_id': return po.po_id; - case 'vendor': return po.vendor; - case 'date': return po.date; - case 'expected_date': return po.expected_date; - case 'pid': return product.pid; - case 'sku': return product.sku; - case 'cost_price': return product.cost_price; - case 'status': return po.status; - case 'notes': return po.notes; - case 'long_note': return po.long_note; - case 'ordered': return product.ordered; - case 'received': return totalReceived; - case 'received_date': return firstReceiving.received_date || null; - case 'last_received_date': return lastReceiving.received_date || null; - case 'received_by': return firstReceiving.received_by || null; - case 'receiving_status': return receiving_status; - case 'receiving_history': return JSON.stringify(allReceivings.map(r => ({ - receiving_id: r.receiving_id, - qty: r.qty_each, - cost: r.cost_each, - date: r.received_date, - received_by: r.received_by, - alt_po: r.is_alt_po - }))); - default: return null; - } - })); - batchProcessed++; - } - } - - if (values.length > 0) { - const placeholders = values.map(() => - `(${Array(columnNames.length).fill("?").join(",")})` - ).join(","); - - const query = ` - INSERT INTO purchase_orders (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${columnNames - .filter((col) => col !== "po_id" && col !== "pid") - .map((col) => `${col} = VALUES(${col})`) - .join(",")}; - `; - - await localConnection.query(query, values.flat()); - } - - processed += batchProcessed; - - // Update progress based on time interval - const now = Date.now(); - if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) { - updateProgress(processed, totalItems, "Purchase orders import", startTime); - lastProgressUpdate = now; - } - } - } - - const endTime = Date.now(); - outputProgress({ - operation: `Purchase orders import complete`, - status: "complete", - processed_records: processed, - total_records: totalItems, - timing: { - start_time: new Date(startTime).toISOString(), - end_time: new Date(endTime).toISOString(), - elapsed_time: formatElapsedTime((endTime - startTime) / 1000), - elapsed_seconds: Math.round((endTime - startTime) / 1000) - } - }); - - } catch (error) { - outputProgress({ - operation: "Purchase orders import failed", - status: "error", - error: error.message, - }); - throw error; - } -} - // Modify main function to handle cancellation and avoid process.exit - async function main() { let ssh; let prodConnection; @@ -1394,20 +40,42 @@ async function main() { const startTime = Date.now(); try { + // Initial progress update outputProgress({ status: "running", - operation: "Starting import process", - message: "Setting up connections...", + operation: "Import process", + message: "Initializing SSH tunnel...", + current: 0, + total: 4, // Total number of major steps + elapsed: formatElapsedTime((Date.now() - startTime) / 1000) }); const tunnel = await setupSshTunnel(); ssh = tunnel.ssh; + outputProgress({ + status: "running", + operation: "Import process", + message: "Connecting to production database...", + current: 0, + total: 4, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000) + }); + prodConnection = await mysql.createConnection({ ...prodDbConfig, stream: tunnel.stream, }); + outputProgress({ + status: "running", + operation: "Import process", + message: "Connecting to local database...", + current: 0, + total: 4, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000) + }); + localConnection = await mysql.createPool({ ...localDbConfig, waitForConnections: true, @@ -1417,31 +85,73 @@ async function main() { if (isImportCancelled) throw new Error("Import cancelled"); + let currentStep = 0; + // Run each import based on constants if (IMPORT_CATEGORIES) { + outputProgress({ + status: "running", + operation: "Import process", + message: "Starting categories import...", + current: currentStep, + total: 4, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000) + }); await importCategories(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); + currentStep++; } if (IMPORT_PRODUCTS) { + outputProgress({ + status: "running", + operation: "Import process", + message: "Starting products import...", + current: currentStep, + total: 4, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000) + }); await importProducts(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); + currentStep++; } if (IMPORT_ORDERS) { + outputProgress({ + status: "running", + operation: "Import process", + message: "Starting orders import...", + current: currentStep, + total: 4, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000) + }); await importOrders(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); + currentStep++; } if (IMPORT_PURCHASE_ORDERS) { + outputProgress({ + status: "running", + operation: "Import process", + message: "Starting purchase orders import...", + current: currentStep, + total: 4, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000) + }); await importPurchaseOrders(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); + currentStep++; } const endTime = Date.now(); outputProgress({ status: "complete", - operation: "Import process completed", + operation: "Import process", + message: "All imports completed successfully", + current: 4, + total: 4, + elapsed: formatElapsedTime((endTime - startTime) / 1000), timing: { start_time: new Date(startTime).toISOString(), end_time: new Date(endTime).toISOString(), @@ -1455,7 +165,11 @@ async function main() { outputProgress({ status: error.message === "Import cancelled" ? "cancelled" : "error", operation: "Import process", + message: error.message === "Import cancelled" ? "Import cancelled by user" : "Import failed", error: error.message, + current: 0, + total: 4, + elapsed: formatElapsedTime((endTime - startTime) / 1000), timing: { start_time: new Date(startTime).toISOString(), end_time: new Date(endTime).toISOString(), diff --git a/inventory-server/scripts/import/categories.js b/inventory-server/scripts/import/categories.js new file mode 100644 index 0000000..34f7c61 --- /dev/null +++ b/inventory-server/scripts/import/categories.js @@ -0,0 +1,168 @@ +const { updateProgress, outputProgress, formatElapsedTime } = require('./utils'); + +async function importCategories(prodConnection, localConnection) { + outputProgress({ + operation: "Starting categories import", + status: "running", + }); + + const startTime = Date.now(); + const typeOrder = [10, 20, 11, 21, 12, 13]; + let totalInserted = 0; + let skippedCategories = []; + + try { + // Process each type in order with its own query + for (const type of typeOrder) { + const [categories] = await prodConnection.query( + ` + SELECT + pc.cat_id, + pc.name, + pc.type, + CASE + WHEN pc.type IN (10, 20) THEN NULL -- Top level categories should have no parent + WHEN pc.master_cat_id IS NULL THEN NULL + ELSE pc.master_cat_id + END as parent_id, + pc.combined_name as description + FROM product_categories pc + WHERE pc.type = ? + ORDER BY pc.cat_id + `, + [type] + ); + + if (categories.length === 0) continue; + + console.log(`\nProcessing ${categories.length} type ${type} categories`); + if (type === 10) { + console.log("Type 10 categories:", JSON.stringify(categories, null, 2)); + } + + // For types that can have parents (11, 21, 12, 13), verify parent existence + let categoriesToInsert = categories; + if (![10, 20].includes(type)) { + // Get all parent IDs + const parentIds = [ + ...new Set( + categories.map((c) => c.parent_id).filter((id) => id !== null) + ), + ]; + + // Check which parents exist + const [existingParents] = await localConnection.query( + "SELECT cat_id FROM categories WHERE cat_id IN (?)", + [parentIds] + ); + const existingParentIds = new Set(existingParents.map((p) => p.cat_id)); + + // Filter categories and track skipped ones + categoriesToInsert = categories.filter( + (cat) => + cat.parent_id === null || existingParentIds.has(cat.parent_id) + ); + const invalidCategories = categories.filter( + (cat) => + cat.parent_id !== null && !existingParentIds.has(cat.parent_id) + ); + + if (invalidCategories.length > 0) { + const skippedInfo = invalidCategories.map((c) => ({ + id: c.cat_id, + name: c.name, + type: c.type, + missing_parent: c.parent_id, + })); + skippedCategories.push(...skippedInfo); + + console.log( + "\nSkipping categories with missing parents:", + invalidCategories + .map( + (c) => + `${c.cat_id} - ${c.name} (missing parent: ${c.parent_id})` + ) + .join("\n") + ); + } + + if (categoriesToInsert.length === 0) { + console.log( + `No valid categories of type ${type} to insert - all had missing parents` + ); + continue; + } + } + + console.log( + `Inserting ${categoriesToInsert.length} type ${type} categories` + ); + + const placeholders = categoriesToInsert + .map(() => "(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)") + .join(","); + + const values = categoriesToInsert.flatMap((cat) => [ + cat.cat_id, + cat.name, + cat.type, + cat.parent_id, + cat.description, + "active", + ]); + + // Insert categories and create relationships in one query to avoid race conditions + await localConnection.query( + ` + INSERT INTO categories (cat_id, name, type, parent_id, description, status, created_at, updated_at) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE + name = VALUES(name), + type = VALUES(type), + parent_id = VALUES(parent_id), + description = VALUES(description), + status = VALUES(status), + updated_at = CURRENT_TIMESTAMP + `, + values + ); + + totalInserted += categoriesToInsert.length; + updateProgress( + totalInserted, + totalInserted, + "Categories import", + startTime + ); + } + + // After all imports, if we skipped any categories, throw an error + if (skippedCategories.length > 0) { + const error = new Error( + "Categories import completed with errors - some categories were skipped due to missing parents" + ); + error.skippedCategories = skippedCategories; + throw error; + } + + outputProgress({ + status: "complete", + operation: "Categories import completed", + current: totalInserted, + total: totalInserted, + duration: formatElapsedTime((Date.now() - startTime) / 1000), + }); + } catch (error) { + console.error("Error importing categories:", error); + if (error.skippedCategories) { + console.error( + "Skipped categories:", + JSON.stringify(error.skippedCategories, null, 2) + ); + } + throw error; + } +} + +module.exports = importCategories; \ No newline at end of file diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js new file mode 100644 index 0000000..1427d0a --- /dev/null +++ b/inventory-server/scripts/import/orders.js @@ -0,0 +1,235 @@ +const { updateProgress, outputProgress, formatElapsedTime } = require('./utils'); +const { importMissingProducts } = require('./products'); + +async function importOrders(prodConnection, localConnection) { + outputProgress({ + operation: "Starting orders import - Getting total count", + status: "running", + }); + + const startTime = Date.now(); + const skippedOrders = new Set(); // Store orders that need to be retried + const missingProducts = new Set(); // Store products that need to be imported + + try { + // First get the column names from the table structure + const [columns] = await localConnection.query(` + SELECT COLUMN_NAME + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'orders' + ORDER BY ORDINAL_POSITION + `); + + const columnNames = columns + .map((col) => col.COLUMN_NAME) + .filter((name) => name !== "id"); // Skip auto-increment ID + + // Get total count first for progress indication + outputProgress({ + operation: "Starting orders import - Getting total count", + status: "running", + }); + + const [countResult] = await prodConnection.query(` + SELECT COUNT(*) as total + FROM order_items oi FORCE INDEX (PRIMARY) + JOIN _order o FORCE INDEX (PRIMARY) ON oi.order_id = o.order_id + WHERE o.order_status >= 15 + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + `); + const totalOrders = countResult[0].total; + + outputProgress({ + operation: `Starting orders import - Fetching ${totalOrders} orders from production`, + status: "running", + }); + + const total = countResult[0].total; + let processed = 0; + + // Process in batches + const batchSize = 1000; + let offset = 0; + + while (offset < total) { + const [orders] = await prodConnection.query(` + SELECT + oi.order_id as order_number, + oi.prod_pid as pid, + oi.prod_itemnumber as SKU, + o.date_placed_onlydate as date, + oi.prod_price_reg as price, + oi.qty_ordered as quantity, + (oi.prod_price_reg - oi.prod_price) as discount, + ( + SELECT + otp.item_taxes_to_collect + FROM + order_tax_info oti + JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id + WHERE + oti.order_id = o.order_id + AND otp.pid = oi.prod_pid + ORDER BY + oti.stamp DESC + LIMIT 1 + ) as tax, + 0 as tax_included, + ROUND( + ((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) * + (oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2 + ) as shipping, + o.order_cid as customer, + CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name, + 'pending' as status, + CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE o.order_status >= 15 + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + LIMIT ? OFFSET ? + `, [batchSize, offset]); + + // Check if all products exist before inserting orders + const orderProductPids = [...new Set(orders.map((o) => o.pid))]; + const [existingProducts] = await localConnection.query( + "SELECT pid FROM products WHERE pid IN (?)", + [orderProductPids] + ); + const existingPids = new Set(existingProducts.map((p) => p.pid)); + + // Filter out orders with missing products and track them + const validOrders = orders.filter((order) => { + if (!existingPids.has(order.pid)) { + missingProducts.add(order.pid); + skippedOrders.add(order.order_number); + return false; + } + return true; + }); + + if (validOrders.length > 0) { + const placeholders = validOrders + .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) + .join(","); + const updateClauses = columnNames + .filter((col) => col !== "order_number") // Don't update primary key + .map((col) => `${col} = VALUES(${col})`) + .join(","); + + const query = ` + INSERT INTO orders (${columnNames.join(",")}) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE ${updateClauses} + `; + + await localConnection.query( + query, + validOrders.flatMap(order => columnNames.map(col => order[col])) + ); + } + + processed += orders.length; + offset += batchSize; + + updateProgress( + processed, + total, + "Orders import", + startTime + ); + } + + // Now handle missing products and retry skipped orders + if (missingProducts.size > 0) { + outputProgress({ + operation: `Found ${missingProducts.size} missing products, importing them now`, + status: "running", + }); + + await importMissingProducts(prodConnection, localConnection, [ + ...missingProducts, + ]); + + // Retry skipped orders + if (skippedOrders.size > 0) { + outputProgress({ + operation: `Retrying ${skippedOrders.size} skipped orders`, + status: "running", + }); + + const [retryOrders] = await prodConnection.query(` + SELECT + oi.order_id as order_number, + oi.prod_pid as pid, + oi.prod_itemnumber as SKU, + o.date_placed_onlydate as date, + oi.prod_price_reg as price, + oi.qty_ordered as quantity, + (oi.prod_price_reg - oi.prod_price) as discount, + ( + SELECT + otp.item_taxes_to_collect + FROM + order_tax_info oti + JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id + WHERE + oti.order_id = o.order_id + AND otp.pid = oi.prod_pid + ORDER BY + oti.stamp DESC + LIMIT 1 + ) as tax, + 0 as tax_included, + ROUND( + ((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) * + (oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2 + ) as shipping, + o.order_cid as customer, + CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name, + 'pending' as status, + CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.order_id IN (?) + `, [[...skippedOrders]]); + + const placeholders = retryOrders + .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) + .join(","); + const updateClauses = columnNames + .filter((col) => col !== "order_number") // Don't update primary key + .map((col) => `${col} = VALUES(${col})`) + .join(","); + + const query = ` + INSERT INTO orders (${columnNames.join(",")}) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE ${updateClauses} + `; + + await localConnection.query( + query, + retryOrders.flatMap(order => columnNames.map(col => order[col])) + ); + } + } + + const endTime = Date.now(); + outputProgress({ + operation: `Orders import complete in ${Math.round( + (endTime - startTime) / 1000 + )}s`, + status: "complete", + }); + } catch (error) { + outputProgress({ + operation: "Orders import failed", + status: "error", + error: error.message, + }); + throw error; + } +} + +module.exports = importOrders; \ No newline at end of file diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js new file mode 100644 index 0000000..70be5c5 --- /dev/null +++ b/inventory-server/scripts/import/products.js @@ -0,0 +1,561 @@ +const { updateProgress, outputProgress, formatElapsedTime } = require('./utils'); + +async function importMissingProducts(prodConnection, localConnection, missingPids) { + // First get the column names from the table structure + const [columns] = await localConnection.query(` + SELECT COLUMN_NAME + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'products' + ORDER BY ORDINAL_POSITION + `); + + const columnNames = columns.map((col) => col.COLUMN_NAME); + + // Get the missing products from production + const [products] = await prodConnection.query(` + SELECT + p.pid, + p.description AS title, + p.notes AS description, + p.itemnumber AS SKU, + p.date_created, + p.datein AS first_received, + p.location, + COALESCE(si.available_local, 0) - COALESCE( + (SELECT SUM(oi.qty_ordered - oi.qty_placed) + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.prod_pid = p.pid + AND o.date_placed != '0000-00-00 00:00:00' + AND o.date_shipped = '0000-00-00 00:00:00' + AND oi.pick_finished = 0 + AND oi.qty_back = 0 + AND o.order_status != 15 + AND o.order_status < 90 + AND oi.qty_ordered >= oi.qty_placed + AND oi.qty_ordered > 0), 0) AS stock_quantity, + ci.onpreorder AS preorder_count, + pnb.inventory AS notions_inv_count, + COALESCE(pcp.price_each, 0) as price, + COALESCE(p.sellingprice, 0) AS regular_price, + COALESCE((SELECT ROUND(AVG(costeach), 5) + FROM product_inventory + WHERE pid = p.pid + AND COUNT > 0), 0) AS cost_price, + NULL AS landing_cost_price, + p.upc AS barcode, + p.harmonized_tariff_code, + p.stamp AS updated_at, + CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, + CASE WHEN p.reorder >= 0 THEN 1 ELSE 0 END AS replenishable, + s.companyname AS vendor, + CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference, + sid.notions_itemnumber AS notions_reference, + CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, + (SELECT CONCAT('https://sbing.com/i/products/0000/', + SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', + p.pid, '-t-', MIN(PI.iid), '.jpg') + FROM product_images PI + WHERE PI.pid = p.pid AND PI.hidden = 0) AS image, + (SELECT CONCAT('https://sbing.com/i/products/0000/', + SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', + p.pid, '-175x175-', MIN(PI.iid), '.jpg') + FROM product_images PI + WHERE PI.pid = p.pid AND PI.hidden = 0 AND PI.width = 175) AS image_175, + (SELECT CONCAT('https://sbing.com/i/products/0000/', + SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', + p.pid, '-o-', MIN(PI.iid), '.jpg') + FROM product_images PI + WHERE PI.pid = p.pid AND PI.hidden = 0) AS image_full, + pc1.name AS brand, + pc2.name AS line, + pc3.name AS subline, + pc4.name AS artist, + NULL AS options, + NULL AS tags, + COALESCE(CASE + WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit + ELSE sid.supplier_qty_per_unit + END, sid.notions_qty_per_unit) AS moq, + NULL AS uom, + p.rating, + p.rating_votes AS reviews, + p.weight, + p.length, + p.width, + p.height, + (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, + (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, + p.totalsold AS total_sold, + p.country_of_origin, + pls.date_sold as date_last_sold, + GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids + FROM products p + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN supplier_item_data sid ON p.pid = sid.pid + LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid + LEFT JOIN product_category_index pci ON p.pid = pci.pid + LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id + AND pc.type IN (10, 20, 11, 21, 12, 13) + AND pci.cat_id NOT IN (16, 17) + LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id + LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id + LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id + LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + LEFT JOIN ( + SELECT pid, MIN(price_each) as price_each + FROM product_current_prices + WHERE active = 1 + GROUP BY pid + ) pcp ON p.pid = pcp.pid + WHERE p.pid IN (?) + GROUP BY p.pid + `, [missingPids]); + + if (products.length > 0) { + // Map values in the same order as columns + const productValues = products.flatMap(product => + columnNames.map(col => { + const val = product[col] ?? null; + if (col === "managing_stock") return 1; + if (typeof val === "number") return val || 0; + return val; + }) + ); + + // Generate placeholders for all products + const placeholders = products + .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) + .join(","); + + // Build and execute the query + const query = ` + INSERT INTO products (${columnNames.join(",")}) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE ${columnNames + .filter((col) => col !== "pid") + .map((col) => `${col} = VALUES(${col})`) + .join(",")} + `; + + await localConnection.query(query, productValues); + + // Verify products were inserted before proceeding with categories + const [insertedProducts] = await localConnection.query( + "SELECT pid FROM products WHERE pid IN (?)", + [products.map(p => p.pid)] + ); + const insertedPids = new Set(insertedProducts.map(p => p.pid)); + + // Handle category relationships if any + const categoryRelationships = []; + products.forEach(product => { + // Only add category relationships for products that were successfully inserted + if (insertedPids.has(product.pid) && product.category_ids) { + const catIds = product.category_ids + .split(",") + .map(id => id.trim()) + .filter(id => id) + .map(Number); + catIds.forEach(catId => { + if (catId) categoryRelationships.push([catId, product.pid]); + }); + } + }); + + if (categoryRelationships.length > 0) { + // Verify categories exist before inserting relationships + const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))]; + const [existingCats] = await localConnection.query( + "SELECT cat_id FROM categories WHERE cat_id IN (?)", + [uniqueCatIds] + ); + const existingCatIds = new Set(existingCats.map(c => c.cat_id)); + + // Filter relationships to only include existing categories + const validRelationships = categoryRelationships.filter(([catId]) => + existingCatIds.has(catId) + ); + + if (validRelationships.length > 0) { + const catPlaceholders = validRelationships + .map(() => "(?, ?)") + .join(","); + await localConnection.query( + ` + INSERT INTO product_categories (cat_id, pid) + VALUES ${catPlaceholders} + ON DUPLICATE KEY UPDATE cat_id = VALUES(cat_id) + `, + validRelationships.flat() + ); + } + } + } +} + +async function importProducts(prodConnection, localConnection) { + outputProgress({ + operation: "Starting products import - Getting schema", + status: "running", + }); + + const startTime = Date.now(); + + try { + // First get the column names from the table structure + const [columns] = await localConnection.query(` + SELECT COLUMN_NAME + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'products' + ORDER BY ORDINAL_POSITION + `); + + const columnNames = columns.map((col) => col.COLUMN_NAME); + + // Get total count first for progress indication + outputProgress({ + operation: "Starting products import - Getting total count", + status: "running", + }); + + const [countResult] = await prodConnection.query(` + SELECT COUNT(*) as total + FROM products p + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + WHERE pls.date_sold >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + OR p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + OR p.datein >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + OR pls.date_sold IS NULL + `); + const totalProducts = countResult[0].total; + + outputProgress({ + operation: `Starting products import - Fetching ${totalProducts} products from production`, + status: "running", + }); + + // Get products from production with optimized query + const [rows] = await prodConnection.query(` + SELECT + p.pid, + p.description AS title, + p.notes AS description, + p.itemnumber AS SKU, + p.date_created, + p.datein AS first_received, + p.location, + COALESCE(si.available_local, 0) - COALESCE( + (SELECT SUM(oi.qty_ordered - oi.qty_placed) + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.prod_pid = p.pid + AND o.date_placed != '0000-00-00 00:00:00' + AND o.date_shipped = '0000-00-00 00:00:00' + AND oi.pick_finished = 0 + AND oi.qty_back = 0 + AND o.order_status != 15 + AND o.order_status < 90 + AND oi.qty_ordered >= oi.qty_placed + AND oi.qty_ordered > 0), 0) AS stock_quantity, + ci.onpreorder AS preorder_count, + pnb.inventory AS notions_inv_count, + COALESCE(pcp.price_each, 0) as price, + COALESCE(p.sellingprice, 0) AS regular_price, + COALESCE((SELECT ROUND(AVG(costeach), 5) + FROM product_inventory + WHERE pid = p.pid + AND COUNT > 0), 0) AS cost_price, + NULL AS landing_cost_price, + p.upc AS barcode, + p.harmonized_tariff_code, + p.stamp AS updated_at, + CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, + CASE WHEN p.reorder >= 0 THEN 1 ELSE 0 END AS replenishable, + s.companyname AS vendor, + CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference, + sid.notions_itemnumber AS notions_reference, + CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, + (SELECT CONCAT('https://sbing.com/i/products/0000/', + SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', + p.pid, '-t-', MIN(PI.iid), '.jpg') + FROM product_images PI + WHERE PI.pid = p.pid AND PI.hidden = 0) AS image, + (SELECT CONCAT('https://sbing.com/i/products/0000/', + SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', + p.pid, '-175x175-', MIN(PI.iid), '.jpg') + FROM product_images PI + WHERE PI.pid = p.pid AND PI.hidden = 0 AND PI.width = 175) AS image_175, + (SELECT CONCAT('https://sbing.com/i/products/0000/', + SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', + p.pid, '-o-', MIN(PI.iid), '.jpg') + FROM product_images PI + WHERE PI.pid = p.pid AND PI.hidden = 0) AS image_full, + pc1.name AS brand, + pc2.name AS line, + pc3.name AS subline, + pc4.name AS artist, + NULL AS options, + NULL AS tags, + COALESCE(CASE + WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit + ELSE sid.supplier_qty_per_unit + END, sid.notions_qty_per_unit) AS moq, + NULL AS uom, + p.rating, + p.rating_votes AS reviews, + p.weight, + p.length, + p.width, + p.height, + (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, + (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, + p.totalsold AS total_sold, + p.country_of_origin, + pls.date_sold as date_last_sold, + GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids + FROM products p + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN supplier_item_data sid ON p.pid = sid.pid + LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid + LEFT JOIN product_category_index pci ON p.pid = pci.pid + LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id + AND pc.type IN (10, 20, 11, 21, 12, 13) + AND pci.cat_id NOT IN (16, 17) + LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id + LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id + LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id + LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + LEFT JOIN ( + SELECT pid, MIN(price_each) as price_each + FROM product_current_prices + WHERE active = 1 + GROUP BY pid + ) pcp ON p.pid = pcp.pid + WHERE (pls.date_sold >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + OR p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + OR p.datein >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + OR pls.date_sold IS NULL) + GROUP BY p.pid + `); + + // Debug log to check for specific product + const debugProduct = rows.find((row) => row.pid === 620972); + if (debugProduct) { + console.log("Found product 620972:", debugProduct); + } else { + console.log("Product 620972 not found in query results"); + + // Debug query to check why it's missing + const [debugResult] = await prodConnection.query( + ` + SELECT + p.pid, + p.itemnumber, + p.date_created, + p.datein, + pls.date_sold, + si.show, + si.buyable, + pcp.price_each + FROM products p + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN ( + SELECT pid, MIN(price_each) as price_each + FROM product_current_prices + WHERE active = 1 + GROUP BY pid + ) pcp ON p.pid = pcp.pid + WHERE p.pid = ? + `, + [620972] + ); + + console.log("Debug query result:", debugResult); + } + + // Also check for the other missing products + const missingPids = [ + 208348, 317600, 370009, 429494, 466233, 471156, 474582, 476214, 484394, + 484755, 484756, 493549, 620972, + ]; + const [missingProducts] = await prodConnection.query( + ` + SELECT + p.pid, + p.itemnumber, + p.date_created, + p.datein, + pls.date_sold, + si.show, + si.buyable, + pcp.price_each + FROM products p + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN ( + SELECT pid, MIN(price_each) as price_each + FROM product_current_prices + WHERE active = 1 + GROUP BY pid + ) pcp ON p.pid = pcp.pid + WHERE p.pid IN (?) + `, + [missingPids] + ); + + console.log("Debug results for missing products:", missingProducts); + + let current = 0; + const total = rows.length; + + // Process products in batches + for (let i = 0; i < rows.length; i += BATCH_SIZE) { + let batch = rows.slice(i, i + BATCH_SIZE); + + // Prepare product values and category relationships in parallel + const productValues = []; + const categoryRelationships = []; + + batch.forEach((row) => { + // Map values in the same order as columns + const rowValues = columnNames.map((col) => { + const val = row[col] ?? null; + if (col === "managing_stock") return 1; + if (typeof val === "number") return val || 0; + return val; + }); + productValues.push(...rowValues); + + // Add category relationships + if (row.category_ids) { + const catIds = row.category_ids + .split(",") + .map((id) => id.trim()) + .filter((id) => id) + .map(Number); + catIds.forEach((catId) => { + if (catId) categoryRelationships.push([catId, row.pid]); + }); + } + }); + + // Generate placeholders based on column count + const placeholderGroup = `(${Array(columnNames.length) + .fill("?") + .join(",")})`; + const productPlaceholders = Array(batch.length) + .fill(placeholderGroup) + .join(","); + + // Build the query dynamically + const insertQuery = ` + INSERT INTO products (${columnNames.join(",")}) + VALUES ${productPlaceholders} + ON DUPLICATE KEY UPDATE ${columnNames + .filter((col) => col !== "pid") + .map((col) => `${col} = VALUES(${col})`) + .join(",")} + `; + + // First insert the products and wait for it to complete + await localConnection.query(insertQuery, productValues); + + // Now that products are inserted, handle category relationships + if (categoryRelationships.length > 0) { + // Get unique category IDs to verify they exist + const uniqueCatIds = [ + ...new Set(categoryRelationships.map(([catId]) => catId)), + ]; + + console.log("Checking categories:", uniqueCatIds); + + // Check which categories exist + const [existingCats] = await localConnection.query( + "SELECT cat_id FROM categories WHERE cat_id IN (?)", + [uniqueCatIds] + ); + const existingCatIds = new Set(existingCats.map((c) => c.cat_id)); + + // Log missing categories + const missingCatIds = uniqueCatIds.filter( + (id) => !existingCatIds.has(id) + ); + if (missingCatIds.length > 0) { + console.error("Missing categories:", missingCatIds); + + // Query production to see what these categories are + const [missingCats] = await prodConnection.query( + ` + SELECT cat_id, name, type, master_cat_id, hidden + FROM product_categories + WHERE cat_id IN (?) + `, + [missingCatIds] + ); + + console.error("Missing category details:", missingCats); + console.warn( + "Skipping invalid category relationships - continuing with import" + ); + continue; + } + + // Verify products exist before inserting relationships + const productIds = [ + ...new Set(categoryRelationships.map(([_, pid]) => pid)), + ]; + const [existingProducts] = await localConnection.query( + "SELECT pid FROM products WHERE pid IN (?)", + [productIds] + ); + const existingProductIds = new Set(existingProducts.map((p) => p.pid)); + + // Filter relationships to only include existing products + const validRelationships = categoryRelationships.filter(([_, pid]) => + existingProductIds.has(pid) + ); + + if (validRelationships.length > 0) { + const catPlaceholders = validRelationships + .map(() => "(?, ?)") + .join(","); + await localConnection.query( + ` + INSERT INTO product_categories (cat_id, pid) + VALUES ${catPlaceholders} + ON DUPLICATE KEY UPDATE cat_id = VALUES(cat_id) + `, + validRelationships.flat() + ); + } + } + + current += batch.length; + updateProgress(current, total, "Products import", startTime); + } + + outputProgress({ + status: "complete", + operation: "Products import completed", + current: total, + total, + duration: formatElapsedTime((Date.now() - startTime) / 1000), + }); + } catch (error) { + console.error("Error importing products:", error); + throw error; + } +} + +module.exports = { + importProducts, + importMissingProducts +}; \ No newline at end of file diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js new file mode 100644 index 0000000..323e894 --- /dev/null +++ b/inventory-server/scripts/import/purchase-orders.js @@ -0,0 +1,290 @@ +const { updateProgress, outputProgress, formatElapsedTime } = require('./utils'); + +async function importPurchaseOrders(prodConnection, localConnection) { + outputProgress({ + operation: "Starting purchase orders import - Initializing", + status: "running", + }); + + const startTime = Date.now(); + + try { + // Get column names for the insert + const [columns] = await localConnection.query(` + SELECT COLUMN_NAME + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'purchase_orders' + ORDER BY ORDINAL_POSITION + `); + const columnNames = columns + .map((col) => col.COLUMN_NAME) + .filter((name) => name !== "id"); + + // First get all relevant PO IDs with basic info - this is much faster than the full join + const [[{ total }]] = await prodConnection.query(` + SELECT COUNT(*) as total + FROM ( + SELECT DISTINCT pop.po_id, pop.pid + FROM po p + FORCE INDEX (idx_date_created) + JOIN po_products pop ON p.po_id = pop.po_id + JOIN suppliers s ON p.supplier_id = s.supplierid + WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + UNION + SELECT DISTINCT r.receiving_id as po_id, rp.pid + FROM receivings_products rp + LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id + WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + ) all_items + `); + + const [poList] = await prodConnection.query(` + SELECT DISTINCT + COALESCE(p.po_id, r.receiving_id) as po_id, + CASE + WHEN p.po_id IS NOT NULL THEN s1.companyname + WHEN r.supplier_id IS NOT NULL THEN s2.companyname + ELSE 'No Supplier' + END as vendor, + CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_ordered) END as date, + CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_estin) END as expected_date, + COALESCE(p.status, 50) as status, + COALESCE(p.short_note, '') as notes, + COALESCE(p.notes, '') as long_note + FROM ( + SELECT po_id FROM po + WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + UNION + SELECT DISTINCT r.receiving_id as po_id + FROM receivings r + JOIN receivings_products rp ON r.receiving_id = rp.receiving_id + WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + ) ids + LEFT JOIN po p ON ids.po_id = p.po_id + LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid + LEFT JOIN receivings r ON ids.po_id = r.receiving_id + LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid + ORDER BY po_id + `); + + const totalItems = total; + let processed = 0; + + const BATCH_SIZE = 5000; + const PROGRESS_INTERVAL = 500; + let lastProgressUpdate = Date.now(); + + outputProgress({ + operation: `Starting purchase orders import - Processing ${totalItems} purchase order items`, + status: "running", + }); + + for (let i = 0; i < poList.length; i += BATCH_SIZE) { + const batch = poList.slice(i, Math.min(i + BATCH_SIZE, poList.length)); + const poIds = batch.map(po => po.po_id); + + // Get all products for these POs in one query + const [poProducts] = await prodConnection.query(` + SELECT + pop.po_id, + pop.pid, + pr.itemnumber as sku, + pop.cost_each as cost_price, + pop.qty_each as ordered + FROM po_products pop + FORCE INDEX (PRIMARY) + JOIN products pr ON pop.pid = pr.pid + WHERE pop.po_id IN (?) + `, [poIds]); + + // Process PO products in smaller sub-batches to avoid packet size issues + const SUB_BATCH_SIZE = 5000; + for (let j = 0; j < poProducts.length; j += SUB_BATCH_SIZE) { + const productBatch = poProducts.slice(j, j + SUB_BATCH_SIZE); + const productPids = [...new Set(productBatch.map(p => p.pid))]; + const batchPoIds = [...new Set(productBatch.map(p => p.po_id))]; + + // Get receivings for this batch + const [receivings] = await prodConnection.query(` + SELECT + r.po_id, + rp.pid, + rp.receiving_id, + rp.qty_each, + rp.cost_each, + DATE(NULLIF(rp.received_date, '0000-00-00 00:00:00')) as received_date, + rp.received_by, + CASE + WHEN r.po_id IS NULL THEN 2 -- No PO + WHEN r.po_id IN (?) THEN 0 -- Original PO + ELSE 1 -- Different PO + END as is_alt_po + FROM receivings_products rp + LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id + WHERE rp.pid IN (?) + AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + ORDER BY r.po_id, rp.pid, rp.received_date + `, [batchPoIds, productPids]); + + // Create maps for this sub-batch + const poProductMap = new Map(); + productBatch.forEach(product => { + const key = `${product.po_id}-${product.pid}`; + poProductMap.set(key, product); + }); + + const receivingMap = new Map(); + const altReceivingMap = new Map(); + const noPOReceivingMap = new Map(); + + receivings.forEach(receiving => { + const key = `${receiving.po_id}-${receiving.pid}`; + if (receiving.is_alt_po === 2) { + // No PO + if (!noPOReceivingMap.has(receiving.pid)) { + noPOReceivingMap.set(receiving.pid, []); + } + noPOReceivingMap.get(receiving.pid).push(receiving); + } else if (receiving.is_alt_po === 1) { + // Different PO + if (!altReceivingMap.has(receiving.pid)) { + altReceivingMap.set(receiving.pid, []); + } + altReceivingMap.get(receiving.pid).push(receiving); + } else { + // Original PO + if (!receivingMap.has(key)) { + receivingMap.set(key, []); + } + receivingMap.get(key).push(receiving); + } + }); + + // Verify PIDs exist + const [existingPids] = await localConnection.query( + 'SELECT pid FROM products WHERE pid IN (?)', + [productPids] + ); + const validPids = new Set(existingPids.map(p => p.pid)); + + // Prepare values for this sub-batch + const values = []; + let batchProcessed = 0; + + for (const po of batch) { + const poProducts = Array.from(poProductMap.values()) + .filter(p => p.po_id === po.po_id && validPids.has(p.pid)); + + for (const product of poProducts) { + const key = `${po.po_id}-${product.pid}`; + const receivingHistory = receivingMap.get(key) || []; + const altReceivingHistory = altReceivingMap.get(product.pid) || []; + const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || []; + + const received = receivingHistory.reduce((sum, r) => sum + r.qty_each, 0); + const altReceived = altReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0); + const noPOReceived = noPOReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0); + const totalReceived = received + altReceived + noPOReceived; + + const receiving_status = !totalReceived ? 1 : // created + totalReceived < product.ordered ? 30 : // partial + 40; // full + + const allReceivings = [...receivingHistory]; + if (altReceivingHistory.length > 0) { + allReceivings.push(...altReceivingHistory); + } + if (noPOReceivingHistory.length > 0) { + allReceivings.push(...noPOReceivingHistory); + } + allReceivings.sort((a, b) => new Date(a.received_date) - new Date(b.received_date)); + + const firstReceiving = allReceivings[0] || {}; + const lastReceiving = allReceivings[allReceivings.length - 1] || {}; + + values.push(columnNames.map(col => { + switch (col) { + case 'po_id': return po.po_id; + case 'vendor': return po.vendor; + case 'date': return po.date; + case 'expected_date': return po.expected_date; + case 'pid': return product.pid; + case 'sku': return product.sku; + case 'cost_price': return product.cost_price; + case 'status': return po.status; + case 'notes': return po.notes; + case 'long_note': return po.long_note; + case 'ordered': return product.ordered; + case 'received': return totalReceived; + case 'received_date': return firstReceiving.received_date || null; + case 'last_received_date': return lastReceiving.received_date || null; + case 'received_by': return firstReceiving.received_by || null; + case 'receiving_status': return receiving_status; + case 'receiving_history': return JSON.stringify(allReceivings.map(r => ({ + receiving_id: r.receiving_id, + qty: r.qty_each, + cost: r.cost_each, + date: r.received_date, + received_by: r.received_by, + alt_po: r.is_alt_po + }))); + default: return null; + } + })); + batchProcessed++; + } + } + + if (values.length > 0) { + const placeholders = values.map(() => + `(${Array(columnNames.length).fill("?").join(",")})` + ).join(","); + + const query = ` + INSERT INTO purchase_orders (${columnNames.join(",")}) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE ${columnNames + .filter((col) => col !== "po_id" && col !== "pid") + .map((col) => `${col} = VALUES(${col})`) + .join(",")}; + `; + + await localConnection.query(query, values.flat()); + } + + processed += batchProcessed; + + // Update progress based on time interval + const now = Date.now(); + if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) { + updateProgress(processed, totalItems, "Purchase orders import", startTime); + lastProgressUpdate = now; + } + } + } + + const endTime = Date.now(); + outputProgress({ + operation: `Purchase orders import complete`, + status: "complete", + processed_records: processed, + total_records: totalItems, + timing: { + start_time: new Date(startTime).toISOString(), + end_time: new Date(endTime).toISOString(), + elapsed_time: formatElapsedTime((endTime - startTime) / 1000), + elapsed_seconds: Math.round((endTime - startTime) / 1000) + } + }); + + } catch (error) { + outputProgress({ + operation: "Purchase orders import failed", + status: "error", + error: error.message, + }); + throw error; + } +} + +module.exports = importPurchaseOrders; \ No newline at end of file diff --git a/inventory-server/scripts/import/utils.js b/inventory-server/scripts/import/utils.js new file mode 100644 index 0000000..3f71b9c --- /dev/null +++ b/inventory-server/scripts/import/utils.js @@ -0,0 +1,102 @@ +const mysql = require("mysql2/promise"); +const { Client } = require("ssh2"); +const dotenv = require("dotenv"); +const path = require("path"); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); + +dotenv.config({ path: path.join(__dirname, "../../.env") }); + +// SSH configuration +const sshConfig = { + host: process.env.PROD_SSH_HOST, + port: process.env.PROD_SSH_PORT || 22, + username: process.env.PROD_SSH_USER, + privateKey: process.env.PROD_SSH_KEY_PATH + ? require("fs").readFileSync(process.env.PROD_SSH_KEY_PATH) + : undefined, +}; + +// Production database configuration +const prodDbConfig = { + host: process.env.PROD_DB_HOST || "localhost", + user: process.env.PROD_DB_USER, + password: process.env.PROD_DB_PASSWORD, + database: process.env.PROD_DB_NAME, + port: process.env.PROD_DB_PORT || 3306, +}; + +// Local database configuration +const localDbConfig = { + host: process.env.DB_HOST, + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + multipleStatements: true, + waitForConnections: true, + connectionLimit: 10, + queueLimit: 0, + namedPlaceholders: true, +}; + +// Constants +const BATCH_SIZE = 1000; +const PROGRESS_INTERVAL = 1000; // Update progress every second + +async function setupSshTunnel() { + return new Promise((resolve, reject) => { + const ssh = new Client(); + + ssh.on('error', (err) => { + console.error('SSH connection error:', err); + // Don't reject here, just log the error + }); + + ssh.on('end', () => { + console.log('SSH connection ended normally'); + }); + + ssh.on('close', () => { + console.log('SSH connection closed'); + }); + + ssh + .on("ready", () => { + ssh.forwardOut( + "127.0.0.1", + 0, + prodDbConfig.host, + prodDbConfig.port, + async (err, stream) => { + if (err) reject(err); + resolve({ ssh, stream }); + } + ); + }) + .connect(sshConfig); + }); +} + +// Helper function to update progress with time estimate +function updateProgress(current, total, operation, startTime) { + outputProgress({ + status: 'running', + operation, + current, + total, + rate: calculateRate(startTime, current), + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, current, total), + percentage: ((current / total) * 100).toFixed(1) + }); +} + +module.exports = { + setupSshTunnel, + updateProgress, + prodDbConfig, + localDbConfig, + BATCH_SIZE, + PROGRESS_INTERVAL, + outputProgress, + formatElapsedTime +}; \ No newline at end of file diff --git a/inventory-server/scripts/import-csv.js b/inventory-server/scripts/old_csv/import-csv.js similarity index 99% rename from inventory-server/scripts/import-csv.js rename to inventory-server/scripts/old_csv/import-csv.js index 04ab8ef..33fe6fa 100644 --- a/inventory-server/scripts/import-csv.js +++ b/inventory-server/scripts/old_csv/import-csv.js @@ -3,7 +3,7 @@ const path = require('path'); const csv = require('csv-parse'); const mysql = require('mysql2/promise'); const dotenv = require('dotenv'); -const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./metrics/utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); // Get test limits from environment variables const PRODUCTS_TEST_LIMIT = parseInt(process.env.PRODUCTS_TEST_LIMIT || '0'); diff --git a/inventory-server/scripts/update-csv.js b/inventory-server/scripts/old_csv/update-csv.js similarity index 98% rename from inventory-server/scripts/update-csv.js rename to inventory-server/scripts/old_csv/update-csv.js index 26e5556..4f49fcb 100644 --- a/inventory-server/scripts/update-csv.js +++ b/inventory-server/scripts/old_csv/update-csv.js @@ -1,7 +1,7 @@ const path = require('path'); const fs = require('fs'); const axios = require('axios'); -const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./metrics/utils/progress'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); // Change working directory to script directory process.chdir(path.dirname(__filename)); From 84baa7e7d366c65c618705ebbc8dff3f65cf9a74 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 13:23:32 -0500 Subject: [PATCH 06/33] Break up prod import script into pieces and move csv scripts into folder --- inventory-server/scripts/import-from-prod.js | 147 +++++++----------- inventory-server/scripts/import/categories.js | 30 +++- inventory-server/scripts/import/orders.js | 35 +++-- inventory-server/scripts/import/products.js | 97 +++--------- .../scripts/import/purchase-orders.js | 30 ++-- inventory-server/scripts/import/utils.js | 114 ++++++-------- 6 files changed, 187 insertions(+), 266 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 94bb506..2c926dd 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -1,7 +1,7 @@ -const mysql = require("mysql2/promise"); const dotenv = require("dotenv"); const path = require("path"); -const { setupSshTunnel, outputProgress, formatElapsedTime, prodDbConfig, localDbConfig } = require('./import/utils'); +const { outputProgress, formatElapsedTime } = require('./metrics/utils/progress'); +const { setupConnections, closeConnections } = require('./import/utils'); const importCategories = require('./import/categories'); const { importProducts } = require('./import/products'); const importOrders = require('./import/orders'); @@ -15,6 +15,38 @@ const IMPORT_PRODUCTS = true; const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; +// SSH configuration +const sshConfig = { + ssh: { + host: process.env.PROD_SSH_HOST, + port: process.env.PROD_SSH_PORT || 22, + username: process.env.PROD_SSH_USER, + privateKey: process.env.PROD_SSH_KEY_PATH + ? require("fs").readFileSync(process.env.PROD_SSH_KEY_PATH) + : undefined, + }, + // Production database configuration + prodDbConfig: { + host: process.env.PROD_DB_HOST || "localhost", + user: process.env.PROD_DB_USER, + password: process.env.PROD_DB_PASSWORD, + database: process.env.PROD_DB_NAME, + port: process.env.PROD_DB_PORT || 3306, + }, + // Local database configuration + localDbConfig: { + host: process.env.DB_HOST, + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + multipleStatements: true, + waitForConnections: true, + connectionLimit: 10, + queueLimit: 0, + namedPlaceholders: true, + } +}; + let isImportCancelled = false; // Add cancel function @@ -32,12 +64,9 @@ function cancelImport() { }); } -// Modify main function to handle cancellation and avoid process.exit async function main() { - let ssh; - let prodConnection; - let localConnection; const startTime = Date.now(); + let connections; try { // Initial progress update @@ -50,96 +79,39 @@ async function main() { elapsed: formatElapsedTime((Date.now() - startTime) / 1000) }); - const tunnel = await setupSshTunnel(); - ssh = tunnel.ssh; - - outputProgress({ - status: "running", - operation: "Import process", - message: "Connecting to production database...", - current: 0, - total: 4, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000) - }); - - prodConnection = await mysql.createConnection({ - ...prodDbConfig, - stream: tunnel.stream, - }); - - outputProgress({ - status: "running", - operation: "Import process", - message: "Connecting to local database...", - current: 0, - total: 4, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000) - }); - - localConnection = await mysql.createPool({ - ...localDbConfig, - waitForConnections: true, - connectionLimit: 10, - queueLimit: 0 - }); + connections = await setupConnections(sshConfig); + const { prodConnection, localConnection } = connections; if (isImportCancelled) throw new Error("Import cancelled"); - let currentStep = 0; + const results = { + categories: null, + products: null, + orders: null, + purchaseOrders: null + }; // Run each import based on constants if (IMPORT_CATEGORIES) { - outputProgress({ - status: "running", - operation: "Import process", - message: "Starting categories import...", - current: currentStep, - total: 4, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000) - }); - await importCategories(prodConnection, localConnection); + results.categories = await importCategories(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); currentStep++; } if (IMPORT_PRODUCTS) { - outputProgress({ - status: "running", - operation: "Import process", - message: "Starting products import...", - current: currentStep, - total: 4, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000) - }); - await importProducts(prodConnection, localConnection); + results.products = await importProducts(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); currentStep++; } if (IMPORT_ORDERS) { - outputProgress({ - status: "running", - operation: "Import process", - message: "Starting orders import...", - current: currentStep, - total: 4, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000) - }); - await importOrders(prodConnection, localConnection); + results.orders = await importOrders(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); currentStep++; } if (IMPORT_PURCHASE_ORDERS) { - outputProgress({ - status: "running", - operation: "Import process", - message: "Starting purchase orders import...", - current: currentStep, - total: 4, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000) - }); - await importPurchaseOrders(prodConnection, localConnection); + results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); currentStep++; } @@ -157,8 +129,11 @@ async function main() { end_time: new Date(endTime).toISOString(), elapsed_time: formatElapsedTime((endTime - startTime) / 1000), elapsed_seconds: Math.round((endTime - startTime) / 1000) - } + }, + results }); + + return results; } catch (error) { const endTime = Date.now(); console.error("Error during import process:", error); @@ -179,23 +154,8 @@ async function main() { }); throw error; } finally { - try { - // Close connections in order - if (prodConnection) await prodConnection.end(); - if (localConnection) await localConnection.end(); - - // Wait a bit for any pending data to be written before closing SSH - await new Promise(resolve => setTimeout(resolve, 100)); - - if (ssh) { - // Properly close the SSH connection - ssh.on('close', () => { - console.log('SSH connection closed cleanly'); - }); - ssh.end(); - } - } catch (err) { - console.error('Error during cleanup:', err); + if (connections) { + await closeConnections(connections); } } } @@ -211,6 +171,5 @@ if (require.main === module) { // Export the functions needed by the route module.exports = { main, - outputProgress, cancelImport, }; diff --git a/inventory-server/scripts/import/categories.js b/inventory-server/scripts/import/categories.js index 34f7c61..b099acc 100644 --- a/inventory-server/scripts/import/categories.js +++ b/inventory-server/scripts/import/categories.js @@ -1,4 +1,4 @@ -const { updateProgress, outputProgress, formatElapsedTime } = require('./utils'); +const { outputProgress, formatElapsedTime } = require('../metrics/utils/progress'); async function importCategories(prodConnection, localConnection) { outputProgress({ @@ -129,12 +129,13 @@ async function importCategories(prodConnection, localConnection) { ); totalInserted += categoriesToInsert.length; - updateProgress( - totalInserted, - totalInserted, - "Categories import", - startTime - ); + outputProgress({ + status: "running", + operation: "Categories import", + current: totalInserted, + total: totalInserted, + elapsed: formatElapsedTime(startTime), + }); } // After all imports, if we skipped any categories, throw an error @@ -151,8 +152,13 @@ async function importCategories(prodConnection, localConnection) { operation: "Categories import completed", current: totalInserted, total: totalInserted, - duration: formatElapsedTime((Date.now() - startTime) / 1000), + duration: formatElapsedTime(Date.now() - startTime), }); + + return { + status: "complete", + totalImported: totalInserted + }; } catch (error) { console.error("Error importing categories:", error); if (error.skippedCategories) { @@ -161,6 +167,14 @@ async function importCategories(prodConnection, localConnection) { JSON.stringify(error.skippedCategories, null, 2) ); } + + outputProgress({ + status: "error", + operation: "Categories import failed", + error: error.message, + skippedCategories: error.skippedCategories + }); + throw error; } } diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 1427d0a..d692696 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -1,4 +1,4 @@ -const { updateProgress, outputProgress, formatElapsedTime } = require('./utils'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); const { importMissingProducts } = require('./products'); async function importOrders(prodConnection, localConnection) { @@ -25,11 +25,6 @@ async function importOrders(prodConnection, localConnection) { .filter((name) => name !== "id"); // Skip auto-increment ID // Get total count first for progress indication - outputProgress({ - operation: "Starting orders import - Getting total count", - status: "running", - }); - const [countResult] = await prodConnection.query(` SELECT COUNT(*) as total FROM order_items oi FORCE INDEX (PRIMARY) @@ -132,12 +127,15 @@ async function importOrders(prodConnection, localConnection) { processed += orders.length; offset += batchSize; - updateProgress( - processed, + outputProgress({ + status: "running", + operation: "Orders import", + current: processed, total, - "Orders import", - startTime - ); + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processed, total), + rate: calculateRate(startTime, processed) + }); } // Now handle missing products and retry skipped orders @@ -215,13 +213,20 @@ async function importOrders(prodConnection, localConnection) { } } - const endTime = Date.now(); outputProgress({ - operation: `Orders import complete in ${Math.round( - (endTime - startTime) / 1000 - )}s`, status: "complete", + operation: "Orders import completed", + current: total, + total, + duration: formatElapsedTime(Date.now() - startTime), }); + + return { + status: "complete", + totalImported: total, + missingProducts: missingProducts.size, + retriedOrders: skippedOrders.size + }; } catch (error) { outputProgress({ operation: "Orders import failed", diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 70be5c5..f597207 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -1,4 +1,4 @@ -const { updateProgress, outputProgress, formatElapsedTime } = require('./utils'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); async function importMissingProducts(prodConnection, localConnection, missingPids) { // First get the column names from the table structure @@ -345,76 +345,9 @@ async function importProducts(prodConnection, localConnection) { GROUP BY p.pid `); - // Debug log to check for specific product - const debugProduct = rows.find((row) => row.pid === 620972); - if (debugProduct) { - console.log("Found product 620972:", debugProduct); - } else { - console.log("Product 620972 not found in query results"); - - // Debug query to check why it's missing - const [debugResult] = await prodConnection.query( - ` - SELECT - p.pid, - p.itemnumber, - p.date_created, - p.datein, - pls.date_sold, - si.show, - si.buyable, - pcp.price_each - FROM products p - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid - WHERE p.pid = ? - `, - [620972] - ); - - console.log("Debug query result:", debugResult); - } - - // Also check for the other missing products - const missingPids = [ - 208348, 317600, 370009, 429494, 466233, 471156, 474582, 476214, 484394, - 484755, 484756, 493549, 620972, - ]; - const [missingProducts] = await prodConnection.query( - ` - SELECT - p.pid, - p.itemnumber, - p.date_created, - p.datein, - pls.date_sold, - si.show, - si.buyable, - pcp.price_each - FROM products p - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid - WHERE p.pid IN (?) - `, - [missingPids] - ); - - console.log("Debug results for missing products:", missingProducts); - let current = 0; const total = rows.length; + const BATCH_SIZE = 1000; // Process products in batches for (let i = 0; i < rows.length; i += BATCH_SIZE) { @@ -475,8 +408,6 @@ async function importProducts(prodConnection, localConnection) { ...new Set(categoryRelationships.map(([catId]) => catId)), ]; - console.log("Checking categories:", uniqueCatIds); - // Check which categories exist const [existingCats] = await localConnection.query( "SELECT cat_id FROM categories WHERE cat_id IN (?)", @@ -539,7 +470,15 @@ async function importProducts(prodConnection, localConnection) { } current += batch.length; - updateProgress(current, total, "Products import", startTime); + outputProgress({ + status: "running", + operation: "Products import", + current, + total, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, current, total), + rate: calculateRate(startTime, current) + }); } outputProgress({ @@ -547,10 +486,22 @@ async function importProducts(prodConnection, localConnection) { operation: "Products import completed", current: total, total, - duration: formatElapsedTime((Date.now() - startTime) / 1000), + duration: formatElapsedTime(Date.now() - startTime), }); + + return { + status: "complete", + totalImported: total + }; } catch (error) { console.error("Error importing products:", error); + + outputProgress({ + status: "error", + operation: "Products import failed", + error: error.message + }); + throw error; } } diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 323e894..2c9ac69 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -1,4 +1,4 @@ -const { updateProgress, outputProgress, formatElapsedTime } = require('./utils'); +const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); async function importPurchaseOrders(prodConnection, localConnection) { outputProgress({ @@ -257,26 +257,32 @@ async function importPurchaseOrders(prodConnection, localConnection) { // Update progress based on time interval const now = Date.now(); if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) { - updateProgress(processed, totalItems, "Purchase orders import", startTime); + outputProgress({ + status: "running", + operation: "Purchase orders import", + current: processed, + total: totalItems, + elapsed: formatElapsedTime(startTime), + remaining: estimateRemaining(startTime, processed, totalItems), + rate: calculateRate(startTime, processed) + }); lastProgressUpdate = now; } } } - const endTime = Date.now(); outputProgress({ - operation: `Purchase orders import complete`, status: "complete", - processed_records: processed, - total_records: totalItems, - timing: { - start_time: new Date(startTime).toISOString(), - end_time: new Date(endTime).toISOString(), - elapsed_time: formatElapsedTime((endTime - startTime) / 1000), - elapsed_seconds: Math.round((endTime - startTime) / 1000) - } + operation: "Purchase orders import completed", + current: totalItems, + total: totalItems, + duration: formatElapsedTime(Date.now() - startTime), }); + return { + status: "complete", + totalImported: totalItems + }; } catch (error) { outputProgress({ operation: "Purchase orders import failed", diff --git a/inventory-server/scripts/import/utils.js b/inventory-server/scripts/import/utils.js index 3f71b9c..12d8a21 100644 --- a/inventory-server/scripts/import/utils.js +++ b/inventory-server/scripts/import/utils.js @@ -2,53 +2,14 @@ const mysql = require("mysql2/promise"); const { Client } = require("ssh2"); const dotenv = require("dotenv"); const path = require("path"); -const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); -dotenv.config({ path: path.join(__dirname, "../../.env") }); - -// SSH configuration -const sshConfig = { - host: process.env.PROD_SSH_HOST, - port: process.env.PROD_SSH_PORT || 22, - username: process.env.PROD_SSH_USER, - privateKey: process.env.PROD_SSH_KEY_PATH - ? require("fs").readFileSync(process.env.PROD_SSH_KEY_PATH) - : undefined, -}; - -// Production database configuration -const prodDbConfig = { - host: process.env.PROD_DB_HOST || "localhost", - user: process.env.PROD_DB_USER, - password: process.env.PROD_DB_PASSWORD, - database: process.env.PROD_DB_NAME, - port: process.env.PROD_DB_PORT || 3306, -}; - -// Local database configuration -const localDbConfig = { - host: process.env.DB_HOST, - user: process.env.DB_USER, - password: process.env.DB_PASSWORD, - database: process.env.DB_NAME, - multipleStatements: true, - waitForConnections: true, - connectionLimit: 10, - queueLimit: 0, - namedPlaceholders: true, -}; - -// Constants -const BATCH_SIZE = 1000; -const PROGRESS_INTERVAL = 1000; // Update progress every second - -async function setupSshTunnel() { +// Helper function to setup SSH tunnel +async function setupSshTunnel(sshConfig) { return new Promise((resolve, reject) => { const ssh = new Client(); ssh.on('error', (err) => { console.error('SSH connection error:', err); - // Don't reject here, just log the error }); ssh.on('end', () => { @@ -64,39 +25,64 @@ async function setupSshTunnel() { ssh.forwardOut( "127.0.0.1", 0, - prodDbConfig.host, - prodDbConfig.port, + sshConfig.prodDbConfig.host, + sshConfig.prodDbConfig.port, async (err, stream) => { if (err) reject(err); resolve({ ssh, stream }); } ); }) - .connect(sshConfig); + .connect(sshConfig.ssh); }); } -// Helper function to update progress with time estimate -function updateProgress(current, total, operation, startTime) { - outputProgress({ - status: 'running', - operation, - current, - total, - rate: calculateRate(startTime, current), - elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, current, total), - percentage: ((current / total) * 100).toFixed(1) - }); +// Helper function to setup database connections +async function setupConnections(sshConfig) { + const tunnel = await setupSshTunnel(sshConfig); + + const prodConnection = await mysql.createConnection({ + ...sshConfig.prodDbConfig, + stream: tunnel.stream, + }); + + const localConnection = await mysql.createPool({ + ...sshConfig.localDbConfig, + waitForConnections: true, + connectionLimit: 10, + queueLimit: 0 + }); + + return { + ssh: tunnel.ssh, + prodConnection, + localConnection + }; +} + +// Helper function to close connections +async function closeConnections(connections) { + const { ssh, prodConnection, localConnection } = connections; + + try { + if (prodConnection) await prodConnection.end(); + if (localConnection) await localConnection.end(); + + // Wait a bit for any pending data to be written before closing SSH + await new Promise(resolve => setTimeout(resolve, 100)); + + if (ssh) { + ssh.on('close', () => { + console.log('SSH connection closed cleanly'); + }); + ssh.end(); + } + } catch (err) { + console.error('Error during cleanup:', err); + } } module.exports = { - setupSshTunnel, - updateProgress, - prodDbConfig, - localDbConfig, - BATCH_SIZE, - PROGRESS_INTERVAL, - outputProgress, - formatElapsedTime + setupConnections, + closeConnections }; \ No newline at end of file From 81a724db9d452290c6de001e5cd3fc37c61c3f56 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 13:40:01 -0500 Subject: [PATCH 07/33] Fix elapsed time calculation in import scripts --- inventory-server/scripts/import/categories.js | 4 ++-- inventory-server/scripts/import/orders.js | 4 ++-- inventory-server/scripts/import/purchase-orders.js | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/inventory-server/scripts/import/categories.js b/inventory-server/scripts/import/categories.js index b099acc..7dffc3c 100644 --- a/inventory-server/scripts/import/categories.js +++ b/inventory-server/scripts/import/categories.js @@ -134,7 +134,7 @@ async function importCategories(prodConnection, localConnection) { operation: "Categories import", current: totalInserted, total: totalInserted, - elapsed: formatElapsedTime(startTime), + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), }); } @@ -152,7 +152,7 @@ async function importCategories(prodConnection, localConnection) { operation: "Categories import completed", current: totalInserted, total: totalInserted, - duration: formatElapsedTime(Date.now() - startTime), + duration: formatElapsedTime((Date.now() - startTime) / 1000), }); return { diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index d692696..5347dce 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -132,7 +132,7 @@ async function importOrders(prodConnection, localConnection) { operation: "Orders import", current: processed, total, - elapsed: formatElapsedTime(startTime), + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), remaining: estimateRemaining(startTime, processed, total), rate: calculateRate(startTime, processed) }); @@ -218,7 +218,7 @@ async function importOrders(prodConnection, localConnection) { operation: "Orders import completed", current: total, total, - duration: formatElapsedTime(Date.now() - startTime), + duration: formatElapsedTime((Date.now() - startTime) / 1000), }); return { diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 2c9ac69..7165c75 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -262,7 +262,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { operation: "Purchase orders import", current: processed, total: totalItems, - elapsed: formatElapsedTime(startTime), + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), remaining: estimateRemaining(startTime, processed, totalItems), rate: calculateRate(startTime, processed) }); @@ -276,7 +276,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { operation: "Purchase orders import completed", current: totalItems, total: totalItems, - duration: formatElapsedTime(Date.now() - startTime), + duration: formatElapsedTime((Date.now() - startTime) / 1000), }); return { From d60b2d4fae3ab3a439d86d3a8edddecfdd801d1e Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 13:47:33 -0500 Subject: [PATCH 08/33] Refactor import scripts with improved progress tracking and time formatting --- inventory-server/scripts/import-from-prod.js | 57 ++++++++++++------- .../scripts/metrics/utils/progress.js | 11 +++- 2 files changed, 44 insertions(+), 24 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 2c926dd..c3a90dc 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,9 +10,9 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = true; -const IMPORT_PRODUCTS = true; -const IMPORT_ORDERS = true; +const IMPORT_CATEGORIES = false; +const IMPORT_PRODUCTS = false; +const IMPORT_ORDERS = false; const IMPORT_PURCHASE_ORDERS = true; // SSH configuration @@ -67,6 +67,13 @@ function cancelImport() { async function main() { const startTime = Date.now(); let connections; + let completedSteps = 0; + const totalSteps = [ + IMPORT_CATEGORIES, + IMPORT_PRODUCTS, + IMPORT_ORDERS, + IMPORT_PURCHASE_ORDERS + ].filter(Boolean).length; try { // Initial progress update @@ -74,9 +81,9 @@ async function main() { status: "running", operation: "Import process", message: "Initializing SSH tunnel...", - current: 0, - total: 4, // Total number of major steps - elapsed: formatElapsedTime((Date.now() - startTime) / 1000) + current: completedSteps, + total: totalSteps, + elapsed: formatElapsedTime(startTime) }); connections = await setupConnections(sshConfig); @@ -95,40 +102,42 @@ async function main() { if (IMPORT_CATEGORIES) { results.categories = await importCategories(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); - currentStep++; + completedSteps++; } if (IMPORT_PRODUCTS) { results.products = await importProducts(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); - currentStep++; + completedSteps++; } if (IMPORT_ORDERS) { results.orders = await importOrders(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); - currentStep++; + completedSteps++; } if (IMPORT_PURCHASE_ORDERS) { results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); - currentStep++; + completedSteps++; } const endTime = Date.now(); + const totalElapsedSeconds = Math.round((endTime - startTime) / 1000); outputProgress({ status: "complete", operation: "Import process", - message: "All imports completed successfully", - current: 4, - total: 4, - elapsed: formatElapsedTime((endTime - startTime) / 1000), + message: `All imports completed successfully in ${formatElapsedTime(totalElapsedSeconds)}`, + current: completedSteps, + total: totalSteps, + elapsed: formatElapsedTime(startTime), timing: { start_time: new Date(startTime).toISOString(), end_time: new Date(endTime).toISOString(), - elapsed_time: formatElapsedTime((endTime - startTime) / 1000), - elapsed_seconds: Math.round((endTime - startTime) / 1000) + elapsed_time: formatElapsedTime(startTime), + elapsed_seconds: totalElapsedSeconds, + total_duration: formatElapsedTime(totalElapsedSeconds) }, results }); @@ -136,20 +145,24 @@ async function main() { return results; } catch (error) { const endTime = Date.now(); + const totalElapsedSeconds = Math.round((endTime - startTime) / 1000); console.error("Error during import process:", error); outputProgress({ status: error.message === "Import cancelled" ? "cancelled" : "error", operation: "Import process", - message: error.message === "Import cancelled" ? "Import cancelled by user" : "Import failed", + message: error.message === "Import cancelled" + ? `Import cancelled by user after ${formatElapsedTime(totalElapsedSeconds)}` + : `Import failed after ${formatElapsedTime(totalElapsedSeconds)}`, error: error.message, - current: 0, - total: 4, - elapsed: formatElapsedTime((endTime - startTime) / 1000), + current: completedSteps, + total: totalSteps, + elapsed: formatElapsedTime(startTime), timing: { start_time: new Date(startTime).toISOString(), end_time: new Date(endTime).toISOString(), - elapsed_time: formatElapsedTime((endTime - startTime) / 1000), - elapsed_seconds: Math.round((endTime - startTime) / 1000) + elapsed_time: formatElapsedTime(startTime), + elapsed_seconds: totalElapsedSeconds, + total_duration: formatElapsedTime(totalElapsedSeconds) } }); throw error; diff --git a/inventory-server/scripts/metrics/utils/progress.js b/inventory-server/scripts/metrics/utils/progress.js index 8e81bde..6b66ceb 100644 --- a/inventory-server/scripts/metrics/utils/progress.js +++ b/inventory-server/scripts/metrics/utils/progress.js @@ -2,8 +2,15 @@ const fs = require('fs'); const path = require('path'); // Helper function to format elapsed time -function formatElapsedTime(startTime) { - const elapsed = Date.now() - startTime; +function formatElapsedTime(elapsed) { + // If elapsed is a timestamp, convert to elapsed milliseconds + if (elapsed instanceof Date || elapsed > 1000000000000) { + elapsed = Date.now() - elapsed; + } else { + // If elapsed is in seconds, convert to milliseconds + elapsed = elapsed * 1000; + } + const seconds = Math.floor(elapsed / 1000); const minutes = Math.floor(seconds / 60); const hours = Math.floor(minutes / 60); From d2a2dbc812f6237d7ac5d73ad74284be5887b084 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 16:22:00 -0500 Subject: [PATCH 09/33] Add incremental import support and tracking for database synchronization --- inventory-server/db/config-schema.sql | 25 +- inventory-server/scripts/import-from-prod.js | 124 ++- inventory-server/scripts/import/orders.js | 30 +- inventory-server/scripts/import/products.js | 839 +++++++++++------- .../scripts/import/purchase-orders.js | 112 ++- inventory-server/scripts/reset-db.js | 2 +- .../components/settings/DataManagement.tsx | 7 +- 7 files changed, 760 insertions(+), 379 deletions(-) diff --git a/inventory-server/db/config-schema.sql b/inventory-server/db/config-schema.sql index a2f7639..1dfb3a7 100644 --- a/inventory-server/db/config-schema.sql +++ b/inventory-server/db/config-schema.sql @@ -169,4 +169,27 @@ ORDER BY ELSE 4 END, c.name, - st.vendor; \ No newline at end of file + st.vendor; + +CREATE TABLE IF NOT EXISTS sync_status ( + table_name VARCHAR(50) PRIMARY KEY, + last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + last_sync_id BIGINT, + INDEX idx_last_sync (last_sync_timestamp) +); + +CREATE TABLE IF NOT EXISTS import_history ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + table_name VARCHAR(50) NOT NULL, + start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + end_time TIMESTAMP NULL, + duration_seconds INT, + records_added INT DEFAULT 0, + records_updated INT DEFAULT 0, + is_incremental BOOLEAN DEFAULT FALSE, + status ENUM('running', 'completed', 'failed', 'cancelled') DEFAULT 'running', + error_message TEXT, + additional_info JSON, + INDEX idx_table_time (table_name, start_time), + INDEX idx_status (status) +); \ No newline at end of file diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index c3a90dc..ede5aa2 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,12 +10,16 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = false; -const IMPORT_PRODUCTS = false; -const IMPORT_ORDERS = false; +const IMPORT_CATEGORIES = true; +const IMPORT_PRODUCTS = true; +const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; +// Add flag for incremental updates +const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE === 'true'; + // SSH configuration +// In import-from-prod.js const sshConfig = { ssh: { host: process.env.PROD_SSH_HOST, @@ -24,16 +28,16 @@ const sshConfig = { privateKey: process.env.PROD_SSH_KEY_PATH ? require("fs").readFileSync(process.env.PROD_SSH_KEY_PATH) : undefined, + compress: true, // Enable SSH compression }, - // Production database configuration prodDbConfig: { host: process.env.PROD_DB_HOST || "localhost", user: process.env.PROD_DB_USER, password: process.env.PROD_DB_PASSWORD, database: process.env.PROD_DB_NAME, port: process.env.PROD_DB_PORT || 3306, + timezone: 'Z', }, - // Local database configuration localDbConfig: { host: process.env.DB_HOST, user: process.env.DB_USER, @@ -44,6 +48,13 @@ const sshConfig = { connectionLimit: 10, queueLimit: 0, namedPlaceholders: true, + maxAllowedPacket: 64 * 1024 * 1024, // 64MB + connectTimeout: 60000, + enableKeepAlive: true, + keepAliveInitialDelay: 10000, + compress: true, + timezone: 'Z', + stringifyObjects: false, } }; @@ -68,6 +79,7 @@ async function main() { const startTime = Date.now(); let connections; let completedSteps = 0; + let importHistoryId; const totalSteps = [ IMPORT_CATEGORIES, IMPORT_PRODUCTS, @@ -80,7 +92,7 @@ async function main() { outputProgress({ status: "running", operation: "Import process", - message: "Initializing SSH tunnel...", + message: `Initializing SSH tunnel for ${INCREMENTAL_UPDATE ? 'incremental' : 'full'} import...`, current: completedSteps, total: totalSteps, elapsed: formatElapsedTime(startTime) @@ -91,6 +103,39 @@ async function main() { if (isImportCancelled) throw new Error("Import cancelled"); + // Initialize sync_status table if it doesn't exist + await localConnection.query(` + CREATE TABLE IF NOT EXISTS sync_status ( + table_name VARCHAR(50) PRIMARY KEY, + last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + last_sync_id BIGINT, + INDEX idx_last_sync (last_sync_timestamp) + ); + `); + + // Create import history record for the overall session + const [historyResult] = await localConnection.query(` + INSERT INTO import_history ( + table_name, + start_time, + is_incremental, + status, + additional_info + ) VALUES ( + 'all_tables', + NOW(), + ?, + 'running', + JSON_OBJECT( + 'categories_enabled', ?, + 'products_enabled', ?, + 'orders_enabled', ?, + 'purchase_orders_enabled', ? + ) + ) + `, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS]); + importHistoryId = historyResult.insertId; + const results = { categories: null, products: null, @@ -98,37 +143,84 @@ async function main() { purchaseOrders: null }; + let totalRecordsAdded = 0; + let totalRecordsUpdated = 0; + // Run each import based on constants if (IMPORT_CATEGORIES) { results.categories = await importCategories(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; + if (results.categories.recordsAdded) totalRecordsAdded += results.categories.recordsAdded; + if (results.categories.recordsUpdated) totalRecordsUpdated += results.categories.recordsUpdated; } if (IMPORT_PRODUCTS) { results.products = await importProducts(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; + if (results.products.recordsAdded) totalRecordsAdded += results.products.recordsAdded; + if (results.products.recordsUpdated) totalRecordsUpdated += results.products.recordsUpdated; } if (IMPORT_ORDERS) { results.orders = await importOrders(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; + if (results.orders.recordsAdded) totalRecordsAdded += results.orders.recordsAdded; + if (results.orders.recordsUpdated) totalRecordsUpdated += results.orders.recordsUpdated; } if (IMPORT_PURCHASE_ORDERS) { results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; + if (results.purchaseOrders.recordsAdded) totalRecordsAdded += results.purchaseOrders.recordsAdded; + if (results.purchaseOrders.recordsUpdated) totalRecordsUpdated += results.purchaseOrders.recordsUpdated; } const endTime = Date.now(); const totalElapsedSeconds = Math.round((endTime - startTime) / 1000); + + // Update import history with final stats + await localConnection.query(` + UPDATE import_history + SET + end_time = NOW(), + duration_seconds = ?, + records_added = ?, + records_updated = ?, + status = 'completed', + additional_info = JSON_OBJECT( + 'categories_enabled', ?, + 'products_enabled', ?, + 'orders_enabled', ?, + 'purchase_orders_enabled', ?, + 'categories_result', CAST(? AS JSON), + 'products_result', CAST(? AS JSON), + 'orders_result', CAST(? AS JSON), + 'purchase_orders_result', CAST(? AS JSON) + ) + WHERE id = ? + `, [ + totalElapsedSeconds, + totalRecordsAdded, + totalRecordsUpdated, + IMPORT_CATEGORIES, + IMPORT_PRODUCTS, + IMPORT_ORDERS, + IMPORT_PURCHASE_ORDERS, + JSON.stringify(results.categories), + JSON.stringify(results.products), + JSON.stringify(results.orders), + JSON.stringify(results.purchaseOrders), + importHistoryId + ]); + outputProgress({ status: "complete", operation: "Import process", - message: `All imports completed successfully in ${formatElapsedTime(totalElapsedSeconds)}`, + message: `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import completed successfully in ${formatElapsedTime(totalElapsedSeconds)}`, current: completedSteps, total: totalSteps, elapsed: formatElapsedTime(startTime), @@ -146,13 +238,27 @@ async function main() { } catch (error) { const endTime = Date.now(); const totalElapsedSeconds = Math.round((endTime - startTime) / 1000); + + // Update import history with error + if (importHistoryId) { + await connections?.localConnection?.query(` + UPDATE import_history + SET + end_time = NOW(), + duration_seconds = ?, + status = ?, + error_message = ? + WHERE id = ? + `, [totalElapsedSeconds, error.message === "Import cancelled" ? 'cancelled' : 'failed', error.message, importHistoryId]); + } + console.error("Error during import process:", error); outputProgress({ status: error.message === "Import cancelled" ? "cancelled" : "error", operation: "Import process", message: error.message === "Import cancelled" - ? `Import cancelled by user after ${formatElapsedTime(totalElapsedSeconds)}` - : `Import failed after ${formatElapsedTime(totalElapsedSeconds)}`, + ? `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import cancelled by user after ${formatElapsedTime(totalElapsedSeconds)}` + : `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import failed after ${formatElapsedTime(totalElapsedSeconds)}`, error: error.message, current: completedSteps, total: totalSteps, diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 5347dce..768153c 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -12,6 +12,12 @@ async function importOrders(prodConnection, localConnection) { const missingProducts = new Set(); // Store products that need to be imported try { + // Get last sync info + const [syncInfo] = await localConnection.query( + "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'" + ); + const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; + // First get the column names from the table structure const [columns] = await localConnection.query(` SELECT COLUMN_NAME @@ -24,14 +30,16 @@ async function importOrders(prodConnection, localConnection) { .map((col) => col.COLUMN_NAME) .filter((name) => name !== "id"); // Skip auto-increment ID - // Get total count first for progress indication + // Get total count first for progress indication - modified for incremental const [countResult] = await prodConnection.query(` SELECT COUNT(*) as total FROM order_items oi FORCE INDEX (PRIMARY) JOIN _order o FORCE INDEX (PRIMARY) ON oi.order_id = o.order_id WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - `); + AND (o.date_placed_onlydate > ? + OR o.stamp > ?) + `, [lastSyncTime, lastSyncTime]); + const totalOrders = countResult[0].total; outputProgress({ @@ -81,9 +89,10 @@ async function importOrders(prodConnection, localConnection) { FROM order_items oi JOIN _order o ON oi.order_id = o.order_id WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + AND (o.date_placed_onlydate > ? + OR o.stamp > ?) LIMIT ? OFFSET ? - `, [batchSize, offset]); + `, [lastSyncTime, lastSyncTime, batchSize, offset]); // Check if all products exist before inserting orders const orderProductPids = [...new Set(orders.map((o) => o.pid))]; @@ -213,6 +222,13 @@ async function importOrders(prodConnection, localConnection) { } } + // After successful import, update the sync status + await localConnection.query(` + INSERT INTO sync_status (table_name, last_sync_timestamp) + VALUES ('orders', NOW()) + ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() + `); + outputProgress({ status: "complete", operation: "Orders import completed", @@ -225,7 +241,9 @@ async function importOrders(prodConnection, localConnection) { status: "complete", totalImported: total, missingProducts: missingProducts.size, - retriedOrders: skippedOrders.size + retriedOrders: skippedOrders.size, + incrementalUpdate: true, + lastSyncTime }; } catch (error) { outputProgress({ diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index f597207..da058f7 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -1,211 +1,473 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); -async function importMissingProducts(prodConnection, localConnection, missingPids) { - // First get the column names from the table structure - const [columns] = await localConnection.query(` - SELECT COLUMN_NAME - FROM INFORMATION_SCHEMA.COLUMNS - WHERE TABLE_NAME = 'products' - ORDER BY ORDINAL_POSITION +// Utility functions +const imageUrlBase = 'https://sbing.com/i/products/0000/'; +const getImageUrls = (pid) => { + const paddedPid = pid.toString().padStart(6, '0'); + const basePath = `${imageUrlBase}${paddedPid.slice(0, 3)}/${pid}`; + return { + image: `${basePath}-t-`, + image_175: `${basePath}-175x175-`, + image_full: `${basePath}-o-` + }; +}; + +async function setupTemporaryTables(connection) { + await connection.query(` + CREATE TEMPORARY TABLE IF NOT EXISTS temp_categories ( + cat_id INT PRIMARY KEY, + name VARCHAR(255) + ) ENGINE=InnoDB; + + CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_images ( + pid INT, + iid INT, + image_type ENUM('thumbnail', '175', 'full'), + url VARCHAR(255), + PRIMARY KEY (pid, image_type) + ) ENGINE=InnoDB; + + CREATE TEMPORARY TABLE IF NOT EXISTS temp_inventory_status ( + pid INT PRIMARY KEY, + stock_quantity INT, + pending_qty INT, + preorder_count INT, + notions_inv_count INT + ) ENGINE=InnoDB; + + CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_prices ( + pid INT PRIMARY KEY, + price DECIMAL(10,2), + regular_price DECIMAL(10,2), + cost_price DECIMAL(10,5) + ) ENGINE=InnoDB; + + INSERT INTO temp_categories + SELECT cat_id, name FROM categories; + + CREATE INDEX idx_temp_cat_id ON temp_categories(cat_id); `); +} - const columnNames = columns.map((col) => col.COLUMN_NAME); +async function cleanupTemporaryTables(connection) { + await connection.query(` + DROP TEMPORARY TABLE IF EXISTS temp_categories; + DROP TEMPORARY TABLE IF EXISTS temp_product_images; + DROP TEMPORARY TABLE IF EXISTS temp_inventory_status; + DROP TEMPORARY TABLE IF EXISTS temp_product_prices; + `); +} - // Get the missing products from production - const [products] = await prodConnection.query(` +async function materializeCalculations(prodConnection, localConnection) { + outputProgress({ + status: "running", + operation: "Products import", + message: "Fetching inventory and order data from production" + }); + + // Get all inventory and order data from production in one query + const [prodInventory] = await prodConnection.query(` + SELECT + p.pid, + COALESCE(si.available_local, 0) as stock_quantity, + COALESCE(ci.onpreorder, 0) as preorder_count, + COALESCE(pnb.inventory, 0) as notions_inv_count, + COALESCE( + ( + SELECT SUM(oi.qty_ordered - oi.qty_placed) + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.prod_pid = p.pid + AND o.date_placed != '0000-00-00 00:00:00' + AND o.date_shipped = '0000-00-00 00:00:00' + AND oi.pick_finished = 0 + AND oi.qty_back = 0 + AND o.order_status != 15 + AND o.order_status < 90 + AND oi.qty_ordered >= oi.qty_placed + AND oi.qty_ordered > 0 + ), 0 + ) as pending_qty + FROM products p + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + `); + + outputProgress({ + status: "running", + operation: "Products import", + message: `Processing ${prodInventory.length} inventory records` + }); + + // Insert inventory data into local temp table in batches + for (let i = 0; i < prodInventory.length; i += 1000) { + const batch = prodInventory.slice(i, i + 1000); + const values = batch.map(row => [ + row.pid, + Math.max(0, row.stock_quantity - row.pending_qty), // Calculate final stock quantity + row.pending_qty, + row.preorder_count, + row.notions_inv_count + ]); + + if (values.length > 0) { + await localConnection.query(` + INSERT INTO temp_inventory_status (pid, stock_quantity, pending_qty, preorder_count, notions_inv_count) + VALUES ? + ON DUPLICATE KEY UPDATE + stock_quantity = VALUES(stock_quantity), + pending_qty = VALUES(pending_qty), + preorder_count = VALUES(preorder_count), + notions_inv_count = VALUES(notions_inv_count) + `, [values]); + } + + outputProgress({ + status: "running", + operation: "Products import", + message: `Processed ${Math.min(i + 1000, prodInventory.length)} of ${prodInventory.length} inventory records`, + current: i + batch.length, + total: prodInventory.length + }); + } + + outputProgress({ + status: "running", + operation: "Products import", + message: "Fetching pricing data from production" + }); + + // Get prices from production + const [prodPrices] = await prodConnection.query(` SELECT p.pid, - p.description AS title, - p.notes AS description, - p.itemnumber AS SKU, - p.date_created, - p.datein AS first_received, - p.location, - COALESCE(si.available_local, 0) - COALESCE( - (SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0), 0) AS stock_quantity, - ci.onpreorder AS preorder_count, - pnb.inventory AS notions_inv_count, COALESCE(pcp.price_each, 0) as price, COALESCE(p.sellingprice, 0) AS regular_price, - COALESCE((SELECT ROUND(AVG(costeach), 5) - FROM product_inventory - WHERE pid = p.pid - AND COUNT > 0), 0) AS cost_price, - NULL AS landing_cost_price, - p.upc AS barcode, - p.harmonized_tariff_code, - p.stamp AS updated_at, - CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, - CASE WHEN p.reorder >= 0 THEN 1 ELSE 0 END AS replenishable, - s.companyname AS vendor, - CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference, - sid.notions_itemnumber AS notions_reference, - CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-t-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0) AS image, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-175x175-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0 AND PI.width = 175) AS image_175, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-o-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0) AS image_full, - pc1.name AS brand, - pc2.name AS line, - pc3.name AS subline, - pc4.name AS artist, - NULL AS options, - NULL AS tags, - COALESCE(CASE - WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit - ELSE sid.supplier_qty_per_unit - END, sid.notions_qty_per_unit) AS moq, - NULL AS uom, - p.rating, - p.rating_votes AS reviews, - p.weight, - p.length, - p.width, - p.height, - (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, - (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, - p.totalsold AS total_sold, - p.country_of_origin, - pls.date_sold as date_last_sold, - GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids + COALESCE( + (SELECT ROUND(AVG(costeach), 5) + FROM product_inventory + WHERE pid = p.pid + AND COUNT > 0), 0 + ) AS cost_price FROM products p - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN supplier_item_data sid ON p.pid = sid.pid - LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid - LEFT JOIN product_category_index pci ON p.pid = pci.pid - LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id - AND pc.type IN (10, 20, 11, 21, 12, 13) - AND pci.cat_id NOT IN (16, 17) - LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id - LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id - LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id - LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid - WHERE p.pid IN (?) - GROUP BY p.pid - `, [missingPids]); + LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid + WHERE pcp.active = 1 + `); - if (products.length > 0) { - // Map values in the same order as columns - const productValues = products.flatMap(product => - columnNames.map(col => { - const val = product[col] ?? null; - if (col === "managing_stock") return 1; - if (typeof val === "number") return val || 0; - return val; - }) - ); + outputProgress({ + status: "running", + operation: "Products import", + message: `Processing ${prodPrices.length} price records` + }); - // Generate placeholders for all products - const placeholders = products - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); + // Insert prices into local temp table in batches + for (let i = 0; i < prodPrices.length; i += 1000) { + const batch = prodPrices.slice(i, i + 1000); + const values = batch.map(row => [ + row.pid, + row.price, + row.regular_price, + row.cost_price + ]); - // Build and execute the query - const query = ` - INSERT INTO products (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${columnNames - .filter((col) => col !== "pid") - .map((col) => `${col} = VALUES(${col})`) - .join(",")} - `; - - await localConnection.query(query, productValues); - - // Verify products were inserted before proceeding with categories - const [insertedProducts] = await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [products.map(p => p.pid)] - ); - const insertedPids = new Set(insertedProducts.map(p => p.pid)); - - // Handle category relationships if any - const categoryRelationships = []; - products.forEach(product => { - // Only add category relationships for products that were successfully inserted - if (insertedPids.has(product.pid) && product.category_ids) { - const catIds = product.category_ids - .split(",") - .map(id => id.trim()) - .filter(id => id) - .map(Number); - catIds.forEach(catId => { - if (catId) categoryRelationships.push([catId, product.pid]); - }); - } - }); - - if (categoryRelationships.length > 0) { - // Verify categories exist before inserting relationships - const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))]; - const [existingCats] = await localConnection.query( - "SELECT cat_id FROM categories WHERE cat_id IN (?)", - [uniqueCatIds] - ); - const existingCatIds = new Set(existingCats.map(c => c.cat_id)); - - // Filter relationships to only include existing categories - const validRelationships = categoryRelationships.filter(([catId]) => - existingCatIds.has(catId) - ); - - if (validRelationships.length > 0) { - const catPlaceholders = validRelationships - .map(() => "(?, ?)") - .join(","); - await localConnection.query( - ` - INSERT INTO product_categories (cat_id, pid) - VALUES ${catPlaceholders} - ON DUPLICATE KEY UPDATE cat_id = VALUES(cat_id) - `, - validRelationships.flat() - ); - } + if (values.length > 0) { + await localConnection.query(` + INSERT INTO temp_product_prices (pid, price, regular_price, cost_price) + VALUES ? + ON DUPLICATE KEY UPDATE + price = VALUES(price), + regular_price = VALUES(regular_price), + cost_price = VALUES(cost_price) + `, [values]); } + + outputProgress({ + status: "running", + operation: "Products import", + message: `Processed ${Math.min(i + 1000, prodPrices.length)} of ${prodPrices.length} price records`, + current: i + batch.length, + total: prodPrices.length + }); } + + outputProgress({ + status: "running", + operation: "Products import", + message: "Finished materializing calculations" + }); } async function importProducts(prodConnection, localConnection) { - outputProgress({ - operation: "Starting products import - Getting schema", - status: "running", - }); - const startTime = Date.now(); try { + // Get column names first + const [columns] = await localConnection.query(` + SELECT COLUMN_NAME + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'products' + ORDER BY ORDINAL_POSITION + `); + const columnNames = columns.map(col => col.COLUMN_NAME); + + // Get last sync info + const [syncInfo] = await localConnection.query( + "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'products'" + ); + const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; + + // Setup temporary tables + await setupTemporaryTables(localConnection); + + // Materialize calculations + await materializeCalculations(prodConnection, localConnection); + + // Optimized count query for changes since last sync + const [countResult] = await prodConnection.query(` + SELECT COUNT(*) as total + FROM products p + WHERE p.stamp > ? + OR EXISTS ( + SELECT 1 FROM product_last_sold pls + WHERE p.pid = pls.pid + AND pls.date_sold > ? + ) + OR p.date_created > ? + OR p.datein > ? + `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + + const totalProducts = countResult[0].total; + + // Main product query using materialized data - modified for incremental + outputProgress({ + status: "running", + operation: "Products import", + message: "Fetching product data from production" + }); + + // Create temporary table for production data + await localConnection.query(` + CREATE TEMPORARY TABLE temp_prod_data ( + pid BIGINT NOT NULL, + title VARCHAR(255), + description TEXT, + SKU VARCHAR(50), + date_created TIMESTAMP NULL, + first_received TIMESTAMP NULL, + location VARCHAR(50), + barcode VARCHAR(50), + harmonized_tariff_code VARCHAR(20), + updated_at TIMESTAMP, + visible BOOLEAN, + replenishable BOOLEAN, + vendor VARCHAR(100), + vendor_reference VARCHAR(100), + notions_reference VARCHAR(100), + brand VARCHAR(100), + line VARCHAR(100), + subline VARCHAR(100), + artist VARCHAR(100), + moq INT, + rating TINYINT UNSIGNED, + reviews INT UNSIGNED, + weight DECIMAL(10,3), + length DECIMAL(10,3), + width DECIMAL(10,3), + height DECIMAL(10,3), + total_sold INT UNSIGNED, + country_of_origin VARCHAR(5), + date_last_sold DATE, + category_ids TEXT, + PRIMARY KEY (pid) + ) ENGINE=InnoDB + `); + + // Get data from production and insert into temp table + const [prodData] = await prodConnection.query(` + SELECT + p.pid, + p.description AS title, + p.notes AS description, + p.itemnumber AS SKU, + p.date_created, + p.datein AS first_received, + p.location, + p.upc AS barcode, + p.harmonized_tariff_code, + p.stamp AS updated_at, + CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, + CASE WHEN p.reorder >= 0 THEN 1 ELSE 0 END AS replenishable, + s.companyname AS vendor, + CASE WHEN s.companyname = 'Notions' + THEN sid.notions_itemnumber + ELSE sid.supplier_itemnumber + END AS vendor_reference, + sid.notions_itemnumber AS notions_reference, + pc1.name AS brand, + pc2.name AS line, + pc3.name AS subline, + pc4.name AS artist, + COALESCE(CASE + WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit + ELSE sid.supplier_qty_per_unit + END, sid.notions_qty_per_unit) AS moq, + p.rating, + p.rating_votes AS reviews, + p.weight, + p.length, + p.width, + p.height, + p.totalsold AS total_sold, + p.country_of_origin, + pls.date_sold as date_last_sold, + GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids + FROM products p + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN supplier_item_data sid ON p.pid = sid.pid + LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid + LEFT JOIN product_category_index pci ON p.pid = pci.pid + LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id + LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id + LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id + LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + WHERE p.stamp > ? + OR pls.date_sold > ? + OR p.date_created > ? + OR p.datein > ? + GROUP BY p.pid + `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + + // Insert production data in batches + for (let i = 0; i < prodData.length; i += 1000) { + const batch = prodData.slice(i, i + 1000); + const placeholders = batch.map(() => "(?)").join(","); + + await localConnection.query(` + INSERT INTO temp_prod_data VALUES ${placeholders} + `, batch.map(row => Object.values(row))); + + outputProgress({ + status: "running", + operation: "Products import", + message: `Loaded ${Math.min(i + 1000, prodData.length)} of ${prodData.length} products from production`, + current: i + batch.length, + total: prodData.length + }); + } + + // Now join with local temp tables + const [rows] = await localConnection.query(` + SELECT + p.*, + COALESCE(tis.stock_quantity, 0) as stock_quantity, + COALESCE(tis.preorder_count, 0) as preorder_count, + COALESCE(tis.notions_inv_count, 0) as notions_inv_count, + COALESCE(tpp.price, 0) as price, + COALESCE(tpp.regular_price, 0) as regular_price, + COALESCE(tpp.cost_price, 0) as cost_price + FROM temp_prod_data p + LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid + LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid + `); + + // Drop the temporary production data table + await localConnection.query("DROP TEMPORARY TABLE IF EXISTS temp_prod_data"); + + // Process products in batches + const BATCH_SIZE = 1000; + let processed = 0; + for (let i = 0; i < rows.length; i += BATCH_SIZE) { + const batch = rows.slice(i, i + BATCH_SIZE); + + // Add image URLs + batch.forEach(row => { + const urls = getImageUrls(row.pid); + row.image = urls.image; + row.image_175 = urls.image_175; + row.image_full = urls.image_full; + }); + + // Prepare product values - now using columnNames from above + const productValues = batch.flatMap(row => + columnNames.map(col => { + const val = row[col] ?? null; + if (col === "managing_stock") return 1; + if (typeof val === "number") return val || 0; + return val; + }) + ); + + // MySQL 8.0 optimized insert + const placeholderGroup = `(${Array(columnNames.length).fill("?").join(",")})`; + const productPlaceholders = Array(batch.length).fill(placeholderGroup).join(","); + + const insertQuery = ` + INSERT INTO products (${columnNames.join(",")}) + VALUES ${productPlaceholders} + AS new_products + ON DUPLICATE KEY UPDATE + ${columnNames + .filter(col => col !== "pid") + .map(col => `${col} = new_products.${col}`) + .join(",")}; + `; + + await localConnection.query(insertQuery, productValues); + + processed += batch.length; + outputProgress({ + status: "running", + operation: "Products import", + message: `Processed ${processed} of ${rows.length} products`, + current: processed, + total: rows.length + }); + } + + // After successful import, update the sync status + await localConnection.query(` + INSERT INTO sync_status (table_name, last_sync_timestamp) + VALUES ('products', NOW()) + ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() + `); + + return { + status: "complete", + totalImported: rows.length, + incrementalUpdate: true, + lastSyncTime + }; + } catch (error) { + throw error; + } finally { + // Cleanup temporary tables + await cleanupTemporaryTables(localConnection); + } +} + +async function importMissingProducts(prodConnection, localConnection, missingPids) { + try { + // Setup temporary tables + await setupTemporaryTables(localConnection); + + // Materialize calculations for missing products + await localConnection.query(` + INSERT INTO temp_inventory_status + WITH product_stock AS ( + SELECT oi.prod_pid, + SUM(oi.qty_ordered - oi.qty_placed) as pending_qty + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.prod_pid IN (?) + AND [rest of conditions] + GROUP BY oi.prod_pid + ) + SELECT [same as above] + WHERE p.pid IN (?) + `, [missingPids, missingPids]); + // First get the column names from the table structure const [columns] = await localConnection.query(` SELECT COLUMN_NAME @@ -216,30 +478,8 @@ async function importProducts(prodConnection, localConnection) { const columnNames = columns.map((col) => col.COLUMN_NAME); - // Get total count first for progress indication - outputProgress({ - operation: "Starting products import - Getting total count", - status: "running", - }); - - const [countResult] = await prodConnection.query(` - SELECT COUNT(*) as total - FROM products p - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - WHERE pls.date_sold >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR p.datein >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR pls.date_sold IS NULL - `); - const totalProducts = countResult[0].total; - - outputProgress({ - operation: `Starting products import - Fetching ${totalProducts} products from production`, - status: "running", - }); - - // Get products from production with optimized query - const [rows] = await prodConnection.query(` + // Get the missing products from production + const [products] = await prodConnection.query(` SELECT p.pid, p.description AS title, @@ -338,120 +578,73 @@ async function importProducts(prodConnection, localConnection) { WHERE active = 1 GROUP BY pid ) pcp ON p.pid = pcp.pid - WHERE (pls.date_sold >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR p.datein >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) - OR pls.date_sold IS NULL) + WHERE p.pid IN (?) GROUP BY p.pid - `); + `, [missingPids]); - let current = 0; - const total = rows.length; - const BATCH_SIZE = 1000; - - // Process products in batches - for (let i = 0; i < rows.length; i += BATCH_SIZE) { - let batch = rows.slice(i, i + BATCH_SIZE); - - // Prepare product values and category relationships in parallel - const productValues = []; - const categoryRelationships = []; - - batch.forEach((row) => { - // Map values in the same order as columns - const rowValues = columnNames.map((col) => { - const val = row[col] ?? null; + if (products.length > 0) { + // Map values in the same order as columns + const productValues = products.flatMap(product => + columnNames.map(col => { + const val = product[col] ?? null; if (col === "managing_stock") return 1; if (typeof val === "number") return val || 0; return val; - }); - productValues.push(...rowValues); + }) + ); - // Add category relationships - if (row.category_ids) { - const catIds = row.category_ids - .split(",") - .map((id) => id.trim()) - .filter((id) => id) - .map(Number); - catIds.forEach((catId) => { - if (catId) categoryRelationships.push([catId, row.pid]); - }); - } - }); - - // Generate placeholders based on column count - const placeholderGroup = `(${Array(columnNames.length) - .fill("?") - .join(",")})`; - const productPlaceholders = Array(batch.length) - .fill(placeholderGroup) + // Generate placeholders for all products + const placeholders = products + .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) .join(","); - // Build the query dynamically - const insertQuery = ` + // Build and execute the query + const query = ` INSERT INTO products (${columnNames.join(",")}) - VALUES ${productPlaceholders} + VALUES ${placeholders} ON DUPLICATE KEY UPDATE ${columnNames .filter((col) => col !== "pid") .map((col) => `${col} = VALUES(${col})`) .join(",")} `; - // First insert the products and wait for it to complete - await localConnection.query(insertQuery, productValues); + await localConnection.query(query, productValues); + + // Verify products were inserted before proceeding with categories + const [insertedProducts] = await localConnection.query( + "SELECT pid FROM products WHERE pid IN (?)", + [products.map(p => p.pid)] + ); + const insertedPids = new Set(insertedProducts.map(p => p.pid)); + + // Handle category relationships if any + const categoryRelationships = []; + products.forEach(product => { + // Only add category relationships for products that were successfully inserted + if (insertedPids.has(product.pid) && product.category_ids) { + const catIds = product.category_ids + .split(",") + .map(id => id.trim()) + .filter(id => id) + .map(Number); + catIds.forEach(catId => { + if (catId) categoryRelationships.push([catId, product.pid]); + }); + } + }); - // Now that products are inserted, handle category relationships if (categoryRelationships.length > 0) { - // Get unique category IDs to verify they exist - const uniqueCatIds = [ - ...new Set(categoryRelationships.map(([catId]) => catId)), - ]; - - // Check which categories exist + // Verify categories exist before inserting relationships + const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))]; const [existingCats] = await localConnection.query( "SELECT cat_id FROM categories WHERE cat_id IN (?)", [uniqueCatIds] ); - const existingCatIds = new Set(existingCats.map((c) => c.cat_id)); + const existingCatIds = new Set(existingCats.map(c => c.cat_id)); - // Log missing categories - const missingCatIds = uniqueCatIds.filter( - (id) => !existingCatIds.has(id) - ); - if (missingCatIds.length > 0) { - console.error("Missing categories:", missingCatIds); - - // Query production to see what these categories are - const [missingCats] = await prodConnection.query( - ` - SELECT cat_id, name, type, master_cat_id, hidden - FROM product_categories - WHERE cat_id IN (?) - `, - [missingCatIds] - ); - - console.error("Missing category details:", missingCats); - console.warn( - "Skipping invalid category relationships - continuing with import" - ); - continue; - } - - // Verify products exist before inserting relationships - const productIds = [ - ...new Set(categoryRelationships.map(([_, pid]) => pid)), - ]; - const [existingProducts] = await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [productIds] - ); - const existingProductIds = new Set(existingProducts.map((p) => p.pid)); - - // Filter relationships to only include existing products - const validRelationships = categoryRelationships.filter(([_, pid]) => - existingProductIds.has(pid) + // Filter relationships to only include existing categories + const validRelationships = categoryRelationships.filter(([catId]) => + existingCatIds.has(catId) ); if (validRelationships.length > 0) { @@ -460,49 +653,25 @@ async function importProducts(prodConnection, localConnection) { .join(","); await localConnection.query( ` - INSERT INTO product_categories (cat_id, pid) - VALUES ${catPlaceholders} - ON DUPLICATE KEY UPDATE cat_id = VALUES(cat_id) - `, + INSERT INTO product_categories (cat_id, pid) + VALUES ${catPlaceholders} + ON DUPLICATE KEY UPDATE cat_id = VALUES(cat_id) + `, validRelationships.flat() ); } } - - current += batch.length; - outputProgress({ - status: "running", - operation: "Products import", - current, - total, - elapsed: formatElapsedTime(startTime), - remaining: estimateRemaining(startTime, current, total), - rate: calculateRate(startTime, current) - }); } - outputProgress({ - status: "complete", - operation: "Products import completed", - current: total, - total, - duration: formatElapsedTime(Date.now() - startTime), - }); - return { status: "complete", - totalImported: total + totalImported: products.length }; } catch (error) { - console.error("Error importing products:", error); - - outputProgress({ - status: "error", - operation: "Products import failed", - error: error.message - }); - throw error; + } finally { + // Cleanup temporary tables + await cleanupTemporaryTables(localConnection); } } diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 7165c75..e2dd1ee 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -1,14 +1,37 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); async function importPurchaseOrders(prodConnection, localConnection) { - outputProgress({ - operation: "Starting purchase orders import - Initializing", - status: "running", - }); - const startTime = Date.now(); + let importHistoryId; try { + // Get last sync info + const [syncInfo] = await localConnection.query( + "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'" + ); + const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; + + // Create import history record + const [historyResult] = await localConnection.query(` + INSERT INTO import_history ( + table_name, + start_time, + is_incremental, + status + ) VALUES ( + 'purchase_orders', + NOW(), + ?, + 'running' + ) + `, [!!syncInfo?.[0]]); + importHistoryId = historyResult.insertId; + + outputProgress({ + operation: "Starting purchase orders import - Initializing", + status: "running", + }); + // Get column names for the insert const [columns] = await localConnection.query(` SELECT COLUMN_NAME @@ -20,7 +43,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { .map((col) => col.COLUMN_NAME) .filter((name) => name !== "id"); - // First get all relevant PO IDs with basic info - this is much faster than the full join + // First get all relevant PO IDs with basic info - modified for incremental const [[{ total }]] = await prodConnection.query(` SELECT COUNT(*) as total FROM ( @@ -29,14 +52,17 @@ async function importPurchaseOrders(prodConnection, localConnection) { FORCE INDEX (idx_date_created) JOIN po_products pop ON p.po_id = pop.po_id JOIN suppliers s ON p.supplier_id = s.supplierid - WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + WHERE p.date_ordered > ? + OR p.stamp > ? + OR p.date_modified > ? UNION SELECT DISTINCT r.receiving_id as po_id, rp.pid FROM receivings_products rp LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id - WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + WHERE rp.received_date > ? + OR rp.stamp > ? ) all_items - `); + `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); const [poList] = await prodConnection.query(` SELECT DISTINCT @@ -53,22 +79,27 @@ async function importPurchaseOrders(prodConnection, localConnection) { COALESCE(p.notes, '') as long_note FROM ( SELECT po_id FROM po - WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + WHERE date_ordered > ? + OR stamp > ? + OR date_modified > ? UNION SELECT DISTINCT r.receiving_id as po_id FROM receivings r JOIN receivings_products rp ON r.receiving_id = rp.receiving_id - WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + WHERE rp.received_date > ? + OR rp.stamp > ? ) ids LEFT JOIN po p ON ids.po_id = p.po_id LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid LEFT JOIN receivings r ON ids.po_id = r.receiving_id LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid ORDER BY po_id - `); + `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); const totalItems = total; let processed = 0; + let recordsAdded = 0; + let recordsUpdated = 0; const BATCH_SIZE = 5000; const PROGRESS_INTERVAL = 500; @@ -249,7 +280,9 @@ async function importPurchaseOrders(prodConnection, localConnection) { .join(",")}; `; - await localConnection.query(query, values.flat()); + const result = await localConnection.query(query, values.flat()); + recordsAdded += result.affectedRows - result.changedRows; + recordsUpdated += result.changedRows; } processed += batchProcessed; @@ -271,19 +304,56 @@ async function importPurchaseOrders(prodConnection, localConnection) { } } - outputProgress({ - status: "complete", - operation: "Purchase orders import completed", - current: totalItems, - total: totalItems, - duration: formatElapsedTime((Date.now() - startTime) / 1000), - }); + // After successful import, update sync status + await localConnection.query(` + INSERT INTO sync_status (table_name, last_sync_timestamp) + VALUES ('purchase_orders', NOW()) + ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() + `); + + // Update import history with final stats + const endTime = Date.now(); + const durationSeconds = Math.round((endTime - startTime) / 1000); + + await localConnection.query(` + UPDATE import_history + SET + end_time = NOW(), + duration_seconds = ?, + records_added = ?, + records_updated = ?, + status = 'completed', + additional_info = JSON_OBJECT( + 'total_processed', ?, + 'last_sync_time', ?, + 'next_sync_time', NOW() + ) + WHERE id = ? + `, [durationSeconds, recordsAdded, recordsUpdated, totalItems, lastSyncTime, importHistoryId]); return { status: "complete", - totalImported: totalItems + totalImported: totalItems, + recordsAdded, + recordsUpdated, + durationSeconds, + incrementalUpdate: !!syncInfo?.[0] }; + } catch (error) { + // Update import history with error + if (importHistoryId) { + await localConnection.query(` + UPDATE import_history + SET + end_time = NOW(), + duration_seconds = ?, + status = 'failed', + error_message = ? + WHERE id = ? + `, [Math.round((Date.now() - startTime) / 1000), error.message, importHistoryId]); + } + outputProgress({ operation: "Purchase orders import failed", status: "error", diff --git a/inventory-server/scripts/reset-db.js b/inventory-server/scripts/reset-db.js index 297284b..8cc437f 100644 --- a/inventory-server/scripts/reset-db.js +++ b/inventory-server/scripts/reset-db.js @@ -156,7 +156,7 @@ async function resetDatabase() { SELECT GROUP_CONCAT(table_name) as tables FROM information_schema.tables WHERE table_schema = DATABASE() - AND table_name != 'users' + AND table_name NOT IN ('users', 'import_history') `); if (!tables[0].tables) { diff --git a/inventory/src/components/settings/DataManagement.tsx b/inventory/src/components/settings/DataManagement.tsx index 4ba827b..c0b4f31 100644 --- a/inventory/src/components/settings/DataManagement.tsx +++ b/inventory/src/components/settings/DataManagement.tsx @@ -13,7 +13,7 @@ import { AlertDialogTitle, AlertDialogTrigger, } from "@/components/ui/alert-dialog"; -import { Loader2, RefreshCw, Upload, X, Database } from "lucide-react"; +import { Loader2, RefreshCw, X, Database } from "lucide-react"; import config from '../../config'; import { toast } from "sonner"; @@ -36,11 +36,6 @@ interface ImportProgress { duration?: string; } -interface ImportLimits { - products: number; - orders: number; - purchaseOrders: number; -} export function DataManagement() { const [isImportingProd, setIsImportingProd] = useState(false); From 655c071960ebad6c00d440e8585c08961da49057 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 16:25:24 -0500 Subject: [PATCH 10/33] Limit order and purchase order imports to last 5 years --- inventory-server/scripts/import/orders.js | 2 ++ .../scripts/import/purchase-orders.js | 20 +++++++++++-------- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 768153c..65906fd 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -36,6 +36,7 @@ async function importOrders(prodConnection, localConnection) { FROM order_items oi FORCE INDEX (PRIMARY) JOIN _order o FORCE INDEX (PRIMARY) ON oi.order_id = o.order_id WHERE o.order_status >= 15 + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND (o.date_placed_onlydate > ? OR o.stamp > ?) `, [lastSyncTime, lastSyncTime]); @@ -89,6 +90,7 @@ async function importOrders(prodConnection, localConnection) { FROM order_items oi JOIN _order o ON oi.order_id = o.order_id WHERE o.order_status >= 15 + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND (o.date_placed_onlydate > ? OR o.stamp > ?) LIMIT ? OFFSET ? diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index e2dd1ee..dfd357a 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -52,15 +52,17 @@ async function importPurchaseOrders(prodConnection, localConnection) { FORCE INDEX (idx_date_created) JOIN po_products pop ON p.po_id = pop.po_id JOIN suppliers s ON p.supplier_id = s.supplierid - WHERE p.date_ordered > ? + WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + AND (p.date_ordered > ? OR p.stamp > ? - OR p.date_modified > ? + OR p.date_modified > ?) UNION SELECT DISTINCT r.receiving_id as po_id, rp.pid FROM receivings_products rp LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id - WHERE rp.received_date > ? - OR rp.stamp > ? + WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + AND (rp.received_date > ? + OR rp.stamp > ?) ) all_items `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); @@ -79,15 +81,17 @@ async function importPurchaseOrders(prodConnection, localConnection) { COALESCE(p.notes, '') as long_note FROM ( SELECT po_id FROM po - WHERE date_ordered > ? + WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + AND (date_ordered > ? OR stamp > ? - OR date_modified > ? + OR date_modified > ?) UNION SELECT DISTINCT r.receiving_id as po_id FROM receivings r JOIN receivings_products rp ON r.receiving_id = rp.receiving_id - WHERE rp.received_date > ? - OR rp.stamp > ? + WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + AND (rp.received_date > ? + OR rp.stamp > ?) ) ids LEFT JOIN po p ON ids.po_id = p.po_id LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid From 4d8a677c5baaaa87e4d1de8551afe7e03fb88a89 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 16:33:37 -0500 Subject: [PATCH 11/33] Remove import history tracking from purchase orders import script --- inventory-server/scripts/import/orders.js | 3 ++ .../scripts/import/purchase-orders.js | 51 ------------------- 2 files changed, 3 insertions(+), 51 deletions(-) diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 65906fd..2fe6a05 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -231,6 +231,9 @@ async function importOrders(prodConnection, localConnection) { ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() `); + const endTime = Date.now(); + const durationSeconds = Math.round((endTime - startTime) / 1000); + outputProgress({ status: "complete", operation: "Orders import completed", diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index dfd357a..f9257e0 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -2,7 +2,6 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = async function importPurchaseOrders(prodConnection, localConnection) { const startTime = Date.now(); - let importHistoryId; try { // Get last sync info @@ -11,22 +10,6 @@ async function importPurchaseOrders(prodConnection, localConnection) { ); const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; - // Create import history record - const [historyResult] = await localConnection.query(` - INSERT INTO import_history ( - table_name, - start_time, - is_incremental, - status - ) VALUES ( - 'purchase_orders', - NOW(), - ?, - 'running' - ) - `, [!!syncInfo?.[0]]); - importHistoryId = historyResult.insertId; - outputProgress({ operation: "Starting purchase orders import - Initializing", status: "running", @@ -315,49 +298,15 @@ async function importPurchaseOrders(prodConnection, localConnection) { ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() `); - // Update import history with final stats - const endTime = Date.now(); - const durationSeconds = Math.round((endTime - startTime) / 1000); - - await localConnection.query(` - UPDATE import_history - SET - end_time = NOW(), - duration_seconds = ?, - records_added = ?, - records_updated = ?, - status = 'completed', - additional_info = JSON_OBJECT( - 'total_processed', ?, - 'last_sync_time', ?, - 'next_sync_time', NOW() - ) - WHERE id = ? - `, [durationSeconds, recordsAdded, recordsUpdated, totalItems, lastSyncTime, importHistoryId]); - return { status: "complete", totalImported: totalItems, recordsAdded, recordsUpdated, - durationSeconds, incrementalUpdate: !!syncInfo?.[0] }; } catch (error) { - // Update import history with error - if (importHistoryId) { - await localConnection.query(` - UPDATE import_history - SET - end_time = NOW(), - duration_seconds = ?, - status = 'failed', - error_message = ? - WHERE id = ? - `, [Math.round((Date.now() - startTime) / 1000), error.message, importHistoryId]); - } - outputProgress({ operation: "Purchase orders import failed", status: "error", From fb5bf4a144a67073855d43a36dd16addff1f2437 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 18:21:31 -0500 Subject: [PATCH 12/33] Optimize order import with improved tax info retrieval and performance --- inventory-server/scripts/import-from-prod.js | 4 +- inventory-server/scripts/import/orders.js | 96 ++++++++++++++------ 2 files changed, 70 insertions(+), 30 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index ede5aa2..1bce520 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,8 +10,8 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = true; -const IMPORT_PRODUCTS = true; +const IMPORT_CATEGORIES = false; +const IMPORT_PRODUCTS = false; const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 2fe6a05..18746e1 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -52,10 +52,11 @@ async function importOrders(prodConnection, localConnection) { let processed = 0; // Process in batches - const batchSize = 1000; + const batchSize = 20000; // Increased from 1000 since order records are small let offset = 0; while (offset < total) { + // First get orders without tax info const [orders] = await prodConnection.query(` SELECT oi.order_id as order_number, @@ -65,19 +66,7 @@ async function importOrders(prodConnection, localConnection) { oi.prod_price_reg as price, oi.qty_ordered as quantity, (oi.prod_price_reg - oi.prod_price) as discount, - ( - SELECT - otp.item_taxes_to_collect - FROM - order_tax_info oti - JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id - WHERE - oti.order_id = o.order_id - AND otp.pid = oi.prod_pid - ORDER BY - oti.stamp DESC - LIMIT 1 - ) as tax, + 0 as tax, 0 as tax_included, ROUND( ((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) * @@ -88,7 +77,9 @@ async function importOrders(prodConnection, localConnection) { 'pending' as status, CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id + FORCE INDEX (PRIMARY) + JOIN _order o USE INDEX (date_placed_onlydate, idx_status) + ON oi.order_id = o.order_id WHERE o.order_status >= 15 AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND (o.date_placed_onlydate > ? @@ -96,6 +87,37 @@ async function importOrders(prodConnection, localConnection) { LIMIT ? OFFSET ? `, [lastSyncTime, lastSyncTime, batchSize, offset]); + // Then get tax info for these orders + if (orders.length > 0) { + const orderIds = [...new Set(orders.map(o => o.order_number))]; + const [taxInfo] = await prodConnection.query(` + SELECT oti.order_id, otp.pid, otp.item_taxes_to_collect + FROM ( + SELECT order_id, MAX(stamp) as latest_stamp + FROM order_tax_info USE INDEX (order_id, stamp) + WHERE order_id IN (?) + GROUP BY order_id + ) latest + JOIN order_tax_info oti USE INDEX (order_id, stamp) + ON oti.order_id = latest.order_id + AND oti.stamp = latest.latest_stamp + JOIN order_tax_info_products otp FORCE INDEX (PRIMARY) + ON oti.taxinfo_id = otp.taxinfo_id + `, [orderIds]); + + // Create a map for quick tax lookup + const taxMap = new Map(); + taxInfo.forEach(t => { + taxMap.set(`${t.order_id}-${t.pid}`, t.item_taxes_to_collect); + }); + + // Add tax info to orders + orders.forEach(order => { + const taxKey = `${order.order_number}-${order.pid}`; + order.tax = taxMap.get(taxKey) || 0; + }); + } + // Check if all products exist before inserting orders const orderProductPids = [...new Set(orders.map((o) => o.pid))]; const [existingProducts] = await localConnection.query( @@ -176,19 +198,7 @@ async function importOrders(prodConnection, localConnection) { oi.prod_price_reg as price, oi.qty_ordered as quantity, (oi.prod_price_reg - oi.prod_price) as discount, - ( - SELECT - otp.item_taxes_to_collect - FROM - order_tax_info oti - JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id - WHERE - oti.order_id = o.order_id - AND otp.pid = oi.prod_pid - ORDER BY - oti.stamp DESC - LIMIT 1 - ) as tax, + 0 as tax, 0 as tax_included, ROUND( ((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) * @@ -203,6 +213,36 @@ async function importOrders(prodConnection, localConnection) { WHERE oi.order_id IN (?) `, [[...skippedOrders]]); + if (retryOrders.length > 0) { + const retryOrderIds = [...new Set(retryOrders.map(o => o.order_number))]; + const [retryTaxInfo] = await prodConnection.query(` + SELECT oti.order_id, otp.pid, otp.item_taxes_to_collect + FROM ( + SELECT order_id, MAX(stamp) as latest_stamp + FROM order_tax_info USE INDEX (order_id, stamp) + WHERE order_id IN (?) + GROUP BY order_id + ) latest + JOIN order_tax_info oti USE INDEX (order_id, stamp) + ON oti.order_id = latest.order_id + AND oti.stamp = latest.latest_stamp + JOIN order_tax_info_products otp FORCE INDEX (PRIMARY) + ON oti.taxinfo_id = otp.taxinfo_id + `, [retryOrderIds]); + + // Create a map for quick tax lookup + const taxMap = new Map(); + retryTaxInfo.forEach(t => { + taxMap.set(`${t.order_id}-${t.pid}`, t.item_taxes_to_collect); + }); + + // Add tax info to orders + retryOrders.forEach(order => { + const taxKey = `${order.order_number}-${order.pid}`; + order.tax = taxMap.get(taxKey) || 0; + }); + } + const placeholders = retryOrders .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) .join(","); From 0d377466aa92b57d22f813708b5bda18e11b23d5 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 18:42:29 -0500 Subject: [PATCH 13/33] Optimize database import queries with improved index selection --- inventory-server/scripts/import/orders.js | 2 +- .../scripts/import/purchase-orders.js | 9 +- inventory-server/scripts/scripts.js | 180 ++++++++++++++++++ 3 files changed, 187 insertions(+), 4 deletions(-) create mode 100644 inventory-server/scripts/scripts.js diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 18746e1..45d16aa 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -52,7 +52,7 @@ async function importOrders(prodConnection, localConnection) { let processed = 0; // Process in batches - const batchSize = 20000; // Increased from 1000 since order records are small + const batchSize = 10000; // Increased from 1000 since order records are small let offset = 0; while (offset < total) { diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index f9257e0..4dff5f0 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -32,7 +32,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { FROM ( SELECT DISTINCT pop.po_id, pop.pid FROM po p - FORCE INDEX (idx_date_created) + USE INDEX (idx_date_created) JOIN po_products pop ON p.po_id = pop.po_id JOIN suppliers s ON p.supplier_id = s.supplierid WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) @@ -42,6 +42,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { UNION SELECT DISTINCT r.receiving_id as po_id, rp.pid FROM receivings_products rp + USE INDEX (received_date) LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND (rp.received_date > ? @@ -64,6 +65,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { COALESCE(p.notes, '') as long_note FROM ( SELECT po_id FROM po + USE INDEX (idx_date_created) WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND (date_ordered > ? OR stamp > ? @@ -71,7 +73,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { UNION SELECT DISTINCT r.receiving_id as po_id FROM receivings r - JOIN receivings_products rp ON r.receiving_id = rp.receiving_id + JOIN receivings_products rp USE INDEX (received_date) ON r.receiving_id = rp.receiving_id WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND (rp.received_date > ? OR rp.stamp > ?) @@ -110,7 +112,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { pop.cost_each as cost_price, pop.qty_each as ordered FROM po_products pop - FORCE INDEX (PRIMARY) + USE INDEX (PRIMARY) JOIN products pr ON pop.pid = pr.pid WHERE pop.po_id IN (?) `, [poIds]); @@ -138,6 +140,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { ELSE 1 -- Different PO END as is_alt_po FROM receivings_products rp + USE INDEX (received_date) LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id WHERE rp.pid IN (?) AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) diff --git a/inventory-server/scripts/scripts.js b/inventory-server/scripts/scripts.js new file mode 100644 index 0000000..fe188cd --- /dev/null +++ b/inventory-server/scripts/scripts.js @@ -0,0 +1,180 @@ +const readline = require('readline'); + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +const question = (query) => new Promise((resolve) => rl.question(query, resolve)); + +async function loadScript(name) { + try { + return await require(name); + } catch (error) { + console.error(`Failed to load script ${name}:`, error); + return null; + } +} + +async function runWithTimeout(fn) { + return new Promise((resolve, reject) => { + // Create a child process for the script + const child = require('child_process').fork(fn.toString(), [], { + stdio: 'inherit' + }); + + child.on('exit', (code) => { + if (code === 0) { + resolve(); + } else { + reject(new Error(`Script exited with code ${code}`)); + } + }); + + child.on('error', (err) => { + reject(err); + }); + }); +} + +function clearScreen() { + process.stdout.write('\x1Bc'); +} + +const scripts = { + 'Import Scripts': { + '1': { name: 'Full Import From Production', path: './import-from-prod' }, + '2': { name: 'Individual Import Scripts â–¸', submenu: { + '1': { name: 'Import Orders', path: './import/orders', key: 'importOrders' }, + '2': { name: 'Import Products', path: './import/products', key: 'importProducts' }, + '3': { name: 'Import Purchase Orders', path: './import/purchase-orders' }, + '4': { name: 'Import Categories', path: './import/categories' }, + 'b': { name: 'Back to Main Menu' } + }} + }, + 'Metrics': { + '3': { name: 'Calculate All Metrics', path: './calculate-metrics' }, + '4': { name: 'Individual Metric Scripts â–¸', submenu: { + '1': { name: 'Brand Metrics', path: './metrics/brand-metrics' }, + '2': { name: 'Category Metrics', path: './metrics/category-metrics' }, + '3': { name: 'Financial Metrics', path: './metrics/financial-metrics' }, + '4': { name: 'Product Metrics', path: './metrics/product-metrics' }, + '5': { name: 'Sales Forecasts', path: './metrics/sales-forecasts' }, + '6': { name: 'Time Aggregates', path: './metrics/time-aggregates' }, + '7': { name: 'Vendor Metrics', path: './metrics/vendor-metrics' }, + 'b': { name: 'Back to Main Menu' } + }} + }, + 'Database Management': { + '5': { name: 'Test Production Connection', path: './test-prod-connection' } + }, + 'Reset Scripts': { + '6': { name: 'Reset Database', path: './reset-db' }, + '7': { name: 'Reset Metrics', path: './reset-metrics' } + } +}; + +let lastRun = null; + +async function displayMenu(menuItems, title = 'Inventory Management Script Runner') { + clearScreen(); + console.log(`\n${title}\n`); + + for (const [category, items] of Object.entries(menuItems)) { + console.log(`\n${category}:`); + Object.entries(items).forEach(([key, script]) => { + console.log(`${key}. ${script.name}`); + }); + } + + if (lastRun) { + console.log('\nQuick Access:'); + console.log(`r. Repeat Last Script (${lastRun.name})`); + } + + console.log('\nq. Quit\n'); +} + +async function handleSubmenu(submenu, title) { + while (true) { + await displayMenu({"Individual Scripts": submenu}, title); + const choice = await question('Select an option (or b to go back): '); + + if (choice.toLowerCase() === 'b') { + return null; + } + + if (submenu[choice]) { + return submenu[choice]; + } + + console.log('Invalid selection. Please try again.'); + await new Promise(resolve => setTimeout(resolve, 1000)); + } +} + +async function runScript(script) { + console.log(`\nRunning: ${script.name}`); + try { + const scriptPath = require.resolve(script.path); + await runWithTimeout(scriptPath); + console.log('\nScript completed successfully'); + lastRun = script; + } catch (error) { + console.error('\nError running script:', error); + } + await question('\nPress Enter to continue...'); +} + +async function main() { + while (true) { + await displayMenu(scripts); + + const choice = await question('Select an option: '); + + if (choice.toLowerCase() === 'q') { + break; + } + + if (choice.toLowerCase() === 'r' && lastRun) { + await runScript(lastRun); + continue; + } + + let selectedScript = null; + for (const category of Object.values(scripts)) { + if (category[choice]) { + selectedScript = category[choice]; + break; + } + } + + if (!selectedScript) { + console.log('Invalid selection. Please try again.'); + await new Promise(resolve => setTimeout(resolve, 1000)); + continue; + } + + if (selectedScript.submenu) { + const submenuChoice = await handleSubmenu( + selectedScript.submenu, + selectedScript.name + ); + if (submenuChoice && submenuChoice.path) { + await runScript(submenuChoice); + } + } else if (selectedScript.path) { + await runScript(selectedScript); + } + } + + rl.close(); + process.exit(0); +} + +if (require.main === module) { + main().catch(error => { + console.error('Fatal error:', error); + process.exit(1); + }); +} From 2b329a55a4bd2fef94d2f9226da2be3f35c05ee5 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 18:51:55 -0500 Subject: [PATCH 14/33] Increase product import batch size --- inventory-server/scripts/import-from-prod.js | 4 ++-- inventory-server/scripts/import/products.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 1bce520..ede5aa2 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,8 +10,8 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = false; -const IMPORT_PRODUCTS = false; +const IMPORT_CATEGORIES = true; +const IMPORT_PRODUCTS = true; const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index da058f7..c7e5bd8 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -376,7 +376,7 @@ async function importProducts(prodConnection, localConnection) { await localConnection.query("DROP TEMPORARY TABLE IF EXISTS temp_prod_data"); // Process products in batches - const BATCH_SIZE = 1000; + const BATCH_SIZE = 10000; let processed = 0; for (let i = 0; i < rows.length; i += BATCH_SIZE) { const batch = rows.slice(i, i + BATCH_SIZE); From 3c5fb9e435075d2ad3d57c22af09e979160107c7 Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 19:14:58 -0500 Subject: [PATCH 15/33] Optimize product import with dynamic batching and memory management --- inventory-server/scripts/import/products.js | 57 +++++++++++---------- 1 file changed, 29 insertions(+), 28 deletions(-) diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index c7e5bd8..2ff588c 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -357,29 +357,25 @@ async function importProducts(prodConnection, localConnection) { }); } - // Now join with local temp tables - const [rows] = await localConnection.query(` - SELECT - p.*, - COALESCE(tis.stock_quantity, 0) as stock_quantity, - COALESCE(tis.preorder_count, 0) as preorder_count, - COALESCE(tis.notions_inv_count, 0) as notions_inv_count, - COALESCE(tpp.price, 0) as price, - COALESCE(tpp.regular_price, 0) as regular_price, - COALESCE(tpp.cost_price, 0) as cost_price - FROM temp_prod_data p - LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid - LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid - `); - - // Drop the temporary production data table - await localConnection.query("DROP TEMPORARY TABLE IF EXISTS temp_prod_data"); - - // Process products in batches - const BATCH_SIZE = 10000; + // Now join with local temp tables and process in batches + const BATCH_SIZE = 2500; let processed = 0; - for (let i = 0; i < rows.length; i += BATCH_SIZE) { - const batch = rows.slice(i, i + BATCH_SIZE); + + while (processed < totalProducts) { + const [batch] = await localConnection.query(` + SELECT + p.*, + COALESCE(tis.stock_quantity, 0) as stock_quantity, + COALESCE(tis.preorder_count, 0) as preorder_count, + COALESCE(tis.notions_inv_count, 0) as notions_inv_count, + COALESCE(tpp.price, 0) as price, + COALESCE(tpp.regular_price, 0) as regular_price, + COALESCE(tpp.cost_price, 0) as cost_price + FROM temp_prod_data p + LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid + LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid + LIMIT ? OFFSET ? + `, [BATCH_SIZE, processed]); // Add image URLs batch.forEach(row => { @@ -420,12 +416,20 @@ async function importProducts(prodConnection, localConnection) { outputProgress({ status: "running", operation: "Products import", - message: `Processed ${processed} of ${rows.length} products`, + message: `Processed ${processed} of ${totalProducts} products`, current: processed, - total: rows.length + total: totalProducts }); + + // Force garbage collection between batches + if (global.gc) { + global.gc(); + } } + // Drop temporary tables + await cleanupTemporaryTables(localConnection); + // After successful import, update the sync status await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) @@ -435,15 +439,12 @@ async function importProducts(prodConnection, localConnection) { return { status: "complete", - totalImported: rows.length, + totalImported: totalProducts, incrementalUpdate: true, lastSyncTime }; } catch (error) { throw error; - } finally { - // Cleanup temporary tables - await cleanupTemporaryTables(localConnection); } } From 6c5f119ee5e8977bfe57100681709836ac2cdd0c Mon Sep 17 00:00:00 2001 From: Matt Date: Wed, 29 Jan 2025 21:48:56 -0500 Subject: [PATCH 16/33] Import fixes/optimizations --- inventory-server/scripts/import/orders.js | 322 ++++++++++-------- inventory-server/scripts/import/products.js | 24 +- .../scripts/import/purchase-orders.js | 42 ++- 3 files changed, 227 insertions(+), 161 deletions(-) diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 45d16aa..081e0d2 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -1,158 +1,188 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); const { importMissingProducts } = require('./products'); -async function importOrders(prodConnection, localConnection) { - outputProgress({ - operation: "Starting orders import - Getting total count", - status: "running", - }); - +/** + * Imports orders from a production MySQL database to a local MySQL database. + * It can run in two modes: + * 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time. + * 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp. + * + * @param {object} prodConnection - A MySQL connection to production DB (MySQL 5.7). + * @param {object} localConnection - A MySQL connection to local DB (MySQL 8.0). + * @param {boolean} incrementalUpdate - Set to false for a full sync; true for incremental. + * + * @returns {object} Information about the sync operation. + */ +async function importOrders(prodConnection, localConnection, incrementalUpdate = true) { const startTime = Date.now(); - const skippedOrders = new Set(); // Store orders that need to be retried - const missingProducts = new Set(); // Store products that need to be imported + const skippedOrders = new Set(); + const missingProducts = new Set(); try { - // Get last sync info + // Get the last sync time const [syncInfo] = await localConnection.query( "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'" ); const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; - // First get the column names from the table structure + // Retrieve column names for the 'orders' table, skip 'id' since it's auto-increment const [columns] = await localConnection.query(` SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = 'orders' ORDER BY ORDINAL_POSITION `); - const columnNames = columns - .map((col) => col.COLUMN_NAME) - .filter((name) => name !== "id"); // Skip auto-increment ID + .map(col => col.COLUMN_NAME) + .filter(name => name !== "id"); - // Get total count first for progress indication - modified for incremental - const [countResult] = await prodConnection.query(` - SELECT COUNT(*) as total - FROM order_items oi FORCE INDEX (PRIMARY) - JOIN _order o FORCE INDEX (PRIMARY) ON oi.order_id = o.order_id - WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) - AND (o.date_placed_onlydate > ? - OR o.stamp > ?) - `, [lastSyncTime, lastSyncTime]); + // Build query clauses for incremental vs. full update + const incrementalWhereClause = incrementalUpdate + ? `AND ( + o.stamp > ? + OR o.date_modified > ? + OR o.date_placed > ? + OR o.date_shipped > ? + OR oi.stamp > ? + )` + : ""; + const incrementalParams = incrementalUpdate + ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] + : []; - const totalOrders = countResult[0].total; - - outputProgress({ - operation: `Starting orders import - Fetching ${totalOrders} orders from production`, - status: "running", - }); - - const total = countResult[0].total; - let processed = 0; - - // Process in batches - const batchSize = 10000; // Increased from 1000 since order records are small - let offset = 0; - - while (offset < total) { - // First get orders without tax info - const [orders] = await prodConnection.query(` - SELECT - oi.order_id as order_number, - oi.prod_pid as pid, - oi.prod_itemnumber as SKU, - o.date_placed_onlydate as date, - oi.prod_price_reg as price, - oi.qty_ordered as quantity, - (oi.prod_price_reg - oi.prod_price) as discount, - 0 as tax, - 0 as tax_included, - ROUND( - ((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) * - (oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2 - ) as shipping, - o.order_cid as customer, - CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name, - 'pending' as status, - CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled - FROM order_items oi - FORCE INDEX (PRIMARY) - JOIN _order o USE INDEX (date_placed_onlydate, idx_status) + // Count how many orders we need to process + const [countResult] = await prodConnection.query( + ` + SELECT COUNT(*) AS total + FROM order_items oi USE INDEX (PRIMARY) + JOIN _order o USE INDEX (PRIMARY) ON oi.order_id = o.order_id WHERE o.order_status >= 15 AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) - AND (o.date_placed_onlydate > ? - OR o.stamp > ?) - LIMIT ? OFFSET ? - `, [lastSyncTime, lastSyncTime, batchSize, offset]); + ${incrementalWhereClause} + `, + incrementalParams + ); - // Then get tax info for these orders + const total = countResult[0].total; + outputProgress({ + operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} orders import - Fetching ${total} orders`, + status: "running", + }); + + let processed = 0; + // Increase or decrease this if you find a more optimal size + const batchSize = 20000; + let offset = 0; + + // Process in batches for memory efficiency + while (offset < total) { + // Fetch orders (initially with tax set to 0, to be updated later) + const [orders] = await prodConnection.query( + ` + SELECT + oi.order_id AS order_number, + oi.prod_pid AS pid, + oi.prod_itemnumber AS SKU, + o.date_placed_onlydate AS date, + oi.prod_price_reg AS price, + oi.qty_ordered AS quantity, + (oi.prod_price_reg - oi.prod_price) AS discount, + 0 AS tax, + 0 AS tax_included, + ROUND( + ( + (o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) + * (oi.prod_price * oi.qty_ordered) + ) / NULLIF(o.summary_subtotal, 0), + 2 + ) AS shipping, + o.order_cid AS customer, + CONCAT(o.bill_firstname, ' ', o.bill_lastname) AS customer_name, + 'pending' AS status, + CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END AS canceled + FROM order_items oi + FORCE INDEX (PRIMARY) + JOIN _order o + ON oi.order_id = o.order_id + WHERE o.order_status >= 15 + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + ${incrementalWhereClause} + LIMIT ? OFFSET ? + `, + [...incrementalParams, batchSize, offset] + ); + + // Fetch the latest tax info for these orders if (orders.length > 0) { const orderIds = [...new Set(orders.map(o => o.order_number))]; const [taxInfo] = await prodConnection.query(` SELECT oti.order_id, otp.pid, otp.item_taxes_to_collect FROM ( - SELECT order_id, MAX(stamp) as latest_stamp - FROM order_tax_info USE INDEX (order_id, stamp) + SELECT order_id, MAX(stamp) AS latest_stamp + FROM order_tax_info WHERE order_id IN (?) GROUP BY order_id - ) latest - JOIN order_tax_info oti USE INDEX (order_id, stamp) + ) latest + JOIN order_tax_info oti ON oti.order_id = latest.order_id AND oti.stamp = latest.latest_stamp - JOIN order_tax_info_products otp FORCE INDEX (PRIMARY) + JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id `, [orderIds]); - // Create a map for quick tax lookup + // Map (order_id-pid) -> tax amount const taxMap = new Map(); taxInfo.forEach(t => { taxMap.set(`${t.order_id}-${t.pid}`, t.item_taxes_to_collect); }); - // Add tax info to orders + // Merge tax into the orders array orders.forEach(order => { - const taxKey = `${order.order_number}-${order.pid}`; - order.tax = taxMap.get(taxKey) || 0; + const key = `${order.order_number}-${order.pid}`; + if (taxMap.has(key)) { + order.tax = taxMap.get(key) || 0; + } }); } - // Check if all products exist before inserting orders - const orderProductPids = [...new Set(orders.map((o) => o.pid))]; + // Check local DB for existing products to ensure we don't insert orders for missing products + const orderProductPids = [...new Set(orders.map(o => o.pid))]; const [existingProducts] = await localConnection.query( "SELECT pid FROM products WHERE pid IN (?)", [orderProductPids] ); - const existingPids = new Set(existingProducts.map((p) => p.pid)); + const existingPids = new Set(existingProducts.map(p => p.pid)); - // Filter out orders with missing products and track them - const validOrders = orders.filter((order) => { + // Separate valid orders from those referencing missing products + const validOrders = []; + for (const order of orders) { if (!existingPids.has(order.pid)) { missingProducts.add(order.pid); skippedOrders.add(order.order_number); - return false; + } else { + validOrders.push(order); } - return true; - }); + } + // Bulk insert valid orders if (validOrders.length > 0) { const placeholders = validOrders .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) .join(","); const updateClauses = columnNames - .filter((col) => col !== "order_number") // Don't update primary key - .map((col) => `${col} = VALUES(${col})`) + .filter(col => col !== "order_number") // don't overwrite primary key + .map(col => `${col} = VALUES(${col})`) .join(","); - const query = ` + const upsertQuery = ` INSERT INTO orders (${columnNames.join(",")}) VALUES ${placeholders} ON DUPLICATE KEY UPDATE ${updateClauses} `; await localConnection.query( - query, + upsertQuery, validOrders.flatMap(order => columnNames.map(col => order[col])) ); } @@ -171,18 +201,17 @@ async function importOrders(prodConnection, localConnection) { }); } - // Now handle missing products and retry skipped orders + // If we found missing products, import them and retry the skipped orders if (missingProducts.size > 0) { outputProgress({ operation: `Found ${missingProducts.size} missing products, importing them now`, status: "running", }); - await importMissingProducts(prodConnection, localConnection, [ - ...missingProducts, - ]); + // Import missing products + await importMissingProducts(prodConnection, localConnection, [...missingProducts]); - // Retry skipped orders + // Retry orders that were skipped due to missing products if (skippedOrders.size > 0) { outputProgress({ operation: `Retrying ${skippedOrders.size} skipped orders`, @@ -191,95 +220,100 @@ async function importOrders(prodConnection, localConnection) { const [retryOrders] = await prodConnection.query(` SELECT - oi.order_id as order_number, - oi.prod_pid as pid, - oi.prod_itemnumber as SKU, - o.date_placed_onlydate as date, - oi.prod_price_reg as price, - oi.qty_ordered as quantity, - (oi.prod_price_reg - oi.prod_price) as discount, - 0 as tax, - 0 as tax_included, + oi.order_id AS order_number, + oi.prod_pid AS pid, + oi.prod_itemnumber AS SKU, + o.date_placed_onlydate AS date, + oi.prod_price_reg AS price, + oi.qty_ordered AS quantity, + (oi.prod_price_reg - oi.prod_price) AS discount, + 0 AS tax, + 0 AS tax_included, ROUND( - ((o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) * - (oi.prod_price * oi.qty_ordered) / NULLIF(o.summary_subtotal, 0)), 2 - ) as shipping, - o.order_cid as customer, - CONCAT(o.bill_firstname, ' ', o.bill_lastname) as customer_name, - 'pending' as status, - CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END as canceled + ( + (o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) + * (oi.prod_price * oi.qty_ordered) + ) / NULLIF(o.summary_subtotal, 0), + 2 + ) AS shipping, + o.order_cid AS customer, + CONCAT(o.bill_firstname, ' ', o.bill_lastname) AS customer_name, + 'pending' AS status, + CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END AS canceled FROM order_items oi JOIN _order o ON oi.order_id = o.order_id WHERE oi.order_id IN (?) `, [[...skippedOrders]]); if (retryOrders.length > 0) { + // Fetch tax data for these specific retry orders const retryOrderIds = [...new Set(retryOrders.map(o => o.order_number))]; const [retryTaxInfo] = await prodConnection.query(` SELECT oti.order_id, otp.pid, otp.item_taxes_to_collect FROM ( - SELECT order_id, MAX(stamp) as latest_stamp - FROM order_tax_info USE INDEX (order_id, stamp) + SELECT order_id, MAX(stamp) AS latest_stamp + FROM order_tax_info WHERE order_id IN (?) GROUP BY order_id - ) latest - JOIN order_tax_info oti USE INDEX (order_id, stamp) + ) latest + JOIN order_tax_info oti ON oti.order_id = latest.order_id AND oti.stamp = latest.latest_stamp - JOIN order_tax_info_products otp FORCE INDEX (PRIMARY) + JOIN order_tax_info_products otp ON oti.taxinfo_id = otp.taxinfo_id `, [retryOrderIds]); - // Create a map for quick tax lookup const taxMap = new Map(); retryTaxInfo.forEach(t => { taxMap.set(`${t.order_id}-${t.pid}`, t.item_taxes_to_collect); }); - // Add tax info to orders retryOrders.forEach(order => { - const taxKey = `${order.order_number}-${order.pid}`; - order.tax = taxMap.get(taxKey) || 0; + const key = `${order.order_number}-${order.pid}`; + if (taxMap.has(key)) { + order.tax = taxMap.get(key) || 0; + } }); + + const placeholders = retryOrders + .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) + .join(","); + const updateClauses = columnNames + .filter(col => col !== "order_number") + .map(col => `${col} = VALUES(${col})`) + .join(","); + + const upsertQuery = ` + INSERT INTO orders (${columnNames.join(",")}) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE ${updateClauses} + `; + + await localConnection.query( + upsertQuery, + retryOrders.flatMap(order => columnNames.map(col => order[col])) + ); } - - const placeholders = retryOrders - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - const updateClauses = columnNames - .filter((col) => col !== "order_number") // Don't update primary key - .map((col) => `${col} = VALUES(${col})`) - .join(","); - - const query = ` - INSERT INTO orders (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${updateClauses} - `; - - await localConnection.query( - query, - retryOrders.flatMap(order => columnNames.map(col => order[col])) - ); } } - // After successful import, update the sync status + // Update the sync timestamp await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('orders', NOW()) - ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() + ON DUPLICATE KEY UPDATE + last_sync_timestamp = NOW(), + last_sync_id = LAST_INSERT_ID(last_sync_id) `); const endTime = Date.now(); - const durationSeconds = Math.round((endTime - startTime) / 1000); outputProgress({ status: "complete", - operation: "Orders import completed", + operation: `${incrementalUpdate ? 'Incremental' : 'Full'} orders import completed`, current: total, total, - duration: formatElapsedTime((Date.now() - startTime) / 1000), + duration: formatElapsedTime((endTime - startTime) / 1000), }); return { @@ -287,12 +321,12 @@ async function importOrders(prodConnection, localConnection) { totalImported: total, missingProducts: missingProducts.size, retriedOrders: skippedOrders.size, - incrementalUpdate: true, + incrementalUpdate, lastSyncTime }; } catch (error) { outputProgress({ - operation: "Orders import failed", + operation: `${incrementalUpdate ? 'Incremental' : 'Full'} orders import failed`, status: "error", error: error.message, }); @@ -300,4 +334,4 @@ async function importOrders(prodConnection, localConnection) { } } -module.exports = importOrders; \ No newline at end of file +module.exports = importOrders; diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 2ff588c..1a0b777 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -456,16 +456,32 @@ async function importMissingProducts(prodConnection, localConnection, missingPid // Materialize calculations for missing products await localConnection.query(` INSERT INTO temp_inventory_status - WITH product_stock AS ( + SELECT + p.pid, + COALESCE(si.available_local, 0) - COALESCE(ps.pending_qty, 0) as stock_quantity, + COALESCE(ps.pending_qty, 0) as pending_qty, + COALESCE(ci.onpreorder, 0) as preorder_count, + COALESCE(pnb.inventory, 0) as notions_inv_count + FROM products p + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + LEFT JOIN ( SELECT oi.prod_pid, SUM(oi.qty_ordered - oi.qty_placed) as pending_qty FROM order_items oi JOIN _order o ON oi.order_id = o.order_id WHERE oi.prod_pid IN (?) - AND [rest of conditions] + AND o.date_placed != '0000-00-00 00:00:00' + AND o.date_shipped = '0000-00-00 00:00:00' + AND oi.pick_finished = 0 + AND oi.qty_back = 0 + AND o.order_status != 15 + AND o.order_status < 90 + AND oi.qty_ordered >= oi.qty_placed + AND oi.qty_ordered > 0 GROUP BY oi.prod_pid - ) - SELECT [same as above] + ) ps ON p.pid = ps.prod_pid WHERE p.pid IN (?) `, [missingPids, missingPids]); diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 4dff5f0..8019ee9 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -1,6 +1,6 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); -async function importPurchaseOrders(prodConnection, localConnection) { +async function importPurchaseOrders(prodConnection, localConnection, incrementalUpdate = true) { const startTime = Date.now(); try { @@ -11,7 +11,7 @@ async function importPurchaseOrders(prodConnection, localConnection) { const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; outputProgress({ - operation: "Starting purchase orders import - Initializing", + operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} purchase orders import`, status: "running", }); @@ -26,7 +26,23 @@ async function importPurchaseOrders(prodConnection, localConnection) { .map((col) => col.COLUMN_NAME) .filter((name) => name !== "id"); - // First get all relevant PO IDs with basic info - modified for incremental + // Build incremental conditions + const incrementalWhereClause = incrementalUpdate + ? `AND ( + p.stamp > ? + OR p.date_modified > ? + OR p.date_ordered > ? + OR p.date_estin > ? + OR r.stamp > ? + OR rp.stamp > ? + OR rp.received_date > ? + )` + : ""; + const incrementalParams = incrementalUpdate + ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] + : []; + + // First get all relevant PO IDs with basic info const [[{ total }]] = await prodConnection.query(` SELECT COUNT(*) as total FROM ( @@ -36,19 +52,16 @@ async function importPurchaseOrders(prodConnection, localConnection) { JOIN po_products pop ON p.po_id = pop.po_id JOIN suppliers s ON p.supplier_id = s.supplierid WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) - AND (p.date_ordered > ? - OR p.stamp > ? - OR p.date_modified > ?) + ${incrementalWhereClause} UNION SELECT DISTINCT r.receiving_id as po_id, rp.pid FROM receivings_products rp USE INDEX (received_date) LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) - AND (rp.received_date > ? - OR rp.stamp > ?) + ${incrementalWhereClause} ) all_items - `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + `, [...incrementalParams, ...incrementalParams]); const [poList] = await prodConnection.query(` SELECT DISTINCT @@ -294,11 +307,13 @@ async function importPurchaseOrders(prodConnection, localConnection) { } } - // After successful import, update sync status + // Update sync status with proper incrementing of last_sync_id await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('purchase_orders', NOW()) - ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() + ON DUPLICATE KEY UPDATE + last_sync_timestamp = NOW(), + last_sync_id = LAST_INSERT_ID(last_sync_id) `); return { @@ -306,12 +321,13 @@ async function importPurchaseOrders(prodConnection, localConnection) { totalImported: totalItems, recordsAdded, recordsUpdated, - incrementalUpdate: !!syncInfo?.[0] + incrementalUpdate, + lastSyncTime }; } catch (error) { outputProgress({ - operation: "Purchase orders import failed", + operation: `${incrementalUpdate ? 'Incremental' : 'Full'} purchase orders import failed`, status: "error", error: error.message, }); From 31d40119020a0f468659b78ac3ae13cfc28a8538 Mon Sep 17 00:00:00 2001 From: Matt Date: Thu, 30 Jan 2025 00:00:30 -0500 Subject: [PATCH 17/33] Add back product-category import and product time estimates --- inventory-server/scripts/import/products.js | 58 +++++++++++++++++++-- 1 file changed, 55 insertions(+), 3 deletions(-) diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 1a0b777..3046da5 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -412,13 +412,66 @@ async function importProducts(prodConnection, localConnection) { await localConnection.query(insertQuery, productValues); + // Insert category relationships + const categoryRelationships = []; + batch.forEach(row => { + if (row.category_ids) { + const catIds = row.category_ids + .split(",") + .map(id => id.trim()) + .filter(id => id) + .map(Number); + + catIds.forEach(catId => { + if (catId) categoryRelationships.push([row.pid, catId]); + }); + } + }); + + if (categoryRelationships.length > 0) { + // First verify categories exist + const uniqueCatIds = [...new Set(categoryRelationships.map(([_, catId]) => catId))]; + const [existingCats] = await localConnection.query( + "SELECT cat_id FROM categories WHERE cat_id IN (?)", + [uniqueCatIds] + ); + const existingCatIds = new Set(existingCats.map(c => c.cat_id)); + + // Filter relationships to only include existing categories + const validRelationships = categoryRelationships.filter(([_, catId]) => + existingCatIds.has(catId) + ); + + if (validRelationships.length > 0) { + // Delete existing relationships for these products first + await localConnection.query( + "DELETE FROM product_categories WHERE pid IN (?)", + [batch.map(p => p.pid)] + ); + + // Insert new relationships using INSERT IGNORE + const catPlaceholders = validRelationships + .map(() => "(?, ?)") + .join(","); + + await localConnection.query( + `INSERT IGNORE INTO product_categories (pid, cat_id) + VALUES ${catPlaceholders}`, + validRelationships.flat() + ); + } + } + processed += batch.length; outputProgress({ status: "running", operation: "Products import", message: `Processed ${processed} of ${totalProducts} products`, current: processed, - total: totalProducts + total: totalProducts, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, processed, totalProducts), + rate: calculateRate(startTime, processed) }); // Force garbage collection between batches @@ -670,9 +723,8 @@ async function importMissingProducts(prodConnection, localConnection, missingPid .join(","); await localConnection.query( ` - INSERT INTO product_categories (cat_id, pid) + INSERT IGNORE INTO product_categories (cat_id, pid) VALUES ${catPlaceholders} - ON DUPLICATE KEY UPDATE cat_id = VALUES(cat_id) `, validRelationships.flat() ); From c433f1aae89ec90db20aa695e3431d6922bfe43c Mon Sep 17 00:00:00 2001 From: Matt Date: Thu, 30 Jan 2025 15:49:47 -0500 Subject: [PATCH 18/33] Enhance import scripts with incremental update support and improved error handling - Update import-from-prod.js to support granular incremental updates for different import types - Modify orders.js to handle complex order data retrieval with better performance and error tracking - Add support for incremental updates in products.js import function - Improve logging and progress tracking for import processes --- inventory-server/scripts/import-from-prod.js | 12 +- inventory-server/scripts/import/orders.js | 529 ++++++++++--------- inventory-server/scripts/import/products.js | 14 +- 3 files changed, 287 insertions(+), 268 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index ede5aa2..84d8613 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,10 +10,10 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = true; -const IMPORT_PRODUCTS = true; +const IMPORT_CATEGORIES = false; +const IMPORT_PRODUCTS = false; const IMPORT_ORDERS = true; -const IMPORT_PURCHASE_ORDERS = true; +const IMPORT_PURCHASE_ORDERS = false; // Add flag for incremental updates const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE === 'true'; @@ -156,7 +156,7 @@ async function main() { } if (IMPORT_PRODUCTS) { - results.products = await importProducts(prodConnection, localConnection); + results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; if (results.products.recordsAdded) totalRecordsAdded += results.products.recordsAdded; @@ -164,7 +164,7 @@ async function main() { } if (IMPORT_ORDERS) { - results.orders = await importOrders(prodConnection, localConnection); + results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; if (results.orders.recordsAdded) totalRecordsAdded += results.orders.recordsAdded; @@ -172,7 +172,7 @@ async function main() { } if (IMPORT_PURCHASE_ORDERS) { - results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection); + results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; if (results.purchaseOrders.recordsAdded) totalRecordsAdded += results.purchaseOrders.recordsAdded; diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 081e0d2..6c6478d 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -19,317 +19,334 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = const missingProducts = new Set(); try { - // Get the last sync time - const [syncInfo] = await localConnection.query( - "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'" - ); - const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; - - // Retrieve column names for the 'orders' table, skip 'id' since it's auto-increment + // Get column names from the local table const [columns] = await localConnection.query(` SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = 'orders' ORDER BY ORDINAL_POSITION `); - const columnNames = columns - .map(col => col.COLUMN_NAME) - .filter(name => name !== "id"); + const columnNames = columns.map(col => col.COLUMN_NAME); - // Build query clauses for incremental vs. full update - const incrementalWhereClause = incrementalUpdate - ? `AND ( - o.stamp > ? - OR o.date_modified > ? - OR o.date_placed > ? - OR o.date_shipped > ? - OR oi.stamp > ? - )` - : ""; - const incrementalParams = incrementalUpdate - ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] - : []; - - // Count how many orders we need to process - const [countResult] = await prodConnection.query( - ` - SELECT COUNT(*) AS total - FROM order_items oi USE INDEX (PRIMARY) - JOIN _order o USE INDEX (PRIMARY) - ON oi.order_id = o.order_id - WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) - ${incrementalWhereClause} - `, - incrementalParams + // Get last sync info + const [syncInfo] = await localConnection.query( + "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'" ); + const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; - const total = countResult[0].total; - outputProgress({ - operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} orders import - Fetching ${total} orders`, - status: "running", + // Count the total number of orders to be imported + const [countResults] = await prodConnection.query(` + SELECT + COUNT(DISTINCT oi.order_id, oi.prod_pid) as total_all, + SUM(CASE + WHEN o.stamp > ? OR o.date_placed > ? OR o.date_shipped > ? OR oi.stamp > ? + THEN 1 ELSE 0 + END) as total_incremental + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE o.order_status >= 15 + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + AND o.date_placed_onlydate IS NOT NULL + `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + + console.log('Count details:', { + total_all: countResults[0].total_all, + total_incremental: countResults[0].total_incremental, + lastSyncTime, + incrementalUpdate }); - let processed = 0; - // Increase or decrease this if you find a more optimal size - const batchSize = 20000; - let offset = 0; + const totalOrders = incrementalUpdate ? countResults[0].total_incremental : countResults[0].total_all; - // Process in batches for memory efficiency - while (offset < total) { - // Fetch orders (initially with tax set to 0, to be updated later) - const [orders] = await prodConnection.query( - ` - SELECT - oi.order_id AS order_number, - oi.prod_pid AS pid, - oi.prod_itemnumber AS SKU, - o.date_placed_onlydate AS date, - oi.prod_price_reg AS price, - oi.qty_ordered AS quantity, - (oi.prod_price_reg - oi.prod_price) AS discount, - 0 AS tax, - 0 AS tax_included, - ROUND( - ( - (o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) - * (oi.prod_price * oi.qty_ordered) - ) / NULLIF(o.summary_subtotal, 0), - 2 - ) AS shipping, - o.order_cid AS customer, - CONCAT(o.bill_firstname, ' ', o.bill_lastname) AS customer_name, - 'pending' AS status, - CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END AS canceled - FROM order_items oi - FORCE INDEX (PRIMARY) - JOIN _order o - ON oi.order_id = o.order_id - WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) - ${incrementalWhereClause} - LIMIT ? OFFSET ? - `, - [...incrementalParams, batchSize, offset] + outputProgress({ + status: "running", + operation: "Orders import", + message: `Starting ${incrementalUpdate ? 'incremental' : 'full'} import of ${totalOrders} orders`, + current: 0, + total: totalOrders + }); + + // Fetch orders in batches + const batchSize = 5000; + let offset = 0; + let importedCount = 0; + let lastProgressUpdate = Date.now(); + + while (offset < totalOrders) { + // First get the base order data + const [prodOrders] = await prodConnection.query(` + SELECT + oi.order_id as order_number, + oi.prod_pid as pid, + oi.prod_itemnumber as SKU, + o.date_placed_onlydate as date, + oi.prod_price as price, + oi.qty_ordered as quantity, + COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount, + o.order_cid as customer, + CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name, + o.order_status as status, + CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + LEFT JOIN users u ON o.order_cid = u.cid + WHERE o.order_status >= 15 + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + AND o.date_placed_onlydate IS NOT NULL + ${incrementalUpdate ? ` + AND ( + o.stamp > ? + OR o.date_placed > ? + OR o.date_shipped > ? + OR oi.stamp > ? + ) + ` : ''} + ORDER BY oi.order_id, oi.prod_pid + LIMIT ? OFFSET ? + `, incrementalUpdate ? + [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, batchSize, offset] : + [batchSize, offset] ); - // Fetch the latest tax info for these orders - if (orders.length > 0) { - const orderIds = [...new Set(orders.map(o => o.order_number))]; - const [taxInfo] = await prodConnection.query(` - SELECT oti.order_id, otp.pid, otp.item_taxes_to_collect - FROM ( - SELECT order_id, MAX(stamp) AS latest_stamp + if (prodOrders.length === 0) break; + + // Get order numbers for this batch + const orderNumbers = [...new Set(prodOrders.map(o => o.order_number))]; + const orderPids = prodOrders.map(o => o.pid); + + // Get promotional discounts in a separate query + const [promoDiscounts] = await prodConnection.query(` + SELECT order_id, pid, amount + FROM order_discount_items + WHERE order_id IN (?) + `, [orderNumbers]); + + // Create a map for quick discount lookups + const discountMap = new Map(); + promoDiscounts.forEach(d => { + const key = `${d.order_id}-${d.pid}`; + discountMap.set(key, d.amount || 0); + }); + + // Get tax information in a separate query + const [taxInfo] = await prodConnection.query(` + SELECT oti.order_id, otip.pid, otip.item_taxes_to_collect + FROM order_tax_info oti + JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id + WHERE oti.order_id IN (?) + AND (oti.order_id, oti.stamp) IN ( + SELECT order_id, MAX(stamp) FROM order_tax_info WHERE order_id IN (?) GROUP BY order_id - ) latest - JOIN order_tax_info oti - ON oti.order_id = latest.order_id - AND oti.stamp = latest.latest_stamp - JOIN order_tax_info_products otp - ON oti.taxinfo_id = otp.taxinfo_id - `, [orderIds]); + ) + `, [orderNumbers, orderNumbers]); - // Map (order_id-pid) -> tax amount - const taxMap = new Map(); - taxInfo.forEach(t => { - taxMap.set(`${t.order_id}-${t.pid}`, t.item_taxes_to_collect); - }); + // Create a map for quick tax lookups + const taxMap = new Map(); + taxInfo.forEach(t => { + const key = `${t.order_id}-${t.pid}`; + taxMap.set(key, t.item_taxes_to_collect || 0); + }); - // Merge tax into the orders array - orders.forEach(order => { - const key = `${order.order_number}-${order.pid}`; - if (taxMap.has(key)) { - order.tax = taxMap.get(key) || 0; - } - }); - } - - // Check local DB for existing products to ensure we don't insert orders for missing products - const orderProductPids = [...new Set(orders.map(o => o.pid))]; + // Check for missing products const [existingProducts] = await localConnection.query( "SELECT pid FROM products WHERE pid IN (?)", - [orderProductPids] + [orderPids] ); const existingPids = new Set(existingProducts.map(p => p.pid)); - // Separate valid orders from those referencing missing products - const validOrders = []; - for (const order of orders) { + // Track missing products and filter orders + const validOrders = prodOrders.filter(order => { + if (!order.date) return false; if (!existingPids.has(order.pid)) { missingProducts.add(order.pid); skippedOrders.add(order.order_number); - } else { - validOrders.push(order); + return false; } - } + return true; + }); - // Bulk insert valid orders - if (validOrders.length > 0) { - const placeholders = validOrders - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - const updateClauses = columnNames - .filter(col => col !== "order_number") // don't overwrite primary key - .map(col => `${col} = VALUES(${col})`) - .join(","); + // Prepare values for insertion + const orderValues = validOrders.map(order => { + const orderKey = `${order.order_number}-${order.pid}`; + const orderData = { + id: order.order_number, + order_number: order.order_number, + pid: order.pid, + SKU: order.SKU, + date: order.date, + price: order.price, + quantity: order.quantity, + discount: Number(order.base_discount || 0) + Number(discountMap.get(orderKey) || 0), + tax: Number(taxMap.get(orderKey) || 0), + tax_included: 0, + shipping: 0, + customer: order.customer, + customer_name: order.customer_name || '', + status: order.status, + canceled: order.canceled, + }; - const upsertQuery = ` - INSERT INTO orders (${columnNames.join(",")}) + return columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null); + }); + + // Execute the insert + if (orderValues.length > 0) { + const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(","); + const insertQuery = ` + INSERT INTO orders (${columnNames.join(", ")}) VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${updateClauses} + ON DUPLICATE KEY UPDATE + ${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")} `; - await localConnection.query( - upsertQuery, - validOrders.flatMap(order => columnNames.map(col => order[col])) - ); + await localConnection.query(insertQuery, orderValues.flat()); } - processed += orders.length; + importedCount += validOrders.length; offset += batchSize; - outputProgress({ - status: "running", - operation: "Orders import", - current: processed, - total, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000), - remaining: estimateRemaining(startTime, processed, total), - rate: calculateRate(startTime, processed) - }); - } - - // If we found missing products, import them and retry the skipped orders - if (missingProducts.size > 0) { - outputProgress({ - operation: `Found ${missingProducts.size} missing products, importing them now`, - status: "running", - }); - - // Import missing products - await importMissingProducts(prodConnection, localConnection, [...missingProducts]); - - // Retry orders that were skipped due to missing products - if (skippedOrders.size > 0) { + // Update progress every second + const now = Date.now(); + if (now - lastProgressUpdate >= 1000) { outputProgress({ - operation: `Retrying ${skippedOrders.size} skipped orders`, status: "running", + operation: "Orders import", + message: `Imported ${importedCount} of ${totalOrders} orders`, + current: importedCount, + total: totalOrders, + elapsed: formatElapsedTime((now - startTime) / 1000), + remaining: estimateRemaining(startTime, importedCount, totalOrders), + rate: calculateRate(startTime, importedCount) }); - - const [retryOrders] = await prodConnection.query(` - SELECT - oi.order_id AS order_number, - oi.prod_pid AS pid, - oi.prod_itemnumber AS SKU, - o.date_placed_onlydate AS date, - oi.prod_price_reg AS price, - oi.qty_ordered AS quantity, - (oi.prod_price_reg - oi.prod_price) AS discount, - 0 AS tax, - 0 AS tax_included, - ROUND( - ( - (o.summary_shipping - COALESCE(o.summary_discount_shipping, 0)) - * (oi.prod_price * oi.qty_ordered) - ) / NULLIF(o.summary_subtotal, 0), - 2 - ) AS shipping, - o.order_cid AS customer, - CONCAT(o.bill_firstname, ' ', o.bill_lastname) AS customer_name, - 'pending' AS status, - CASE WHEN o.order_status = 15 THEN 1 ELSE 0 END AS canceled - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.order_id IN (?) - `, [[...skippedOrders]]); - - if (retryOrders.length > 0) { - // Fetch tax data for these specific retry orders - const retryOrderIds = [...new Set(retryOrders.map(o => o.order_number))]; - const [retryTaxInfo] = await prodConnection.query(` - SELECT oti.order_id, otp.pid, otp.item_taxes_to_collect - FROM ( - SELECT order_id, MAX(stamp) AS latest_stamp - FROM order_tax_info - WHERE order_id IN (?) - GROUP BY order_id - ) latest - JOIN order_tax_info oti - ON oti.order_id = latest.order_id - AND oti.stamp = latest.latest_stamp - JOIN order_tax_info_products otp - ON oti.taxinfo_id = otp.taxinfo_id - `, [retryOrderIds]); - - const taxMap = new Map(); - retryTaxInfo.forEach(t => { - taxMap.set(`${t.order_id}-${t.pid}`, t.item_taxes_to_collect); - }); - - retryOrders.forEach(order => { - const key = `${order.order_number}-${order.pid}`; - if (taxMap.has(key)) { - order.tax = taxMap.get(key) || 0; - } - }); - - const placeholders = retryOrders - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - const updateClauses = columnNames - .filter(col => col !== "order_number") - .map(col => `${col} = VALUES(${col})`) - .join(","); - - const upsertQuery = ` - INSERT INTO orders (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${updateClauses} - `; - - await localConnection.query( - upsertQuery, - retryOrders.flatMap(order => columnNames.map(col => order[col])) - ); - } + lastProgressUpdate = now; } } - // Update the sync timestamp + // Import missing products if any + if (missingProducts.size > 0) { + await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts)); + + // Retry skipped orders after importing products + if (skippedOrders.size > 0) { + outputProgress({ + status: "running", + operation: "Orders import", + message: `Retrying import of ${skippedOrders.size} orders with previously missing products` + }); + + const [skippedProdOrders] = await prodConnection.query(` + SELECT + o.order_id, + CASE + WHEN o.date_placed = '0000-00-00 00:00:00' OR o.date_placed IS NULL THEN o.stamp + ELSE o.date_placed + END as date, + o.order_cid, + o.bill_firstname, + o.bill_lastname, + o.order_email, + o.order_status, + o.date_shipped, + o.date_cancelled, + oi.prod_pid, + oi.prod_itemnumber, + oi.prod_price, + oi.qty_ordered, + oi.qty_back, + oi.qty_placed, + oi.qty_placed_2, + oi.discounted, + oi.summary_cogs, + oi.summary_profit, + oi.summary_orderdate, + oi.summary_paiddate, + oi.date_added, + oi.stamp + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE o.order_id IN (?) + `, [Array.from(skippedOrders)]); + + // Prepare values for insertion + const skippedOrderValues = skippedProdOrders.flatMap(order => { + if (!order.date) { + console.log(`Warning: Skipped order ${order.order_id} has null date:`, JSON.stringify(order, null, 2)); + return []; + } + + const canceled = order.date_cancelled !== '0000-00-00 00:00:00' ? 1 : 0; + const customerName = `${order.bill_firstname} ${order.bill_lastname}`; + + // Create an object with keys based on column names + const orderData = { + id: order.order_id, + order_number: order.order_id, + pid: order.prod_pid, + SKU: order.prod_itemnumber, + date: order.date ? ( + order.date instanceof Date ? + order.date.toJSON()?.slice(0,10) || null : + (typeof order.date === 'string' ? order.date.split(' ')[0] : null) + ) : null, + price: order.prod_price, + quantity: order.qty_ordered, + discount: order.discounted, + tax: 0, // Placeholder, will be calculated later + tax_included: 0, // Placeholder, will be calculated later + shipping: 0, // Placeholder, will be calculated later + customer: order.order_email, + customer_name: customerName, + status: order.order_status, + canceled: canceled, + }; + + // Map column names to values, handling missing columns + return [columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null)]; + }); + + // Construct the insert query dynamically + const skippedPlaceholders = skippedProdOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(","); + const skippedInsertQuery = ` + INSERT INTO orders (${columnNames.join(", ")}) + VALUES ${skippedPlaceholders} + ON DUPLICATE KEY UPDATE + ${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")} + `; + + // Execute the insert query + if (skippedOrderValues.length > 0) { + await localConnection.query(skippedInsertQuery, skippedOrderValues.flat()); + } + + importedCount += skippedProdOrders.length; + + outputProgress({ + status: "running", + operation: "Orders import", + message: `Successfully imported ${skippedProdOrders.length} previously skipped orders`, + }); + } + } + + // Update sync status await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('orders', NOW()) - ON DUPLICATE KEY UPDATE - last_sync_timestamp = NOW(), - last_sync_id = LAST_INSERT_ID(last_sync_id) + ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() `); - const endTime = Date.now(); - - outputProgress({ - status: "complete", - operation: `${incrementalUpdate ? 'Incremental' : 'Full'} orders import completed`, - current: total, - total, - duration: formatElapsedTime((endTime - startTime) / 1000), - }); - return { status: "complete", - totalImported: total, + totalImported: importedCount, + totalSkipped: skippedOrders.size, missingProducts: missingProducts.size, - retriedOrders: skippedOrders.size, incrementalUpdate, lastSyncTime }; } catch (error) { - outputProgress({ - operation: `${incrementalUpdate ? 'Incremental' : 'Full'} orders import failed`, - status: "error", - error: error.message, - }); + console.error("Error during orders import:", error); throw error; } } diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 3046da5..d6c838f 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -198,7 +198,7 @@ async function materializeCalculations(prodConnection, localConnection) { }); } -async function importProducts(prodConnection, localConnection) { +async function importProducts(prodConnection, localConnection, incrementalUpdate = true) { const startTime = Date.now(); try { @@ -332,12 +332,14 @@ async function importProducts(prodConnection, localConnection) { LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id LEFT JOIN product_last_sold pls ON p.pid = pls.pid - WHERE p.stamp > ? - OR pls.date_sold > ? - OR p.date_created > ? - OR p.datein > ? + ${incrementalUpdate ? ` + WHERE p.stamp > ? + OR pls.date_sold > ? + OR p.date_created > ? + OR p.datein > ? + ` : ''} GROUP BY p.pid - `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); // Insert production data in batches for (let i = 0; i < prodData.length; i += 1000) { From b506f89dd740e999d6d8144a1d688ed9b5d63cee Mon Sep 17 00:00:00 2001 From: Matt Date: Thu, 30 Jan 2025 21:13:53 -0500 Subject: [PATCH 19/33] Optimize order and product import scripts with improved performance and incremental update handling - Refactor orders import to use temporary tables for more efficient data processing - Improve batch processing and memory management in order import script - Update product import to use temporary tables for inventory status - Modify purchase orders import to use updated timestamp for incremental updates - Enhance error handling and logging for import processes --- inventory-server/scripts/import-from-prod.js | 6 +- inventory-server/scripts/import/orders.js | 567 ++++++++++-------- inventory-server/scripts/import/products.js | 54 +- .../scripts/import/purchase-orders.js | 4 +- 4 files changed, 357 insertions(+), 274 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 84d8613..3148261 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,10 +10,10 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = false; -const IMPORT_PRODUCTS = false; +const IMPORT_CATEGORIES = true; +const IMPORT_PRODUCTS = true; const IMPORT_ORDERS = true; -const IMPORT_PURCHASE_ORDERS = false; +const IMPORT_PURCHASE_ORDERS = true; // Add flag for incremental updates const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE === 'true'; diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 6c6478d..b97e427 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -34,303 +34,384 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = ); const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; - // Count the total number of orders to be imported - const [countResults] = await prodConnection.query(` + // Create temporary tables for staging data + await localConnection.query(` + CREATE TEMPORARY TABLE temp_order_items ( + order_id INT UNSIGNED, + pid INT UNSIGNED, + SKU VARCHAR(50), + price DECIMAL(10,3), + quantity INT, + base_discount DECIMAL(10,3), + PRIMARY KEY (order_id, pid) + ) ENGINE=InnoDB; + + CREATE TEMPORARY TABLE temp_order_meta ( + order_id INT UNSIGNED PRIMARY KEY, + date DATE, + customer INT UNSIGNED, + customer_name VARCHAR(100), + status TINYINT UNSIGNED, + canceled TINYINT UNSIGNED + ) ENGINE=InnoDB; + + CREATE TEMPORARY TABLE temp_order_discounts ( + order_id INT UNSIGNED, + pid INT UNSIGNED, + discount DECIMAL(10,3), + PRIMARY KEY (order_id, pid) + ) ENGINE=InnoDB; + + CREATE TEMPORARY TABLE temp_order_taxes ( + order_id INT UNSIGNED, + pid INT UNSIGNED, + tax DECIMAL(10,3), + PRIMARY KEY (order_id, pid) + ) ENGINE=InnoDB; + `); + + // Get base order items first + const [orderItems] = await prodConnection.query(` SELECT - COUNT(DISTINCT oi.order_id, oi.prod_pid) as total_all, - SUM(CASE - WHEN o.stamp > ? OR o.date_placed > ? OR o.date_shipped > ? OR oi.stamp > ? - THEN 1 ELSE 0 - END) as total_incremental + oi.order_id, + oi.prod_pid as pid, + oi.prod_itemnumber as SKU, + oi.prod_price as price, + oi.qty_ordered as quantity, + COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount FROM order_items oi JOIN _order o ON oi.order_id = o.order_id WHERE o.order_status >= 15 AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND o.date_placed_onlydate IS NOT NULL - `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + ${incrementalUpdate ? ` + AND ( + o.stamp > ? + OR o.date_placed > ? + OR o.date_shipped > ? + OR oi.stamp > ? + ) + ` : ''} + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); - console.log('Count details:', { - total_all: countResults[0].total_all, - total_incremental: countResults[0].total_incremental, - lastSyncTime, - incrementalUpdate - }); + const totalOrders = orderItems.length; + let processed = 0; - const totalOrders = incrementalUpdate ? countResults[0].total_incremental : countResults[0].total_all; + // Insert order items in batches + for (let i = 0; i < orderItems.length; i += 5000) { + const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length)); + const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(","); + const values = batch.flatMap(item => [ + item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount + ]); - outputProgress({ - status: "running", - operation: "Orders import", - message: `Starting ${incrementalUpdate ? 'incremental' : 'full'} import of ${totalOrders} orders`, - current: 0, - total: totalOrders - }); + await localConnection.query(` + INSERT INTO temp_order_items VALUES ${placeholders} + `, values); - // Fetch orders in batches - const batchSize = 5000; - let offset = 0; - let importedCount = 0; - let lastProgressUpdate = Date.now(); + processed += batch.length; + outputProgress({ + status: "running", + operation: "Orders import", + message: `Loading order items: ${processed} of ${totalOrders}`, + current: processed, + total: totalOrders + }); + } - while (offset < totalOrders) { - // First get the base order data - const [prodOrders] = await prodConnection.query(` + // Get unique order IDs + const orderIds = [...new Set(orderItems.map(item => item.order_id))]; + + // Get order metadata in batches + for (let i = 0; i < orderIds.length; i += 5000) { + const batchIds = orderIds.slice(i, i + 5000); + const [orders] = await prodConnection.query(` SELECT - oi.order_id as order_number, - oi.prod_pid as pid, - oi.prod_itemnumber as SKU, + o.order_id, o.date_placed_onlydate as date, - oi.prod_price as price, - oi.qty_ordered as quantity, - COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount, o.order_cid as customer, CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name, o.order_status as status, CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id + FROM _order o LEFT JOIN users u ON o.order_cid = u.cid - WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) - AND o.date_placed_onlydate IS NOT NULL - ${incrementalUpdate ? ` - AND ( - o.stamp > ? - OR o.date_placed > ? - OR o.date_shipped > ? - OR oi.stamp > ? - ) - ` : ''} - ORDER BY oi.order_id, oi.prod_pid - LIMIT ? OFFSET ? - `, incrementalUpdate ? - [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, batchSize, offset] : - [batchSize, offset] - ); + WHERE o.order_id IN (?) + `, [batchIds]); - if (prodOrders.length === 0) break; + const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(","); + const values = orders.flatMap(order => [ + order.order_id, order.date, order.customer, order.customer_name, order.status, order.canceled + ]); - // Get order numbers for this batch - const orderNumbers = [...new Set(prodOrders.map(o => o.order_number))]; - const orderPids = prodOrders.map(o => o.pid); + await localConnection.query(` + INSERT INTO temp_order_meta VALUES ${placeholders} + `, values); - // Get promotional discounts in a separate query - const [promoDiscounts] = await prodConnection.query(` - SELECT order_id, pid, amount + outputProgress({ + status: "running", + operation: "Orders import", + message: `Loading order metadata: ${i + orders.length} of ${orderIds.length}`, + current: i + orders.length, + total: orderIds.length + }); + } + + // Get promotional discounts in batches + for (let i = 0; i < orderIds.length; i += 5000) { + const batchIds = orderIds.slice(i, i + 5000); + const [discounts] = await prodConnection.query(` + SELECT order_id, pid, SUM(amount) as discount FROM order_discount_items WHERE order_id IN (?) - `, [orderNumbers]); + GROUP BY order_id, pid + `, [batchIds]); - // Create a map for quick discount lookups - const discountMap = new Map(); - promoDiscounts.forEach(d => { - const key = `${d.order_id}-${d.pid}`; - discountMap.set(key, d.amount || 0); - }); + if (discounts.length > 0) { + const placeholders = discounts.map(() => "(?, ?, ?)").join(","); + const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]); - // Get tax information in a separate query - const [taxInfo] = await prodConnection.query(` - SELECT oti.order_id, otip.pid, otip.item_taxes_to_collect + await localConnection.query(` + INSERT INTO temp_order_discounts VALUES ${placeholders} + `, values); + } + } + + // Get tax information in batches + for (let i = 0; i < orderIds.length; i += 5000) { + const batchIds = orderIds.slice(i, i + 5000); + const [taxes] = await prodConnection.query(` + SELECT DISTINCT + oti.order_id, + otip.pid, + otip.item_taxes_to_collect as tax FROM order_tax_info oti + JOIN ( + SELECT order_id, MAX(stamp) as max_stamp + FROM order_tax_info + WHERE order_id IN (?) + GROUP BY order_id + ) latest ON oti.order_id = latest.order_id AND oti.stamp = latest.max_stamp JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id - WHERE oti.order_id IN (?) - AND (oti.order_id, oti.stamp) IN ( - SELECT order_id, MAX(stamp) - FROM order_tax_info - WHERE order_id IN (?) - GROUP BY order_id - ) - `, [orderNumbers, orderNumbers]); + `, [batchIds]); - // Create a map for quick tax lookups - const taxMap = new Map(); - taxInfo.forEach(t => { - const key = `${t.order_id}-${t.pid}`; - taxMap.set(key, t.item_taxes_to_collect || 0); - }); + if (taxes.length > 0) { + // Remove any duplicates before inserting + const uniqueTaxes = new Map(); + taxes.forEach(t => { + const key = `${t.order_id}-${t.pid}`; + uniqueTaxes.set(key, t); + }); - // Check for missing products - const [existingProducts] = await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [orderPids] - ); - const existingPids = new Set(existingProducts.map(p => p.pid)); + const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]); + if (values.length > 0) { + const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(","); + await localConnection.query(` + INSERT INTO temp_order_taxes VALUES ${placeholders} + `, values); + } + } + } - // Track missing products and filter orders - const validOrders = prodOrders.filter(order => { - if (!order.date) return false; + // Now combine all the data and insert into orders table + let importedCount = 0; + + // Pre-check all products at once instead of per batch + const allOrderPids = [...new Set(orderItems.map(item => item.pid))]; + const [existingProducts] = await localConnection.query( + "SELECT pid FROM products WHERE pid IN (?)", + [allOrderPids] + ); + const existingPids = new Set(existingProducts.map(p => p.pid)); + + // Process in larger batches + for (let i = 0; i < orderIds.length; i += 5000) { + const batchIds = orderIds.slice(i, i + 5000); + + // Get combined data for this batch + const [orders] = await localConnection.query(` + SELECT + oi.order_id as order_number, + oi.pid, + oi.SKU, + om.date, + oi.price, + oi.quantity, + oi.base_discount + COALESCE(od.discount, 0) as discount, + COALESCE(ot.tax, 0) as tax, + 0 as tax_included, + 0 as shipping, + om.customer, + om.customer_name, + om.status, + om.canceled + FROM temp_order_items oi + JOIN temp_order_meta om ON oi.order_id = om.order_id + LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid + LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid + WHERE oi.order_id IN (?) + `, [batchIds]); + + // Filter orders and track missing products - do this in a single pass + const validOrders = []; + const values = []; + + for (const order of orders) { if (!existingPids.has(order.pid)) { missingProducts.add(order.pid); skippedOrders.add(order.order_number); - return false; + continue; } - return true; - }); + validOrders.push(order); + values.push(...columnNames.map(col => order[col] ?? null)); + } - // Prepare values for insertion - const orderValues = validOrders.map(order => { - const orderKey = `${order.order_number}-${order.pid}`; - const orderData = { - id: order.order_number, - order_number: order.order_number, - pid: order.pid, - SKU: order.SKU, - date: order.date, - price: order.price, - quantity: order.quantity, - discount: Number(order.base_discount || 0) + Number(discountMap.get(orderKey) || 0), - tax: Number(taxMap.get(orderKey) || 0), - tax_included: 0, - shipping: 0, - customer: order.customer, - customer_name: order.customer_name || '', - status: order.status, - canceled: order.canceled, - }; + if (validOrders.length > 0) { + // Pre-compute the placeholders string once + const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`; + const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(","); - return columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null); - }); - - // Execute the insert - if (orderValues.length > 0) { - const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(","); - const insertQuery = ` - INSERT INTO orders (${columnNames.join(", ")}) + await localConnection.query(` + INSERT INTO orders (${columnNames.join(",")}) VALUES ${placeholders} ON DUPLICATE KEY UPDATE - ${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")} - `; + ${columnNames.map(col => `${col} = VALUES(${col})`).join(",")} + `, values); - await localConnection.query(insertQuery, orderValues.flat()); + importedCount += validOrders.length; } - importedCount += validOrders.length; - offset += batchSize; - - // Update progress every second - const now = Date.now(); - if (now - lastProgressUpdate >= 1000) { - outputProgress({ - status: "running", - operation: "Orders import", - message: `Imported ${importedCount} of ${totalOrders} orders`, - current: importedCount, - total: totalOrders, - elapsed: formatElapsedTime((now - startTime) / 1000), - remaining: estimateRemaining(startTime, importedCount, totalOrders), - rate: calculateRate(startTime, importedCount) - }); - lastProgressUpdate = now; - } + outputProgress({ + status: "running", + operation: "Orders import", + message: `Imported ${importedCount} of ${totalOrders} orders`, + current: importedCount, + total: totalOrders, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, importedCount, totalOrders), + rate: calculateRate(startTime, importedCount) + }); } + // Clean up temporary tables + await localConnection.query(` + DROP TEMPORARY TABLE IF EXISTS temp_order_items; + DROP TEMPORARY TABLE IF EXISTS temp_order_meta; + DROP TEMPORARY TABLE IF EXISTS temp_order_discounts; + DROP TEMPORARY TABLE IF EXISTS temp_order_taxes; + `); + // Import missing products if any if (missingProducts.size > 0) { - await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts)); - - // Retry skipped orders after importing products - if (skippedOrders.size > 0) { - outputProgress({ - status: "running", - operation: "Orders import", - message: `Retrying import of ${skippedOrders.size} orders with previously missing products` - }); + try { + await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts)); + + // Retry skipped orders after importing products + if (skippedOrders.size > 0) { + outputProgress({ + status: "running", + operation: "Orders import", + message: `Retrying import of ${skippedOrders.size} orders with previously missing products` + }); - const [skippedProdOrders] = await prodConnection.query(` - SELECT - o.order_id, - CASE - WHEN o.date_placed = '0000-00-00 00:00:00' OR o.date_placed IS NULL THEN o.stamp - ELSE o.date_placed - END as date, - o.order_cid, - o.bill_firstname, - o.bill_lastname, - o.order_email, - o.order_status, - o.date_shipped, - o.date_cancelled, - oi.prod_pid, - oi.prod_itemnumber, - oi.prod_price, - oi.qty_ordered, - oi.qty_back, - oi.qty_placed, - oi.qty_placed_2, - oi.discounted, - oi.summary_cogs, - oi.summary_profit, - oi.summary_orderdate, - oi.summary_paiddate, - oi.date_added, - oi.stamp - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE o.order_id IN (?) - `, [Array.from(skippedOrders)]); + const [skippedProdOrders] = await prodConnection.query(` + SELECT + o.order_id, + CASE + WHEN o.date_placed = '0000-00-00 00:00:00' OR o.date_placed IS NULL THEN o.stamp + ELSE o.date_placed + END as date, + o.order_cid, + o.bill_firstname, + o.bill_lastname, + o.order_email, + o.order_status, + o.date_shipped, + o.date_cancelled, + oi.prod_pid, + oi.prod_itemnumber, + oi.prod_price, + oi.qty_ordered, + oi.qty_back, + oi.qty_placed, + oi.qty_placed_2, + oi.discounted, + oi.summary_cogs, + oi.summary_profit, + oi.summary_orderdate, + oi.summary_paiddate, + oi.date_added, + oi.stamp + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE o.order_id IN (?) + `, [Array.from(skippedOrders)]); - // Prepare values for insertion - const skippedOrderValues = skippedProdOrders.flatMap(order => { - if (!order.date) { - console.log(`Warning: Skipped order ${order.order_id} has null date:`, JSON.stringify(order, null, 2)); - return []; + // Prepare values for insertion + const skippedOrderValues = skippedProdOrders.flatMap(order => { + if (!order.date) { + console.log(`Warning: Skipped order ${order.order_id} has null date:`, JSON.stringify(order, null, 2)); + return []; + } + + const canceled = order.date_cancelled !== '0000-00-00 00:00:00' ? 1 : 0; + const customerName = `${order.bill_firstname} ${order.bill_lastname}`; + + // Create an object with keys based on column names + const orderData = { + id: order.order_id, + order_number: order.order_id, + pid: order.prod_pid, + SKU: order.prod_itemnumber, + date: order.date ? ( + order.date instanceof Date ? + order.date.toJSON()?.slice(0,10) || null : + (typeof order.date === 'string' ? order.date.split(' ')[0] : null) + ) : null, + price: order.prod_price, + quantity: order.qty_ordered, + discount: order.discounted, + tax: 0, // Placeholder, will be calculated later + tax_included: 0, // Placeholder, will be calculated later + shipping: 0, // Placeholder, will be calculated later + customer: order.order_email, + customer_name: customerName, + status: order.order_status, + canceled: canceled, + }; + + // Map column names to values, handling missing columns + return [columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null)]; + }); + + // Construct the insert query dynamically + const skippedPlaceholders = skippedProdOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(","); + const skippedInsertQuery = ` + INSERT INTO orders (${columnNames.join(", ")}) + VALUES ${skippedPlaceholders} + ON DUPLICATE KEY UPDATE + ${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")} + `; + + // Execute the insert query + if (skippedOrderValues.length > 0) { + await localConnection.query(skippedInsertQuery, skippedOrderValues.flat()); } - const canceled = order.date_cancelled !== '0000-00-00 00:00:00' ? 1 : 0; - const customerName = `${order.bill_firstname} ${order.bill_lastname}`; + importedCount += skippedProdOrders.length; - // Create an object with keys based on column names - const orderData = { - id: order.order_id, - order_number: order.order_id, - pid: order.prod_pid, - SKU: order.prod_itemnumber, - date: order.date ? ( - order.date instanceof Date ? - order.date.toJSON()?.slice(0,10) || null : - (typeof order.date === 'string' ? order.date.split(' ')[0] : null) - ) : null, - price: order.prod_price, - quantity: order.qty_ordered, - discount: order.discounted, - tax: 0, // Placeholder, will be calculated later - tax_included: 0, // Placeholder, will be calculated later - shipping: 0, // Placeholder, will be calculated later - customer: order.order_email, - customer_name: customerName, - status: order.order_status, - canceled: canceled, - }; - - // Map column names to values, handling missing columns - return [columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null)]; - }); - - // Construct the insert query dynamically - const skippedPlaceholders = skippedProdOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(","); - const skippedInsertQuery = ` - INSERT INTO orders (${columnNames.join(", ")}) - VALUES ${skippedPlaceholders} - ON DUPLICATE KEY UPDATE - ${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")} - `; - - // Execute the insert query - if (skippedOrderValues.length > 0) { - await localConnection.query(skippedInsertQuery, skippedOrderValues.flat()); + outputProgress({ + status: "running", + operation: "Orders import", + message: `Successfully imported ${skippedProdOrders.length} previously skipped orders`, + }); } - - importedCount += skippedProdOrders.length; - - outputProgress({ - status: "running", - operation: "Orders import", - message: `Successfully imported ${skippedProdOrders.length} previously skipped orders`, - }); + } catch (error) { + console.warn('Warning: Failed to import missing products:', error.message); + console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`); } } - // Update sync status + // Update sync status - do this even if missing products import fails await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('orders', NOW()) diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index d6c838f..86dcffd 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -508,9 +508,8 @@ async function importMissingProducts(prodConnection, localConnection, missingPid // Setup temporary tables await setupTemporaryTables(localConnection); - // Materialize calculations for missing products - await localConnection.query(` - INSERT INTO temp_inventory_status + // Get inventory data from production first + const [prodInventory] = await prodConnection.query(` SELECT p.pid, COALESCE(si.available_local, 0) - COALESCE(ps.pending_qty, 0) as stock_quantity, @@ -540,6 +539,22 @@ async function importMissingProducts(prodConnection, localConnection, missingPid WHERE p.pid IN (?) `, [missingPids, missingPids]); + // Insert inventory data into temp table + if (prodInventory.length > 0) { + const placeholders = prodInventory.map(() => "(?, ?, ?, ?, ?)").join(","); + const values = prodInventory.flatMap(p => [ + p.pid, + p.stock_quantity, + p.pending_qty, + p.preorder_count, + p.notions_inv_count + ]); + + await localConnection.query(` + INSERT INTO temp_inventory_status VALUES ${placeholders} + `, values); + } + // First get the column names from the table structure const [columns] = await localConnection.query(` SELECT COLUMN_NAME @@ -560,21 +575,9 @@ async function importMissingProducts(prodConnection, localConnection, missingPid p.date_created, p.datein AS first_received, p.location, - COALESCE(si.available_local, 0) - COALESCE( - (SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0), 0) AS stock_quantity, - ci.onpreorder AS preorder_count, - pnb.inventory AS notions_inv_count, + tis.stock_quantity, + tis.preorder_count, + tis.notions_inv_count, COALESCE(pcp.price_each, 0) as price, COALESCE(p.sellingprice, 0) AS regular_price, COALESCE((SELECT ROUND(AVG(costeach), 5) @@ -630,8 +633,7 @@ async function importMissingProducts(prodConnection, localConnection, missingPid pls.date_sold as date_last_sold, GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids FROM products p - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 LEFT JOIN supplier_item_data sid ON p.pid = sid.pid LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid @@ -644,12 +646,12 @@ async function importMissingProducts(prodConnection, localConnection, missingPid LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid + LEFT JOIN ( + SELECT pid, MIN(price_each) as price_each + FROM product_current_prices + WHERE active = 1 + GROUP BY pid + ) pcp ON p.pid = pcp.pid WHERE p.pid IN (?) GROUP BY p.pid `, [missingPids]); diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 8019ee9..3a3f8e0 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -29,7 +29,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental // Build incremental conditions const incrementalWhereClause = incrementalUpdate ? `AND ( - p.stamp > ? + p.date_updated > ? OR p.date_modified > ? OR p.date_ordered > ? OR p.date_estin > ? @@ -81,7 +81,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental USE INDEX (idx_date_created) WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) AND (date_ordered > ? - OR stamp > ? + OR date_updated > ? OR date_modified > ?) UNION SELECT DISTINCT r.receiving_id as po_id From 1be97d6610ff12674f2c373a76c1d6319f8e0021 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 31 Jan 2025 01:25:48 -0500 Subject: [PATCH 20/33] Enhance purchase order import with advanced receiving tracking and fulfillment logic - Implement FIFO-based receiving fulfillment tracking - Add detailed receiving history with excess and partial fulfillment support - Improve vendor name resolution and fallback handling - Optimize incremental update queries by removing redundant conditions - Enhance receiving status calculation with more granular tracking --- inventory-server/scripts/import/orders.js | 3 +- .../scripts/import/purchase-orders.js | 99 ++++++++++++------- 2 files changed, 65 insertions(+), 37 deletions(-) diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index b97e427..ce33da4 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -80,9 +80,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = oi.qty_ordered as quantity, COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount FROM order_items oi + USE INDEX (PRIMARY) JOIN _order o ON oi.order_id = o.order_id WHERE o.order_status >= 15 - AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) AND o.date_placed_onlydate IS NOT NULL ${incrementalUpdate ? ` AND ( diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 3a3f8e0..54ce02a 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -30,7 +30,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const incrementalWhereClause = incrementalUpdate ? `AND ( p.date_updated > ? - OR p.date_modified > ? OR p.date_ordered > ? OR p.date_estin > ? OR r.stamp > ? @@ -39,7 +38,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental )` : ""; const incrementalParams = incrementalUpdate - ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] + ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []; // First get all relevant PO IDs with basic info @@ -51,14 +50,14 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental USE INDEX (idx_date_created) JOIN po_products pop ON p.po_id = pop.po_id JOIN suppliers s ON p.supplier_id = s.supplierid - WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) ${incrementalWhereClause} UNION SELECT DISTINCT r.receiving_id as po_id, rp.pid FROM receivings_products rp USE INDEX (received_date) LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id - WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) ${incrementalWhereClause} ) all_items `, [...incrementalParams, ...incrementalParams]); @@ -66,11 +65,11 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const [poList] = await prodConnection.query(` SELECT DISTINCT COALESCE(p.po_id, r.receiving_id) as po_id, - CASE - WHEN p.po_id IS NOT NULL THEN s1.companyname - WHEN r.supplier_id IS NOT NULL THEN s2.companyname - ELSE 'No Supplier' - END as vendor, + COALESCE( + NULLIF(s1.companyname, ''), + NULLIF(s2.companyname, ''), + 'Unknown Vendor' + ) as vendor, CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_ordered) END as date, CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_estin) END as expected_date, COALESCE(p.status, 50) as status, @@ -79,15 +78,14 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental FROM ( SELECT po_id FROM po USE INDEX (idx_date_created) - WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) AND (date_ordered > ? - OR date_updated > ? - OR date_modified > ?) + OR date_updated > ?) UNION SELECT DISTINCT r.receiving_id as po_id FROM receivings r JOIN receivings_products rp USE INDEX (received_date) ON r.receiving_id = rp.receiving_id - WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) + WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) AND (rp.received_date > ? OR rp.stamp > ?) ) ids @@ -96,7 +94,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental LEFT JOIN receivings r ON ids.po_id = r.receiving_id LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid ORDER BY po_id - `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); const totalItems = total; let processed = 0; @@ -215,24 +213,52 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const altReceivingHistory = altReceivingMap.get(product.pid) || []; const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || []; - const received = receivingHistory.reduce((sum, r) => sum + r.qty_each, 0); - const altReceived = altReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0); - const noPOReceived = noPOReceivingHistory.reduce((sum, r) => sum + r.qty_each, 0); - const totalReceived = received + altReceived + noPOReceived; + // Combine all receivings and sort by date + const allReceivings = [ + ...receivingHistory.map(r => ({ ...r, type: 'original' })), + ...altReceivingHistory.map(r => ({ ...r, type: 'alternate' })), + ...noPOReceivingHistory.map(r => ({ ...r, type: 'no_po' })) + ].sort((a, b) => new Date(a.received_date) - new Date(b.received_date)); + + // Track FIFO fulfillment + let remainingToFulfill = product.ordered; + const fulfillmentTracking = []; + let totalReceived = 0; + + for (const receiving of allReceivings) { + const qtyToApply = Math.min(remainingToFulfill, receiving.qty_each); + if (qtyToApply > 0) { + fulfillmentTracking.push({ + receiving_id: receiving.receiving_id, + qty_applied: qtyToApply, + qty_total: receiving.qty_each, + cost: receiving.cost_each, + date: receiving.received_date, + received_by: receiving.received_by, + type: receiving.type, + remaining_qty: receiving.qty_each - qtyToApply + }); + remainingToFulfill -= qtyToApply; + } else { + // Track excess receivings + fulfillmentTracking.push({ + receiving_id: receiving.receiving_id, + qty_applied: 0, + qty_total: receiving.qty_each, + cost: receiving.cost_each, + date: receiving.received_date, + received_by: receiving.received_by, + type: receiving.type, + is_excess: true + }); + } + totalReceived += receiving.qty_each; + } const receiving_status = !totalReceived ? 1 : // created - totalReceived < product.ordered ? 30 : // partial + remainingToFulfill > 0 ? 30 : // partial 40; // full - const allReceivings = [...receivingHistory]; - if (altReceivingHistory.length > 0) { - allReceivings.push(...altReceivingHistory); - } - if (noPOReceivingHistory.length > 0) { - allReceivings.push(...noPOReceivingHistory); - } - allReceivings.sort((a, b) => new Date(a.received_date) - new Date(b.received_date)); - const firstReceiving = allReceivings[0] || {}; const lastReceiving = allReceivings[allReceivings.length - 1] || {}; @@ -250,18 +276,19 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental case 'long_note': return po.long_note; case 'ordered': return product.ordered; case 'received': return totalReceived; + case 'unfulfilled': return remainingToFulfill; + case 'excess_received': return Math.max(0, totalReceived - product.ordered); case 'received_date': return firstReceiving.received_date || null; case 'last_received_date': return lastReceiving.received_date || null; case 'received_by': return firstReceiving.received_by || null; case 'receiving_status': return receiving_status; - case 'receiving_history': return JSON.stringify(allReceivings.map(r => ({ - receiving_id: r.receiving_id, - qty: r.qty_each, - cost: r.cost_each, - date: r.received_date, - received_by: r.received_by, - alt_po: r.is_alt_po - }))); + case 'receiving_history': return JSON.stringify({ + fulfillment: fulfillmentTracking, + ordered_qty: product.ordered, + total_received: totalReceived, + remaining_unfulfilled: remainingToFulfill, + excess_received: Math.max(0, totalReceived - product.ordered) + }); default: return null; } })); From 5e4d1c3bd8f62020b0545a71a3899d42d9a4be3c Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 31 Jan 2025 01:39:48 -0500 Subject: [PATCH 21/33] Improve import scripts with enhanced incremental update tracking and performance - Add record tracking for added and updated records in import scripts - Modify products import to use a dynamic 'needs_update' flag for selective updates - Enhance order import with more comprehensive timestamp checks - Update import-from-prod.js to handle and clean up previously running imports - Improve error handling and connection management in import processes --- inventory-server/scripts/import-from-prod.js | 17 +++++-- inventory-server/scripts/import/orders.js | 18 +++++-- inventory-server/scripts/import/products.js | 49 ++++++++++++++----- .../scripts/import/purchase-orders.js | 13 +++-- 4 files changed, 72 insertions(+), 25 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 3148261..7395293 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -16,7 +16,7 @@ const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; // Add flag for incremental updates -const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE === 'true'; +const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false // SSH configuration // In import-from-prod.js @@ -103,6 +103,17 @@ async function main() { if (isImportCancelled) throw new Error("Import cancelled"); + // Clean up any previously running imports that weren't completed + await localConnection.query(` + UPDATE import_history + SET + status = 'cancelled', + end_time = NOW(), + duration_seconds = TIMESTAMPDIFF(SECOND, start_time, NOW()), + error_message = 'Previous import was not completed properly' + WHERE status = 'running' + `); + // Initialize sync_status table if it doesn't exist await localConnection.query(` CREATE TABLE IF NOT EXISTS sync_status ( @@ -240,8 +251,8 @@ async function main() { const totalElapsedSeconds = Math.round((endTime - startTime) / 1000); // Update import history with error - if (importHistoryId) { - await connections?.localConnection?.query(` + if (importHistoryId && connections?.localConnection) { + await connections.localConnection.query(` UPDATE import_history SET end_time = NOW(), diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index ce33da4..38b0eae 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -17,6 +17,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = const startTime = Date.now(); const skippedOrders = new Set(); const missingProducts = new Set(); + let recordsAdded = 0; + let recordsUpdated = 0; try { // Get column names from the local table @@ -88,12 +90,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = ${incrementalUpdate ? ` AND ( o.stamp > ? + OR oi.stamp > ? OR o.date_placed > ? OR o.date_shipped > ? - OR oi.stamp > ? + OR o.date_cancelled > ? + OR o.date_updated > ? ) ` : ''} - `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); const totalOrders = orderItems.length; let processed = 0; @@ -271,12 +275,16 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`; const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(","); - await localConnection.query(` + const query = ` INSERT INTO orders (${columnNames.join(",")}) VALUES ${placeholders} ON DUPLICATE KEY UPDATE ${columnNames.map(col => `${col} = VALUES(${col})`).join(",")} - `, values); + `; + + const result = await localConnection.query(query, values.flat()); + recordsAdded += result.affectedRows - result.changedRows; + recordsUpdated += result.changedRows; importedCount += validOrders.length; } @@ -422,6 +430,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = return { status: "complete", totalImported: importedCount, + recordsAdded, + recordsUpdated, totalSkipped: skippedOrders.size, missingProducts: missingProducts.size, incrementalUpdate, diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 86dcffd..41bbbaa 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -279,6 +279,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate country_of_origin VARCHAR(5), date_last_sold DATE, category_ids TEXT, + needs_update BOOLEAN DEFAULT FALSE, PRIMARY KEY (pid) ) ENGINE=InnoDB `); @@ -321,7 +322,19 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate p.totalsold AS total_sold, p.country_of_origin, pls.date_sold as date_last_sold, - GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids + GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids, + CASE WHEN + ${incrementalUpdate ? ` + p.stamp > ? OR + ci.stamp > ? OR + pcp.date_deactive > ? OR + pcp.date_active > ? OR + sid.stamp > ? OR + pnb.date_updated > ? OR + pls.date_sold > ? OR + si.stamp > ? + ` : 'TRUE'} + THEN 1 ELSE 0 END as needs_update FROM products p LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 LEFT JOIN supplier_item_data sid ON p.pid = sid.pid @@ -332,16 +345,13 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id LEFT JOIN product_last_sold pls ON p.pid = pls.pid - ${incrementalUpdate ? ` - WHERE p.stamp > ? - OR pls.date_sold > ? - OR p.date_created > ? - OR p.datein > ? - ` : ''} + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid GROUP BY p.pid - `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); - // Insert production data in batches + // Insert production data in batches, but only for products that need updates for (let i = 0; i < prodData.length; i += 1000) { const batch = prodData.slice(i, i + 1000); const placeholders = batch.map(() => "(?)").join(","); @@ -359,9 +369,11 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate }); } - // Now join with local temp tables and process in batches + // Now join with local temp tables and process in batches, but only for products that need updates const BATCH_SIZE = 2500; let processed = 0; + let recordsAdded = 0; + let recordsUpdated = 0; while (processed < totalProducts) { const [batch] = await localConnection.query(` @@ -376,6 +388,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate FROM temp_prod_data p LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid + WHERE p.needs_update = 1 LIMIT ? OFFSET ? `, [BATCH_SIZE, processed]); @@ -412,7 +425,9 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate .join(",")}; `; - await localConnection.query(insertQuery, productValues); + const result = await localConnection.query(insertQuery, productValues); + recordsAdded += result.affectedRows - result.changedRows; + recordsUpdated += result.changedRows; // Insert category relationships const categoryRelationships = []; @@ -495,6 +510,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate return { status: "complete", totalImported: totalProducts, + recordsAdded, + recordsUpdated, incrementalUpdate: true, lastSyncTime }; @@ -682,7 +699,9 @@ async function importMissingProducts(prodConnection, localConnection, missingPid .join(",")} `; - await localConnection.query(query, productValues); + const result = await localConnection.query(query, productValues); + recordsAdded += result.affectedRows - result.changedRows; + recordsUpdated += result.changedRows; // Verify products were inserted before proceeding with categories const [insertedProducts] = await localConnection.query( @@ -738,7 +757,11 @@ async function importMissingProducts(prodConnection, localConnection, missingPid return { status: "complete", - totalImported: products.length + totalImported: products.length, + recordsAdded, + recordsUpdated, + incrementalUpdate: true, + lastSyncTime }; } catch (error) { throw error; diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 54ce02a..d05c231 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -2,6 +2,8 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = async function importPurchaseOrders(prodConnection, localConnection, incrementalUpdate = true) { const startTime = Date.now(); + let recordsAdded = 0; + let recordsUpdated = 0; try { // Get last sync info @@ -29,16 +31,19 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental // Build incremental conditions const incrementalWhereClause = incrementalUpdate ? `AND ( - p.date_updated > ? + p.stamp > ? + OR p.date_updated > ? OR p.date_ordered > ? OR p.date_estin > ? - OR r.stamp > ? + OR r.date_updated > ? + OR r.date_created > ? + OR r.date_checked > ? OR rp.stamp > ? OR rp.received_date > ? )` : ""; const incrementalParams = incrementalUpdate - ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] + ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []; // First get all relevant PO IDs with basic info @@ -98,8 +103,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const totalItems = total; let processed = 0; - let recordsAdded = 0; - let recordsUpdated = 0; const BATCH_SIZE = 5000; const PROGRESS_INTERVAL = 500; From d0abe9d9a2e3cd5267ab2b3662be41760df4b2f0 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 31 Jan 2025 01:50:21 -0500 Subject: [PATCH 22/33] - Modify import scripts to handle edge cases with empty arrays and null conditions - Improve parameter handling in incremental update queries for purchase orders and products --- inventory-server/scripts/import-from-prod.js | 4 +-- inventory-server/scripts/import/orders.js | 18 ++++++++---- inventory-server/scripts/import/products.js | 7 ++--- .../scripts/import/purchase-orders.js | 29 ++++++++++++++----- 4 files changed, 40 insertions(+), 18 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 7395293..2f054da 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,8 +10,8 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = true; -const IMPORT_PRODUCTS = true; +const IMPORT_CATEGORIES = false; +const IMPORT_PRODUCTS = false; const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 38b0eae..c000555 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -222,10 +222,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = // Pre-check all products at once instead of per batch const allOrderPids = [...new Set(orderItems.map(item => item.pid))]; - const [existingProducts] = await localConnection.query( + const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query( "SELECT pid FROM products WHERE pid IN (?)", [allOrderPids] - ); + ) : [[]]; const existingPids = new Set(existingProducts.map(p => p.pid)); // Process in larger batches @@ -312,8 +312,15 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = // Import missing products if any if (missingProducts.size > 0) { try { + // Setup temporary tables again since they were dropped + await setupTemporaryTables(localConnection); + await materializeCalculations(prodConnection, localConnection); + await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts)); + // Clean up temporary tables after missing products import + await cleanupTemporaryTables(localConnection); + // Retry skipped orders after importing products if (skippedOrders.size > 0) { outputProgress({ @@ -322,7 +329,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = message: `Retrying import of ${skippedOrders.size} orders with previously missing products` }); - const [skippedProdOrders] = await prodConnection.query(` + const skippedOrdersArray = Array.from(skippedOrders); + const [skippedProdOrders] = skippedOrdersArray.length > 0 ? await prodConnection.query(` SELECT o.order_id, CASE @@ -353,7 +361,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = FROM order_items oi JOIN _order o ON oi.order_id = o.order_id WHERE o.order_id IN (?) - `, [Array.from(skippedOrders)]); + `, [skippedOrdersArray]) : [[]]; // Prepare values for insertion const skippedOrderValues = skippedProdOrders.flatMap(order => { @@ -420,7 +428,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = } } - // Update sync status - do this even if missing products import fails + // Only update sync status if we get here (no errors thrown) await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('orders', NOW()) diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 41bbbaa..cbd67a1 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -331,8 +331,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate pcp.date_active > ? OR sid.stamp > ? OR pnb.date_updated > ? OR - pls.date_sold > ? OR - si.stamp > ? + pls.date_sold > ? ` : 'TRUE'} THEN 1 ELSE 0 END as needs_update FROM products p @@ -349,7 +348,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid GROUP BY p.pid - `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); // Insert production data in batches, but only for products that need updates for (let i = 0; i < prodData.length; i += 1000) { @@ -500,7 +499,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate // Drop temporary tables await cleanupTemporaryTables(localConnection); - // After successful import, update the sync status + // Only update sync status if we get here (no errors thrown) await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('products', NOW()) diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index d05c231..369e369 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -31,8 +31,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental // Build incremental conditions const incrementalWhereClause = incrementalUpdate ? `AND ( - p.stamp > ? - OR p.date_updated > ? + p.date_updated > ? OR p.date_ordered > ? OR p.date_estin > ? OR r.date_updated > ? @@ -43,7 +42,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental )` : ""; const incrementalParams = incrementalUpdate - ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] + ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []; // First get all relevant PO IDs with basic info @@ -56,16 +55,32 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental JOIN po_products pop ON p.po_id = pop.po_id JOIN suppliers s ON p.supplier_id = s.supplierid WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) - ${incrementalWhereClause} + ${incrementalUpdate ? ` + AND ( + p.date_updated > ? + OR p.date_ordered > ? + OR p.date_estin > ? + ) + ` : ''} UNION SELECT DISTINCT r.receiving_id as po_id, rp.pid FROM receivings_products rp USE INDEX (received_date) LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) - ${incrementalWhereClause} + ${incrementalUpdate ? ` + AND ( + r.date_created > ? + OR r.date_checked > ? + OR rp.stamp > ? + OR rp.received_date > ? + ) + ` : ''} ) all_items - `, [...incrementalParams, ...incrementalParams]); + `, incrementalUpdate ? [ + lastSyncTime, lastSyncTime, lastSyncTime, // PO conditions + lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions + ] : []); const [poList] = await prodConnection.query(` SELECT DISTINCT @@ -337,7 +352,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental } } - // Update sync status with proper incrementing of last_sync_id + // Only update sync status if we get here (no errors thrown) await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('purchase_orders', NOW()) From 996d3d36af8b1d10aa8b06a2e4b32e5b39f50864 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 31 Jan 2025 10:01:50 -0500 Subject: [PATCH 23/33] Streamline incremental imports --- inventory-server/scripts/import-from-prod.js | 21 +-- inventory-server/scripts/import/orders.js | 10 +- inventory-server/scripts/import/products.js | 187 +++++++------------ 3 files changed, 77 insertions(+), 141 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 2f054da..1fb63f1 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,8 +10,8 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = false; -const IMPORT_PRODUCTS = false; +const IMPORT_CATEGORIES = true; +const IMPORT_PRODUCTS = true; const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; @@ -48,7 +48,6 @@ const sshConfig = { connectionLimit: 10, queueLimit: 0, namedPlaceholders: true, - maxAllowedPacket: 64 * 1024 * 1024, // 64MB connectTimeout: 60000, enableKeepAlive: true, keepAliveInitialDelay: 10000, @@ -162,32 +161,32 @@ async function main() { results.categories = await importCategories(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; - if (results.categories.recordsAdded) totalRecordsAdded += results.categories.recordsAdded; - if (results.categories.recordsUpdated) totalRecordsUpdated += results.categories.recordsUpdated; + if (results.categories?.recordsAdded) totalRecordsAdded += results.categories.recordsAdded; + if (results.categories?.recordsUpdated) totalRecordsUpdated += results.categories.recordsUpdated; } if (IMPORT_PRODUCTS) { results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; - if (results.products.recordsAdded) totalRecordsAdded += results.products.recordsAdded; - if (results.products.recordsUpdated) totalRecordsUpdated += results.products.recordsUpdated; + if (results.products?.recordsAdded) totalRecordsAdded += results.products.recordsAdded; + if (results.products?.recordsUpdated) totalRecordsUpdated += results.products.recordsUpdated; } if (IMPORT_ORDERS) { results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; - if (results.orders.recordsAdded) totalRecordsAdded += results.orders.recordsAdded; - if (results.orders.recordsUpdated) totalRecordsUpdated += results.orders.recordsUpdated; + if (results.orders?.recordsAdded) totalRecordsAdded += results.orders.recordsAdded; + if (results.orders?.recordsUpdated) totalRecordsUpdated += results.orders.recordsUpdated; } if (IMPORT_PURCHASE_ORDERS) { results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; - if (results.purchaseOrders.recordsAdded) totalRecordsAdded += results.purchaseOrders.recordsAdded; - if (results.purchaseOrders.recordsUpdated) totalRecordsUpdated += results.purchaseOrders.recordsUpdated; + if (results.purchaseOrders?.recordsAdded) totalRecordsAdded += results.purchaseOrders.recordsAdded; + if (results.purchaseOrders?.recordsUpdated) totalRecordsUpdated += results.purchaseOrders.recordsUpdated; } const endTime = Date.now(); diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index c000555..1cfdea8 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -1,5 +1,5 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); -const { importMissingProducts } = require('./products'); +const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products'); /** * Imports orders from a production MySQL database to a local MySQL database. @@ -312,15 +312,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = // Import missing products if any if (missingProducts.size > 0) { try { - // Setup temporary tables again since they were dropped - await setupTemporaryTables(localConnection); - await materializeCalculations(prodConnection, localConnection); - + // Import missing products directly without materialization await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts)); - // Clean up temporary tables after missing products import - await cleanupTemporaryTables(localConnection); - // Retry skipped orders after importing products if (skippedOrders.size > 0) { outputProgress({ diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index cbd67a1..fa1f2ce 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -227,15 +227,21 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate const [countResult] = await prodConnection.query(` SELECT COUNT(*) as total FROM products p - WHERE p.stamp > ? - OR EXISTS ( - SELECT 1 FROM product_last_sold pls - WHERE p.pid = pls.pid - AND pls.date_sold > ? - ) - OR p.date_created > ? - OR p.datein > ? - `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 + LEFT JOIN supplier_item_data sid ON p.pid = sid.pid + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + WHERE ${incrementalUpdate ? ` + p.stamp > ? OR + ci.stamp > ? OR + pcp.date_deactive > ? OR + pcp.date_active > ? OR + sid.stamp > ? OR + pnb.date_updated > ? OR + pls.date_sold > ? + ` : 'TRUE'} + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); const totalProducts = countResult[0].total; @@ -243,7 +249,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate outputProgress({ status: "running", operation: "Products import", - message: "Fetching product data from production" + message: `Fetching ${incrementalUpdate ? 'updated' : 'all'} product data from production` }); // Create temporary table for production data @@ -279,7 +285,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate country_of_origin VARCHAR(5), date_last_sold DATE, category_ids TEXT, - needs_update BOOLEAN DEFAULT FALSE, + needs_update BOOLEAN DEFAULT TRUE, PRIMARY KEY (pid) ) ENGINE=InnoDB `); @@ -322,18 +328,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate p.totalsold AS total_sold, p.country_of_origin, pls.date_sold as date_last_sold, - GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids, - CASE WHEN - ${incrementalUpdate ? ` - p.stamp > ? OR - ci.stamp > ? OR - pcp.date_deactive > ? OR - pcp.date_active > ? OR - sid.stamp > ? OR - pnb.date_updated > ? OR - pls.date_sold > ? - ` : 'TRUE'} - THEN 1 ELSE 0 END as needs_update + GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids FROM products p LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 LEFT JOIN supplier_item_data sid ON p.pid = sid.pid @@ -347,6 +342,15 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate LEFT JOIN current_inventory ci ON p.pid = ci.pid LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + WHERE ${incrementalUpdate ? ` + p.stamp > ? OR + ci.stamp > ? OR + pcp.date_deactive > ? OR + pcp.date_active > ? OR + sid.stamp > ? OR + pnb.date_updated > ? OR + pls.date_sold > ? + ` : 'TRUE'} GROUP BY p.pid `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); @@ -521,67 +525,16 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate async function importMissingProducts(prodConnection, localConnection, missingPids) { try { - // Setup temporary tables - await setupTemporaryTables(localConnection); - - // Get inventory data from production first - const [prodInventory] = await prodConnection.query(` - SELECT - p.pid, - COALESCE(si.available_local, 0) - COALESCE(ps.pending_qty, 0) as stock_quantity, - COALESCE(ps.pending_qty, 0) as pending_qty, - COALESCE(ci.onpreorder, 0) as preorder_count, - COALESCE(pnb.inventory, 0) as notions_inv_count - FROM products p - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid - LEFT JOIN ( - SELECT oi.prod_pid, - SUM(oi.qty_ordered - oi.qty_placed) as pending_qty - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid IN (?) - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0 - GROUP BY oi.prod_pid - ) ps ON p.pid = ps.prod_pid - WHERE p.pid IN (?) - `, [missingPids, missingPids]); - - // Insert inventory data into temp table - if (prodInventory.length > 0) { - const placeholders = prodInventory.map(() => "(?, ?, ?, ?, ?)").join(","); - const values = prodInventory.flatMap(p => [ - p.pid, - p.stock_quantity, - p.pending_qty, - p.preorder_count, - p.notions_inv_count - ]); - - await localConnection.query(` - INSERT INTO temp_inventory_status VALUES ${placeholders} - `, values); - } - - // First get the column names from the table structure + // Get column names first const [columns] = await localConnection.query(` SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = 'products' ORDER BY ORDINAL_POSITION `); - const columnNames = columns.map((col) => col.COLUMN_NAME); - // Get the missing products from production + // Get the missing products with all their data in one optimized query const [products] = await prodConnection.query(` SELECT p.pid, @@ -591,9 +544,22 @@ async function importMissingProducts(prodConnection, localConnection, missingPid p.date_created, p.datein AS first_received, p.location, - tis.stock_quantity, - tis.preorder_count, - tis.notions_inv_count, + COALESCE(si.available_local, 0) - COALESCE( + (SELECT SUM(oi.qty_ordered - oi.qty_placed) + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.prod_pid = p.pid + AND o.date_placed != '0000-00-00 00:00:00' + AND o.date_shipped = '0000-00-00 00:00:00' + AND oi.pick_finished = 0 + AND oi.qty_back = 0 + AND o.order_status != 15 + AND o.order_status < 90 + AND oi.qty_ordered >= oi.qty_placed + AND oi.qty_ordered > 0), 0 + ) as stock_quantity, + COALESCE(ci.onpreorder, 0) as preorder_count, + COALESCE(pnb.inventory, 0) as notions_inv_count, COALESCE(pcp.price_each, 0) as price, COALESCE(p.sellingprice, 0) AS regular_price, COALESCE((SELECT ROUND(AVG(costeach), 5) @@ -610,21 +576,6 @@ async function importMissingProducts(prodConnection, localConnection, missingPid CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference, sid.notions_itemnumber AS notions_reference, CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-t-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0) AS image, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-175x175-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0 AND PI.width = 175) AS image_175, - (SELECT CONCAT('https://sbing.com/i/products/0000/', - SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', - p.pid, '-o-', MIN(PI.iid), '.jpg') - FROM product_images PI - WHERE PI.pid = p.pid AND PI.hidden = 0) AS image_full, pc1.name AS brand, pc2.name AS line, pc3.name AS subline, @@ -649,7 +600,6 @@ async function importMissingProducts(prodConnection, localConnection, missingPid pls.date_sold as date_last_sold, GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids FROM products p - LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 LEFT JOIN supplier_item_data sid ON p.pid = sid.pid LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid @@ -662,16 +612,24 @@ async function importMissingProducts(prodConnection, localConnection, missingPid LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN ( - SELECT pid, MIN(price_each) as price_each - FROM product_current_prices - WHERE active = 1 - GROUP BY pid - ) pcp ON p.pid = pcp.pid + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid WHERE p.pid IN (?) GROUP BY p.pid `, [missingPids]); + // Add image URLs + products.forEach(product => { + const urls = getImageUrls(product.pid); + product.image = urls.image; + product.image_175 = urls.image_175; + product.image_full = urls.image_full; + }); + + let recordsAdded = 0; + let recordsUpdated = 0; + if (products.length > 0) { // Map values in the same order as columns const productValues = products.flatMap(product => @@ -699,21 +657,13 @@ async function importMissingProducts(prodConnection, localConnection, missingPid `; const result = await localConnection.query(query, productValues); - recordsAdded += result.affectedRows - result.changedRows; - recordsUpdated += result.changedRows; - - // Verify products were inserted before proceeding with categories - const [insertedProducts] = await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [products.map(p => p.pid)] - ); - const insertedPids = new Set(insertedProducts.map(p => p.pid)); + recordsAdded = result.affectedRows - result.changedRows; + recordsUpdated = result.changedRows; // Handle category relationships if any const categoryRelationships = []; products.forEach(product => { - // Only add category relationships for products that were successfully inserted - if (insertedPids.has(product.pid) && product.category_ids) { + if (product.category_ids) { const catIds = product.category_ids .split(",") .map(id => id.trim()) @@ -744,10 +694,8 @@ async function importMissingProducts(prodConnection, localConnection, missingPid .map(() => "(?, ?)") .join(","); await localConnection.query( - ` - INSERT IGNORE INTO product_categories (cat_id, pid) - VALUES ${catPlaceholders} - `, + `INSERT IGNORE INTO product_categories (cat_id, pid) + VALUES ${catPlaceholders}`, validRelationships.flat() ); } @@ -758,15 +706,10 @@ async function importMissingProducts(prodConnection, localConnection, missingPid status: "complete", totalImported: products.length, recordsAdded, - recordsUpdated, - incrementalUpdate: true, - lastSyncTime + recordsUpdated }; } catch (error) { throw error; - } finally { - // Cleanup temporary tables - await cleanupTemporaryTables(localConnection); } } From a867117c3cac4881506e58f8b12bb97e70826038 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 31 Jan 2025 11:12:38 -0500 Subject: [PATCH 24/33] Import script incremental fixes --- inventory-server/scripts/import-from-prod.js | 16 +-- inventory-server/scripts/import/orders.js | 4 +- inventory-server/scripts/import/products.js | 100 +++++++++++++----- .../scripts/import/purchase-orders.js | 4 +- 4 files changed, 84 insertions(+), 40 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 1fb63f1..a8390e6 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -161,32 +161,32 @@ async function main() { results.categories = await importCategories(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; - if (results.categories?.recordsAdded) totalRecordsAdded += results.categories.recordsAdded; - if (results.categories?.recordsUpdated) totalRecordsUpdated += results.categories.recordsUpdated; + totalRecordsAdded += results.categories?.recordsAdded || 0; + totalRecordsUpdated += results.categories?.recordsUpdated || 0; } if (IMPORT_PRODUCTS) { results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; - if (results.products?.recordsAdded) totalRecordsAdded += results.products.recordsAdded; - if (results.products?.recordsUpdated) totalRecordsUpdated += results.products.recordsUpdated; + totalRecordsAdded += results.products?.recordsAdded || 0; + totalRecordsUpdated += results.products?.recordsUpdated || 0; } if (IMPORT_ORDERS) { results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; - if (results.orders?.recordsAdded) totalRecordsAdded += results.orders.recordsAdded; - if (results.orders?.recordsUpdated) totalRecordsUpdated += results.orders.recordsUpdated; + totalRecordsAdded += results.orders?.recordsAdded || 0; + totalRecordsUpdated += results.orders?.recordsUpdated || 0; } if (IMPORT_PURCHASE_ORDERS) { results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; - if (results.purchaseOrders?.recordsAdded) totalRecordsAdded += results.purchaseOrders.recordsAdded; - if (results.purchaseOrders?.recordsUpdated) totalRecordsUpdated += results.purchaseOrders.recordsUpdated; + totalRecordsAdded += results.purchaseOrders?.recordsAdded || 0; + totalRecordsUpdated += results.purchaseOrders?.recordsUpdated || 0; } const endTime = Date.now(); diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 1cfdea8..e739e7a 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -432,8 +432,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = return { status: "complete", totalImported: importedCount, - recordsAdded, - recordsUpdated, + recordsAdded: recordsAdded || 0, + recordsUpdated: recordsUpdated || 0, totalSkipped: skippedOrders.size, missingProducts: missingProducts.size, incrementalUpdate, diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index fa1f2ce..99dccc4 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -357,11 +357,46 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate // Insert production data in batches, but only for products that need updates for (let i = 0; i < prodData.length; i += 1000) { const batch = prodData.slice(i, i + 1000); - const placeholders = batch.map(() => "(?)").join(","); + const placeholders = batch.map(() => `(${Array(31).fill("?").join(",")})`).join(","); + // Map each row to exactly match our temp table columns + const values = batch.flatMap(row => [ + row.pid, + row.title, + row.description, + row.SKU, + row.date_created, + row.first_received, + row.location, + row.barcode, + row.harmonized_tariff_code, + row.updated_at, + row.visible, + row.replenishable, + row.vendor, + row.vendor_reference, + row.notions_reference, + row.brand, + row.line, + row.subline, + row.artist, + row.moq, + row.rating, + row.reviews, + row.weight, + row.length, + row.width, + row.height, + row.total_sold, + row.country_of_origin, + row.date_last_sold, + row.category_ids, + true // needs_update + ]); + await localConnection.query(` INSERT INTO temp_prod_data VALUES ${placeholders} - `, batch.map(row => Object.values(row))); + `, values); outputProgress({ status: "running", @@ -378,7 +413,12 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate let recordsAdded = 0; let recordsUpdated = 0; - while (processed < totalProducts) { + // Get actual count from temp table + const [[{ actualTotal }]] = await localConnection.query( + "SELECT COUNT(*) as actualTotal FROM temp_prod_data WHERE needs_update = 1" + ); + + while (processed < actualTotal) { const [batch] = await localConnection.query(` SELECT p.*, @@ -394,6 +434,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate WHERE p.needs_update = 1 LIMIT ? OFFSET ? `, [BATCH_SIZE, processed]); + + if (!batch || batch.length === 0) break; // Exit if no more records // Add image URLs batch.forEach(row => { @@ -413,24 +455,25 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate }) ); - // MySQL 8.0 optimized insert - const placeholderGroup = `(${Array(columnNames.length).fill("?").join(",")})`; - const productPlaceholders = Array(batch.length).fill(placeholderGroup).join(","); - - const insertQuery = ` - INSERT INTO products (${columnNames.join(",")}) - VALUES ${productPlaceholders} - AS new_products - ON DUPLICATE KEY UPDATE - ${columnNames - .filter(col => col !== "pid") - .map(col => `${col} = new_products.${col}`) - .join(",")}; - `; + if (productValues.length > 0) { + // MySQL 8.0 optimized insert with proper placeholders + const placeholderGroup = `(${Array(columnNames.length).fill("?").join(",")})`; + const productPlaceholders = Array(batch.length).fill(placeholderGroup).join(","); + + const insertQuery = ` + INSERT INTO products (${columnNames.join(",")}) + VALUES ${productPlaceholders} + ON DUPLICATE KEY UPDATE + ${columnNames + .filter(col => col !== "pid") + .map(col => `${col} = VALUES(${col})`) + .join(",")}; + `; - const result = await localConnection.query(insertQuery, productValues); - recordsAdded += result.affectedRows - result.changedRows; - recordsUpdated += result.changedRows; + const result = await localConnection.query(insertQuery, productValues); + recordsAdded += result.affectedRows - result.changedRows; + recordsUpdated += result.changedRows; + } // Insert category relationships const categoryRelationships = []; @@ -482,15 +525,16 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate } } - processed += batch.length; + processed += batch.length; // Only increment by actual records processed + outputProgress({ status: "running", operation: "Products import", - message: `Processed ${processed} of ${totalProducts} products`, + message: `Processed ${processed} of ${actualTotal} products`, current: processed, - total: totalProducts, + total: actualTotal, elapsed: formatElapsedTime((Date.now() - startTime) / 1000), - remaining: estimateRemaining(startTime, processed, totalProducts), + remaining: estimateRemaining(startTime, processed, actualTotal), rate: calculateRate(startTime, processed) }); @@ -512,10 +556,10 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate return { status: "complete", - totalImported: totalProducts, - recordsAdded, - recordsUpdated, - incrementalUpdate: true, + totalImported: actualTotal, + recordsAdded: recordsAdded || 0, + recordsUpdated: recordsUpdated || 0, + incrementalUpdate, lastSyncTime }; } catch (error) { diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 369e369..f1add71 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -364,8 +364,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental return { status: "complete", totalImported: totalItems, - recordsAdded, - recordsUpdated, + recordsAdded: recordsAdded || 0, + recordsUpdated: recordsUpdated || 0, incrementalUpdate, lastSyncTime }; From 1c932e0df5ddc44f084a7e4161de1c50389f9a77 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 31 Jan 2025 13:12:43 -0500 Subject: [PATCH 25/33] More import script updates and fixes, better import_history tracking --- inventory-server/db/config-schema.sql | 1 + inventory-server/db/schema.sql | 1 + inventory-server/scripts/import-from-prod.js | 4 ++ inventory-server/scripts/import/orders.js | 41 ++++++++++++++----- inventory-server/scripts/import/products.js | 6 ++- .../scripts/import/purchase-orders.js | 2 +- 6 files changed, 41 insertions(+), 14 deletions(-) diff --git a/inventory-server/db/config-schema.sql b/inventory-server/db/config-schema.sql index 1dfb3a7..2a28a1e 100644 --- a/inventory-server/db/config-schema.sql +++ b/inventory-server/db/config-schema.sql @@ -184,6 +184,7 @@ CREATE TABLE IF NOT EXISTS import_history ( start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, end_time TIMESTAMP NULL, duration_seconds INT, + duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED, records_added INT DEFAULT 0, records_updated INT DEFAULT 0, is_incremental BOOLEAN DEFAULT FALSE, diff --git a/inventory-server/db/schema.sql b/inventory-server/db/schema.sql index b3dc803..9b96a70 100644 --- a/inventory-server/db/schema.sql +++ b/inventory-server/db/schema.sql @@ -118,6 +118,7 @@ CREATE TABLE IF NOT EXISTS orders ( status VARCHAR(20) DEFAULT 'pending', canceled TINYINT(1) DEFAULT 0, PRIMARY KEY (id), + UNIQUE KEY unique_order_line (order_number, pid), KEY order_number (order_number), KEY pid (pid), KEY customer (customer), diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index a8390e6..67b6678 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -161,6 +161,7 @@ async function main() { results.categories = await importCategories(prodConnection, localConnection); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; + console.log('Categories import result:', results.categories); totalRecordsAdded += results.categories?.recordsAdded || 0; totalRecordsUpdated += results.categories?.recordsUpdated || 0; } @@ -169,6 +170,7 @@ async function main() { results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; + console.log('Products import result:', results.products); totalRecordsAdded += results.products?.recordsAdded || 0; totalRecordsUpdated += results.products?.recordsUpdated || 0; } @@ -177,6 +179,7 @@ async function main() { results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; + console.log('Orders import result:', results.orders); totalRecordsAdded += results.orders?.recordsAdded || 0; totalRecordsUpdated += results.orders?.recordsUpdated || 0; } @@ -185,6 +188,7 @@ async function main() { results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE); if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; + console.log('Purchase orders import result:', results.purchaseOrders); totalRecordsAdded += results.purchaseOrders?.recordsAdded || 0; totalRecordsUpdated += results.purchaseOrders?.recordsUpdated || 0; } diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index e739e7a..a976fa1 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -73,6 +73,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = `); // Get base order items first + console.log('Last sync time:', lastSyncTime); const [orderItems] = await prodConnection.query(` SELECT oi.order_id, @@ -89,15 +90,13 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = AND o.date_placed_onlydate IS NOT NULL ${incrementalUpdate ? ` AND ( - o.stamp > ? + o.stamp > ? OR oi.stamp > ? - OR o.date_placed > ? - OR o.date_shipped > ? - OR o.date_cancelled > ? - OR o.date_updated > ? ) ` : ''} - `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); + `, incrementalUpdate ? [lastSyncTime, lastSyncTime] : []); + + console.log('Found', orderItems.length, 'orders to process'); const totalOrders = orderItems.length; let processed = 0; @@ -111,7 +110,13 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = ]); await localConnection.query(` - INSERT INTO temp_order_items VALUES ${placeholders} + INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE + SKU = VALUES(SKU), + price = VALUES(price), + quantity = VALUES(quantity), + base_discount = VALUES(base_discount) `, values); processed += batch.length; @@ -279,12 +284,26 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = INSERT INTO orders (${columnNames.join(",")}) VALUES ${placeholders} ON DUPLICATE KEY UPDATE - ${columnNames.map(col => `${col} = VALUES(${col})`).join(",")} + SKU = VALUES(SKU), + date = VALUES(date), + price = VALUES(price), + quantity = VALUES(quantity), + discount = VALUES(discount), + tax = VALUES(tax), + tax_included = VALUES(tax_included), + shipping = VALUES(shipping), + customer = VALUES(customer), + customer_name = VALUES(customer_name), + status = VALUES(status), + canceled = VALUES(canceled) `; - const result = await localConnection.query(query, values.flat()); - recordsAdded += result.affectedRows - result.changedRows; - recordsUpdated += result.changedRows; + const result = await localConnection.query(query, values); + // For INSERT ... ON DUPLICATE KEY UPDATE: + // - affectedRows is 1 for each inserted row and 2 for each updated row + // - changedRows is 1 for each row that was actually changed during update + recordsAdded += result[0].affectedRows - (2 * result[0].changedRows); // New rows + recordsUpdated += result[0].changedRows; // Actually changed rows importedCount += validOrders.length; } diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 99dccc4..ad17c18 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -200,6 +200,8 @@ async function materializeCalculations(prodConnection, localConnection) { async function importProducts(prodConnection, localConnection, incrementalUpdate = true) { const startTime = Date.now(); + let recordsAdded = 0; + let recordsUpdated = 0; try { // Get column names first @@ -471,8 +473,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate `; const result = await localConnection.query(insertQuery, productValues); - recordsAdded += result.affectedRows - result.changedRows; - recordsUpdated += result.changedRows; + recordsAdded += result.affectedRows - (2 * result.changedRows); // New rows + recordsUpdated += result.changedRows; // Actually changed rows } // Insert category relationships diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index f1add71..d2da638 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -329,7 +329,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental `; const result = await localConnection.query(query, values.flat()); - recordsAdded += result.affectedRows - result.changedRows; + recordsAdded += result.affectedRows - (2 * result.changedRows); recordsUpdated += result.changedRows; } From d57239c40c9231016f3b491d95a6b6181dc06df3 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 31 Jan 2025 16:01:21 -0500 Subject: [PATCH 26/33] Finish up import script incremental and reliability updates --- inventory-server/db/schema.sql | 3 +- inventory-server/scripts/import/orders.js | 241 +++++++++++++----- inventory-server/scripts/import/products.js | 183 ++++++++----- .../scripts/import/purchase-orders.js | 105 +++++++- .../scripts/import/purchase_orders.js | 82 ++++++ 5 files changed, 462 insertions(+), 152 deletions(-) create mode 100644 inventory-server/scripts/import/purchase_orders.js diff --git a/inventory-server/db/schema.sql b/inventory-server/db/schema.sql index 9b96a70..372dfb6 100644 --- a/inventory-server/db/schema.sql +++ b/inventory-server/db/schema.sql @@ -52,7 +52,7 @@ CREATE TABLE products ( notifies INT UNSIGNED DEFAULT 0, date_last_sold DATE, PRIMARY KEY (pid), - UNIQUE KEY unique_sku (SKU), + INDEX idx_sku (SKU), INDEX idx_vendor (vendor), INDEX idx_brand (brand), INDEX idx_location (location), @@ -148,7 +148,6 @@ CREATE TABLE purchase_orders ( received_by INT, receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag', FOREIGN KEY (pid) REFERENCES products(pid), - FOREIGN KEY (sku) REFERENCES products(SKU), INDEX idx_po_id (po_id), INDEX idx_vendor (vendor), INDEX idx_status (status), diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index a976fa1..1ba7d93 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -21,6 +21,46 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = let recordsUpdated = 0; try { + // Insert temporary table creation queries + await localConnection.query(` + CREATE TABLE IF NOT EXISTS temp_order_items ( + order_id INT UNSIGNED NOT NULL, + pid INT UNSIGNED NOT NULL, + SKU VARCHAR(50) NOT NULL, + price DECIMAL(10,2) NOT NULL, + quantity INT NOT NULL, + base_discount DECIMAL(10,2) DEFAULT 0, + PRIMARY KEY (order_id, pid) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + `); + await localConnection.query(` + CREATE TABLE IF NOT EXISTS temp_order_meta ( + order_id INT UNSIGNED NOT NULL, + date DATE NOT NULL, + customer VARCHAR(100) NOT NULL, + customer_name VARCHAR(150) NOT NULL, + status INT, + canceled TINYINT(1), + PRIMARY KEY (order_id) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + `); + await localConnection.query(` + CREATE TABLE IF NOT EXISTS temp_order_discounts ( + order_id INT UNSIGNED NOT NULL, + pid INT UNSIGNED NOT NULL, + discount DECIMAL(10,2) NOT NULL, + PRIMARY KEY (order_id, pid) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + `); + await localConnection.query(` + CREATE TABLE IF NOT EXISTS temp_order_taxes ( + order_id INT UNSIGNED NOT NULL, + pid INT UNSIGNED NOT NULL, + tax DECIMAL(10,2) NOT NULL, + PRIMARY KEY (order_id, pid) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + `); + // Get column names from the local table const [columns] = await localConnection.query(` SELECT COLUMN_NAME @@ -36,52 +76,11 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = ); const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; - // Create temporary tables for staging data - await localConnection.query(` - CREATE TEMPORARY TABLE temp_order_items ( - order_id INT UNSIGNED, - pid INT UNSIGNED, - SKU VARCHAR(50), - price DECIMAL(10,3), - quantity INT, - base_discount DECIMAL(10,3), - PRIMARY KEY (order_id, pid) - ) ENGINE=InnoDB; + console.log('Orders: Using last sync time:', lastSyncTime); - CREATE TEMPORARY TABLE temp_order_meta ( - order_id INT UNSIGNED PRIMARY KEY, - date DATE, - customer INT UNSIGNED, - customer_name VARCHAR(100), - status TINYINT UNSIGNED, - canceled TINYINT UNSIGNED - ) ENGINE=InnoDB; - - CREATE TEMPORARY TABLE temp_order_discounts ( - order_id INT UNSIGNED, - pid INT UNSIGNED, - discount DECIMAL(10,3), - PRIMARY KEY (order_id, pid) - ) ENGINE=InnoDB; - - CREATE TEMPORARY TABLE temp_order_taxes ( - order_id INT UNSIGNED, - pid INT UNSIGNED, - tax DECIMAL(10,3), - PRIMARY KEY (order_id, pid) - ) ENGINE=InnoDB; - `); - - // Get base order items first - console.log('Last sync time:', lastSyncTime); - const [orderItems] = await prodConnection.query(` - SELECT - oi.order_id, - oi.prod_pid as pid, - oi.prod_itemnumber as SKU, - oi.prod_price as price, - oi.qty_ordered as quantity, - COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount + // First get all relevant order items with basic info + const [[{ total }]] = await prodConnection.query(` + SELECT COUNT(*) as total FROM order_items oi USE INDEX (PRIMARY) JOIN _order o ON oi.order_id = o.order_id @@ -92,11 +91,61 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = AND ( o.stamp > ? OR oi.stamp > ? + OR EXISTS ( + SELECT 1 FROM order_discount_items odi + WHERE odi.order_id = o.order_id + AND odi.pid = oi.prod_pid + ) + OR EXISTS ( + SELECT 1 FROM order_tax_info oti + JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id + WHERE oti.order_id = o.order_id + AND otip.pid = oi.prod_pid + AND oti.stamp > ? + ) ) ` : ''} - `, incrementalUpdate ? [lastSyncTime, lastSyncTime] : []); + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []); - console.log('Found', orderItems.length, 'orders to process'); + console.log('Orders: Found changes:', total); + + // Get order items in batches + const [orderItems] = await prodConnection.query(` + SELECT + oi.order_id, + oi.prod_pid as pid, + oi.prod_itemnumber as SKU, + oi.prod_price as price, + oi.qty_ordered as quantity, + COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount, + oi.stamp as last_modified + FROM order_items oi + USE INDEX (PRIMARY) + JOIN _order o ON oi.order_id = o.order_id + WHERE o.order_status >= 15 + AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) + AND o.date_placed_onlydate IS NOT NULL + ${incrementalUpdate ? ` + AND ( + o.stamp > ? + OR oi.stamp > ? + OR EXISTS ( + SELECT 1 FROM order_discount_items odi + WHERE odi.order_id = o.order_id + AND odi.pid = oi.prod_pid + ) + OR EXISTS ( + SELECT 1 FROM order_tax_info oti + JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id + WHERE oti.order_id = o.order_id + AND otip.pid = oi.prod_pid + AND oti.stamp > ? + ) + ) + ` : ''} + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []); + + console.log('Orders: Processing', orderItems.length, 'order items'); const totalOrders = orderItems.length; let processed = 0; @@ -280,30 +329,82 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`; const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(","); - const query = ` - INSERT INTO orders (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE - SKU = VALUES(SKU), - date = VALUES(date), - price = VALUES(price), - quantity = VALUES(quantity), - discount = VALUES(discount), - tax = VALUES(tax), - tax_included = VALUES(tax_included), - shipping = VALUES(shipping), - customer = VALUES(customer), - customer_name = VALUES(customer_name), - status = VALUES(status), - canceled = VALUES(canceled) - `; + // First check which orders exist and get their current values + const [existingOrders] = await localConnection.query( + `SELECT ${columnNames.join(',')} FROM orders WHERE (order_number, pid) IN (${validOrders.map(() => "(?,?)").join(",")})`, + validOrders.flatMap(o => [o.order_number, o.pid]) + ); + const existingOrderMap = new Map( + existingOrders.map(o => [`${o.order_number}-${o.pid}`, o]) + ); - const result = await localConnection.query(query, values); - // For INSERT ... ON DUPLICATE KEY UPDATE: - // - affectedRows is 1 for each inserted row and 2 for each updated row - // - changedRows is 1 for each row that was actually changed during update - recordsAdded += result[0].affectedRows - (2 * result[0].changedRows); // New rows - recordsUpdated += result[0].changedRows; // Actually changed rows + // Split into inserts and updates + const insertsAndUpdates = validOrders.reduce((acc, order) => { + const key = `${order.order_number}-${order.pid}`; + if (existingOrderMap.has(key)) { + const existing = existingOrderMap.get(key); + // Check if any values are different + const hasChanges = columnNames.some(col => { + const newVal = order[col] ?? null; + const oldVal = existing[col] ?? null; + // Special handling for numbers to avoid type coercion issues + if (typeof newVal === 'number' && typeof oldVal === 'number') { + return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences + } + return newVal !== oldVal; + }); + + if (hasChanges) { + acc.updates.push({ + order_number: order.order_number, + pid: order.pid, + values: columnNames.map(col => order[col] ?? null) + }); + } else { + acc.inserts.push({ + order_number: order.order_number, + pid: order.pid, + values: columnNames.map(col => order[col] ?? null) + }); + } + return acc; + + // Handle inserts + if (insertsAndUpdates.inserts.length > 0) { + const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(singlePlaceholder).join(","); + + const insertResult = await localConnection.query(` + INSERT INTO orders (${columnNames.join(",")}) + VALUES ${insertPlaceholders} + `, insertsAndUpdates.inserts.map(i => i.values).flat()); + + recordsAdded += insertResult[0].affectedRows; + } + + // Handle updates - now we know these actually have changes + if (insertsAndUpdates.updates.length > 0) { + const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(singlePlaceholder).join(","); + + const updateResult = await localConnection.query(` + INSERT INTO orders (${columnNames.join(",")}) + VALUES ${updatePlaceholders} + ON DUPLICATE KEY UPDATE + SKU = VALUES(SKU), + date = VALUES(date), + price = VALUES(price), + quantity = VALUES(quantity), + discount = VALUES(discount), + tax = VALUES(tax), + tax_included = VALUES(tax_included), + shipping = VALUES(shipping), + customer = VALUES(customer), + customer_name = VALUES(customer_name), + status = VALUES(status), + canceled = VALUES(canceled) + `, insertsAndUpdates.updates.map(u => u.values).flat()); + + recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows + } importedCount += validOrders.length; } diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index ad17c18..ce43418 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -13,40 +13,12 @@ const getImageUrls = (pid) => { }; async function setupTemporaryTables(connection) { - await connection.query(` - CREATE TEMPORARY TABLE IF NOT EXISTS temp_categories ( - cat_id INT PRIMARY KEY, - name VARCHAR(255) - ) ENGINE=InnoDB; - - CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_images ( - pid INT, - iid INT, - image_type ENUM('thumbnail', '175', 'full'), - url VARCHAR(255), - PRIMARY KEY (pid, image_type) - ) ENGINE=InnoDB; - - CREATE TEMPORARY TABLE IF NOT EXISTS temp_inventory_status ( - pid INT PRIMARY KEY, - stock_quantity INT, - pending_qty INT, - preorder_count INT, - notions_inv_count INT - ) ENGINE=InnoDB; - - CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_prices ( - pid INT PRIMARY KEY, - price DECIMAL(10,2), - regular_price DECIMAL(10,2), - cost_price DECIMAL(10,5) - ) ENGINE=InnoDB; - - INSERT INTO temp_categories - SELECT cat_id, name FROM categories; - - CREATE INDEX idx_temp_cat_id ON temp_categories(cat_id); - `); + await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_categories ( cat_id INT PRIMARY KEY, name VARCHAR(255) ) ENGINE=InnoDB;`); + await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_images ( pid INT, iid INT, image_type ENUM('thumbnail', '175', 'full'), url VARCHAR(255), PRIMARY KEY (pid, image_type) ) ENGINE=InnoDB;`); + await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_inventory_status ( pid INT PRIMARY KEY, stock_quantity INT, pending_qty INT, preorder_count INT, notions_inv_count INT, needs_update BOOLEAN ) ENGINE=InnoDB;`); + await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_prices ( pid INT PRIMARY KEY, price DECIMAL(10,2), regular_price DECIMAL(10,2), cost_price DECIMAL(10,5), needs_update BOOLEAN ) ENGINE=InnoDB;`); + await connection.query(`INSERT INTO temp_categories SELECT cat_id, name FROM categories;`); + await connection.query(`CREATE INDEX idx_temp_cat_id ON temp_categories(cat_id);`); } async function cleanupTemporaryTables(connection) { @@ -108,18 +80,20 @@ async function materializeCalculations(prodConnection, localConnection) { Math.max(0, row.stock_quantity - row.pending_qty), // Calculate final stock quantity row.pending_qty, row.preorder_count, - row.notions_inv_count + row.notions_inv_count, + true // Mark as needing update ]); if (values.length > 0) { await localConnection.query(` - INSERT INTO temp_inventory_status (pid, stock_quantity, pending_qty, preorder_count, notions_inv_count) + INSERT INTO temp_inventory_status (pid, stock_quantity, pending_qty, preorder_count, notions_inv_count, needs_update) VALUES ? ON DUPLICATE KEY UPDATE stock_quantity = VALUES(stock_quantity), pending_qty = VALUES(pending_qty), preorder_count = VALUES(preorder_count), - notions_inv_count = VALUES(notions_inv_count) + notions_inv_count = VALUES(notions_inv_count), + needs_update = TRUE `, [values]); } @@ -168,17 +142,19 @@ async function materializeCalculations(prodConnection, localConnection) { row.pid, row.price, row.regular_price, - row.cost_price + row.cost_price, + true // Mark as needing update ]); if (values.length > 0) { await localConnection.query(` - INSERT INTO temp_product_prices (pid, price, regular_price, cost_price) + INSERT INTO temp_product_prices (pid, price, regular_price, cost_price, needs_update) VALUES ? ON DUPLICATE KEY UPDATE price = VALUES(price), regular_price = VALUES(regular_price), - cost_price = VALUES(cost_price) + cost_price = VALUES(cost_price), + needs_update = TRUE `, [values]); } @@ -218,6 +194,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'products'" ); const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; + + console.log('Products: Using last sync time:', lastSyncTime); // Setup temporary tables await setupTemporaryTables(localConnection); @@ -245,6 +223,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate ` : 'TRUE'} `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); + console.log('Products: Found changes:', countResult[0].total); + const totalProducts = countResult[0].total; // Main product query using materialized data - modified for incremental @@ -415,10 +395,16 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate let recordsAdded = 0; let recordsUpdated = 0; - // Get actual count from temp table - const [[{ actualTotal }]] = await localConnection.query( - "SELECT COUNT(*) as actualTotal FROM temp_prod_data WHERE needs_update = 1" - ); + // Get actual count from temp table - only count products that need updates + const [[{ actualTotal }]] = await localConnection.query(` + SELECT COUNT(DISTINCT p.pid) as actualTotal + FROM temp_prod_data p + LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid + LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid + WHERE p.needs_update = 1 + OR tis.needs_update = 1 + OR tpp.needs_update = 1 + `); while (processed < actualTotal) { const [batch] = await localConnection.query(` @@ -433,7 +419,9 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate FROM temp_prod_data p LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid - WHERE p.needs_update = 1 + WHERE p.needs_update = 1 + OR tis.needs_update = 1 + OR tpp.needs_update = 1 LIMIT ? OFFSET ? `, [BATCH_SIZE, processed]); @@ -447,34 +435,93 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate row.image_full = urls.image_full; }); - // Prepare product values - now using columnNames from above - const productValues = batch.flatMap(row => - columnNames.map(col => { - const val = row[col] ?? null; + if (batch.length > 0) { + // MySQL 8.0 optimized insert with proper placeholders + const placeholderGroup = `(${Array(columnNames.length).fill("?").join(",")})`; + + // First check which products already exist and get their current values + const [existingProducts] = await localConnection.query( + `SELECT ${columnNames.join(',')} FROM products WHERE pid IN (?)`, + [batch.map(p => p.pid)] + ); + const existingPidsMap = new Map(existingProducts.map(p => [p.pid, p])); + + // Helper function to map values consistently + const mapValues = (product) => columnNames.map(col => { + const val = product[col] ?? null; if (col === "managing_stock") return 1; if (typeof val === "number") return val || 0; return val; - }) - ); + }); - if (productValues.length > 0) { - // MySQL 8.0 optimized insert with proper placeholders - const placeholderGroup = `(${Array(columnNames.length).fill("?").join(",")})`; - const productPlaceholders = Array(batch.length).fill(placeholderGroup).join(","); - - const insertQuery = ` - INSERT INTO products (${columnNames.join(",")}) - VALUES ${productPlaceholders} - ON DUPLICATE KEY UPDATE - ${columnNames - .filter(col => col !== "pid") - .map(col => `${col} = VALUES(${col})`) - .join(",")}; - `; + // Split into inserts and updates, comparing values for updates + const insertsAndUpdates = batch.reduce((acc, product) => { + if (existingPidsMap.has(product.pid)) { + const existing = existingPidsMap.get(product.pid); + // Check if any values are different + const hasChanges = columnNames.some(col => { + const newVal = product[col] ?? null; + const oldVal = existing[col] ?? null; + // Special handling for numbers to avoid type coercion issues + if (typeof newVal === 'number' && typeof oldVal === 'number') { + // Handle NaN and Infinity + if (isNaN(newVal) || isNaN(oldVal)) return isNaN(newVal) !== isNaN(oldVal); + if (!isFinite(newVal) || !isFinite(oldVal)) return !isFinite(newVal) !== !isFinite(oldVal); + // Allow for tiny floating point differences + return Math.abs(newVal - oldVal) > 0.00001; + } + if (col === 'managing_stock') return false; // Skip this as it's always 1 + return newVal !== oldVal; + }); - const result = await localConnection.query(insertQuery, productValues); - recordsAdded += result.affectedRows - (2 * result.changedRows); // New rows - recordsUpdated += result.changedRows; // Actually changed rows + if (hasChanges) { + acc.updates.push({ + pid: product.pid, + values: mapValues(product) + }); + } + } else { + acc.inserts.push({ + pid: product.pid, + values: mapValues(product) + }); + } + return acc; + }, { inserts: [], updates: [] }); + + // Log summary for this batch + if (insertsAndUpdates.inserts.length > 0 || insertsAndUpdates.updates.length > 0) { + console.log(`Batch summary: ${insertsAndUpdates.inserts.length} new products, ${insertsAndUpdates.updates.length} updates`); + } + + // Handle inserts + if (insertsAndUpdates.inserts.length > 0) { + const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(","); + + const insertResult = await localConnection.query(` + INSERT INTO products (${columnNames.join(",")}) + VALUES ${insertPlaceholders} + `, insertsAndUpdates.inserts.map(i => i.values).flat()); + + recordsAdded += insertResult[0].affectedRows; + } + + // Handle updates - now we know these actually have changes + if (insertsAndUpdates.updates.length > 0) { + const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(","); + + const updateResult = await localConnection.query(` + INSERT INTO products (${columnNames.join(",")}) + VALUES ${updatePlaceholders} + ON DUPLICATE KEY UPDATE + ${columnNames + .filter(col => col !== "pid") + .map(col => `${col} = VALUES(${col})`) + .join(",")}; + `, insertsAndUpdates.updates.map(u => u.values).flat()); + + recordsUpdated += insertsAndUpdates.updates.length; + } } // Insert category relationships diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index d2da638..b492e9e 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -12,6 +12,22 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental ); const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; + console.log('Purchase Orders: Using last sync time:', lastSyncTime); + + // Insert temporary table creation query for purchase orders + await localConnection.query(` + CREATE TABLE IF NOT EXISTS temp_purchase_orders ( + po_id INT UNSIGNED NOT NULL, + pid INT UNSIGNED NOT NULL, + vendor VARCHAR(255), + date DATE, + expected_date DATE, + status INT, + notes TEXT, + PRIMARY KEY (po_id, pid) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + `); + outputProgress({ operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} purchase orders import`, status: "running", @@ -82,6 +98,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions ] : []); + console.log('Purchase Orders: Found changes:', total); + const [poList] = await prodConnection.query(` SELECT DISTINCT COALESCE(p.po_id, r.receiving_id) as po_id, @@ -221,6 +239,22 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const values = []; let batchProcessed = 0; + // First check which PO lines already exist and get their current values + const poLines = Array.from(poProductMap.values()) + .filter(p => validPids.has(p.pid)) + .map(p => [p.po_id, p.pid]); + + const [existingPOs] = await localConnection.query( + `SELECT ${columnNames.join(',')} FROM purchase_orders WHERE (po_id, pid) IN (${poLines.map(() => "(?,?)").join(",")})`, + poLines.flat() + ); + const existingPOMap = new Map( + existingPOs.map(po => [`${po.po_id}-${po.pid}`, po]) + ); + + // Split into inserts and updates + const insertsAndUpdates = { inserts: [], updates: [] }; + for (const po of batch) { const poProducts = Array.from(poProductMap.values()) .filter(p => p.po_id === po.po_id && validPids.has(p.pid)); @@ -280,7 +314,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const firstReceiving = allReceivings[0] || {}; const lastReceiving = allReceivings[allReceivings.length - 1] || {}; - values.push(columnNames.map(col => { + const rowValues = columnNames.map(col => { switch (col) { case 'po_id': return po.po_id; case 'vendor': return po.vendor; @@ -309,28 +343,75 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental }); default: return null; } - })); + }); + + if (existingPOMap.has(key)) { + const existing = existingPOMap.get(key); + // Check if any values are different + const hasChanges = columnNames.some(col => { + const newVal = rowValues[columnNames.indexOf(col)]; + const oldVal = existing[col] ?? null; + // Special handling for numbers to avoid type coercion issues + if (typeof newVal === 'number' && typeof oldVal === 'number') { + return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences + } + // Special handling for receiving_history - parse and compare + if (col === 'receiving_history') { + const newHistory = JSON.parse(newVal || '{}'); + const oldHistory = JSON.parse(oldVal || '{}'); + return JSON.stringify(newHistory) !== JSON.stringify(oldHistory); + } + return newVal !== oldVal; + }); + + if (hasChanges) { + insertsAndUpdates.updates.push({ + po_id: po.po_id, + pid: product.pid, + values: rowValues + }); + } + } else { + insertsAndUpdates.inserts.push({ + po_id: po.po_id, + pid: product.pid, + values: rowValues + }); + } batchProcessed++; } } - if (values.length > 0) { - const placeholders = values.map(() => - `(${Array(columnNames.length).fill("?").join(",")})` - ).join(","); + // Handle inserts + if (insertsAndUpdates.inserts.length > 0) { + const insertPlaceholders = insertsAndUpdates.inserts + .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) + .join(","); - const query = ` + const insertResult = await localConnection.query(` INSERT INTO purchase_orders (${columnNames.join(",")}) - VALUES ${placeholders} + VALUES ${insertPlaceholders} + `, insertsAndUpdates.inserts.map(i => i.values).flat()); + + recordsAdded += insertResult[0].affectedRows; + } + + // Handle updates - now we know these actually have changes + if (insertsAndUpdates.updates.length > 0) { + const updatePlaceholders = insertsAndUpdates.updates + .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) + .join(","); + + const updateResult = await localConnection.query(` + INSERT INTO purchase_orders (${columnNames.join(",")}) + VALUES ${updatePlaceholders} ON DUPLICATE KEY UPDATE ${columnNames .filter((col) => col !== "po_id" && col !== "pid") .map((col) => `${col} = VALUES(${col})`) .join(",")}; - `; + `, insertsAndUpdates.updates.map(u => u.values).flat()); - const result = await localConnection.query(query, values.flat()); - recordsAdded += result.affectedRows - (2 * result.changedRows); - recordsUpdated += result.changedRows; + recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows } processed += batchProcessed; diff --git a/inventory-server/scripts/import/purchase_orders.js b/inventory-server/scripts/import/purchase_orders.js new file mode 100644 index 0000000..c127c87 --- /dev/null +++ b/inventory-server/scripts/import/purchase_orders.js @@ -0,0 +1,82 @@ +// Split into inserts and updates +const insertsAndUpdates = batch.reduce((acc, po) => { + const key = `${po.po_id}-${po.pid}`; + if (existingPOMap.has(key)) { + const existing = existingPOMap.get(key); + // Check if any values are different + const hasChanges = columnNames.some(col => { + const newVal = po[col] ?? null; + const oldVal = existing[col] ?? null; + // Special handling for numbers to avoid type coercion issues + if (typeof newVal === 'number' && typeof oldVal === 'number') { + return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences + } + // Special handling for receiving_history JSON + if (col === 'receiving_history') { + return JSON.stringify(newVal) !== JSON.stringify(oldVal); + } + return newVal !== oldVal; + }); + + if (hasChanges) { + console.log(`PO line changed: ${key}`, { + po_id: po.po_id, + pid: po.pid, + changes: columnNames.filter(col => { + const newVal = po[col] ?? null; + const oldVal = existing[col] ?? null; + if (typeof newVal === 'number' && typeof oldVal === 'number') { + return Math.abs(newVal - oldVal) > 0.00001; + } + if (col === 'receiving_history') { + return JSON.stringify(newVal) !== JSON.stringify(oldVal); + } + return newVal !== oldVal; + }) + }); + acc.updates.push({ + po_id: po.po_id, + pid: po.pid, + values: columnNames.map(col => po[col] ?? null) + }); + } + } else { + console.log(`New PO line: ${key}`); + acc.inserts.push({ + po_id: po.po_id, + pid: po.pid, + values: columnNames.map(col => po[col] ?? null) + }); + } + return acc; +}, { inserts: [], updates: [] }); + +// Handle inserts +if (insertsAndUpdates.inserts.length > 0) { + const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(","); + + const insertResult = await localConnection.query(` + INSERT INTO purchase_orders (${columnNames.join(",")}) + VALUES ${insertPlaceholders} + `, insertsAndUpdates.inserts.map(i => i.values).flat()); + + recordsAdded += insertResult[0].affectedRows; +} + +// Handle updates +if (insertsAndUpdates.updates.length > 0) { + const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(","); + + const updateResult = await localConnection.query(` + INSERT INTO purchase_orders (${columnNames.join(",")}) + VALUES ${updatePlaceholders} + ON DUPLICATE KEY UPDATE + ${columnNames + .filter(col => col !== "po_id" && col !== "pid") + .map(col => `${col} = VALUES(${col})`) + .join(",")}; + `, insertsAndUpdates.updates.map(u => u.values).flat()); + + // Each update affects 2 rows in affectedRows, so we divide by 2 to get actual count + recordsUpdated += insertsAndUpdates.updates.length; +} \ No newline at end of file From e77b488cd4d202595e38360de7bbf9300c98ef52 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 31 Jan 2025 18:44:11 -0500 Subject: [PATCH 27/33] Fix/add data to products script --- inventory-server/scripts/import/products.js | 76 ++++++++++++++++----- 1 file changed, 58 insertions(+), 18 deletions(-) diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index ce43418..70fe5e2 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -2,13 +2,16 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = // Utility functions const imageUrlBase = 'https://sbing.com/i/products/0000/'; -const getImageUrls = (pid) => { +const getImageUrls = (pid, iid = 1) => { const paddedPid = pid.toString().padStart(6, '0'); - const basePath = `${imageUrlBase}${paddedPid.slice(0, 3)}/${pid}`; + // Use padded PID only for the first 3 digits + const prefix = paddedPid.slice(0, 3); + // Use the actual pid for the rest of the URL + const basePath = `${imageUrlBase}${prefix}/${pid}`; return { - image: `${basePath}-t-`, - image_175: `${basePath}-175x175-`, - image_full: `${basePath}-o-` + image: `${basePath}-t-${iid}.jpg`, + image_175: `${basePath}-175x175-${iid}.jpg`, + image_full: `${basePath}-o-${iid}.jpg` }; }; @@ -118,12 +121,11 @@ async function materializeCalculations(prodConnection, localConnection) { p.pid, COALESCE(pcp.price_each, 0) as price, COALESCE(p.sellingprice, 0) AS regular_price, - COALESCE( - (SELECT ROUND(AVG(costeach), 5) - FROM product_inventory - WHERE pid = p.pid - AND COUNT > 0), 0 - ) AS cost_price + CASE + WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0) + THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0) + ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1) + END AS cost_price FROM products p LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid WHERE pcp.active = 1 @@ -256,6 +258,13 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate line VARCHAR(100), subline VARCHAR(100), artist VARCHAR(100), + landing_cost_price DECIMAL(10,2) DEFAULT NULL, + permalink VARCHAR(255) DEFAULT NULL, + options TEXT DEFAULT NULL, + tags TEXT DEFAULT NULL, + uom VARCHAR(50) DEFAULT NULL, + baskets INT DEFAULT 0, + notifies INT DEFAULT 0, moq INT, rating TINYINT UNSIGNED, reviews INT UNSIGNED, @@ -286,7 +295,14 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate p.harmonized_tariff_code, p.stamp AS updated_at, CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, - CASE WHEN p.reorder >= 0 THEN 1 ELSE 0 END AS replenishable, + CASE + WHEN p.reorder < 0 THEN 0 + WHEN ( + ((p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) + AND (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(NOW(), INTERVAL 5 YEAR))) + ) THEN 0 + ELSE 1 + END AS replenishable, s.companyname AS vendor, CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber @@ -297,6 +313,13 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate pc2.name AS line, pc3.name AS subline, pc4.name AS artist, + NULL AS landing_cost_price, + CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, + NULL AS options, + NULL AS tags, + NULL AS uom, + (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, + (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, COALESCE(CASE WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit ELSE sid.supplier_qty_per_unit @@ -310,7 +333,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate p.totalsold AS total_sold, p.country_of_origin, pls.date_sold as date_last_sold, - GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids + GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids, + true // needs_update FROM products p LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 LEFT JOIN supplier_item_data sid ON p.pid = sid.pid @@ -362,6 +386,13 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate row.line, row.subline, row.artist, + row.landing_cost_price, + row.permalink, + row.options, + row.tags, + row.uom, + row.baskets, + row.notifies, row.moq, row.rating, row.reviews, @@ -655,16 +686,25 @@ async function importMissingProducts(prodConnection, localConnection, missingPid COALESCE(pnb.inventory, 0) as notions_inv_count, COALESCE(pcp.price_each, 0) as price, COALESCE(p.sellingprice, 0) AS regular_price, - COALESCE((SELECT ROUND(AVG(costeach), 5) - FROM product_inventory - WHERE pid = p.pid - AND COUNT > 0), 0) AS cost_price, + CASE + WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0) + THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0) + ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1) + END AS cost_price, NULL AS landing_cost_price, p.upc AS barcode, p.harmonized_tariff_code, p.stamp AS updated_at, CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, - CASE WHEN p.reorder >= 0 THEN 1 ELSE 0 END AS replenishable, + CASE + WHEN p.reorder < 0 THEN 0 + WHEN ( + (IFNULL(pls.date_sold, '0000-00-00') = '0000-00-00' OR pls.date_sold <= DATE_SUB(CURDATE(), INTERVAL 5 YEAR)) + OR (p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) + OR (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) + ) THEN 0 + ELSE 1 + END AS replenishable, s.companyname AS vendor, CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference, sid.notions_itemnumber AS notions_reference, From 07f14c0017e41aea4c39c9ec383058eb05fe386c Mon Sep 17 00:00:00 2001 From: Matt Date: Sat, 1 Feb 2025 01:06:45 -0500 Subject: [PATCH 28/33] Fix/add data to orders script and fix other import errors --- inventory-server/db/schema.sql | 3 +- inventory-server/scripts/import/orders.js | 50 +- inventory-server/scripts/import/products.js | 855 +++++++++----------- 3 files changed, 409 insertions(+), 499 deletions(-) diff --git a/inventory-server/db/schema.sql b/inventory-server/db/schema.sql index 372dfb6..38e2531 100644 --- a/inventory-server/db/schema.sql +++ b/inventory-server/db/schema.sql @@ -39,7 +39,7 @@ CREATE TABLE products ( tags TEXT, moq INT DEFAULT 1, uom INT DEFAULT 1, - rating TINYINT UNSIGNED DEFAULT 0, + rating DECIMAL(10,2) DEFAULT 0.00, reviews INT UNSIGNED DEFAULT 0, weight DECIMAL(10,3), length DECIMAL(10,3), @@ -113,6 +113,7 @@ CREATE TABLE IF NOT EXISTS orders ( tax DECIMAL(10,3) DEFAULT 0.000, tax_included TINYINT(1) DEFAULT 0, shipping DECIMAL(10,3) DEFAULT 0.000, + costeach DECIMAL(10,3) DEFAULT 0.000, customer VARCHAR(50) NOT NULL, customer_name VARCHAR(100), status VARCHAR(20) DEFAULT 'pending', diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 1ba7d93..442d107 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -60,6 +60,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = PRIMARY KEY (order_id, pid) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; `); + await localConnection.query(` + CREATE TABLE IF NOT EXISTS temp_order_costs ( + order_id INT UNSIGNED NOT NULL, + pid INT UNSIGNED NOT NULL, + costeach DECIMAL(10,3) DEFAULT 0.000, + PRIMARY KEY (order_id, pid) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + `); // Get column names from the local table const [columns] = await localConnection.query(` @@ -117,7 +125,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = oi.prod_itemnumber as SKU, oi.prod_price as price, oi.qty_ordered as quantity, - COALESCE(oi.prod_price_reg - oi.prod_price, 0) * oi.qty_ordered as base_discount, + COALESCE(oi.prod_price_reg - oi.prod_price, 0) as base_discount, oi.stamp as last_modified FROM order_items oi USE INDEX (PRIMARY) @@ -271,6 +279,26 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = } } + // Get costeach values in batches + for (let i = 0; i < orderIds.length; i += 5000) { + const batchIds = orderIds.slice(i, i + 5000); + const [costs] = await prodConnection.query(` + SELECT orderid as order_id, pid, costeach + FROM order_costs + WHERE orderid IN (?) + `, [batchIds]); + + if (costs.length > 0) { + const placeholders = costs.map(() => '(?, ?, ?)').join(","); + const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach]); + await localConnection.query(` + INSERT INTO temp_order_costs (order_id, pid, costeach) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE costeach = VALUES(costeach) + `, values); + } + } + // Now combine all the data and insert into orders table let importedCount = 0; @@ -302,18 +330,19 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = om.customer, om.customer_name, om.status, - om.canceled + om.canceled, + COALESCE(tc.costeach, 0) as costeach FROM temp_order_items oi JOIN temp_order_meta om ON oi.order_id = om.order_id LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid + LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid WHERE oi.order_id IN (?) `, [batchIds]); // Filter orders and track missing products - do this in a single pass const validOrders = []; const values = []; - for (const order of orders) { if (!existingPids.has(order.pid)) { missingProducts.add(order.pid); @@ -331,7 +360,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = // First check which orders exist and get their current values const [existingOrders] = await localConnection.query( - `SELECT ${columnNames.join(',')} FROM orders WHERE (order_number, pid) IN (${validOrders.map(() => "(?,?)").join(",")})`, + `SELECT ${columnNames.join(",")} FROM orders WHERE (order_number, pid) IN (${validOrders.map(() => "(?,?)").join(",")})`, validOrders.flatMap(o => [o.order_number, o.pid]) ); const existingOrderMap = new Map( @@ -347,13 +376,11 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = const hasChanges = columnNames.some(col => { const newVal = order[col] ?? null; const oldVal = existing[col] ?? null; - // Special handling for numbers to avoid type coercion issues if (typeof newVal === 'number' && typeof oldVal === 'number') { return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences } return newVal !== oldVal; }); - if (hasChanges) { acc.updates.push({ order_number: order.order_number, @@ -367,7 +394,15 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = values: columnNames.map(col => order[col] ?? null) }); } - return acc; + } else { + acc.inserts.push({ + order_number: order.order_number, + pid: order.pid, + values: columnNames.map(col => order[col] ?? null) + }); + } + return acc; + }, { inserts: [], updates: [] }); // Handle inserts if (insertsAndUpdates.inserts.length > 0) { @@ -427,6 +462,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = DROP TEMPORARY TABLE IF EXISTS temp_order_meta; DROP TEMPORARY TABLE IF EXISTS temp_order_discounts; DROP TEMPORARY TABLE IF EXISTS temp_order_taxes; + DROP TEMPORARY TABLE IF EXISTS temp_order_costs; `); // Import missing products if any diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 70fe5e2..5986858 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -15,147 +15,299 @@ const getImageUrls = (pid, iid = 1) => { }; }; -async function setupTemporaryTables(connection) { - await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_categories ( cat_id INT PRIMARY KEY, name VARCHAR(255) ) ENGINE=InnoDB;`); - await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_images ( pid INT, iid INT, image_type ENUM('thumbnail', '175', 'full'), url VARCHAR(255), PRIMARY KEY (pid, image_type) ) ENGINE=InnoDB;`); - await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_inventory_status ( pid INT PRIMARY KEY, stock_quantity INT, pending_qty INT, preorder_count INT, notions_inv_count INT, needs_update BOOLEAN ) ENGINE=InnoDB;`); - await connection.query(`CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_prices ( pid INT PRIMARY KEY, price DECIMAL(10,2), regular_price DECIMAL(10,2), cost_price DECIMAL(10,5), needs_update BOOLEAN ) ENGINE=InnoDB;`); - await connection.query(`INSERT INTO temp_categories SELECT cat_id, name FROM categories;`); - await connection.query(`CREATE INDEX idx_temp_cat_id ON temp_categories(cat_id);`); +async function setupAndCleanupTempTables(connection, operation = 'setup') { + if (operation === 'setup') { + await connection.query(` + CREATE TEMPORARY TABLE IF NOT EXISTS temp_products ( + pid BIGINT NOT NULL, + title VARCHAR(255), + description TEXT, + SKU VARCHAR(50), + stock_quantity INT DEFAULT 0, + pending_qty INT DEFAULT 0, + preorder_count INT DEFAULT 0, + notions_inv_count INT DEFAULT 0, + price DECIMAL(10,3) NOT NULL DEFAULT 0, + regular_price DECIMAL(10,3) NOT NULL DEFAULT 0, + cost_price DECIMAL(10,3), + vendor VARCHAR(100), + vendor_reference VARCHAR(100), + notions_reference VARCHAR(100), + brand VARCHAR(100), + line VARCHAR(100), + subline VARCHAR(100), + artist VARCHAR(100), + category_ids TEXT, + created_at DATETIME, + first_received DATETIME, + landing_cost_price DECIMAL(10,3), + barcode VARCHAR(50), + harmonized_tariff_code VARCHAR(50), + updated_at DATETIME, + visible BOOLEAN, + replenishable BOOLEAN, + permalink VARCHAR(255), + moq DECIMAL(10,3), + rating DECIMAL(10,2), + reviews INT, + weight DECIMAL(10,3), + length DECIMAL(10,3), + width DECIMAL(10,3), + height DECIMAL(10,3), + country_of_origin VARCHAR(100), + location VARCHAR(100), + total_sold INT, + baskets INT, + notifies INT, + date_last_sold DATETIME, + needs_update BOOLEAN DEFAULT TRUE, + PRIMARY KEY (pid), + INDEX idx_needs_update (needs_update) + ) ENGINE=InnoDB; + `); + } else { + await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_products;'); + } } -async function cleanupTemporaryTables(connection) { - await connection.query(` - DROP TEMPORARY TABLE IF EXISTS temp_categories; - DROP TEMPORARY TABLE IF EXISTS temp_product_images; - DROP TEMPORARY TABLE IF EXISTS temp_inventory_status; - DROP TEMPORARY TABLE IF EXISTS temp_product_prices; - `); -} - -async function materializeCalculations(prodConnection, localConnection) { +async function materializeCalculations(prodConnection, localConnection, incrementalUpdate = true, lastSyncTime = '1970-01-01') { outputProgress({ status: "running", operation: "Products import", - message: "Fetching inventory and order data from production" + message: "Fetching product data from production" }); - // Get all inventory and order data from production in one query - const [prodInventory] = await prodConnection.query(` + // Get all product data in a single optimized query + const [prodData] = await prodConnection.query(` SELECT p.pid, - COALESCE(si.available_local, 0) as stock_quantity, + p.description AS title, + p.notes AS description, + p.itemnumber AS SKU, + p.date_created, + p.datein AS first_received, + p.location, + p.upc AS barcode, + p.harmonized_tariff_code, + p.stamp AS updated_at, + CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, + CASE + WHEN p.reorder < 0 THEN 0 + WHEN ( + (IFNULL(pls.date_sold, '0000-00-00') = '0000-00-00' OR pls.date_sold <= DATE_SUB(CURDATE(), INTERVAL 5 YEAR)) + OR (p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) + OR (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) + ) THEN 0 + ELSE 1 + END AS replenishable, + COALESCE(si.available_local, 0) - COALESCE( + (SELECT SUM(oi.qty_ordered - oi.qty_placed) + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.prod_pid = p.pid + AND o.date_placed != '0000-00-00 00:00:00' + AND o.date_shipped = '0000-00-00 00:00:00' + AND oi.pick_finished = 0 + AND oi.qty_back = 0 + AND o.order_status != 15 + AND o.order_status < 90 + AND oi.qty_ordered >= oi.qty_placed + AND oi.qty_ordered > 0 + ), 0 + ) as stock_quantity, + COALESCE( + (SELECT SUM(oi.qty_ordered - oi.qty_placed) + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.prod_pid = p.pid + AND o.date_placed != '0000-00-00 00:00:00' + AND o.date_shipped = '0000-00-00 00:00:00' + AND oi.pick_finished = 0 + AND oi.qty_back = 0 + AND o.order_status != 15 + AND o.order_status < 90 + AND oi.qty_ordered >= oi.qty_placed + AND oi.qty_ordered > 0 + ), 0 + ) as pending_qty, COALESCE(ci.onpreorder, 0) as preorder_count, COALESCE(pnb.inventory, 0) as notions_inv_count, - COALESCE( - ( - SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0 - ), 0 - ) as pending_qty - FROM products p - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid - `); - - outputProgress({ - status: "running", - operation: "Products import", - message: `Processing ${prodInventory.length} inventory records` - }); - - // Insert inventory data into local temp table in batches - for (let i = 0; i < prodInventory.length; i += 1000) { - const batch = prodInventory.slice(i, i + 1000); - const values = batch.map(row => [ - row.pid, - Math.max(0, row.stock_quantity - row.pending_qty), // Calculate final stock quantity - row.pending_qty, - row.preorder_count, - row.notions_inv_count, - true // Mark as needing update - ]); - - if (values.length > 0) { - await localConnection.query(` - INSERT INTO temp_inventory_status (pid, stock_quantity, pending_qty, preorder_count, notions_inv_count, needs_update) - VALUES ? - ON DUPLICATE KEY UPDATE - stock_quantity = VALUES(stock_quantity), - pending_qty = VALUES(pending_qty), - preorder_count = VALUES(preorder_count), - notions_inv_count = VALUES(notions_inv_count), - needs_update = TRUE - `, [values]); - } - - outputProgress({ - status: "running", - operation: "Products import", - message: `Processed ${Math.min(i + 1000, prodInventory.length)} of ${prodInventory.length} inventory records`, - current: i + batch.length, - total: prodInventory.length - }); - } - - outputProgress({ - status: "running", - operation: "Products import", - message: "Fetching pricing data from production" - }); - - // Get prices from production - const [prodPrices] = await prodConnection.query(` - SELECT - p.pid, COALESCE(pcp.price_each, 0) as price, COALESCE(p.sellingprice, 0) AS regular_price, CASE WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0) THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0) ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1) - END AS cost_price + END AS cost_price, + NULL as landing_cost_price, + s.companyname AS vendor, + CASE + WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber + ELSE sid.supplier_itemnumber + END AS vendor_reference, + sid.notions_itemnumber AS notions_reference, + CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, + pc1.name AS brand, + pc2.name AS line, + pc3.name AS subline, + pc4.name AS artist, + COALESCE(CASE + WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit + ELSE sid.supplier_qty_per_unit + END, sid.notions_qty_per_unit) AS moq, + p.rating, + p.rating_votes AS reviews, + p.weight, + p.length, + p.width, + p.height, + p.country_of_origin, + (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, + (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, + p.totalsold AS total_sold, + pls.date_sold as date_last_sold, + GROUP_CONCAT(DISTINCT CASE + WHEN pc.cat_id IS NOT NULL + AND pc.type IN (10, 20, 11, 21, 12, 13) + AND pci.cat_id NOT IN (16, 17) + THEN pci.cat_id + END) as category_ids FROM products p - LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid - WHERE pcp.active = 1 - `); + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 + LEFT JOIN supplier_item_data sid ON p.pid = sid.pid + LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid + LEFT JOIN product_category_index pci ON p.pid = pci.pid + LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id + LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id + LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id + LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id + LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + WHERE ${incrementalUpdate ? ` + p.stamp > ? OR + ci.stamp > ? OR + pcp.date_deactive > ? OR + pcp.date_active > ? OR + pnb.date_updated > ? + ` : 'TRUE'} + GROUP BY p.pid + `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); outputProgress({ status: "running", operation: "Products import", - message: `Processing ${prodPrices.length} price records` + message: `Processing ${prodData.length} product records` }); - // Insert prices into local temp table in batches - for (let i = 0; i < prodPrices.length; i += 1000) { - const batch = prodPrices.slice(i, i + 1000); + // Insert all product data into temp table in batches + for (let i = 0; i < prodData.length; i += 1000) { + const batch = prodData.slice(i, i + 1000); const values = batch.map(row => [ row.pid, + row.title, + row.description, + row.SKU, + // Set stock quantity to 0 if it's over 5000 + row.stock_quantity > 5000 ? 0 : Math.max(0, row.stock_quantity), + row.pending_qty, + row.preorder_count, + row.notions_inv_count, row.price, row.regular_price, row.cost_price, + row.vendor, + row.vendor_reference, + row.notions_reference, + row.brand, + row.line, + row.subline, + row.artist, + row.category_ids, + row.date_created, // map to created_at + row.first_received, + row.landing_cost_price, + row.barcode, + row.harmonized_tariff_code, + row.updated_at, + row.visible, + row.replenishable, + row.permalink, + row.moq, + row.rating ? Number(row.rating).toFixed(2) : null, + row.reviews, + row.weight, + row.length, + row.width, + row.height, + row.country_of_origin, + row.location, + row.total_sold, + row.baskets, + row.notifies, + row.date_last_sold, true // Mark as needing update ]); if (values.length > 0) { await localConnection.query(` - INSERT INTO temp_product_prices (pid, price, regular_price, cost_price, needs_update) + INSERT INTO temp_products ( + pid, title, description, SKU, + stock_quantity, pending_qty, preorder_count, notions_inv_count, + price, regular_price, cost_price, + vendor, vendor_reference, notions_reference, + brand, line, subline, artist, + category_ids, created_at, first_received, + landing_cost_price, barcode, harmonized_tariff_code, + updated_at, visible, replenishable, permalink, + moq, rating, reviews, weight, length, width, + height, country_of_origin, location, total_sold, + baskets, notifies, date_last_sold, needs_update + ) VALUES ? ON DUPLICATE KEY UPDATE + title = VALUES(title), + description = VALUES(description), + SKU = VALUES(SKU), + stock_quantity = VALUES(stock_quantity), + pending_qty = VALUES(pending_qty), + preorder_count = VALUES(preorder_count), + notions_inv_count = VALUES(notions_inv_count), price = VALUES(price), regular_price = VALUES(regular_price), cost_price = VALUES(cost_price), + vendor = VALUES(vendor), + vendor_reference = VALUES(vendor_reference), + notions_reference = VALUES(notions_reference), + brand = VALUES(brand), + line = VALUES(line), + subline = VALUES(subline), + artist = VALUES(artist), + category_ids = VALUES(category_ids), + created_at = VALUES(created_at), + first_received = VALUES(first_received), + landing_cost_price = VALUES(landing_cost_price), + barcode = VALUES(barcode), + harmonized_tariff_code = VALUES(harmonized_tariff_code), + updated_at = VALUES(updated_at), + visible = VALUES(visible), + replenishable = VALUES(replenishable), + permalink = VALUES(permalink), + moq = VALUES(moq), + rating = VALUES(rating), + reviews = VALUES(reviews), + weight = VALUES(weight), + length = VALUES(length), + width = VALUES(width), + height = VALUES(height), + country_of_origin = VALUES(country_of_origin), + location = VALUES(location), + total_sold = VALUES(total_sold), + baskets = VALUES(baskets), + notifies = VALUES(notifies), + date_last_sold = VALUES(date_last_sold), needs_update = TRUE `, [values]); } @@ -163,9 +315,9 @@ async function materializeCalculations(prodConnection, localConnection) { outputProgress({ status: "running", operation: "Products import", - message: `Processed ${Math.min(i + 1000, prodPrices.length)} of ${prodPrices.length} price records`, + message: `Processed ${Math.min(i + 1000, prodData.length)} of ${prodData.length} product records`, current: i + batch.length, - total: prodPrices.length + total: prodData.length }); } @@ -200,263 +352,32 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate console.log('Products: Using last sync time:', lastSyncTime); // Setup temporary tables - await setupTemporaryTables(localConnection); + await setupAndCleanupTempTables(localConnection, 'setup'); - // Materialize calculations - await materializeCalculations(prodConnection, localConnection); + // Materialize calculations - this will populate temp_products + await materializeCalculations(prodConnection, localConnection, incrementalUpdate, lastSyncTime); - // Optimized count query for changes since last sync - const [countResult] = await prodConnection.query(` - SELECT COUNT(*) as total - FROM products p - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 - LEFT JOIN supplier_item_data sid ON p.pid = sid.pid - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - WHERE ${incrementalUpdate ? ` - p.stamp > ? OR - ci.stamp > ? OR - pcp.date_deactive > ? OR - pcp.date_active > ? OR - sid.stamp > ? OR - pnb.date_updated > ? OR - pls.date_sold > ? - ` : 'TRUE'} - `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); - - console.log('Products: Found changes:', countResult[0].total); - - const totalProducts = countResult[0].total; - - // Main product query using materialized data - modified for incremental - outputProgress({ - status: "running", - operation: "Products import", - message: `Fetching ${incrementalUpdate ? 'updated' : 'all'} product data from production` - }); - - // Create temporary table for production data - await localConnection.query(` - CREATE TEMPORARY TABLE temp_prod_data ( - pid BIGINT NOT NULL, - title VARCHAR(255), - description TEXT, - SKU VARCHAR(50), - date_created TIMESTAMP NULL, - first_received TIMESTAMP NULL, - location VARCHAR(50), - barcode VARCHAR(50), - harmonized_tariff_code VARCHAR(20), - updated_at TIMESTAMP, - visible BOOLEAN, - replenishable BOOLEAN, - vendor VARCHAR(100), - vendor_reference VARCHAR(100), - notions_reference VARCHAR(100), - brand VARCHAR(100), - line VARCHAR(100), - subline VARCHAR(100), - artist VARCHAR(100), - landing_cost_price DECIMAL(10,2) DEFAULT NULL, - permalink VARCHAR(255) DEFAULT NULL, - options TEXT DEFAULT NULL, - tags TEXT DEFAULT NULL, - uom VARCHAR(50) DEFAULT NULL, - baskets INT DEFAULT 0, - notifies INT DEFAULT 0, - moq INT, - rating TINYINT UNSIGNED, - reviews INT UNSIGNED, - weight DECIMAL(10,3), - length DECIMAL(10,3), - width DECIMAL(10,3), - height DECIMAL(10,3), - total_sold INT UNSIGNED, - country_of_origin VARCHAR(5), - date_last_sold DATE, - category_ids TEXT, - needs_update BOOLEAN DEFAULT TRUE, - PRIMARY KEY (pid) - ) ENGINE=InnoDB - `); - - // Get data from production and insert into temp table - const [prodData] = await prodConnection.query(` - SELECT - p.pid, - p.description AS title, - p.notes AS description, - p.itemnumber AS SKU, - p.date_created, - p.datein AS first_received, - p.location, - p.upc AS barcode, - p.harmonized_tariff_code, - p.stamp AS updated_at, - CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, - CASE - WHEN p.reorder < 0 THEN 0 - WHEN ( - ((p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) - AND (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(NOW(), INTERVAL 5 YEAR))) - ) THEN 0 - ELSE 1 - END AS replenishable, - s.companyname AS vendor, - CASE WHEN s.companyname = 'Notions' - THEN sid.notions_itemnumber - ELSE sid.supplier_itemnumber - END AS vendor_reference, - sid.notions_itemnumber AS notions_reference, - pc1.name AS brand, - pc2.name AS line, - pc3.name AS subline, - pc4.name AS artist, - NULL AS landing_cost_price, - CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, - NULL AS options, - NULL AS tags, - NULL AS uom, - (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, - (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, - COALESCE(CASE - WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit - ELSE sid.supplier_qty_per_unit - END, sid.notions_qty_per_unit) AS moq, - p.rating, - p.rating_votes AS reviews, - p.weight, - p.length, - p.width, - p.height, - p.totalsold AS total_sold, - p.country_of_origin, - pls.date_sold as date_last_sold, - GROUP_CONCAT(DISTINCT pci.cat_id) as category_ids, - true // needs_update - FROM products p - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN supplier_item_data sid ON p.pid = sid.pid - LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid - LEFT JOIN product_category_index pci ON p.pid = pci.pid - LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id - LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id - LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id - LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid - WHERE ${incrementalUpdate ? ` - p.stamp > ? OR - ci.stamp > ? OR - pcp.date_deactive > ? OR - pcp.date_active > ? OR - sid.stamp > ? OR - pnb.date_updated > ? OR - pls.date_sold > ? - ` : 'TRUE'} - GROUP BY p.pid - `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []); - - // Insert production data in batches, but only for products that need updates - for (let i = 0; i < prodData.length; i += 1000) { - const batch = prodData.slice(i, i + 1000); - const placeholders = batch.map(() => `(${Array(31).fill("?").join(",")})`).join(","); - - // Map each row to exactly match our temp table columns - const values = batch.flatMap(row => [ - row.pid, - row.title, - row.description, - row.SKU, - row.date_created, - row.first_received, - row.location, - row.barcode, - row.harmonized_tariff_code, - row.updated_at, - row.visible, - row.replenishable, - row.vendor, - row.vendor_reference, - row.notions_reference, - row.brand, - row.line, - row.subline, - row.artist, - row.landing_cost_price, - row.permalink, - row.options, - row.tags, - row.uom, - row.baskets, - row.notifies, - row.moq, - row.rating, - row.reviews, - row.weight, - row.length, - row.width, - row.height, - row.total_sold, - row.country_of_origin, - row.date_last_sold, - row.category_ids, - true // needs_update - ]); - - await localConnection.query(` - INSERT INTO temp_prod_data VALUES ${placeholders} - `, values); - - outputProgress({ - status: "running", - operation: "Products import", - message: `Loaded ${Math.min(i + 1000, prodData.length)} of ${prodData.length} products from production`, - current: i + batch.length, - total: prodData.length - }); - } - - // Now join with local temp tables and process in batches, but only for products that need updates - const BATCH_SIZE = 2500; - let processed = 0; - let recordsAdded = 0; - let recordsUpdated = 0; - // Get actual count from temp table - only count products that need updates const [[{ actualTotal }]] = await localConnection.query(` - SELECT COUNT(DISTINCT p.pid) as actualTotal - FROM temp_prod_data p - LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid - LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid - WHERE p.needs_update = 1 - OR tis.needs_update = 1 - OR tpp.needs_update = 1 + SELECT COUNT(DISTINCT pid) as actualTotal + FROM temp_products + WHERE needs_update = 1 `); + + console.log('Products: Found changes:', actualTotal); + + // Process in batches + const BATCH_SIZE = 5000; + let processed = 0; while (processed < actualTotal) { const [batch] = await localConnection.query(` - SELECT - p.*, - COALESCE(tis.stock_quantity, 0) as stock_quantity, - COALESCE(tis.preorder_count, 0) as preorder_count, - COALESCE(tis.notions_inv_count, 0) as notions_inv_count, - COALESCE(tpp.price, 0) as price, - COALESCE(tpp.regular_price, 0) as regular_price, - COALESCE(tpp.cost_price, 0) as cost_price - FROM temp_prod_data p - LEFT JOIN temp_inventory_status tis ON p.pid = tis.pid - LEFT JOIN temp_product_prices tpp ON p.pid = tpp.pid - WHERE p.needs_update = 1 - OR tis.needs_update = 1 - OR tpp.needs_update = 1 + SELECT * FROM temp_products + WHERE needs_update = 1 LIMIT ? OFFSET ? `, [BATCH_SIZE, processed]); - if (!batch || batch.length === 0) break; // Exit if no more records + if (!batch || batch.length === 0) break; // Add image URLs batch.forEach(row => { @@ -467,25 +388,14 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate }); if (batch.length > 0) { - // MySQL 8.0 optimized insert with proper placeholders - const placeholderGroup = `(${Array(columnNames.length).fill("?").join(",")})`; - - // First check which products already exist and get their current values + // Get existing products in one query const [existingProducts] = await localConnection.query( `SELECT ${columnNames.join(',')} FROM products WHERE pid IN (?)`, [batch.map(p => p.pid)] ); const existingPidsMap = new Map(existingProducts.map(p => [p.pid, p])); - // Helper function to map values consistently - const mapValues = (product) => columnNames.map(col => { - const val = product[col] ?? null; - if (col === "managing_stock") return 1; - if (typeof val === "number") return val || 0; - return val; - }); - - // Split into inserts and updates, comparing values for updates + // Split into inserts and updates const insertsAndUpdates = batch.reduce((acc, product) => { if (existingPidsMap.has(product.pid)) { const existing = existingPidsMap.get(product.pid); @@ -493,119 +403,114 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate const hasChanges = columnNames.some(col => { const newVal = product[col] ?? null; const oldVal = existing[col] ?? null; - // Special handling for numbers to avoid type coercion issues + if (col === "managing_stock") return false; // Skip this as it's always 1 if (typeof newVal === 'number' && typeof oldVal === 'number') { - // Handle NaN and Infinity - if (isNaN(newVal) || isNaN(oldVal)) return isNaN(newVal) !== isNaN(oldVal); - if (!isFinite(newVal) || !isFinite(oldVal)) return !isFinite(newVal) !== !isFinite(oldVal); - // Allow for tiny floating point differences return Math.abs(newVal - oldVal) > 0.00001; } - if (col === 'managing_stock') return false; // Skip this as it's always 1 return newVal !== oldVal; }); if (hasChanges) { - acc.updates.push({ - pid: product.pid, - values: mapValues(product) - }); + acc.updates.push(product); } } else { - acc.inserts.push({ - pid: product.pid, - values: mapValues(product) - }); + acc.inserts.push(product); } return acc; }, { inserts: [], updates: [] }); - // Log summary for this batch - if (insertsAndUpdates.inserts.length > 0 || insertsAndUpdates.updates.length > 0) { - console.log(`Batch summary: ${insertsAndUpdates.inserts.length} new products, ${insertsAndUpdates.updates.length} updates`); - } - - // Handle inserts + // Process inserts if (insertsAndUpdates.inserts.length > 0) { - const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(","); - + const insertValues = insertsAndUpdates.inserts.map(product => + columnNames.map(col => { + const val = product[col] ?? null; + if (col === "managing_stock") return 1; + return val; + }) + ); + + const insertPlaceholders = insertsAndUpdates.inserts + .map(() => `(${Array(columnNames.length).fill('?').join(',')})`) + .join(','); + const insertResult = await localConnection.query(` - INSERT INTO products (${columnNames.join(",")}) + INSERT INTO products (${columnNames.join(',')}) VALUES ${insertPlaceholders} - `, insertsAndUpdates.inserts.map(i => i.values).flat()); - + `, insertValues.flat()); + recordsAdded += insertResult[0].affectedRows; } - // Handle updates - now we know these actually have changes + // Process updates if (insertsAndUpdates.updates.length > 0) { - const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(","); - + const updateValues = insertsAndUpdates.updates.map(product => + columnNames.map(col => { + const val = product[col] ?? null; + if (col === "managing_stock") return 1; + return val; + }) + ); + + const updatePlaceholders = insertsAndUpdates.updates + .map(() => `(${Array(columnNames.length).fill('?').join(',')})`) + .join(','); + const updateResult = await localConnection.query(` - INSERT INTO products (${columnNames.join(",")}) + INSERT INTO products (${columnNames.join(',')}) VALUES ${updatePlaceholders} ON DUPLICATE KEY UPDATE - ${columnNames - .filter(col => col !== "pid") - .map(col => `${col} = VALUES(${col})`) - .join(",")}; - `, insertsAndUpdates.updates.map(u => u.values).flat()); - + ${columnNames + .filter(col => col !== 'pid') + .map(col => `${col} = VALUES(${col})`) + .join(',')}; + `, updateValues.flat()); + recordsUpdated += insertsAndUpdates.updates.length; } - } - // Insert category relationships - const categoryRelationships = []; - batch.forEach(row => { - if (row.category_ids) { - const catIds = row.category_ids - .split(",") - .map(id => id.trim()) - .filter(id => id) - .map(Number); - - catIds.forEach(catId => { - if (catId) categoryRelationships.push([row.pid, catId]); - }); - } - }); + // Process category relationships + if (batch.some(p => p.category_ids)) { + const categoryRelationships = batch + .filter(p => p.category_ids) + .flatMap(product => + product.category_ids + .split(',') + .map(id => id.trim()) + .filter(id => id) + .map(Number) + .filter(id => !isNaN(id)) + .map(catId => [catId, product.pid]) + ); - if (categoryRelationships.length > 0) { - // First verify categories exist - const uniqueCatIds = [...new Set(categoryRelationships.map(([_, catId]) => catId))]; - const [existingCats] = await localConnection.query( - "SELECT cat_id FROM categories WHERE cat_id IN (?)", - [uniqueCatIds] - ); - const existingCatIds = new Set(existingCats.map(c => c.cat_id)); + if (categoryRelationships.length > 0) { + // Verify categories exist before inserting relationships + const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))]; + const [existingCats] = await localConnection.query( + "SELECT cat_id FROM categories WHERE cat_id IN (?)", + [uniqueCatIds] + ); + const existingCatIds = new Set(existingCats.map(c => c.cat_id)); - // Filter relationships to only include existing categories - const validRelationships = categoryRelationships.filter(([_, catId]) => - existingCatIds.has(catId) - ); + // Filter relationships to only include existing categories + const validRelationships = categoryRelationships.filter(([catId]) => + existingCatIds.has(catId) + ); - if (validRelationships.length > 0) { - // Delete existing relationships for these products first - await localConnection.query( - "DELETE FROM product_categories WHERE pid IN (?)", - [batch.map(p => p.pid)] - ); - - // Insert new relationships using INSERT IGNORE - const catPlaceholders = validRelationships - .map(() => "(?, ?)") - .join(","); - - await localConnection.query( - `INSERT IGNORE INTO product_categories (pid, cat_id) - VALUES ${catPlaceholders}`, - validRelationships.flat() - ); + if (validRelationships.length > 0) { + const catPlaceholders = validRelationships + .map(() => "(?, ?)") + .join(","); + await localConnection.query( + `INSERT IGNORE INTO product_categories (cat_id, pid) + VALUES ${catPlaceholders}`, + validRelationships.flat() + ); + } + } } } - processed += batch.length; // Only increment by actual records processed + processed += batch.length; outputProgress({ status: "running", @@ -617,15 +522,10 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate remaining: estimateRemaining(startTime, processed, actualTotal), rate: calculateRate(startTime, processed) }); - - // Force garbage collection between batches - if (global.gc) { - global.gc(); - } } // Drop temporary tables - await cleanupTemporaryTables(localConnection); + await setupAndCleanupTempTables(localConnection, 'cleanup'); // Only update sync status if we get here (no errors thrown) await localConnection.query(` @@ -668,30 +568,6 @@ async function importMissingProducts(prodConnection, localConnection, missingPid p.date_created, p.datein AS first_received, p.location, - COALESCE(si.available_local, 0) - COALESCE( - (SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0), 0 - ) as stock_quantity, - COALESCE(ci.onpreorder, 0) as preorder_count, - COALESCE(pnb.inventory, 0) as notions_inv_count, - COALESCE(pcp.price_each, 0) as price, - COALESCE(p.sellingprice, 0) AS regular_price, - CASE - WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0) - THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0) - ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1) - END AS cost_price, - NULL AS landing_cost_price, p.upc AS barcode, p.harmonized_tariff_code, p.stamp AS updated_at, @@ -705,21 +581,18 @@ async function importMissingProducts(prodConnection, localConnection, missingPid ) THEN 0 ELSE 1 END AS replenishable, - s.companyname AS vendor, - CASE WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber ELSE sid.supplier_itemnumber END AS vendor_reference, - sid.notions_itemnumber AS notions_reference, - CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, - pc1.name AS brand, - pc2.name AS line, - pc3.name AS subline, - pc4.name AS artist, - NULL AS options, - NULL AS tags, - COALESCE(CASE - WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit - ELSE sid.supplier_qty_per_unit - END, sid.notions_qty_per_unit) AS moq, - NULL AS uom, + COALESCE(si.available_local, 0) as stock_quantity, + COALESCE(pq.qty, 0) as pending_qty, + COALESCE(ci.onpreorder, 0) as preorder_count, + COALESCE(pnb.inventory, 0) as notions_inv_count, + COALESCE(pcp.price_each, 0) as price, + COALESCE(p.sellingprice, 0) AS regular_price, + CASE + WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0) + THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0) + ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1) + END AS cost_price, + NULL AS landing_cost_price, p.rating, p.rating_votes AS reviews, p.weight, @@ -786,7 +659,7 @@ async function importMissingProducts(prodConnection, localConnection, missingPid ON DUPLICATE KEY UPDATE ${columnNames .filter((col) => col !== "pid") .map((col) => `${col} = VALUES(${col})`) - .join(",")} + .join(",")}; `; const result = await localConnection.query(query, productValues); @@ -849,4 +722,4 @@ async function importMissingProducts(prodConnection, localConnection, missingPid module.exports = { importProducts, importMissingProducts -}; \ No newline at end of file +}; \ No newline at end of file From 50b86d6d8adfd2433fbdeac234088155eeab82b0 Mon Sep 17 00:00:00 2001 From: Matt Date: Sat, 1 Feb 2025 10:51:47 -0500 Subject: [PATCH 29/33] Fix/add data to PO script --- inventory-server/db/schema.sql | 2 + inventory-server/scripts/import-from-prod.js | 6 +- .../scripts/import/purchase-orders.js | 114 +++++++++++++----- 3 files changed, 91 insertions(+), 31 deletions(-) diff --git a/inventory-server/db/schema.sql b/inventory-server/db/schema.sql index 38e2531..a202739 100644 --- a/inventory-server/db/schema.sql +++ b/inventory-server/db/schema.sql @@ -137,7 +137,9 @@ CREATE TABLE purchase_orders ( expected_date DATE, pid BIGINT NOT NULL, sku VARCHAR(50) NOT NULL, + name VARCHAR(100) NOT NULL COMMENT 'Product name from products.description', cost_price DECIMAL(10, 3) NOT NULL, + po_cost_price DECIMAL(10, 3) NOT NULL COMMENT 'Original cost from PO, before receiving adjustments', status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done', receiving_status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,30=partial_received,40=full_received,50=paid', notes TEXT, diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 67b6678..f5bc680 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,9 +10,9 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = true; -const IMPORT_PRODUCTS = true; -const IMPORT_ORDERS = true; +const IMPORT_CATEGORIES = false; +const IMPORT_PRODUCTS = false; +const IMPORT_ORDERS = false; const IMPORT_PURCHASE_ORDERS = true; // Add flag for incremental updates diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index b492e9e..7dacde7 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -108,31 +108,62 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental NULLIF(s2.companyname, ''), 'Unknown Vendor' ) as vendor, - CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_ordered) END as date, - CASE WHEN p.po_id IS NOT NULL THEN DATE(p.date_estin) END as expected_date, + CASE + WHEN p.po_id IS NOT NULL THEN + DATE(COALESCE( + NULLIF(p.date_ordered, '0000-00-00 00:00:00'), + p.date_created + )) + WHEN r.receiving_id IS NOT NULL THEN + DATE(r.date_created) + END as date, + NULLIF(p.date_estin, '0000-00-00') as expected_date, COALESCE(p.status, 50) as status, - COALESCE(p.short_note, '') as notes, - COALESCE(p.notes, '') as long_note + p.short_note as notes, + p.notes as long_note FROM ( SELECT po_id FROM po USE INDEX (idx_date_created) WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) - AND (date_ordered > ? - OR date_updated > ?) + ${incrementalUpdate ? ` + AND ( + date_ordered > ? + OR date_updated > ? + OR date_estin > ? + ) + ` : ''} UNION SELECT DISTINCT r.receiving_id as po_id FROM receivings r JOIN receivings_products rp USE INDEX (received_date) ON r.receiving_id = rp.receiving_id WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR) - AND (rp.received_date > ? - OR rp.stamp > ?) + ${incrementalUpdate ? ` + AND ( + r.date_created > ? + OR r.date_checked > ? + OR rp.stamp > ? + OR rp.received_date > ? + ) + ` : ''} ) ids LEFT JOIN po p ON ids.po_id = p.po_id LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid LEFT JOIN receivings r ON ids.po_id = r.receiving_id LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid ORDER BY po_id - `, [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]); + `, incrementalUpdate ? [ + lastSyncTime, lastSyncTime, lastSyncTime, // PO conditions + lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions + ] : []); + + console.log('Sample PO dates:', poList.slice(0, 5).map(po => ({ + po_id: po.po_id, + raw_date_ordered: po.raw_date_ordered, + raw_date_created: po.raw_date_created, + raw_date_estin: po.raw_date_estin, + computed_date: po.date, + expected_date: po.expected_date + }))); const totalItems = total; let processed = 0; @@ -156,7 +187,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental pop.po_id, pop.pid, pr.itemnumber as sku, - pop.cost_each as cost_price, + pr.description as name, + pop.cost_each, pop.qty_each as ordered FROM po_products pop USE INDEX (PRIMARY) @@ -171,7 +203,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const productPids = [...new Set(productBatch.map(p => p.pid))]; const batchPoIds = [...new Set(productBatch.map(p => p.po_id))]; - // Get receivings for this batch + // Get receivings for this batch with employee names const [receivings] = await prodConnection.query(` SELECT r.po_id, @@ -179,8 +211,9 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental rp.receiving_id, rp.qty_each, rp.cost_each, - DATE(NULLIF(rp.received_date, '0000-00-00 00:00:00')) as received_date, + COALESCE(rp.received_date, r.date_created) as received_date, rp.received_by, + CONCAT(e.firstname, ' ', e.lastname) as received_by_name, CASE WHEN r.po_id IS NULL THEN 2 -- No PO WHEN r.po_id IN (?) THEN 0 -- Original PO @@ -189,8 +222,9 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental FROM receivings_products rp USE INDEX (received_date) LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id + LEFT JOIN employees e ON rp.received_by = e.employeeid WHERE rp.pid IN (?) - AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR) + AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR) ORDER BY r.po_id, rp.pid, rp.received_date `, [batchPoIds, productPids]); @@ -235,10 +269,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental ); const validPids = new Set(existingPids.map(p => p.pid)); - // Prepare values for this sub-batch - const values = []; - let batchProcessed = 0; - // First check which PO lines already exist and get their current values const poLines = Array.from(poProductMap.values()) .filter(p => validPids.has(p.pid)) @@ -254,6 +284,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental // Split into inserts and updates const insertsAndUpdates = { inserts: [], updates: [] }; + let batchProcessed = 0; for (const po of batch) { const poProducts = Array.from(poProductMap.values()) @@ -270,16 +301,29 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental ...receivingHistory.map(r => ({ ...r, type: 'original' })), ...altReceivingHistory.map(r => ({ ...r, type: 'alternate' })), ...noPOReceivingHistory.map(r => ({ ...r, type: 'no_po' })) - ].sort((a, b) => new Date(a.received_date) - new Date(b.received_date)); + ].sort((a, b) => new Date(a.received_date || '9999-12-31') - new Date(b.received_date || '9999-12-31')); + + // Split receivings into original PO and others + const originalPOReceivings = allReceivings.filter(r => r.type === 'original'); + const otherReceivings = allReceivings.filter(r => r.type !== 'original'); // Track FIFO fulfillment let remainingToFulfill = product.ordered; const fulfillmentTracking = []; let totalReceived = 0; + let actualCost = null; // Will store the cost of the first receiving that fulfills this PO + let firstFulfillmentReceiving = null; + let lastFulfillmentReceiving = null; for (const receiving of allReceivings) { const qtyToApply = Math.min(remainingToFulfill, receiving.qty_each); if (qtyToApply > 0) { + // If this is the first receiving being applied, use its cost + if (actualCost === null) { + actualCost = receiving.cost_each; + firstFulfillmentReceiving = receiving; + } + lastFulfillmentReceiving = receiving; fulfillmentTracking.push({ receiving_id: receiving.receiving_id, qty_applied: qtyToApply, @@ -287,6 +331,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental cost: receiving.cost_each, date: receiving.received_date, received_by: receiving.received_by, + received_by_name: receiving.received_by_name || 'Unknown', type: receiving.type, remaining_qty: receiving.qty_each - qtyToApply }); @@ -300,29 +345,40 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental cost: receiving.cost_each, date: receiving.received_date, received_by: receiving.received_by, + received_by_name: receiving.received_by_name || 'Unknown', type: receiving.type, is_excess: true }); } totalReceived += receiving.qty_each; } - + const receiving_status = !totalReceived ? 1 : // created remainingToFulfill > 0 ? 30 : // partial 40; // full - const firstReceiving = allReceivings[0] || {}; - const lastReceiving = allReceivings[allReceivings.length - 1] || {}; + function formatDate(dateStr) { + if (!dateStr) return null; + try { + const date = new Date(dateStr); + if (isNaN(date.getTime())) return null; + return date.toISOString().split('T')[0]; + } catch (e) { + return null; + } + } const rowValues = columnNames.map(col => { switch (col) { case 'po_id': return po.po_id; case 'vendor': return po.vendor; - case 'date': return po.date; - case 'expected_date': return po.expected_date; + case 'date': return formatDate(po.date); + case 'expected_date': return formatDate(po.expected_date); case 'pid': return product.pid; case 'sku': return product.sku; - case 'cost_price': return product.cost_price; + case 'name': return product.name; + case 'cost_price': return actualCost || product.cost_each; + case 'po_cost_price': return product.cost_each; case 'status': return po.status; case 'notes': return po.notes; case 'long_note': return po.long_note; @@ -330,16 +386,18 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental case 'received': return totalReceived; case 'unfulfilled': return remainingToFulfill; case 'excess_received': return Math.max(0, totalReceived - product.ordered); - case 'received_date': return firstReceiving.received_date || null; - case 'last_received_date': return lastReceiving.received_date || null; - case 'received_by': return firstReceiving.received_by || null; + case 'received_date': return formatDate(firstFulfillmentReceiving?.received_date); + case 'last_received_date': return formatDate(lastFulfillmentReceiving?.received_date); + case 'received_by': return firstFulfillmentReceiving?.received_by_name || null; case 'receiving_status': return receiving_status; case 'receiving_history': return JSON.stringify({ fulfillment: fulfillmentTracking, ordered_qty: product.ordered, total_received: totalReceived, remaining_unfulfilled: remainingToFulfill, - excess_received: Math.max(0, totalReceived - product.ordered) + excess_received: Math.max(0, totalReceived - product.ordered), + po_cost: product.cost_each, + actual_cost: actualCost || product.cost_each }); default: return null; } From 2d0089dc5214d0847e8183b3e4345bd7bd746cc8 Mon Sep 17 00:00:00 2001 From: Matt Date: Sat, 1 Feb 2025 11:03:42 -0500 Subject: [PATCH 30/33] Incremental import order fixes --- inventory-server/scripts/import-from-prod.js | 6 +++--- inventory-server/scripts/import/orders.js | 21 ++++++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index f5bc680..67b6678 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -10,9 +10,9 @@ const importPurchaseOrders = require('./import/purchase-orders'); dotenv.config({ path: path.join(__dirname, "../.env") }); // Constants to control which imports run -const IMPORT_CATEGORIES = false; -const IMPORT_PRODUCTS = false; -const IMPORT_ORDERS = false; +const IMPORT_CATEGORIES = true; +const IMPORT_PRODUCTS = true; +const IMPORT_ORDERS = true; const IMPORT_PURCHASE_ORDERS = true; // Add flag for incremental updates diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 442d107..58d9329 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -188,10 +188,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = // Get unique order IDs const orderIds = [...new Set(orderItems.map(item => item.order_id))]; + console.log('Total unique order IDs:', orderIds.length); // Get order metadata in batches for (let i = 0; i < orderIds.length; i += 5000) { const batchIds = orderIds.slice(i, i + 5000); + console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`); + console.log('Sample of batch IDs:', batchIds.slice(0, 5)); + const [orders] = await prodConnection.query(` SELECT o.order_id, @@ -204,6 +208,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = LEFT JOIN users u ON o.order_cid = u.cid WHERE o.order_id IN (?) `, [batchIds]); + + console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`); + const duplicates = orders.filter((order, index, self) => + self.findIndex(o => o.order_id === order.order_id) !== index + ); + if (duplicates.length > 0) { + console.log('Found duplicates:', duplicates); + } const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(","); const values = orders.flatMap(order => [ @@ -212,6 +224,12 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = await localConnection.query(` INSERT INTO temp_order_meta VALUES ${placeholders} + ON DUPLICATE KEY UPDATE + date = VALUES(date), + customer = VALUES(customer), + customer_name = VALUES(customer_name), + status = VALUES(status), + canceled = VALUES(canceled) `, values); outputProgress({ @@ -239,6 +257,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = await localConnection.query(` INSERT INTO temp_order_discounts VALUES ${placeholders} + ON DUPLICATE KEY UPDATE + discount = VALUES(discount) `, values); } } @@ -274,6 +294,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(","); await localConnection.query(` INSERT INTO temp_order_taxes VALUES ${placeholders} + ON DUPLICATE KEY UPDATE tax = VALUES(tax) `, values); } } From 1003ff3cf2f0b365b774f61455b18df21578f0f2 Mon Sep 17 00:00:00 2001 From: Matt Date: Sat, 1 Feb 2025 11:42:51 -0500 Subject: [PATCH 31/33] More incremental import fixes --- inventory-server/scripts/import/orders.js | 116 ++++++++++-------- inventory-server/scripts/import/products.js | 16 ++- .../scripts/import/purchase-orders.js | 10 +- 3 files changed, 87 insertions(+), 55 deletions(-) diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 58d9329..af7f307 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -19,6 +19,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = const missingProducts = new Set(); let recordsAdded = 0; let recordsUpdated = 0; + let processedCount = 0; + let importedCount = 0; + let totalOrderItems = 0; + let totalUniqueOrders = 0; try { // Insert temporary table creation queries @@ -86,7 +90,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = console.log('Orders: Using last sync time:', lastSyncTime); - // First get all relevant order items with basic info + // First get count of order items const [[{ total }]] = await prodConnection.query(` SELECT COUNT(*) as total FROM order_items oi @@ -115,7 +119,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = ` : ''} `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []); - console.log('Orders: Found changes:', total); + totalOrderItems = total; + console.log('Orders: Found changes:', totalOrderItems); // Get order items in batches const [orderItems] = await prodConnection.query(` @@ -155,9 +160,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = console.log('Orders: Processing', orderItems.length, 'order items'); - const totalOrders = orderItems.length; - let processed = 0; - // Insert order items in batches for (let i = 0; i < orderItems.length; i += 5000) { const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length)); @@ -176,19 +178,23 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = base_discount = VALUES(base_discount) `, values); - processed += batch.length; + processedCount = i + batch.length; outputProgress({ status: "running", operation: "Orders import", - message: `Loading order items: ${processed} of ${totalOrders}`, - current: processed, - total: totalOrders + message: `Loading order items: ${processedCount} of ${totalOrderItems}`, + current: processedCount, + total: totalOrderItems }); } // Get unique order IDs const orderIds = [...new Set(orderItems.map(item => item.order_id))]; - console.log('Total unique order IDs:', orderIds.length); + totalUniqueOrders = orderIds.length; + console.log('Total unique order IDs:', totalUniqueOrders); + + // Reset processed count for order processing phase + processedCount = 0; // Get order metadata in batches for (let i = 0; i < orderIds.length; i += 5000) { @@ -232,15 +238,19 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = canceled = VALUES(canceled) `, values); + processedCount = i + orders.length; outputProgress({ status: "running", operation: "Orders import", - message: `Loading order metadata: ${i + orders.length} of ${orderIds.length}`, - current: i + orders.length, - total: orderIds.length + message: `Loading order metadata: ${processedCount} of ${totalUniqueOrders}`, + current: processedCount, + total: totalUniqueOrders }); } + // Reset processed count for final phase + processedCount = 0; + // Get promotional discounts in batches for (let i = 0; i < orderIds.length; i += 5000) { const batchIds = orderIds.slice(i, i + 5000); @@ -321,8 +331,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = } // Now combine all the data and insert into orders table - let importedCount = 0; - // Pre-check all products at once instead of per batch const allOrderPids = [...new Set(orderItems.map(item => item.pid))]; const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query( @@ -403,24 +411,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = return newVal !== oldVal; }); if (hasChanges) { - acc.updates.push({ - order_number: order.order_number, - pid: order.pid, - values: columnNames.map(col => order[col] ?? null) - }); - } else { - acc.inserts.push({ - order_number: order.order_number, - pid: order.pid, - values: columnNames.map(col => order[col] ?? null) - }); + acc.updates.push(order); } } else { - acc.inserts.push({ - order_number: order.order_number, - pid: order.pid, - values: columnNames.map(col => order[col] ?? null) - }); + acc.inserts.push(order); } return acc; }, { inserts: [], updates: [] }); @@ -432,9 +426,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = const insertResult = await localConnection.query(` INSERT INTO orders (${columnNames.join(",")}) VALUES ${insertPlaceholders} - `, insertsAndUpdates.inserts.map(i => i.values).flat()); + `, insertsAndUpdates.inserts.map(i => columnNames.map(col => i[col] ?? null)).flat()); recordsAdded += insertResult[0].affectedRows; + importedCount += insertResult[0].affectedRows; } // Handle updates - now we know these actually have changes @@ -456,24 +451,26 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = customer = VALUES(customer), customer_name = VALUES(customer_name), status = VALUES(status), - canceled = VALUES(canceled) - `, insertsAndUpdates.updates.map(u => u.values).flat()); + canceled = VALUES(canceled), + costeach = VALUES(costeach) + `, insertsAndUpdates.updates.map(u => columnNames.map(col => u[col] ?? null)).flat()); recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows + importedCount += updateResult[0].affectedRows / 2; } - - importedCount += validOrders.length; } + // Update progress based on batch size - this is the number of order items we've processed + processedCount = i + batchIds.length * (totalOrderItems / totalUniqueOrders); outputProgress({ status: "running", operation: "Orders import", - message: `Imported ${importedCount} of ${totalOrders} orders`, - current: importedCount, - total: totalOrders, + message: `Imported ${Math.floor(importedCount)} orders (${Math.floor(processedCount)} of ${totalOrderItems} items processed)`, + current: Math.floor(processedCount), + total: totalOrderItems, elapsed: formatElapsedTime((Date.now() - startTime) / 1000), - remaining: estimateRemaining(startTime, importedCount, totalOrders), - rate: calculateRate(startTime, importedCount) + remaining: estimateRemaining(startTime, processedCount, totalOrderItems), + rate: calculateRate(startTime, processedCount) }); } @@ -577,21 +574,34 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = INSERT INTO orders (${columnNames.join(", ")}) VALUES ${skippedPlaceholders} ON DUPLICATE KEY UPDATE - ${columnNames.map(col => `${col} = VALUES(${col})`).join(", ")} + SKU = VALUES(SKU), + date = VALUES(date), + price = VALUES(price), + quantity = VALUES(quantity), + discount = VALUES(discount), + tax = VALUES(tax), + tax_included = VALUES(tax_included), + shipping = VALUES(shipping), + customer = VALUES(customer), + customer_name = VALUES(customer_name), + status = VALUES(status), + canceled = VALUES(canceled), + costeach = VALUES(costeach) `; // Execute the insert query if (skippedOrderValues.length > 0) { - await localConnection.query(skippedInsertQuery, skippedOrderValues.flat()); + const result = await localConnection.query(skippedInsertQuery, skippedOrderValues.flat()); + const addedOrUpdated = Math.floor(result[0].affectedRows / 2); // Round down to avoid fractional orders + importedCount += addedOrUpdated; + recordsUpdated += addedOrUpdated; + + outputProgress({ + status: "running", + operation: "Orders import", + message: `Successfully imported ${addedOrUpdated} previously skipped orders`, + }); } - - importedCount += skippedProdOrders.length; - - outputProgress({ - status: "running", - operation: "Orders import", - message: `Successfully imported ${skippedProdOrders.length} previously skipped orders`, - }); } } catch (error) { console.warn('Warning: Failed to import missing products:', error.message); @@ -608,9 +618,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = return { status: "complete", - totalImported: importedCount, + totalImported: Math.floor(importedCount), // Round down to avoid fractional orders recordsAdded: recordsAdded || 0, - recordsUpdated: recordsUpdated || 0, + recordsUpdated: Math.floor(recordsUpdated), // Round down to avoid fractional orders totalSkipped: skippedOrders.size, missingProducts: missingProducts.size, incrementalUpdate, diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 5986858..979cca7 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -582,7 +582,21 @@ async function importMissingProducts(prodConnection, localConnection, missingPid ELSE 1 END AS replenishable, COALESCE(si.available_local, 0) as stock_quantity, - COALESCE(pq.qty, 0) as pending_qty, + COALESCE( + (SELECT SUM(oi.qty_ordered - oi.qty_placed) + FROM order_items oi + JOIN _order o ON oi.order_id = o.order_id + WHERE oi.prod_pid = p.pid + AND o.date_placed != '0000-00-00 00:00:00' + AND o.date_shipped = '0000-00-00 00:00:00' + AND oi.pick_finished = 0 + AND oi.qty_back = 0 + AND o.order_status != 15 + AND o.order_status < 90 + AND oi.qty_ordered >= oi.qty_placed + AND oi.qty_ordered > 0 + ), 0 + ) as pending_qty, COALESCE(ci.onpreorder, 0) as preorder_count, COALESCE(pnb.inventory, 0) as notions_inv_count, COALESCE(pcp.price_each, 0) as price, diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 7dacde7..cd87139 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -117,7 +117,12 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental WHEN r.receiving_id IS NOT NULL THEN DATE(r.date_created) END as date, - NULLIF(p.date_estin, '0000-00-00') as expected_date, + CASE + WHEN p.date_estin = '0000-00-00' THEN NULL + WHEN p.date_estin IS NULL THEN NULL + WHEN p.date_estin NOT REGEXP '^[0-9]{4}-[0-9]{2}-[0-9]{2}$' THEN NULL + ELSE p.date_estin + END as expected_date, COALESCE(p.status, 50) as status, p.short_note as notes, p.notes as long_note @@ -359,9 +364,12 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental function formatDate(dateStr) { if (!dateStr) return null; + if (dateStr === '0000-00-00' || dateStr === '0000-00-00 00:00:00') return null; + if (typeof dateStr === 'string' && !dateStr.match(/^\d{4}-\d{2}-\d{2}/)) return null; try { const date = new Date(dateStr); if (isNaN(date.getTime())) return null; + if (date.getFullYear() < 1900 || date.getFullYear() > 2100) return null; return date.toISOString().split('T')[0]; } catch (e) { return null; From 5bfd6f6d04472b316e4dc67acea2b27f75801cb8 Mon Sep 17 00:00:00 2001 From: Matt Date: Sat, 1 Feb 2025 12:54:33 -0500 Subject: [PATCH 32/33] Fix import script order count issues --- inventory-server/scripts/import/orders.js | 325 +++++++----------- .../scripts/import/purchase-orders.js | 21 +- 2 files changed, 146 insertions(+), 200 deletions(-) diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index af7f307..5bcaf33 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -24,6 +24,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = let totalOrderItems = 0; let totalUniqueOrders = 0; + // Add a cumulative counter for processed orders before the loop + let cumulativeProcessedOrders = 0; + try { // Insert temporary table creation queries await localConnection.query(` @@ -372,6 +375,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = // Filter orders and track missing products - do this in a single pass const validOrders = []; const values = []; + const processedOrderItems = new Set(); // Track unique order items + const processedOrders = new Set(); // Track unique orders + for (const order of orders) { if (!existingPids.has(order.pid)) { missingProducts.add(order.pid); @@ -380,65 +386,111 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = } validOrders.push(order); values.push(...columnNames.map(col => order[col] ?? null)); + processedOrderItems.add(`${order.order_number}-${order.pid}`); + processedOrders.add(order.order_number); } if (validOrders.length > 0) { // Pre-compute the placeholders string once const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`; const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(","); + + const result = await localConnection.query(` + INSERT INTO orders (${columnNames.join(",")}) + VALUES ${placeholders} + ON DUPLICATE KEY UPDATE + SKU = VALUES(SKU), + date = VALUES(date), + price = VALUES(price), + quantity = VALUES(quantity), + discount = VALUES(discount), + tax = VALUES(tax), + tax_included = VALUES(tax_included), + shipping = VALUES(shipping), + customer = VALUES(customer), + customer_name = VALUES(customer_name), + status = VALUES(status), + canceled = VALUES(canceled), + costeach = VALUES(costeach) + `, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat()); + + const affectedRows = result[0].affectedRows; + const updates = Math.floor(affectedRows / 2); + const inserts = affectedRows - (updates * 2); + + recordsAdded += inserts; + recordsUpdated += updates; + importedCount += processedOrderItems.size; // Count unique order items processed + } - // First check which orders exist and get their current values - const [existingOrders] = await localConnection.query( - `SELECT ${columnNames.join(",")} FROM orders WHERE (order_number, pid) IN (${validOrders.map(() => "(?,?)").join(",")})`, - validOrders.flatMap(o => [o.order_number, o.pid]) - ); - const existingOrderMap = new Map( - existingOrders.map(o => [`${o.order_number}-${o.pid}`, o]) - ); + // Update progress based on unique orders processed + cumulativeProcessedOrders += processedOrders.size; + outputProgress({ + status: "running", + operation: "Orders import", + message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`, + current: cumulativeProcessedOrders, + total: totalUniqueOrders, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders), + rate: calculateRate(startTime, cumulativeProcessedOrders) + }); + } - // Split into inserts and updates - const insertsAndUpdates = validOrders.reduce((acc, order) => { - const key = `${order.order_number}-${order.pid}`; - if (existingOrderMap.has(key)) { - const existing = existingOrderMap.get(key); - // Check if any values are different - const hasChanges = columnNames.some(col => { - const newVal = order[col] ?? null; - const oldVal = existing[col] ?? null; - if (typeof newVal === 'number' && typeof oldVal === 'number') { - return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences - } - return newVal !== oldVal; - }); - if (hasChanges) { - acc.updates.push(order); - } - } else { - acc.inserts.push(order); - } - return acc; - }, { inserts: [], updates: [] }); + // Now try to import any orders that were skipped due to missing products + if (skippedOrders.size > 0) { + try { + outputProgress({ + status: "running", + operation: "Orders import", + message: `Retrying import of ${skippedOrders.size} orders with previously missing products`, + }); - // Handle inserts - if (insertsAndUpdates.inserts.length > 0) { - const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(singlePlaceholder).join(","); - - const insertResult = await localConnection.query(` - INSERT INTO orders (${columnNames.join(",")}) - VALUES ${insertPlaceholders} - `, insertsAndUpdates.inserts.map(i => columnNames.map(col => i[col] ?? null)).flat()); - - recordsAdded += insertResult[0].affectedRows; - importedCount += insertResult[0].affectedRows; - } + // Get the orders that were skipped + const [skippedProdOrders] = await localConnection.query(` + SELECT DISTINCT + oi.order_id as order_number, + oi.pid, + oi.SKU, + om.date, + oi.price, + oi.quantity, + oi.base_discount + COALESCE(od.discount, 0) as discount, + COALESCE(ot.tax, 0) as tax, + 0 as tax_included, + 0 as shipping, + om.customer, + om.customer_name, + om.status, + om.canceled, + COALESCE(tc.costeach, 0) as costeach + FROM temp_order_items oi + JOIN temp_order_meta om ON oi.order_id = om.order_id + LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid + LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid + LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid + WHERE oi.order_id IN (?) + `, [Array.from(skippedOrders)]); - // Handle updates - now we know these actually have changes - if (insertsAndUpdates.updates.length > 0) { - const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(singlePlaceholder).join(","); - - const updateResult = await localConnection.query(` - INSERT INTO orders (${columnNames.join(",")}) - VALUES ${updatePlaceholders} + // Check which products exist now + const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))]; + const [existingProducts] = skippedPids.length > 0 ? await localConnection.query( + "SELECT pid FROM products WHERE pid IN (?)", + [skippedPids] + ) : [[]]; + const existingPids = new Set(existingProducts.map(p => p.pid)); + + // Filter orders that can now be imported + const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid)); + const retryOrderItems = new Set(); // Track unique order items in retry + + if (validOrders.length > 0) { + const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(","); + const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat(); + + const result = await localConnection.query(` + INSERT INTO orders (${columnNames.join(", ")}) + VALUES ${placeholders} ON DUPLICATE KEY UPDATE SKU = VALUES(SKU), date = VALUES(date), @@ -453,28 +505,35 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = status = VALUES(status), canceled = VALUES(canceled), costeach = VALUES(costeach) - `, insertsAndUpdates.updates.map(u => columnNames.map(col => u[col] ?? null)).flat()); - - recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows - importedCount += updateResult[0].affectedRows / 2; - } - } + `, values); - // Update progress based on batch size - this is the number of order items we've processed - processedCount = i + batchIds.length * (totalOrderItems / totalUniqueOrders); - outputProgress({ - status: "running", - operation: "Orders import", - message: `Imported ${Math.floor(importedCount)} orders (${Math.floor(processedCount)} of ${totalOrderItems} items processed)`, - current: Math.floor(processedCount), - total: totalOrderItems, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000), - remaining: estimateRemaining(startTime, processedCount, totalOrderItems), - rate: calculateRate(startTime, processedCount) - }); + const affectedRows = result[0].affectedRows; + const updates = Math.floor(affectedRows / 2); + const inserts = affectedRows - (updates * 2); + + // Track unique order items + validOrders.forEach(order => { + retryOrderItems.add(`${order.order_number}-${order.pid}`); + }); + + outputProgress({ + status: "running", + operation: "Orders import", + message: `Successfully imported ${retryOrderItems.size} previously skipped order items`, + }); + + // Update the main counters + recordsAdded += inserts; + recordsUpdated += updates; + importedCount += retryOrderItems.size; + } + } catch (error) { + console.warn('Warning: Failed to retry skipped orders:', error.message); + console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`); + } } - // Clean up temporary tables + // Clean up temporary tables after ALL processing is complete await localConnection.query(` DROP TEMPORARY TABLE IF EXISTS temp_order_items; DROP TEMPORARY TABLE IF EXISTS temp_order_meta; @@ -483,132 +542,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = DROP TEMPORARY TABLE IF EXISTS temp_order_costs; `); - // Import missing products if any - if (missingProducts.size > 0) { - try { - // Import missing products directly without materialization - await importMissingProducts(prodConnection, localConnection, Array.from(missingProducts)); - - // Retry skipped orders after importing products - if (skippedOrders.size > 0) { - outputProgress({ - status: "running", - operation: "Orders import", - message: `Retrying import of ${skippedOrders.size} orders with previously missing products` - }); - - const skippedOrdersArray = Array.from(skippedOrders); - const [skippedProdOrders] = skippedOrdersArray.length > 0 ? await prodConnection.query(` - SELECT - o.order_id, - CASE - WHEN o.date_placed = '0000-00-00 00:00:00' OR o.date_placed IS NULL THEN o.stamp - ELSE o.date_placed - END as date, - o.order_cid, - o.bill_firstname, - o.bill_lastname, - o.order_email, - o.order_status, - o.date_shipped, - o.date_cancelled, - oi.prod_pid, - oi.prod_itemnumber, - oi.prod_price, - oi.qty_ordered, - oi.qty_back, - oi.qty_placed, - oi.qty_placed_2, - oi.discounted, - oi.summary_cogs, - oi.summary_profit, - oi.summary_orderdate, - oi.summary_paiddate, - oi.date_added, - oi.stamp - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE o.order_id IN (?) - `, [skippedOrdersArray]) : [[]]; - - // Prepare values for insertion - const skippedOrderValues = skippedProdOrders.flatMap(order => { - if (!order.date) { - console.log(`Warning: Skipped order ${order.order_id} has null date:`, JSON.stringify(order, null, 2)); - return []; - } - - const canceled = order.date_cancelled !== '0000-00-00 00:00:00' ? 1 : 0; - const customerName = `${order.bill_firstname} ${order.bill_lastname}`; - - // Create an object with keys based on column names - const orderData = { - id: order.order_id, - order_number: order.order_id, - pid: order.prod_pid, - SKU: order.prod_itemnumber, - date: order.date ? ( - order.date instanceof Date ? - order.date.toJSON()?.slice(0,10) || null : - (typeof order.date === 'string' ? order.date.split(' ')[0] : null) - ) : null, - price: order.prod_price, - quantity: order.qty_ordered, - discount: order.discounted, - tax: 0, // Placeholder, will be calculated later - tax_included: 0, // Placeholder, will be calculated later - shipping: 0, // Placeholder, will be calculated later - customer: order.order_email, - customer_name: customerName, - status: order.order_status, - canceled: canceled, - }; - - // Map column names to values, handling missing columns - return [columnNames.map(colName => orderData[colName] !== undefined ? orderData[colName] : null)]; - }); - - // Construct the insert query dynamically - const skippedPlaceholders = skippedProdOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(","); - const skippedInsertQuery = ` - INSERT INTO orders (${columnNames.join(", ")}) - VALUES ${skippedPlaceholders} - ON DUPLICATE KEY UPDATE - SKU = VALUES(SKU), - date = VALUES(date), - price = VALUES(price), - quantity = VALUES(quantity), - discount = VALUES(discount), - tax = VALUES(tax), - tax_included = VALUES(tax_included), - shipping = VALUES(shipping), - customer = VALUES(customer), - customer_name = VALUES(customer_name), - status = VALUES(status), - canceled = VALUES(canceled), - costeach = VALUES(costeach) - `; - - // Execute the insert query - if (skippedOrderValues.length > 0) { - const result = await localConnection.query(skippedInsertQuery, skippedOrderValues.flat()); - const addedOrUpdated = Math.floor(result[0].affectedRows / 2); // Round down to avoid fractional orders - importedCount += addedOrUpdated; - recordsUpdated += addedOrUpdated; - - outputProgress({ - status: "running", - operation: "Orders import", - message: `Successfully imported ${addedOrUpdated} previously skipped orders`, - }); - } - } - } catch (error) { - console.warn('Warning: Failed to import missing products:', error.message); - console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`); - } - } - // Only update sync status if we get here (no errors thrown) await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) @@ -618,9 +551,9 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = return { status: "complete", - totalImported: Math.floor(importedCount), // Round down to avoid fractional orders + totalImported: Math.floor(importedCount), recordsAdded: recordsAdded || 0, - recordsUpdated: Math.floor(recordsUpdated), // Round down to avoid fractional orders + recordsUpdated: Math.floor(recordsUpdated), totalSkipped: skippedOrders.size, missingProducts: missingProducts.size, incrementalUpdate, diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index cd87139..64e5a97 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -459,7 +459,15 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental VALUES ${insertPlaceholders} `, insertsAndUpdates.inserts.map(i => i.values).flat()); - recordsAdded += insertResult[0].affectedRows; + const affectedRows = insertResult[0].affectedRows; + // For an upsert, MySQL counts rows twice for updates + // So if affectedRows is odd, we have (updates * 2 + inserts) + const updates = Math.floor(affectedRows / 2); + const inserts = affectedRows - (updates * 2); + + recordsAdded += inserts; + recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates + processed += batchProcessed; } // Handle updates - now we know these actually have changes @@ -477,10 +485,15 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental .join(",")}; `, insertsAndUpdates.updates.map(u => u.values).flat()); - recordsUpdated += updateResult[0].affectedRows / 2; // Each update counts as 2 in affectedRows + const affectedRows = updateResult[0].affectedRows; + // For an upsert, MySQL counts rows twice for updates + // So if affectedRows is odd, we have (updates * 2 + inserts) + const updates = Math.floor(affectedRows / 2); + const inserts = affectedRows - (updates * 2); + + recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates + processed += batchProcessed; } - - processed += batchProcessed; // Update progress based on time interval const now = Date.now(); From 9e1989ac6659164432d17b85753d35babcee0ae6 Mon Sep 17 00:00:00 2001 From: Matt Date: Sat, 1 Feb 2025 14:08:27 -0500 Subject: [PATCH 33/33] Cleanup --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 2b9c0f9..19e2d35 100644 --- a/.gitignore +++ b/.gitignore @@ -57,3 +57,4 @@ csv/**/* **/csv/**/* !csv/.gitkeep inventory/tsconfig.tsbuildinfo +inventory-server/scripts/.fuse_hidden00000fa20000000a