From 22ad2d44db42368009fd79828e9143dd489739a2 Mon Sep 17 00:00:00 2001 From: Matt Date: Sun, 2 Feb 2025 15:06:20 -0500 Subject: [PATCH] Set up change tracking in core tables --- inventory-server/db/config-schema.sql | 74 ++++++++++++------- inventory-server/db/metrics-schema.sql | 57 +++++++++----- inventory-server/db/schema.sql | 30 +++----- inventory-server/scripts/import-from-prod.js | 10 +++ inventory-server/scripts/import/categories.js | 15 ++++ inventory-server/scripts/import/orders.js | 26 ++++++- inventory-server/scripts/import/products.js | 15 ++++ .../scripts/import/purchase-orders.js | 24 ++++++ inventory-server/scripts/reset-db.js | 59 ++++++++------- 9 files changed, 216 insertions(+), 94 deletions(-) diff --git a/inventory-server/db/config-schema.sql b/inventory-server/db/config-schema.sql index 2a28a1e..9b6e0ef 100644 --- a/inventory-server/db/config-schema.sql +++ b/inventory-server/db/config-schema.sql @@ -100,44 +100,44 @@ CREATE TABLE IF NOT EXISTS sales_seasonality ( -- Insert default global thresholds if not exists INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days) -VALUES (1, NULL, NULL, 7, 14, 90) -ON DUPLICATE KEY UPDATE - critical_days = VALUES(critical_days), - reorder_days = VALUES(reorder_days), - overstock_days = VALUES(overstock_days); +VALUES (1, NULL, NULL, 7, 14, 90) AS new_values +ON DUPLICATE KEY UPDATE + critical_days = new_values.critical_days, + reorder_days = new_values.reorder_days, + overstock_days = new_values.overstock_days; INSERT INTO lead_time_thresholds (id, category_id, vendor, target_days, warning_days, critical_days) -VALUES (1, NULL, NULL, 14, 21, 30) -ON DUPLICATE KEY UPDATE - target_days = VALUES(target_days), - warning_days = VALUES(warning_days), - critical_days = VALUES(critical_days); +VALUES (1, NULL, NULL, 14, 21, 28) AS new_values +ON DUPLICATE KEY UPDATE + target_days = new_values.target_days, + warning_days = new_values.warning_days, + critical_days = new_values.critical_days; INSERT INTO sales_velocity_config (id, category_id, vendor, daily_window_days, weekly_window_days, monthly_window_days) -VALUES (1, NULL, NULL, 30, 7, 90) -ON DUPLICATE KEY UPDATE - daily_window_days = VALUES(daily_window_days), - weekly_window_days = VALUES(weekly_window_days), - monthly_window_days = VALUES(monthly_window_days); +VALUES (1, NULL, NULL, 30, 90, 180) AS new_values +ON DUPLICATE KEY UPDATE + daily_window_days = new_values.daily_window_days, + weekly_window_days = new_values.weekly_window_days, + monthly_window_days = new_values.monthly_window_days; INSERT INTO abc_classification_config (id, a_threshold, b_threshold, classification_period_days) -VALUES (1, 20.0, 50.0, 90) -ON DUPLICATE KEY UPDATE - a_threshold = VALUES(a_threshold), - b_threshold = VALUES(b_threshold), - classification_period_days = VALUES(classification_period_days); +VALUES (1, 80, 95, 180) AS new_values +ON DUPLICATE KEY UPDATE + a_threshold = new_values.a_threshold, + b_threshold = new_values.b_threshold, + classification_period_days = new_values.classification_period_days; INSERT INTO safety_stock_config (id, category_id, vendor, coverage_days, service_level) -VALUES (1, NULL, NULL, 14, 95.0) +VALUES (1, NULL, NULL, 14, 95.0) AS new_values ON DUPLICATE KEY UPDATE - coverage_days = VALUES(coverage_days), - service_level = VALUES(service_level); + coverage_days = new_values.coverage_days, + service_level = new_values.service_level; INSERT INTO turnover_config (id, category_id, vendor, calculation_period_days, target_rate) -VALUES (1, NULL, NULL, 30, 1.0) +VALUES (1, NULL, NULL, 30, 1.0) AS new_values ON DUPLICATE KEY UPDATE - calculation_period_days = VALUES(calculation_period_days), - target_rate = VALUES(target_rate); + calculation_period_days = new_values.calculation_period_days, + target_rate = new_values.target_rate; -- Insert default seasonality factors (neutral) INSERT INTO sales_seasonality (month, seasonality_factor) @@ -193,4 +193,24 @@ CREATE TABLE IF NOT EXISTS import_history ( additional_info JSON, INDEX idx_table_time (table_name, start_time), INDEX idx_status (status) -); \ No newline at end of file +); + +-- Configuration for metric calculation windows +CREATE TABLE IF NOT EXISTS metric_calculation_config ( + id INT NOT NULL PRIMARY KEY, + calculation_window_days INT NOT NULL DEFAULT 90, + incremental_enabled BOOLEAN DEFAULT TRUE, + force_full_calculation_days INT DEFAULT 7, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP +); + + + +-- Insert default metric calculation config +INSERT INTO metric_calculation_config (id, calculation_window_days, incremental_enabled, force_full_calculation_days) +VALUES (1, 90, TRUE, 7) AS new_values +ON DUPLICATE KEY UPDATE + calculation_window_days = new_values.calculation_window_days, + incremental_enabled = new_values.incremental_enabled, + force_full_calculation_days = new_values.force_full_calculation_days; \ No newline at end of file diff --git a/inventory-server/db/metrics-schema.sql b/inventory-server/db/metrics-schema.sql index ffa9984..220eb97 100644 --- a/inventory-server/db/metrics-schema.sql +++ b/inventory-server/db/metrics-schema.sql @@ -1,6 +1,44 @@ -- Disable foreign key checks SET FOREIGN_KEY_CHECKS = 0; +-- Create metric status tracking table +CREATE TABLE IF NOT EXISTS product_metric_status ( + pid BIGINT NOT NULL, + last_calculated_at TIMESTAMP NULL, + needs_recalculation BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (pid), + INDEX idx_needs_recalc (needs_recalculation), + INDEX idx_last_calc (last_calculated_at), + FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE +) ENGINE=InnoDB; + +-- Add triggers for metric recalculation +DROP TRIGGER IF EXISTS orders_after_insert_update; +CREATE TRIGGER orders_after_insert_update AFTER INSERT ON orders FOR EACH ROW + INSERT INTO product_metric_status (pid, needs_recalculation) + VALUES (NEW.pid, TRUE) + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP; + +DROP TRIGGER IF EXISTS purchase_orders_after_insert_update; +CREATE TRIGGER purchase_orders_after_insert_update AFTER INSERT ON purchase_orders FOR EACH ROW + INSERT INTO product_metric_status (pid, needs_recalculation) + VALUES (NEW.pid, TRUE) + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP; + +DROP TRIGGER IF EXISTS products_after_insert_update; +CREATE TRIGGER products_after_insert_update AFTER INSERT ON products FOR EACH ROW + INSERT INTO product_metric_status (pid, needs_recalculation) + VALUES (NEW.pid, TRUE) + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP; + -- Temporary tables for batch metrics processing CREATE TABLE IF NOT EXISTS temp_sales_metrics ( pid BIGINT NOT NULL, @@ -410,21 +448,4 @@ LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id; -- Re-enable foreign key checks -SET FOREIGN_KEY_CHECKS = 1; - --- Create table for sales seasonality factors -CREATE TABLE IF NOT EXISTS sales_seasonality ( - month INT NOT NULL, - seasonality_factor DECIMAL(5,3) DEFAULT 0, - last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (month), - CHECK (month BETWEEN 1 AND 12), - CHECK (seasonality_factor BETWEEN -1.0 AND 1.0) -); - --- Insert default seasonality factors (neutral) -INSERT INTO sales_seasonality (month, seasonality_factor) -VALUES - (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), - (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0) -ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP; \ No newline at end of file +SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file diff --git a/inventory-server/db/schema.sql b/inventory-server/db/schema.sql index 3f779cc..2d5f8f1 100644 --- a/inventory-server/db/schema.sql +++ b/inventory-server/db/schema.sql @@ -19,7 +19,7 @@ CREATE TABLE products ( landing_cost_price DECIMAL(10, 3), barcode VARCHAR(50), harmonized_tariff_code VARCHAR(20), - updated_at TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, visible BOOLEAN DEFAULT true, managing_stock BOOLEAN DEFAULT true, replenishable BOOLEAN DEFAULT true, @@ -57,7 +57,8 @@ CREATE TABLE products ( INDEX idx_brand (brand), INDEX idx_location (location), INDEX idx_total_sold (total_sold), - INDEX idx_date_last_sold (date_last_sold) + INDEX idx_date_last_sold (date_last_sold), + INDEX idx_updated (updated_at) ) ENGINE=InnoDB; -- Create categories table with hierarchy support @@ -74,19 +75,8 @@ CREATE TABLE categories ( INDEX idx_parent (parent_id), INDEX idx_type (type), INDEX idx_status (status), - INDEX idx_name_type (name, type) -) ENGINE=InnoDB; - --- Create vendor_details table -CREATE TABLE vendor_details ( - vendor VARCHAR(100) PRIMARY KEY, - contact_name VARCHAR(100), - email VARCHAR(255), - phone VARCHAR(50), - status VARCHAR(20) DEFAULT 'active', - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - INDEX idx_status (status) + INDEX idx_name_type (name, type), + INDEX idx_updated (updated_at) ) ENGINE=InnoDB; -- Create product_categories junction table @@ -118,6 +108,7 @@ CREATE TABLE IF NOT EXISTS orders ( customer_name VARCHAR(100), status VARCHAR(20) DEFAULT 'pending', canceled TINYINT(1) DEFAULT 0, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (id), UNIQUE KEY unique_order_line (order_number, pid), KEY order_number (order_number), @@ -125,8 +116,9 @@ CREATE TABLE IF NOT EXISTS orders ( KEY customer (customer), KEY date (date), KEY status (status), - INDEX idx_orders_metrics (pid, date, canceled) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + INDEX idx_orders_metrics (pid, date, canceled, updated_at), + INDEX idx_updated (updated_at) +) ENGINE=InnoDB; -- Create purchase_orders table with its indexes CREATE TABLE purchase_orders ( @@ -150,15 +142,17 @@ CREATE TABLE purchase_orders ( last_received_date DATE COMMENT 'Date of most recent receiving', received_by VARCHAR(100) COMMENT 'Name of person who first received this PO line', receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag', + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, FOREIGN KEY (pid) REFERENCES products(pid), INDEX idx_po_id (po_id), INDEX idx_vendor (vendor), INDEX idx_status (status), INDEX idx_receiving_status (receiving_status), - INDEX idx_purchase_orders_metrics (pid, date, status, ordered, received), + INDEX idx_purchase_orders_metrics (pid, date, status, ordered, received, updated_at), INDEX idx_po_metrics (pid, date, receiving_status, received_date), INDEX idx_po_product_date (pid, date), INDEX idx_po_product_status (pid, status), + INDEX idx_updated (updated_at), UNIQUE KEY unique_po_product (po_id, pid) ) ENGINE=InnoDB; diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 67b6678..c80cc63 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -102,6 +102,16 @@ async function main() { if (isImportCancelled) throw new Error("Import cancelled"); + // Initialize product_metric_status table for any missing products + await localConnection.query(` + INSERT IGNORE INTO product_metric_status (pid) + SELECT pid FROM products p + WHERE NOT EXISTS ( + SELECT 1 FROM product_metric_status pms + WHERE pms.pid = p.pid + ) + `); + // Clean up any previously running imports that weren't completed await localConnection.query(` UPDATE import_history diff --git a/inventory-server/scripts/import/categories.js b/inventory-server/scripts/import/categories.js index 7dffc3c..94b11cb 100644 --- a/inventory-server/scripts/import/categories.js +++ b/inventory-server/scripts/import/categories.js @@ -136,6 +136,21 @@ async function importCategories(prodConnection, localConnection) { total: totalInserted, elapsed: formatElapsedTime((Date.now() - startTime) / 1000), }); + + // Mark all products in these categories for recalculation + if (categoriesToInsert.length > 0) { + const affectedCatIds = categoriesToInsert.map(c => c.cat_id); + + await localConnection.query(` + INSERT INTO product_metric_status (pid, needs_recalculation) + SELECT DISTINCT pc.pid, TRUE + FROM product_categories pc + WHERE pc.cat_id IN (?) + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP + `, [affectedCatIds]); + } } // After all imports, if we skipped any categories, throw an error diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index cb9d5a7..60b06c6 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -472,7 +472,19 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = recordsAdded += inserts; recordsUpdated += updates; - importedCount += processedOrderItems.size; // Count unique order items processed + importedCount += processedOrderItems.size; + + // Mark affected products for recalculation + const affectedPids = [...new Set(validOrders.map(o => o.pid))]; + if (affectedPids.length > 0) { + await localConnection.query(` + INSERT INTO product_metric_status (pid, needs_recalculation) + VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')} + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP + `, affectedPids); + } } // Update progress based on unique orders processed @@ -585,6 +597,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = recordsAdded += inserts; recordsUpdated += updates; importedCount += retryOrderItems.size; + + // Mark affected products for recalculation + const affectedPids = [...new Set(validOrders.map(o => o.pid))]; + if (affectedPids.length > 0) { + await localConnection.query(` + INSERT INTO product_metric_status (pid, needs_recalculation) + VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')} + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP + `, affectedPids); + } } } catch (error) { console.warn('Warning: Failed to retry skipped orders:', error.message); diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index 094808b..a17162c 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -468,6 +468,21 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate recordsUpdated += insertsAndUpdates.updates.length; } + if (insertsAndUpdates.updates.length > 0 || insertsAndUpdates.inserts.length > 0) { + const affectedPids = [ + ...insertsAndUpdates.updates.map(p => p.pid), + ...insertsAndUpdates.inserts.map(p => p.pid) + ]; + + await localConnection.query(` + INSERT INTO product_metric_status (pid, needs_recalculation) + VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')} + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP + `, affectedPids); + } + // Process category relationships if (batch.some(p => p.category_ids)) { // First get all valid categories diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index 1f40883..197cbed 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -474,6 +474,18 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental recordsAdded += inserts; recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates processed += batchProcessed; + + // Mark affected products for recalculation + const affectedPids = [...new Set(productBatch.map(p => p.pid))]; + if (affectedPids.length > 0) { + await localConnection.query(` + INSERT INTO product_metric_status (pid, needs_recalculation) + VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')} + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP + `, affectedPids); + } } // Handle updates - now we know these actually have changes @@ -499,6 +511,18 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates processed += batchProcessed; + + // Mark affected products for recalculation + const affectedPids = [...new Set(productBatch.map(p => p.pid))]; + if (affectedPids.length > 0) { + await localConnection.query(` + INSERT INTO product_metric_status (pid, needs_recalculation) + VALUES ${affectedPids.map(() => '(?, TRUE)').join(',')} + ON DUPLICATE KEY UPDATE + needs_recalculation = TRUE, + updated_at = CURRENT_TIMESTAMP + `, affectedPids); + } } // Update progress based on time interval diff --git a/inventory-server/scripts/reset-db.js b/inventory-server/scripts/reset-db.js index 8cc437f..767dd50 100644 --- a/inventory-server/scripts/reset-db.js +++ b/inventory-server/scripts/reset-db.js @@ -41,7 +41,10 @@ const CONFIG_TABLES = [ 'abc_classification_config', 'safety_stock_config', 'sales_seasonality', - 'turnover_config' + 'turnover_config', + 'sync_status', + 'metric_calculation_config', + 'product_metric_status' ]; // Split SQL into individual statements @@ -175,7 +178,7 @@ async function resetDatabase() { DROP TABLE IF EXISTS ${tables[0].tables .split(',') - .filter(table => table !== 'users') + .filter(table => !['users', 'import_history'].includes(table)) .map(table => '`' + table + '`') .join(', ')} `; @@ -436,34 +439,7 @@ async function resetDatabase() { } } - // Verify config tables were created - const [showConfigTables] = await connection.query('SHOW TABLES'); - const existingConfigTables = showConfigTables.map(t => Object.values(t)[0]); - - outputProgress({ - operation: 'Config tables verification', - message: { - found: existingConfigTables, - expected: CONFIG_TABLES - } - }); - - const missingConfigTables = CONFIG_TABLES.filter( - t => !existingConfigTables.includes(t) - ); - - if (missingConfigTables.length > 0) { - throw new Error( - `Failed to create config tables: ${missingConfigTables.join(', ')}` - ); - } - - outputProgress({ - operation: 'Config tables created', - message: `Successfully created tables: ${CONFIG_TABLES.join(', ')}` - }); - - // Read and execute metrics schema (metrics tables) + // Read and execute metrics schema outputProgress({ operation: 'Running metrics setup', message: 'Creating metrics tables...' @@ -525,6 +501,29 @@ async function resetDatabase() { } } + // Verify triggers exist + const [triggers] = await connection.query('SHOW TRIGGERS'); + const expectedTriggers = [ + 'orders_after_insert_update', + 'purchase_orders_after_insert_update', + 'products_after_insert_update' + ]; + + const missingTriggers = expectedTriggers.filter( + triggerName => !triggers.some(t => t.Trigger === triggerName) + ); + + if (missingTriggers.length > 0) { + throw new Error( + `Missing required triggers: ${missingTriggers.join(', ')}` + ); + } + + outputProgress({ + operation: 'Triggers verified', + message: `Successfully verified triggers: ${expectedTriggers.join(', ')}` + }); + outputProgress({ status: 'complete', operation: 'Database reset complete',