Clean up, fix file path issues with import scripts, adjust data management page for new metrics calcs
This commit is contained in:
@@ -110,4 +110,54 @@ SELECT
|
||||
FROM
|
||||
public.products p
|
||||
|
||||
ON CONFLICT (pid) DO NOTHING; -- IMPORTANT: Do not overwrite existing product-specific settings
|
||||
ON CONFLICT (pid) DO NOTHING; -- IMPORTANT: Do not overwrite existing product-specific settings
|
||||
|
||||
|
||||
-- History and status tables
|
||||
CREATE TABLE IF NOT EXISTS calculate_history (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP WITH TIME ZONE NULL,
|
||||
duration_seconds INTEGER,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||
total_products INTEGER DEFAULT 0,
|
||||
total_orders INTEGER DEFAULT 0,
|
||||
total_purchase_orders INTEGER DEFAULT 0,
|
||||
processed_products INTEGER DEFAULT 0,
|
||||
processed_orders INTEGER DEFAULT 0,
|
||||
processed_purchase_orders INTEGER DEFAULT 0,
|
||||
status calculation_status DEFAULT 'running',
|
||||
error_message TEXT,
|
||||
additional_info JSONB
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS calculate_status (
|
||||
module_name module_name PRIMARY KEY,
|
||||
last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sync_status (
|
||||
table_name TEXT PRIMARY KEY,
|
||||
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_sync_id BIGINT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import_history (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
table_name VARCHAR(50) NOT NULL,
|
||||
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP WITH TIME ZONE NULL,
|
||||
duration_seconds INTEGER,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||
records_added INTEGER DEFAULT 0,
|
||||
records_updated INTEGER DEFAULT 0,
|
||||
is_incremental BOOLEAN DEFAULT FALSE,
|
||||
status calculation_status DEFAULT 'running',
|
||||
error_message TEXT,
|
||||
additional_info JSONB
|
||||
);
|
||||
|
||||
-- Create all indexes after tables are fully created
|
||||
CREATE INDEX IF NOT EXISTS idx_last_calc ON calculate_status(last_calculation_timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status(last_sync_timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_table_time ON import_history(table_name, start_time);
|
||||
@@ -255,53 +255,4 @@
|
||||
-- ELSE 4
|
||||
-- END,
|
||||
-- c.name,
|
||||
-- st.vendor;
|
||||
|
||||
-- History and status tables
|
||||
CREATE TABLE IF NOT EXISTS calculate_history (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP WITH TIME ZONE NULL,
|
||||
duration_seconds INTEGER,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||
total_products INTEGER DEFAULT 0,
|
||||
total_orders INTEGER DEFAULT 0,
|
||||
total_purchase_orders INTEGER DEFAULT 0,
|
||||
processed_products INTEGER DEFAULT 0,
|
||||
processed_orders INTEGER DEFAULT 0,
|
||||
processed_purchase_orders INTEGER DEFAULT 0,
|
||||
status calculation_status DEFAULT 'running',
|
||||
error_message TEXT,
|
||||
additional_info JSONB
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS calculate_status (
|
||||
module_name module_name PRIMARY KEY,
|
||||
last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sync_status (
|
||||
table_name TEXT PRIMARY KEY,
|
||||
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_sync_id BIGINT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import_history (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
table_name VARCHAR(50) NOT NULL,
|
||||
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP WITH TIME ZONE NULL,
|
||||
duration_seconds INTEGER,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||
records_added INTEGER DEFAULT 0,
|
||||
records_updated INTEGER DEFAULT 0,
|
||||
is_incremental BOOLEAN DEFAULT FALSE,
|
||||
status calculation_status DEFAULT 'running',
|
||||
error_message TEXT,
|
||||
additional_info JSONB
|
||||
);
|
||||
|
||||
-- Create all indexes after tables are fully created
|
||||
CREATE INDEX IF NOT EXISTS idx_last_calc ON calculate_status(last_calculation_timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status(last_sync_timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_table_time ON import_history(table_name, start_time);
|
||||
-- st.vendor;
|
||||
@@ -246,6 +246,89 @@ process.on('unhandledRejection', (reason, promise) => {
|
||||
|
||||
// --- Core Logic ---
|
||||
|
||||
/**
|
||||
* Ensures all products have entries in the settings_product table
|
||||
* This is important after importing new products
|
||||
*/
|
||||
async function syncSettingsProductTable() {
|
||||
let conn = null;
|
||||
try {
|
||||
currentStep = 'Syncing settings_product table';
|
||||
progressUtils.outputProgress({
|
||||
operation: 'Syncing product settings',
|
||||
message: 'Ensuring all products have settings entries'
|
||||
});
|
||||
|
||||
conn = await getConnection();
|
||||
|
||||
// Get counts before sync
|
||||
const beforeCounts = await conn.query(`
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM products) AS products_count,
|
||||
(SELECT COUNT(*) FROM settings_product) AS settings_count
|
||||
`);
|
||||
|
||||
const productsCount = parseInt(beforeCounts.rows[0].products_count);
|
||||
const settingsCount = parseInt(beforeCounts.rows[0].settings_count);
|
||||
|
||||
progressUtils.outputProgress({
|
||||
operation: 'Settings product sync',
|
||||
message: `Found ${productsCount} products and ${settingsCount} settings entries`
|
||||
});
|
||||
|
||||
// Insert missing product settings
|
||||
const result = await conn.query(`
|
||||
INSERT INTO settings_product (
|
||||
pid,
|
||||
lead_time_days,
|
||||
days_of_stock,
|
||||
safety_stock,
|
||||
forecast_method,
|
||||
exclude_from_forecast
|
||||
)
|
||||
SELECT
|
||||
p.pid,
|
||||
CAST(NULL AS INTEGER),
|
||||
CAST(NULL AS INTEGER),
|
||||
COALESCE((SELECT setting_value::int FROM settings_global WHERE setting_key = 'default_safety_stock_units'), 0),
|
||||
CAST(NULL AS VARCHAR),
|
||||
FALSE
|
||||
FROM
|
||||
public.products p
|
||||
WHERE
|
||||
NOT EXISTS (
|
||||
SELECT 1 FROM settings_product sp WHERE sp.pid = p.pid
|
||||
)
|
||||
ON CONFLICT (pid) DO NOTHING
|
||||
`);
|
||||
|
||||
// Get counts after sync
|
||||
const afterCounts = await conn.query(`
|
||||
SELECT COUNT(*) AS settings_count FROM settings_product
|
||||
`);
|
||||
|
||||
const newSettingsCount = parseInt(afterCounts.rows[0].settings_count);
|
||||
const addedCount = newSettingsCount - settingsCount;
|
||||
|
||||
progressUtils.outputProgress({
|
||||
operation: 'Settings product sync',
|
||||
message: `Added ${addedCount} new settings entries. Now have ${newSettingsCount} total entries.`,
|
||||
status: 'complete'
|
||||
});
|
||||
|
||||
conn.release();
|
||||
return addedCount;
|
||||
} catch (err) {
|
||||
progressUtils.outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Settings product sync failed',
|
||||
error: err.message
|
||||
});
|
||||
if (conn) conn.release();
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a single SQL calculation step.
|
||||
* @param {object} config - Configuration for the step.
|
||||
@@ -552,6 +635,26 @@ async function runAllCalculations() {
|
||||
let overallSuccess = true;
|
||||
|
||||
try {
|
||||
// First, sync the settings_product table to ensure all products have entries
|
||||
progressUtils.outputProgress({
|
||||
operation: 'Starting metrics calculation',
|
||||
message: 'Preparing product settings...'
|
||||
});
|
||||
|
||||
try {
|
||||
const addedCount = await syncSettingsProductTable();
|
||||
|
||||
progressUtils.outputProgress({
|
||||
operation: 'Preparation complete',
|
||||
message: `Added ${addedCount} missing product settings entries`,
|
||||
status: 'complete'
|
||||
});
|
||||
} catch (syncError) {
|
||||
console.error('Warning: Failed to sync product settings, continuing with metrics calculations:', syncError);
|
||||
// Don't fail the entire process if settings sync fails
|
||||
}
|
||||
|
||||
// Now run the calculation steps
|
||||
for (const step of steps) {
|
||||
if (step.run) {
|
||||
if (isCancelled) {
|
||||
@@ -599,6 +702,7 @@ if (require.main === module) {
|
||||
module.exports = {
|
||||
runAllCalculations,
|
||||
cancelCalculation,
|
||||
syncSettingsProductTable,
|
||||
// Expose individual steps if useful, wrapping them slightly
|
||||
runDailySnapshots: () => executeSqlStep({ name: 'Daily Snapshots Update', sqlFile: 'update_daily_snapshots.sql', historyType: 'daily_snapshots', statusModule: 'daily_snapshots' }, progressUtils),
|
||||
runProductMetrics: () => executeSqlStep({ name: 'Product Metrics Update', sqlFile: 'update_product_metrics.sql', historyType: 'product_metrics', statusModule: 'product_metrics' }, progressUtils),
|
||||
|
||||
@@ -88,7 +88,7 @@ async function fullReset() {
|
||||
operation: 'Starting metrics calculation',
|
||||
message: 'Step 3/3: Calculating metrics...'
|
||||
});
|
||||
await runScript(path.join(__dirname, 'calculate-metrics.js'));
|
||||
await runScript(path.join(__dirname, 'calculate-metrics-new.js'));
|
||||
|
||||
// Final completion message
|
||||
outputProgress({
|
||||
|
||||
@@ -68,7 +68,7 @@ async function fullUpdate() {
|
||||
operation: 'Starting metrics calculation',
|
||||
message: 'Step 2/2: Calculating metrics...'
|
||||
});
|
||||
await runScript(path.join(__dirname, 'calculate-metrics.js'));
|
||||
await runScript(path.join(__dirname, 'calculate-metrics-new.js'));
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Metrics step complete',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const dotenv = require("dotenv");
|
||||
const path = require("path");
|
||||
const { outputProgress, formatElapsedTime } = require('./metrics/utils/progress');
|
||||
const { outputProgress, formatElapsedTime } = require('./metrics-new/utils/progress');
|
||||
const { setupConnections, closeConnections } = require('./import/utils');
|
||||
const importCategories = require('./import/categories');
|
||||
const { importProducts } = require('./import/products');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { outputProgress, formatElapsedTime } = require('../metrics/utils/progress');
|
||||
const { outputProgress, formatElapsedTime } = require('../metrics-new/utils/progress');
|
||||
|
||||
async function importCategories(prodConnection, localConnection) {
|
||||
outputProgress({
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { pipeline } = require('stream');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
|
||||
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
|
||||
const BATCH_SIZE = 1000; // Smaller batch size for better progress tracking
|
||||
const MAX_RETRIES = 3;
|
||||
const RETRY_DELAY = 5000; // 5 seconds
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
|
||||
|
||||
/**
|
||||
* Validates a date from MySQL before inserting it into PostgreSQL
|
||||
|
||||
@@ -384,7 +384,7 @@ async function resetDatabase() {
|
||||
message: 'Creating configuration tables...'
|
||||
});
|
||||
const configSchemaSQL = fs.readFileSync(
|
||||
path.join(__dirname, '../db/config-schema.sql'),
|
||||
path.join(__dirname, '../db/config-schema-new.sql'),
|
||||
'utf8'
|
||||
);
|
||||
|
||||
@@ -433,7 +433,7 @@ async function resetDatabase() {
|
||||
message: 'Creating metrics tables...'
|
||||
});
|
||||
const metricsSchemaSQL = fs.readFileSync(
|
||||
path.join(__dirname, '../db/metrics-schema.sql'),
|
||||
path.join(__dirname, '../db/metrics-schema-new.sql'),
|
||||
'utf8'
|
||||
);
|
||||
|
||||
|
||||
@@ -843,14 +843,10 @@ router.get('/status/table-counts', async (req, res) => {
|
||||
const tables = [
|
||||
// Core tables
|
||||
'products', 'categories', 'product_categories', 'orders', 'purchase_orders',
|
||||
// Metrics tables
|
||||
'product_metrics', 'product_time_aggregates', 'vendor_metrics', 'category_metrics',
|
||||
'vendor_time_metrics', 'category_time_metrics', 'category_sales_metrics',
|
||||
'brand_metrics', 'brand_time_metrics', 'sales_forecasts', 'category_forecasts',
|
||||
// New metrics tables
|
||||
'product_metrics', 'daily_product_snapshots',
|
||||
// Config tables
|
||||
'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config',
|
||||
'abc_classification_config', 'safety_stock_config', 'turnover_config',
|
||||
'sales_seasonality', 'financial_calc_config'
|
||||
'settings_global', 'settings_vendor', 'settings_product'
|
||||
];
|
||||
|
||||
const counts = await Promise.all(
|
||||
@@ -871,16 +867,8 @@ router.get('/status/table-counts', async (req, res) => {
|
||||
// Group tables by type
|
||||
const groupedCounts = {
|
||||
core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders'].includes(c.table_name)),
|
||||
metrics: counts.filter(c => [
|
||||
'product_metrics', 'product_time_aggregates', 'vendor_metrics', 'category_metrics',
|
||||
'vendor_time_metrics', 'category_time_metrics', 'category_sales_metrics',
|
||||
'brand_metrics', 'brand_time_metrics', 'sales_forecasts', 'category_forecasts'
|
||||
].includes(c.table_name)),
|
||||
config: counts.filter(c => [
|
||||
'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config',
|
||||
'abc_classification_config', 'safety_stock_config', 'turnover_config',
|
||||
'sales_seasonality', 'financial_calc_config'
|
||||
].includes(c.table_name))
|
||||
metrics: counts.filter(c => ['product_metrics', 'daily_product_snapshots'].includes(c.table_name)),
|
||||
config: counts.filter(c => ['settings_global', 'settings_vendor', 'settings_product'].includes(c.table_name))
|
||||
};
|
||||
|
||||
res.json(groupedCounts);
|
||||
|
||||
Reference in New Issue
Block a user