Clean up, fix file path issues with import scripts, adjust data management page for new metrics calcs

This commit is contained in:
2025-04-01 00:15:06 -04:00
parent 5035dda733
commit 97296946f1
15 changed files with 181 additions and 87 deletions

View File

@@ -111,3 +111,53 @@ FROM
public.products p public.products p
ON CONFLICT (pid) DO NOTHING; -- IMPORTANT: Do not overwrite existing product-specific settings ON CONFLICT (pid) DO NOTHING; -- IMPORTANT: Do not overwrite existing product-specific settings
-- History and status tables
CREATE TABLE IF NOT EXISTS calculate_history (
id BIGSERIAL PRIMARY KEY,
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP WITH TIME ZONE NULL,
duration_seconds INTEGER,
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
total_products INTEGER DEFAULT 0,
total_orders INTEGER DEFAULT 0,
total_purchase_orders INTEGER DEFAULT 0,
processed_products INTEGER DEFAULT 0,
processed_orders INTEGER DEFAULT 0,
processed_purchase_orders INTEGER DEFAULT 0,
status calculation_status DEFAULT 'running',
error_message TEXT,
additional_info JSONB
);
CREATE TABLE IF NOT EXISTS calculate_status (
module_name module_name PRIMARY KEY,
last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS sync_status (
table_name TEXT PRIMARY KEY,
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT
);
CREATE TABLE IF NOT EXISTS import_history (
id BIGSERIAL PRIMARY KEY,
table_name VARCHAR(50) NOT NULL,
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP WITH TIME ZONE NULL,
duration_seconds INTEGER,
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
records_added INTEGER DEFAULT 0,
records_updated INTEGER DEFAULT 0,
is_incremental BOOLEAN DEFAULT FALSE,
status calculation_status DEFAULT 'running',
error_message TEXT,
additional_info JSONB
);
-- Create all indexes after tables are fully created
CREATE INDEX IF NOT EXISTS idx_last_calc ON calculate_status(last_calculation_timestamp);
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status(last_sync_timestamp);
CREATE INDEX IF NOT EXISTS idx_table_time ON import_history(table_name, start_time);

View File

@@ -256,52 +256,3 @@
-- END, -- END,
-- c.name, -- c.name,
-- st.vendor; -- st.vendor;
-- History and status tables
CREATE TABLE IF NOT EXISTS calculate_history (
id BIGSERIAL PRIMARY KEY,
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP WITH TIME ZONE NULL,
duration_seconds INTEGER,
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
total_products INTEGER DEFAULT 0,
total_orders INTEGER DEFAULT 0,
total_purchase_orders INTEGER DEFAULT 0,
processed_products INTEGER DEFAULT 0,
processed_orders INTEGER DEFAULT 0,
processed_purchase_orders INTEGER DEFAULT 0,
status calculation_status DEFAULT 'running',
error_message TEXT,
additional_info JSONB
);
CREATE TABLE IF NOT EXISTS calculate_status (
module_name module_name PRIMARY KEY,
last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS sync_status (
table_name TEXT PRIMARY KEY,
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT
);
CREATE TABLE IF NOT EXISTS import_history (
id BIGSERIAL PRIMARY KEY,
table_name VARCHAR(50) NOT NULL,
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP WITH TIME ZONE NULL,
duration_seconds INTEGER,
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
records_added INTEGER DEFAULT 0,
records_updated INTEGER DEFAULT 0,
is_incremental BOOLEAN DEFAULT FALSE,
status calculation_status DEFAULT 'running',
error_message TEXT,
additional_info JSONB
);
-- Create all indexes after tables are fully created
CREATE INDEX IF NOT EXISTS idx_last_calc ON calculate_status(last_calculation_timestamp);
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status(last_sync_timestamp);
CREATE INDEX IF NOT EXISTS idx_table_time ON import_history(table_name, start_time);

View File

@@ -246,6 +246,89 @@ process.on('unhandledRejection', (reason, promise) => {
// --- Core Logic --- // --- Core Logic ---
/**
* Ensures all products have entries in the settings_product table
* This is important after importing new products
*/
async function syncSettingsProductTable() {
let conn = null;
try {
currentStep = 'Syncing settings_product table';
progressUtils.outputProgress({
operation: 'Syncing product settings',
message: 'Ensuring all products have settings entries'
});
conn = await getConnection();
// Get counts before sync
const beforeCounts = await conn.query(`
SELECT
(SELECT COUNT(*) FROM products) AS products_count,
(SELECT COUNT(*) FROM settings_product) AS settings_count
`);
const productsCount = parseInt(beforeCounts.rows[0].products_count);
const settingsCount = parseInt(beforeCounts.rows[0].settings_count);
progressUtils.outputProgress({
operation: 'Settings product sync',
message: `Found ${productsCount} products and ${settingsCount} settings entries`
});
// Insert missing product settings
const result = await conn.query(`
INSERT INTO settings_product (
pid,
lead_time_days,
days_of_stock,
safety_stock,
forecast_method,
exclude_from_forecast
)
SELECT
p.pid,
CAST(NULL AS INTEGER),
CAST(NULL AS INTEGER),
COALESCE((SELECT setting_value::int FROM settings_global WHERE setting_key = 'default_safety_stock_units'), 0),
CAST(NULL AS VARCHAR),
FALSE
FROM
public.products p
WHERE
NOT EXISTS (
SELECT 1 FROM settings_product sp WHERE sp.pid = p.pid
)
ON CONFLICT (pid) DO NOTHING
`);
// Get counts after sync
const afterCounts = await conn.query(`
SELECT COUNT(*) AS settings_count FROM settings_product
`);
const newSettingsCount = parseInt(afterCounts.rows[0].settings_count);
const addedCount = newSettingsCount - settingsCount;
progressUtils.outputProgress({
operation: 'Settings product sync',
message: `Added ${addedCount} new settings entries. Now have ${newSettingsCount} total entries.`,
status: 'complete'
});
conn.release();
return addedCount;
} catch (err) {
progressUtils.outputProgress({
status: 'error',
operation: 'Settings product sync failed',
error: err.message
});
if (conn) conn.release();
throw err;
}
}
/** /**
* Executes a single SQL calculation step. * Executes a single SQL calculation step.
* @param {object} config - Configuration for the step. * @param {object} config - Configuration for the step.
@@ -552,6 +635,26 @@ async function runAllCalculations() {
let overallSuccess = true; let overallSuccess = true;
try { try {
// First, sync the settings_product table to ensure all products have entries
progressUtils.outputProgress({
operation: 'Starting metrics calculation',
message: 'Preparing product settings...'
});
try {
const addedCount = await syncSettingsProductTable();
progressUtils.outputProgress({
operation: 'Preparation complete',
message: `Added ${addedCount} missing product settings entries`,
status: 'complete'
});
} catch (syncError) {
console.error('Warning: Failed to sync product settings, continuing with metrics calculations:', syncError);
// Don't fail the entire process if settings sync fails
}
// Now run the calculation steps
for (const step of steps) { for (const step of steps) {
if (step.run) { if (step.run) {
if (isCancelled) { if (isCancelled) {
@@ -599,6 +702,7 @@ if (require.main === module) {
module.exports = { module.exports = {
runAllCalculations, runAllCalculations,
cancelCalculation, cancelCalculation,
syncSettingsProductTable,
// Expose individual steps if useful, wrapping them slightly // Expose individual steps if useful, wrapping them slightly
runDailySnapshots: () => executeSqlStep({ name: 'Daily Snapshots Update', sqlFile: 'update_daily_snapshots.sql', historyType: 'daily_snapshots', statusModule: 'daily_snapshots' }, progressUtils), runDailySnapshots: () => executeSqlStep({ name: 'Daily Snapshots Update', sqlFile: 'update_daily_snapshots.sql', historyType: 'daily_snapshots', statusModule: 'daily_snapshots' }, progressUtils),
runProductMetrics: () => executeSqlStep({ name: 'Product Metrics Update', sqlFile: 'update_product_metrics.sql', historyType: 'product_metrics', statusModule: 'product_metrics' }, progressUtils), runProductMetrics: () => executeSqlStep({ name: 'Product Metrics Update', sqlFile: 'update_product_metrics.sql', historyType: 'product_metrics', statusModule: 'product_metrics' }, progressUtils),

View File

@@ -88,7 +88,7 @@ async function fullReset() {
operation: 'Starting metrics calculation', operation: 'Starting metrics calculation',
message: 'Step 3/3: Calculating metrics...' message: 'Step 3/3: Calculating metrics...'
}); });
await runScript(path.join(__dirname, 'calculate-metrics.js')); await runScript(path.join(__dirname, 'calculate-metrics-new.js'));
// Final completion message // Final completion message
outputProgress({ outputProgress({

View File

@@ -68,7 +68,7 @@ async function fullUpdate() {
operation: 'Starting metrics calculation', operation: 'Starting metrics calculation',
message: 'Step 2/2: Calculating metrics...' message: 'Step 2/2: Calculating metrics...'
}); });
await runScript(path.join(__dirname, 'calculate-metrics.js')); await runScript(path.join(__dirname, 'calculate-metrics-new.js'));
outputProgress({ outputProgress({
status: 'complete', status: 'complete',
operation: 'Metrics step complete', operation: 'Metrics step complete',

View File

@@ -1,6 +1,6 @@
const dotenv = require("dotenv"); const dotenv = require("dotenv");
const path = require("path"); const path = require("path");
const { outputProgress, formatElapsedTime } = require('./metrics/utils/progress'); const { outputProgress, formatElapsedTime } = require('./metrics-new/utils/progress');
const { setupConnections, closeConnections } = require('./import/utils'); const { setupConnections, closeConnections } = require('./import/utils');
const importCategories = require('./import/categories'); const importCategories = require('./import/categories');
const { importProducts } = require('./import/products'); const { importProducts } = require('./import/products');

View File

@@ -1,4 +1,4 @@
const { outputProgress, formatElapsedTime } = require('../metrics/utils/progress'); const { outputProgress, formatElapsedTime } = require('../metrics-new/utils/progress');
async function importCategories(prodConnection, localConnection) { async function importCategories(prodConnection, localConnection) {
outputProgress({ outputProgress({

View File

@@ -1,4 +1,4 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const { pipeline } = require('stream'); const { pipeline } = require('stream');

View File

@@ -1,4 +1,4 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products'); const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
/** /**

View File

@@ -1,4 +1,4 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
const BATCH_SIZE = 1000; // Smaller batch size for better progress tracking const BATCH_SIZE = 1000; // Smaller batch size for better progress tracking
const MAX_RETRIES = 3; const MAX_RETRIES = 3;
const RETRY_DELAY = 5000; // 5 seconds const RETRY_DELAY = 5000; // 5 seconds

View File

@@ -1,4 +1,4 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
/** /**
* Validates a date from MySQL before inserting it into PostgreSQL * Validates a date from MySQL before inserting it into PostgreSQL

View File

@@ -384,7 +384,7 @@ async function resetDatabase() {
message: 'Creating configuration tables...' message: 'Creating configuration tables...'
}); });
const configSchemaSQL = fs.readFileSync( const configSchemaSQL = fs.readFileSync(
path.join(__dirname, '../db/config-schema.sql'), path.join(__dirname, '../db/config-schema-new.sql'),
'utf8' 'utf8'
); );
@@ -433,7 +433,7 @@ async function resetDatabase() {
message: 'Creating metrics tables...' message: 'Creating metrics tables...'
}); });
const metricsSchemaSQL = fs.readFileSync( const metricsSchemaSQL = fs.readFileSync(
path.join(__dirname, '../db/metrics-schema.sql'), path.join(__dirname, '../db/metrics-schema-new.sql'),
'utf8' 'utf8'
); );

View File

@@ -843,14 +843,10 @@ router.get('/status/table-counts', async (req, res) => {
const tables = [ const tables = [
// Core tables // Core tables
'products', 'categories', 'product_categories', 'orders', 'purchase_orders', 'products', 'categories', 'product_categories', 'orders', 'purchase_orders',
// Metrics tables // New metrics tables
'product_metrics', 'product_time_aggregates', 'vendor_metrics', 'category_metrics', 'product_metrics', 'daily_product_snapshots',
'vendor_time_metrics', 'category_time_metrics', 'category_sales_metrics',
'brand_metrics', 'brand_time_metrics', 'sales_forecasts', 'category_forecasts',
// Config tables // Config tables
'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config', 'settings_global', 'settings_vendor', 'settings_product'
'abc_classification_config', 'safety_stock_config', 'turnover_config',
'sales_seasonality', 'financial_calc_config'
]; ];
const counts = await Promise.all( const counts = await Promise.all(
@@ -871,16 +867,8 @@ router.get('/status/table-counts', async (req, res) => {
// Group tables by type // Group tables by type
const groupedCounts = { const groupedCounts = {
core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders'].includes(c.table_name)), core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders'].includes(c.table_name)),
metrics: counts.filter(c => [ metrics: counts.filter(c => ['product_metrics', 'daily_product_snapshots'].includes(c.table_name)),
'product_metrics', 'product_time_aggregates', 'vendor_metrics', 'category_metrics', config: counts.filter(c => ['settings_global', 'settings_vendor', 'settings_product'].includes(c.table_name))
'vendor_time_metrics', 'category_time_metrics', 'category_sales_metrics',
'brand_metrics', 'brand_time_metrics', 'sales_forecasts', 'category_forecasts'
].includes(c.table_name)),
config: counts.filter(c => [
'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config',
'abc_classification_config', 'safety_stock_config', 'turnover_config',
'sales_seasonality', 'financial_calc_config'
].includes(c.table_name))
}; };
res.json(groupedCounts); res.json(groupedCounts);

View File

@@ -767,11 +767,11 @@ export function DataManagement() {
return new Intl.NumberFormat().format(num); return new Intl.NumberFormat().format(num);
}; };
// Update renderTableCountsSection to use skeletons // Update renderTableCountsSection to show new metrics tables
const renderTableCountsSection = () => { const renderTableCountsSection = () => {
const renderTableCountsSkeleton = () => ( const renderTableCountsSkeleton = () => (
<div> <div>
{Array.from({ length: 18 }).map((_, i) => ( {Array.from({ length: 11 }).map((_, i) => (
<div key={i} className="flex justify-between text-sm items-center py-2 border-b last:border-0"> <div key={i} className="flex justify-between text-sm items-center py-2 border-b last:border-0">
<Skeleton className="h-4 w-[120px]" /> <Skeleton className="h-4 w-[120px]" />
<Skeleton className="h-4 w-[60px]" /> <Skeleton className="h-4 w-[60px]" />
@@ -801,12 +801,12 @@ export function DataManagement() {
); );
return ( return (
<Card className="md:col-start-2 md:row-span-2 h-[670px]"> <Card className="md:col-start-2 md:row-span-2 h-[550px]">
<CardHeader className="pb-3"> <CardHeader className="pb-3">
<CardTitle>Table Record Counts</CardTitle> <CardTitle>Table Record Counts</CardTitle>
</CardHeader> </CardHeader>
<CardContent> <CardContent>
{isLoading ? ( {isLoading && !tableCounts ? (
<div className="px-2"> <div className="px-2">
{renderTableCountsSkeleton()} {renderTableCountsSkeleton()}
</div> </div>
@@ -820,7 +820,8 @@ export function DataManagement() {
) : ( ) : (
<div> <div>
<div className="bg-sky-50/50 rounded-t-md px-2">{renderTableGroup('Core Tables', tableCounts?.core || [])}</div> <div className="bg-sky-50/50 rounded-t-md px-2">{renderTableGroup('Core Tables', tableCounts?.core || [])}</div>
<div className="bg-green-50/50 rounded-b-md px-2">{renderTableGroup('Metrics Tables', tableCounts?.metrics || [])}</div> <div className="bg-green-50/50 px-2">{renderTableGroup('Metrics Tables', tableCounts?.metrics || [])}</div>
<div className="bg-amber-50/50 rounded-b-md px-2">{renderTableGroup('Config Tables', tableCounts?.config || [])}</div>
</div> </div>
)} )}
</CardContent> </CardContent>
@@ -952,12 +953,12 @@ export function DataManagement() {
<div className="grid gap-4 md:grid-cols-2"> <div className="grid gap-4 md:grid-cols-2">
{/* Table Status */} {/* Table Status */}
<div className="space-y-4 flex flex-col h-[670px]"> <div className="space-y-4 flex flex-col h-[550px]">
<Card className="flex-1"> <Card className="flex-1">
<CardHeader className="pb-3"> <CardHeader className="pb-3">
<CardTitle>Last Import Times</CardTitle> <CardTitle>Last Import Times</CardTitle>
</CardHeader> </CardHeader>
<CardContent className="h-[calc(50%)]"> <CardContent className="h-auto">
<div className=""> <div className="">
{isLoading && !tableStatus.length ? ( {isLoading && !tableStatus.length ? (
<div> <div>
@@ -999,11 +1000,11 @@ export function DataManagement() {
<CardHeader className="pb-3"> <CardHeader className="pb-3">
<CardTitle>Last Calculation Times</CardTitle> <CardTitle>Last Calculation Times</CardTitle>
</CardHeader> </CardHeader>
<CardContent className="h-[calc(50%)]"> <CardContent className="h-auto">
<div className=""> <div className="">
{isLoading && !moduleStatus.length ? ( {isLoading && !moduleStatus.length ? (
<div> <div>
{Array.from({ length: 7 }).map((_, i) => ( {Array.from({ length: 3 }).map((_, i) => (
<div key={i} className="flex justify-between text-sm items-center py-2 border-b last:border-0"> <div key={i} className="flex justify-between text-sm items-center py-2 border-b last:border-0">
<Skeleton className="h-4 w-[120px]" /> <Skeleton className="h-4 w-[120px]" />
<Skeleton className="h-4 w-[60px]" /> <Skeleton className="h-4 w-[60px]" />