Try to synchronize time zones across import

This commit is contained in:
2025-04-05 16:20:43 -04:00
parent c9b656d34b
commit a4c1a19d2e
4 changed files with 252 additions and 158 deletions

View File

@@ -13,6 +13,22 @@ const dbConfig = {
port: process.env.DB_PORT || 5432
};
// Tables to always protect from being dropped
const PROTECTED_TABLES = [
'users',
'permissions',
'user_permissions',
'calculate_history',
'import_history',
'ai_prompts',
'ai_validation_performance',
'templates',
'reusable_images',
'imported_daily_inventory',
'imported_product_stat_history',
'imported_product_current_prices'
];
// Helper function to output progress in JSON format
function outputProgress(data) {
if (!data.status) {
@@ -33,17 +49,6 @@ const CORE_TABLES = [
'product_categories'
];
// Config tables that must be created
const CONFIG_TABLES = [
'stock_thresholds',
'lead_time_thresholds',
'sales_velocity_config',
'abc_classification_config',
'safety_stock_config',
'sales_seasonality',
'turnover_config'
];
// Split SQL into individual statements
function splitSQLStatements(sql) {
// First, normalize line endings
@@ -184,8 +189,8 @@ async function resetDatabase() {
SELECT string_agg(tablename, ', ') as tables
FROM pg_tables
WHERE schemaname = 'public'
AND tablename NOT IN ('users', 'permissions', 'user_permissions', 'calculate_history', 'import_history', 'ai_prompts', 'ai_validation_performance', 'templates', 'reusable_images');
`);
AND tablename NOT IN (SELECT unnest($1::text[]));
`, [PROTECTED_TABLES]);
if (!tablesResult.rows[0].tables) {
outputProgress({
@@ -204,7 +209,7 @@ async function resetDatabase() {
// Drop all tables except users
const tables = tablesResult.rows[0].tables.split(', ');
for (const table of tables) {
if (!['users', 'reusable_images'].includes(table)) {
if (!PROTECTED_TABLES.includes(table)) {
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
}
}
@@ -259,7 +264,9 @@ async function resetDatabase() {
'category_metrics',
'brand_metrics',
'sales_forecasts',
'abc_classification'
'abc_classification',
'daily_snapshots',
'periodic_metrics'
)
`);
}
@@ -301,51 +308,67 @@ async function resetDatabase() {
}
});
for (let i = 0; i < statements.length; i++) {
const stmt = statements[i];
try {
const result = await client.query(stmt);
// Verify if table was created (if this was a CREATE TABLE statement)
if (stmt.trim().toLowerCase().startsWith('create table')) {
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1];
if (tableName) {
const tableExists = await client.query(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = $1
`, [tableName]);
outputProgress({
operation: 'Table Creation Verification',
message: {
table: tableName,
exists: tableExists.rows[0].count > 0
}
});
// Start a transaction for better error handling
await client.query('BEGIN');
try {
for (let i = 0; i < statements.length; i++) {
const stmt = statements[i];
try {
const result = await client.query(stmt);
// Verify if table was created (if this was a CREATE TABLE statement)
if (stmt.trim().toLowerCase().startsWith('create table')) {
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1];
if (tableName) {
const tableExists = await client.query(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = $1
`, [tableName]);
outputProgress({
operation: 'Table Creation Verification',
message: {
table: tableName,
exists: tableExists.rows[0].count > 0
}
});
}
}
outputProgress({
operation: 'SQL Progress',
message: {
statement: i + 1,
total: statements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
rowCount: result.rowCount
}
});
// Commit in chunks of 10 statements to avoid long-running transactions
if (i > 0 && i % 10 === 0) {
await client.query('COMMIT');
await client.query('BEGIN');
}
} catch (sqlError) {
await client.query('ROLLBACK');
outputProgress({
status: 'error',
operation: 'SQL Error',
error: sqlError.message,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
outputProgress({
operation: 'SQL Progress',
message: {
statement: i + 1,
total: statements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
rowCount: result.rowCount
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'SQL Error',
error: sqlError.message,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
// Commit the final transaction
await client.query('COMMIT');
} catch (error) {
await client.query('ROLLBACK');
throw error;
}
// Verify core tables were created
@@ -383,11 +406,25 @@ async function resetDatabase() {
operation: 'Running config setup',
message: 'Creating configuration tables...'
});
const configSchemaSQL = fs.readFileSync(
path.join(__dirname, '../db/config-schema-new.sql'),
'utf8'
);
const configSchemaPath = path.join(__dirname, '../db/config-schema-new.sql');
// Verify file exists
if (!fs.existsSync(configSchemaPath)) {
throw new Error(`Config schema file not found at: ${configSchemaPath}`);
}
const configSchemaSQL = fs.readFileSync(configSchemaPath, 'utf8');
outputProgress({
operation: 'Config Schema file',
message: {
path: configSchemaPath,
exists: fs.existsSync(configSchemaPath),
size: fs.statSync(configSchemaPath).size,
firstFewLines: configSchemaSQL.split('\n').slice(0, 5).join('\n')
}
});
// Execute config schema statements one at a time
const configStatements = splitSQLStatements(configSchemaSQL);
outputProgress({
@@ -401,30 +438,46 @@ async function resetDatabase() {
}
});
for (let i = 0; i < configStatements.length; i++) {
const stmt = configStatements[i];
try {
const result = await client.query(stmt);
outputProgress({
operation: 'Config SQL Progress',
message: {
statement: i + 1,
total: configStatements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
rowCount: result.rowCount
// Start a transaction for better error handling
await client.query('BEGIN');
try {
for (let i = 0; i < configStatements.length; i++) {
const stmt = configStatements[i];
try {
const result = await client.query(stmt);
outputProgress({
operation: 'Config SQL Progress',
message: {
statement: i + 1,
total: configStatements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
rowCount: result.rowCount
}
});
// Commit in chunks of 10 statements to avoid long-running transactions
if (i > 0 && i % 10 === 0) {
await client.query('COMMIT');
await client.query('BEGIN');
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'Config SQL Error',
error: sqlError.message,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
} catch (sqlError) {
await client.query('ROLLBACK');
outputProgress({
status: 'error',
operation: 'Config SQL Error',
error: sqlError.message,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
}
// Commit the final transaction
await client.query('COMMIT');
} catch (error) {
await client.query('ROLLBACK');
throw error;
}
// Read and execute metrics schema (metrics tables)
@@ -432,11 +485,25 @@ async function resetDatabase() {
operation: 'Running metrics setup',
message: 'Creating metrics tables...'
});
const metricsSchemaSQL = fs.readFileSync(
path.join(__dirname, '../db/metrics-schema-new.sql'),
'utf8'
);
const metricsSchemaPath = path.join(__dirname, '../db/metrics-schema-new.sql');
// Verify file exists
if (!fs.existsSync(metricsSchemaPath)) {
throw new Error(`Metrics schema file not found at: ${metricsSchemaPath}`);
}
const metricsSchemaSQL = fs.readFileSync(metricsSchemaPath, 'utf8');
outputProgress({
operation: 'Metrics Schema file',
message: {
path: metricsSchemaPath,
exists: fs.existsSync(metricsSchemaPath),
size: fs.statSync(metricsSchemaPath).size,
firstFewLines: metricsSchemaSQL.split('\n').slice(0, 5).join('\n')
}
});
// Execute metrics schema statements one at a time
const metricsStatements = splitSQLStatements(metricsSchemaSQL);
outputProgress({
@@ -450,30 +517,46 @@ async function resetDatabase() {
}
});
for (let i = 0; i < metricsStatements.length; i++) {
const stmt = metricsStatements[i];
try {
const result = await client.query(stmt);
outputProgress({
operation: 'Metrics SQL Progress',
message: {
statement: i + 1,
total: metricsStatements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
rowCount: result.rowCount
// Start a transaction for better error handling
await client.query('BEGIN');
try {
for (let i = 0; i < metricsStatements.length; i++) {
const stmt = metricsStatements[i];
try {
const result = await client.query(stmt);
outputProgress({
operation: 'Metrics SQL Progress',
message: {
statement: i + 1,
total: metricsStatements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
rowCount: result.rowCount
}
});
// Commit in chunks of 10 statements to avoid long-running transactions
if (i > 0 && i % 10 === 0) {
await client.query('COMMIT');
await client.query('BEGIN');
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'Metrics SQL Error',
error: sqlError.message,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
} catch (sqlError) {
await client.query('ROLLBACK');
outputProgress({
status: 'error',
operation: 'Metrics SQL Error',
error: sqlError.message,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
}
// Commit the final transaction
await client.query('COMMIT');
} catch (error) {
await client.query('ROLLBACK');
throw error;
}
outputProgress({
@@ -490,6 +573,14 @@ async function resetDatabase() {
});
process.exit(1);
} finally {
// Make sure to re-enable foreign key checks if they were disabled
try {
await client.query('SET session_replication_role = \'origin\'');
} catch (e) {
console.error('Error re-enabling foreign key checks:', e.message);
}
// Close the database connection
await client.end();
}
}