const { Client } = require('pg'); const path = require('path'); const dotenv = require('dotenv'); const fs = require('fs'); dotenv.config({ path: path.join(__dirname, '../.env') }); const dbConfig = { host: process.env.DB_HOST, user: process.env.DB_USER, password: process.env.DB_PASSWORD, database: process.env.DB_NAME, port: process.env.DB_PORT || 5432 }; // Helper function to output progress in JSON format function outputProgress(data) { if (!data.status) { data = { status: 'running', ...data }; } console.log(JSON.stringify(data)); } // Core tables that must be created const CORE_TABLES = [ 'products', 'orders', 'purchase_orders', 'categories', 'product_categories' ]; // Config tables that must be created const CONFIG_TABLES = [ 'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config', 'abc_classification_config', 'safety_stock_config', 'sales_seasonality', 'turnover_config' ]; // Split SQL into individual statements function splitSQLStatements(sql) { // First, normalize line endings sql = sql.replace(/\r\n/g, '\n'); // Track statement boundaries let statements = []; let currentStatement = ''; let inString = false; let stringChar = ''; // Process character by character for (let i = 0; i < sql.length; i++) { const char = sql[i]; const nextChar = sql[i + 1] || ''; // Handle string literals if ((char === "'" || char === '"') && sql[i - 1] !== '\\') { if (!inString) { inString = true; stringChar = char; } else if (char === stringChar) { inString = false; } } // Handle comments if (!inString && char === '-' && nextChar === '-') { // Skip to end of line while (i < sql.length && sql[i] !== '\n') i++; continue; } if (!inString && char === '/' && nextChar === '*') { // Skip until closing */ i += 2; while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++; i++; // Skip the closing / continue; } // Handle statement boundaries if (!inString && char === ';') { if (currentStatement.trim()) { statements.push(currentStatement.trim()); } currentStatement = ''; } else { currentStatement += char; } } // Add the last statement if it exists if (currentStatement.trim()) { statements.push(currentStatement.trim()); } return statements; } async function resetDatabase() { outputProgress({ operation: 'Starting database reset', message: 'Connecting to database...' }); // Debug: Log current directory and file paths outputProgress({ operation: 'Debug paths', message: { currentDir: process.cwd(), __dirname: __dirname, schemaPath: path.join(__dirname, '../db/schema.sql') } }); const client = new Client(dbConfig); await client.connect(); try { // Check PostgreSQL version and user outputProgress({ operation: 'Checking database', message: 'Verifying PostgreSQL version and user privileges...' }); const versionResult = await client.query('SELECT version()'); const userResult = await client.query('SELECT current_user, current_database()'); outputProgress({ operation: 'Database info', message: { version: versionResult.rows[0].version, user: userResult.rows[0].current_user, database: userResult.rows[0].current_database } }); // Get list of all tables in the current database outputProgress({ operation: 'Getting table list', message: 'Retrieving all table names...' }); const tablesResult = await client.query(` SELECT string_agg(tablename, ', ') as tables FROM pg_tables WHERE schemaname = 'public' AND tablename NOT IN ('users', 'calculate_history', 'import_history'); `); if (!tablesResult.rows[0].tables) { outputProgress({ operation: 'No tables found', message: 'Database is already empty' }); } else { outputProgress({ operation: 'Dropping tables', message: 'Dropping all existing tables...' }); // Disable triggers/foreign key checks await client.query('SET session_replication_role = \'replica\';'); // Drop all tables except users const tables = tablesResult.rows[0].tables.split(', '); for (const table of tables) { if (!['users'].includes(table)) { await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`); } } // Drop types if they exist await client.query('DROP TYPE IF EXISTS calculation_status CASCADE;'); await client.query('DROP TYPE IF EXISTS module_name CASCADE;'); // Re-enable triggers/foreign key checks await client.query('SET session_replication_role = \'origin\';'); } // Create enum types outputProgress({ operation: 'Creating enum types', message: 'Setting up required enum types...' }); await client.query(` CREATE TYPE calculation_status AS ENUM ('running', 'completed', 'failed', 'cancelled') `); await client.query(` CREATE TYPE module_name AS ENUM ( 'product_metrics', 'time_aggregates', 'financial_metrics', 'vendor_metrics', 'category_metrics', 'brand_metrics', 'sales_forecasts', 'abc_classification' ) `); // Read and execute main schema (core tables) outputProgress({ operation: 'Running database setup', message: 'Creating core tables...' }); const schemaPath = path.join(__dirname, '../db/schema.sql'); // Verify file exists if (!fs.existsSync(schemaPath)) { throw new Error(`Schema file not found at: ${schemaPath}`); } const schemaSQL = fs.readFileSync(schemaPath, 'utf8'); outputProgress({ operation: 'Schema file', message: { path: schemaPath, exists: fs.existsSync(schemaPath), size: fs.statSync(schemaPath).size, firstFewLines: schemaSQL.split('\n').slice(0, 5).join('\n') } }); // Execute schema statements one at a time const statements = splitSQLStatements(schemaSQL); outputProgress({ operation: 'SQL Execution', message: { totalStatements: statements.length, statements: statements.map((stmt, i) => ({ number: i + 1, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '') })) } }); for (let i = 0; i < statements.length; i++) { const stmt = statements[i]; try { const result = await client.query(stmt); // Verify if table was created (if this was a CREATE TABLE statement) if (stmt.trim().toLowerCase().startsWith('create table')) { const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1]; if (tableName) { const tableExists = await client.query(` SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = 'public' AND table_name = $1 `, [tableName]); outputProgress({ operation: 'Table Creation Verification', message: { table: tableName, exists: tableExists.rows[0].count > 0 } }); } } outputProgress({ operation: 'SQL Progress', message: { statement: i + 1, total: statements.length, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), rowCount: result.rowCount } }); } catch (sqlError) { outputProgress({ status: 'error', operation: 'SQL Error', error: sqlError.message, statement: stmt, statementNumber: i + 1 }); throw sqlError; } } // List all tables in the database after schema execution const allTables = await client.query(` SELECT table_schema, table_name, pg_size_pretty(pg_total_relation_size(quote_ident(table_schema) || '.' || quote_ident(table_name))) as size, pg_relation_size(quote_ident(table_schema) || '.' || quote_ident(table_name)) as raw_size FROM information_schema.tables WHERE table_schema = 'public' `); if (allTables.rows.length === 0) { outputProgress({ operation: 'Warning', message: 'No tables found in database after schema execution' }); } else { outputProgress({ operation: 'Tables after schema execution', message: { count: allTables.rows.length, tables: allTables.rows.map(t => ({ schema: t.table_schema, name: t.table_name, size: t.size, rawSize: t.raw_size })) } }); } // Verify core tables were created const existingTables = allTables.rows.map(t => t.table_name); outputProgress({ operation: 'Core tables verification', message: { found: existingTables, expected: CORE_TABLES } }); const missingCoreTables = CORE_TABLES.filter( t => !existingTables.includes(t) ); if (missingCoreTables.length > 0) { throw new Error( `Failed to create core tables: ${missingCoreTables.join(', ')}` ); } outputProgress({ operation: 'Core tables created', message: `Successfully created tables: ${CORE_TABLES.join(', ')}` }); // Read and execute config schema outputProgress({ operation: 'Running config setup', message: 'Creating configuration tables...' }); const configSchemaSQL = fs.readFileSync( path.join(__dirname, '../db/config-schema.sql'), 'utf8' ); // Execute config schema statements one at a time const configStatements = splitSQLStatements(configSchemaSQL); outputProgress({ operation: 'Config SQL Execution', message: { totalStatements: configStatements.length, statements: configStatements.map((stmt, i) => ({ number: i + 1, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '') })) } }); for (let i = 0; i < configStatements.length; i++) { const stmt = configStatements[i]; try { const result = await client.query(stmt); outputProgress({ operation: 'Config SQL Progress', message: { statement: i + 1, total: configStatements.length, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), rowCount: result.rowCount } }); } catch (sqlError) { outputProgress({ status: 'error', operation: 'Config SQL Error', error: sqlError.message, statement: stmt, statementNumber: i + 1 }); throw sqlError; } } // Verify config tables were created const configTablesResult = await client.query(` SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' `); const existingConfigTables = configTablesResult.rows.map(t => t.table_name); outputProgress({ operation: 'Config tables verification', message: { found: existingConfigTables, expected: CONFIG_TABLES } }); const missingConfigTables = CONFIG_TABLES.filter( t => !existingConfigTables.includes(t) ); if (missingConfigTables.length > 0) { throw new Error( `Failed to create config tables: ${missingConfigTables.join(', ')}` ); } outputProgress({ operation: 'Config tables created', message: `Successfully created tables: ${CONFIG_TABLES.join(', ')}` }); // Read and execute metrics schema (metrics tables) outputProgress({ operation: 'Running metrics setup', message: 'Creating metrics tables...' }); const metricsSchemaSQL = fs.readFileSync( path.join(__dirname, '../db/metrics-schema.sql'), 'utf8' ); // Execute metrics schema statements one at a time const metricsStatements = splitSQLStatements(metricsSchemaSQL); outputProgress({ operation: 'Metrics SQL Execution', message: { totalStatements: metricsStatements.length, statements: metricsStatements.map((stmt, i) => ({ number: i + 1, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '') })) } }); for (let i = 0; i < metricsStatements.length; i++) { const stmt = metricsStatements[i]; try { const result = await client.query(stmt); outputProgress({ operation: 'Metrics SQL Progress', message: { statement: i + 1, total: metricsStatements.length, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), rowCount: result.rowCount } }); } catch (sqlError) { outputProgress({ status: 'error', operation: 'Metrics SQL Error', error: sqlError.message, statement: stmt, statementNumber: i + 1 }); throw sqlError; } } outputProgress({ status: 'complete', operation: 'Database reset complete', message: 'Database has been reset and all tables recreated' }); } catch (error) { outputProgress({ status: 'error', operation: 'Failed to reset database', error: error.message, stack: error.stack }); process.exit(1); } finally { await client.end(); } } // Export if required as a module if (typeof module !== 'undefined' && module.exports) { module.exports = resetDatabase; } // Run if called directly if (require.main === module) { resetDatabase().catch(error => { console.error('Error:', error); process.exit(1); }); }