508 lines
17 KiB
JavaScript
508 lines
17 KiB
JavaScript
const { Client } = require('pg');
|
|
const path = require('path');
|
|
const dotenv = require('dotenv');
|
|
const fs = require('fs');
|
|
|
|
dotenv.config({ path: path.join(__dirname, '../.env') });
|
|
|
|
const dbConfig = {
|
|
host: process.env.DB_HOST,
|
|
user: process.env.DB_USER,
|
|
password: process.env.DB_PASSWORD,
|
|
database: process.env.DB_NAME,
|
|
port: process.env.DB_PORT || 5432
|
|
};
|
|
|
|
// Helper function to output progress in JSON format
|
|
function outputProgress(data) {
|
|
if (!data.status) {
|
|
data = {
|
|
status: 'running',
|
|
...data
|
|
};
|
|
}
|
|
console.log(JSON.stringify(data));
|
|
}
|
|
|
|
// Core tables that must be created
|
|
const CORE_TABLES = [
|
|
'products',
|
|
'orders',
|
|
'purchase_orders',
|
|
'categories',
|
|
'product_categories'
|
|
];
|
|
|
|
// Config tables that must be created
|
|
const CONFIG_TABLES = [
|
|
'stock_thresholds',
|
|
'lead_time_thresholds',
|
|
'sales_velocity_config',
|
|
'abc_classification_config',
|
|
'safety_stock_config',
|
|
'sales_seasonality',
|
|
'turnover_config'
|
|
];
|
|
|
|
// Split SQL into individual statements
|
|
function splitSQLStatements(sql) {
|
|
// First, normalize line endings
|
|
sql = sql.replace(/\r\n/g, '\n');
|
|
|
|
// Track statement boundaries
|
|
let statements = [];
|
|
let currentStatement = '';
|
|
let inString = false;
|
|
let stringChar = '';
|
|
let inDollarQuote = false;
|
|
let dollarQuoteTag = '';
|
|
|
|
// Process character by character
|
|
for (let i = 0; i < sql.length; i++) {
|
|
const char = sql[i];
|
|
const nextChar = sql[i + 1] || '';
|
|
|
|
// Handle dollar quotes
|
|
if (char === '$' && !inString) {
|
|
// Look ahead to find the dollar quote tag
|
|
let tag = '$';
|
|
let j = i + 1;
|
|
while (j < sql.length && sql[j] !== '$') {
|
|
tag += sql[j];
|
|
j++;
|
|
}
|
|
tag += '$';
|
|
|
|
if (j < sql.length) { // Found closing $
|
|
if (!inDollarQuote) {
|
|
inDollarQuote = true;
|
|
dollarQuoteTag = tag;
|
|
currentStatement += tag;
|
|
i = j;
|
|
continue;
|
|
} else if (sql.substring(i, j + 1) === dollarQuoteTag) {
|
|
inDollarQuote = false;
|
|
dollarQuoteTag = '';
|
|
currentStatement += tag;
|
|
i = j;
|
|
continue;
|
|
}
|
|
}
|
|
}
|
|
|
|
// Handle string literals (only if not in dollar quote)
|
|
if (!inDollarQuote && (char === "'" || char === '"') && sql[i - 1] !== '\\') {
|
|
if (!inString) {
|
|
inString = true;
|
|
stringChar = char;
|
|
} else if (char === stringChar) {
|
|
inString = false;
|
|
}
|
|
}
|
|
|
|
// Handle comments (only if not in string or dollar quote)
|
|
if (!inString && !inDollarQuote) {
|
|
if (char === '-' && nextChar === '-') {
|
|
// Skip to end of line
|
|
while (i < sql.length && sql[i] !== '\n') i++;
|
|
continue;
|
|
}
|
|
|
|
if (char === '/' && nextChar === '*') {
|
|
// Skip until closing */
|
|
i += 2;
|
|
while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++;
|
|
i++; // Skip the closing /
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// Handle statement boundaries (only if not in string or dollar quote)
|
|
if (!inString && !inDollarQuote && char === ';') {
|
|
if (currentStatement.trim()) {
|
|
statements.push(currentStatement.trim());
|
|
}
|
|
currentStatement = '';
|
|
} else {
|
|
currentStatement += char;
|
|
}
|
|
}
|
|
|
|
// Add the last statement if it exists
|
|
if (currentStatement.trim()) {
|
|
statements.push(currentStatement.trim());
|
|
}
|
|
|
|
return statements;
|
|
}
|
|
|
|
async function resetDatabase() {
|
|
outputProgress({
|
|
operation: 'Starting database reset',
|
|
message: 'Connecting to database...'
|
|
});
|
|
|
|
// Debug: Log current directory and file paths
|
|
outputProgress({
|
|
operation: 'Debug paths',
|
|
message: {
|
|
currentDir: process.cwd(),
|
|
__dirname: __dirname,
|
|
schemaPath: path.join(__dirname, '../db/schema.sql')
|
|
}
|
|
});
|
|
|
|
const client = new Client(dbConfig);
|
|
await client.connect();
|
|
|
|
try {
|
|
// Check PostgreSQL version and user
|
|
outputProgress({
|
|
operation: 'Checking database',
|
|
message: 'Verifying PostgreSQL version and user privileges...'
|
|
});
|
|
|
|
const versionResult = await client.query('SELECT version()');
|
|
const userResult = await client.query('SELECT current_user, current_database()');
|
|
|
|
outputProgress({
|
|
operation: 'Database info',
|
|
message: {
|
|
version: versionResult.rows[0].version,
|
|
user: userResult.rows[0].current_user,
|
|
database: userResult.rows[0].current_database
|
|
}
|
|
});
|
|
|
|
// Get list of all tables in the current database
|
|
outputProgress({
|
|
operation: 'Getting table list',
|
|
message: 'Retrieving all table names...'
|
|
});
|
|
|
|
const tablesResult = await client.query(`
|
|
SELECT string_agg(tablename, ', ') as tables
|
|
FROM pg_tables
|
|
WHERE schemaname = 'public'
|
|
AND tablename NOT IN ('users', 'permissions', 'user_permissions', 'calculate_history', 'import_history', 'ai_prompts', 'ai_validation_performance', 'templates', 'reusable_images');
|
|
`);
|
|
|
|
if (!tablesResult.rows[0].tables) {
|
|
outputProgress({
|
|
operation: 'No tables found',
|
|
message: 'Database is already empty'
|
|
});
|
|
} else {
|
|
outputProgress({
|
|
operation: 'Dropping tables',
|
|
message: 'Dropping all existing tables...'
|
|
});
|
|
|
|
// Disable triggers/foreign key checks
|
|
await client.query('SET session_replication_role = \'replica\';');
|
|
|
|
// Drop all tables except users
|
|
const tables = tablesResult.rows[0].tables.split(', ');
|
|
for (const table of tables) {
|
|
if (!['users', 'reusable_images'].includes(table)) {
|
|
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
|
|
}
|
|
}
|
|
|
|
// Only drop types if we're not preserving history tables
|
|
const historyTablesExist = await client.query(`
|
|
SELECT EXISTS (
|
|
SELECT FROM pg_tables
|
|
WHERE schemaname = 'public'
|
|
AND tablename IN ('calculate_history', 'import_history')
|
|
);
|
|
`);
|
|
|
|
if (!historyTablesExist.rows[0].exists) {
|
|
await client.query('DROP TYPE IF EXISTS calculation_status CASCADE;');
|
|
await client.query('DROP TYPE IF EXISTS module_name CASCADE;');
|
|
}
|
|
|
|
// Re-enable triggers/foreign key checks
|
|
await client.query('SET session_replication_role = \'origin\';');
|
|
}
|
|
|
|
// Create enum types if they don't exist
|
|
outputProgress({
|
|
operation: 'Creating enum types',
|
|
message: 'Setting up required enum types...'
|
|
});
|
|
|
|
// Check if types exist before creating
|
|
const typesExist = await client.query(`
|
|
SELECT EXISTS (
|
|
SELECT 1 FROM pg_type
|
|
WHERE typname = 'calculation_status'
|
|
) as calc_status_exists,
|
|
EXISTS (
|
|
SELECT 1 FROM pg_type
|
|
WHERE typname = 'module_name'
|
|
) as module_name_exists;
|
|
`);
|
|
|
|
if (!typesExist.rows[0].calc_status_exists) {
|
|
await client.query(`CREATE TYPE calculation_status AS ENUM ('running', 'completed', 'failed', 'cancelled')`);
|
|
}
|
|
|
|
if (!typesExist.rows[0].module_name_exists) {
|
|
await client.query(`
|
|
CREATE TYPE module_name AS ENUM (
|
|
'product_metrics',
|
|
'time_aggregates',
|
|
'financial_metrics',
|
|
'vendor_metrics',
|
|
'category_metrics',
|
|
'brand_metrics',
|
|
'sales_forecasts',
|
|
'abc_classification'
|
|
)
|
|
`);
|
|
}
|
|
|
|
// Read and execute main schema first (core tables)
|
|
outputProgress({
|
|
operation: 'Running database setup',
|
|
message: 'Creating core tables...'
|
|
});
|
|
const schemaPath = path.join(__dirname, '../db/schema.sql');
|
|
|
|
// Verify file exists
|
|
if (!fs.existsSync(schemaPath)) {
|
|
throw new Error(`Schema file not found at: ${schemaPath}`);
|
|
}
|
|
|
|
const schemaSQL = fs.readFileSync(schemaPath, 'utf8');
|
|
|
|
outputProgress({
|
|
operation: 'Schema file',
|
|
message: {
|
|
path: schemaPath,
|
|
exists: fs.existsSync(schemaPath),
|
|
size: fs.statSync(schemaPath).size,
|
|
firstFewLines: schemaSQL.split('\n').slice(0, 5).join('\n')
|
|
}
|
|
});
|
|
|
|
// Execute schema statements one at a time
|
|
const statements = splitSQLStatements(schemaSQL);
|
|
outputProgress({
|
|
operation: 'SQL Execution',
|
|
message: {
|
|
totalStatements: statements.length,
|
|
statements: statements.map((stmt, i) => ({
|
|
number: i + 1,
|
|
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
|
|
}))
|
|
}
|
|
});
|
|
|
|
for (let i = 0; i < statements.length; i++) {
|
|
const stmt = statements[i];
|
|
try {
|
|
const result = await client.query(stmt);
|
|
|
|
// Verify if table was created (if this was a CREATE TABLE statement)
|
|
if (stmt.trim().toLowerCase().startsWith('create table')) {
|
|
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1];
|
|
if (tableName) {
|
|
const tableExists = await client.query(`
|
|
SELECT COUNT(*) as count
|
|
FROM information_schema.tables
|
|
WHERE table_schema = 'public'
|
|
AND table_name = $1
|
|
`, [tableName]);
|
|
|
|
outputProgress({
|
|
operation: 'Table Creation Verification',
|
|
message: {
|
|
table: tableName,
|
|
exists: tableExists.rows[0].count > 0
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
outputProgress({
|
|
operation: 'SQL Progress',
|
|
message: {
|
|
statement: i + 1,
|
|
total: statements.length,
|
|
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
|
rowCount: result.rowCount
|
|
}
|
|
});
|
|
} catch (sqlError) {
|
|
outputProgress({
|
|
status: 'error',
|
|
operation: 'SQL Error',
|
|
error: sqlError.message,
|
|
statement: stmt,
|
|
statementNumber: i + 1
|
|
});
|
|
throw sqlError;
|
|
}
|
|
}
|
|
|
|
// Verify core tables were created
|
|
const existingTables = (await client.query(`
|
|
SELECT table_name
|
|
FROM information_schema.tables
|
|
WHERE table_schema = 'public'
|
|
`)).rows.map(t => t.table_name);
|
|
|
|
outputProgress({
|
|
operation: 'Core tables verification',
|
|
message: {
|
|
found: existingTables,
|
|
expected: CORE_TABLES
|
|
}
|
|
});
|
|
|
|
const missingCoreTables = CORE_TABLES.filter(
|
|
t => !existingTables.includes(t)
|
|
);
|
|
|
|
if (missingCoreTables.length > 0) {
|
|
throw new Error(
|
|
`Failed to create core tables: ${missingCoreTables.join(', ')}`
|
|
);
|
|
}
|
|
|
|
outputProgress({
|
|
operation: 'Core tables created',
|
|
message: `Successfully created tables: ${CORE_TABLES.join(', ')}`
|
|
});
|
|
|
|
// Now read and execute config schema (since core tables exist)
|
|
outputProgress({
|
|
operation: 'Running config setup',
|
|
message: 'Creating configuration tables...'
|
|
});
|
|
const configSchemaSQL = fs.readFileSync(
|
|
path.join(__dirname, '../db/config-schema-new.sql'),
|
|
'utf8'
|
|
);
|
|
|
|
// Execute config schema statements one at a time
|
|
const configStatements = splitSQLStatements(configSchemaSQL);
|
|
outputProgress({
|
|
operation: 'Config SQL Execution',
|
|
message: {
|
|
totalStatements: configStatements.length,
|
|
statements: configStatements.map((stmt, i) => ({
|
|
number: i + 1,
|
|
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
|
|
}))
|
|
}
|
|
});
|
|
|
|
for (let i = 0; i < configStatements.length; i++) {
|
|
const stmt = configStatements[i];
|
|
try {
|
|
const result = await client.query(stmt);
|
|
|
|
outputProgress({
|
|
operation: 'Config SQL Progress',
|
|
message: {
|
|
statement: i + 1,
|
|
total: configStatements.length,
|
|
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
|
rowCount: result.rowCount
|
|
}
|
|
});
|
|
} catch (sqlError) {
|
|
outputProgress({
|
|
status: 'error',
|
|
operation: 'Config SQL Error',
|
|
error: sqlError.message,
|
|
statement: stmt,
|
|
statementNumber: i + 1
|
|
});
|
|
throw sqlError;
|
|
}
|
|
}
|
|
|
|
// Read and execute metrics schema (metrics tables)
|
|
outputProgress({
|
|
operation: 'Running metrics setup',
|
|
message: 'Creating metrics tables...'
|
|
});
|
|
const metricsSchemaSQL = fs.readFileSync(
|
|
path.join(__dirname, '../db/metrics-schema-new.sql'),
|
|
'utf8'
|
|
);
|
|
|
|
// Execute metrics schema statements one at a time
|
|
const metricsStatements = splitSQLStatements(metricsSchemaSQL);
|
|
outputProgress({
|
|
operation: 'Metrics SQL Execution',
|
|
message: {
|
|
totalStatements: metricsStatements.length,
|
|
statements: metricsStatements.map((stmt, i) => ({
|
|
number: i + 1,
|
|
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
|
|
}))
|
|
}
|
|
});
|
|
|
|
for (let i = 0; i < metricsStatements.length; i++) {
|
|
const stmt = metricsStatements[i];
|
|
try {
|
|
const result = await client.query(stmt);
|
|
|
|
outputProgress({
|
|
operation: 'Metrics SQL Progress',
|
|
message: {
|
|
statement: i + 1,
|
|
total: metricsStatements.length,
|
|
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
|
rowCount: result.rowCount
|
|
}
|
|
});
|
|
} catch (sqlError) {
|
|
outputProgress({
|
|
status: 'error',
|
|
operation: 'Metrics SQL Error',
|
|
error: sqlError.message,
|
|
statement: stmt,
|
|
statementNumber: i + 1
|
|
});
|
|
throw sqlError;
|
|
}
|
|
}
|
|
|
|
outputProgress({
|
|
status: 'complete',
|
|
operation: 'Database reset complete',
|
|
message: 'Database has been reset and all tables recreated'
|
|
});
|
|
} catch (error) {
|
|
outputProgress({
|
|
status: 'error',
|
|
operation: 'Failed to reset database',
|
|
error: error.message,
|
|
stack: error.stack
|
|
});
|
|
process.exit(1);
|
|
} finally {
|
|
await client.end();
|
|
}
|
|
}
|
|
|
|
// Export if required as a module
|
|
if (typeof module !== 'undefined' && module.exports) {
|
|
module.exports = resetDatabase;
|
|
}
|
|
|
|
// Run if called directly
|
|
if (require.main === module) {
|
|
resetDatabase().catch(error => {
|
|
console.error('Error:', error);
|
|
process.exit(1);
|
|
});
|
|
}
|