Files
inventory/inventory-server/scripts/reset-db.js
2025-02-02 15:41:57 -05:00

522 lines
17 KiB
JavaScript

const mysql = require('mysql2/promise');
const path = require('path');
const dotenv = require('dotenv');
const fs = require('fs');
dotenv.config({ path: path.join(__dirname, '../.env') });
const dbConfig = {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
multipleStatements: true
};
// Helper function to output progress in JSON format
function outputProgress(data) {
if (!data.status) {
data = {
status: 'running',
...data
};
}
console.log(JSON.stringify(data));
}
// Core tables that must be created
const CORE_TABLES = [
'products',
'orders',
'purchase_orders',
'categories',
'product_categories'
];
// Config tables that must be created
const CONFIG_TABLES = [
'stock_thresholds',
'lead_time_thresholds',
'sales_velocity_config',
'abc_classification_config',
'safety_stock_config',
'sales_seasonality',
'turnover_config',
'sync_status',
'metric_calculation_config'
];
// Split SQL into individual statements
function splitSQLStatements(sql) {
// First, normalize line endings
sql = sql.replace(/\r\n/g, '\n');
// Track statement boundaries
let statements = [];
let currentStatement = '';
let inString = false;
let stringChar = '';
// Process character by character
for (let i = 0; i < sql.length; i++) {
const char = sql[i];
const nextChar = sql[i + 1] || '';
// Handle string literals
if ((char === "'" || char === '"') && sql[i - 1] !== '\\') {
if (!inString) {
inString = true;
stringChar = char;
} else if (char === stringChar) {
inString = false;
}
}
// Handle comments
if (!inString && char === '-' && nextChar === '-') {
// Skip to end of line
while (i < sql.length && sql[i] !== '\n') i++;
continue;
}
if (!inString && char === '/' && nextChar === '*') {
// Skip until closing */
i += 2;
while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++;
i++; // Skip the closing /
continue;
}
// Handle statement boundaries
if (!inString && char === ';') {
if (currentStatement.trim()) {
statements.push(currentStatement.trim());
}
currentStatement = '';
} else {
currentStatement += char;
}
}
// Add the last statement if it exists
if (currentStatement.trim()) {
statements.push(currentStatement.trim());
}
return statements;
}
async function resetDatabase() {
outputProgress({
operation: 'Starting database reset',
message: 'Connecting to database...'
});
// Debug: Log current directory and file paths
outputProgress({
operation: 'Debug paths',
message: {
currentDir: process.cwd(),
__dirname: __dirname,
schemaPath: path.join(__dirname, '../db/schema.sql')
}
});
const connection = await mysql.createConnection(dbConfig);
try {
// Check MySQL privileges
outputProgress({
operation: 'Checking privileges',
message: 'Verifying MySQL user privileges...'
});
const [grants] = await connection.query('SHOW GRANTS');
outputProgress({
operation: 'User privileges',
message: {
grants: grants.map(g => Object.values(g)[0])
}
});
// Enable warnings as errors
await connection.query('SET SESSION sql_notes = 1');
// Log database config (without sensitive info)
outputProgress({
operation: 'Database config',
message: `Using database: ${dbConfig.database} on host: ${dbConfig.host}`
});
// Get list of all tables in the current database
outputProgress({
operation: 'Getting table list',
message: 'Retrieving all table names...'
});
const [tables] = await connection.query(`
SELECT GROUP_CONCAT(table_name) as tables
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name NOT IN ('users', 'import_history')
`);
if (!tables[0].tables) {
outputProgress({
operation: 'No tables found',
message: 'Database is already empty'
});
} else {
outputProgress({
operation: 'Dropping tables',
message: 'Dropping all existing tables...'
});
await connection.query('SET FOREIGN_KEY_CHECKS = 0');
const dropQuery = `
DROP TABLE IF EXISTS
${tables[0].tables
.split(',')
.filter(table => !['users', 'import_history'].includes(table))
.map(table => '`' + table + '`')
.join(', ')}
`;
await connection.query(dropQuery);
await connection.query('SET FOREIGN_KEY_CHECKS = 1');
}
// Read and execute main schema (core tables)
outputProgress({
operation: 'Running database setup',
message: 'Creating core tables...'
});
const schemaPath = path.join(__dirname, '../db/schema.sql');
// Verify file exists
if (!fs.existsSync(schemaPath)) {
throw new Error(`Schema file not found at: ${schemaPath}`);
}
const schemaSQL = fs.readFileSync(schemaPath, 'utf8');
outputProgress({
operation: 'Schema file',
message: {
path: schemaPath,
exists: fs.existsSync(schemaPath),
size: fs.statSync(schemaPath).size,
firstFewLines: schemaSQL.split('\n').slice(0, 5).join('\n')
}
});
// Execute schema statements one at a time
const statements = splitSQLStatements(schemaSQL);
outputProgress({
operation: 'SQL Execution',
message: {
totalStatements: statements.length,
statements: statements.map((stmt, i) => ({
number: i + 1,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
}))
}
});
for (let i = 0; i < statements.length; i++) {
const stmt = statements[i];
try {
const [result, fields] = await connection.query(stmt);
// Check for warnings
const [warnings] = await connection.query('SHOW WARNINGS');
if (warnings && warnings.length > 0) {
outputProgress({
status: 'warning',
operation: 'SQL Warning',
statement: i + 1,
warnings: warnings
});
}
// Verify if table was created (if this was a CREATE TABLE statement)
if (stmt.trim().toLowerCase().startsWith('create table')) {
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1];
if (tableName) {
const [tableExists] = await connection.query(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name = ?
`, [tableName]);
outputProgress({
operation: 'Table Creation Verification',
message: {
table: tableName,
exists: tableExists[0].count > 0
}
});
}
}
outputProgress({
operation: 'SQL Progress',
message: {
statement: i + 1,
total: statements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
affectedRows: result.affectedRows
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'SQL Error',
error: sqlError.message,
sqlState: sqlError.sqlState,
errno: sqlError.errno,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
}
// List all tables in the database after schema execution
outputProgress({
operation: 'Debug database',
message: {
currentDatabase: (await connection.query('SELECT DATABASE() as db'))[0][0].db
}
});
const [allTables] = await connection.query(`
SELECT
table_schema,
table_name,
engine,
create_time,
table_rows
FROM information_schema.tables
WHERE table_schema = DATABASE()
`);
if (allTables.length === 0) {
outputProgress({
operation: 'Warning',
message: 'No tables found in database after schema execution'
});
} else {
outputProgress({
operation: 'Tables after schema execution',
message: {
count: allTables.length,
tables: allTables.map(t => ({
schema: t.table_schema,
name: t.table_name,
engine: t.engine,
created: t.create_time,
rows: t.table_rows
}))
}
});
}
// Also check table status
const [tableStatus] = await connection.query('SHOW TABLE STATUS');
outputProgress({
operation: 'Table Status',
message: {
tables: tableStatus.map(t => ({
name: t.Name,
engine: t.Engine,
version: t.Version,
rowFormat: t.Row_format,
rows: t.Rows,
createTime: t.Create_time,
updateTime: t.Update_time
}))
}
});
// Verify core tables were created using SHOW TABLES
const [showTables] = await connection.query('SHOW TABLES');
const existingTables = showTables.map(t => Object.values(t)[0]);
outputProgress({
operation: 'Core tables verification',
message: {
found: existingTables,
expected: CORE_TABLES
}
});
const missingCoreTables = CORE_TABLES.filter(
t => !existingTables.includes(t)
);
if (missingCoreTables.length > 0) {
throw new Error(
`Failed to create core tables: ${missingCoreTables.join(', ')}`
);
}
// Verify all core tables use InnoDB
const [engineStatus] = await connection.query('SHOW TABLE STATUS WHERE Name IN (?)', [CORE_TABLES]);
const nonInnoDBTables = engineStatus.filter(t => t.Engine !== 'InnoDB');
if (nonInnoDBTables.length > 0) {
throw new Error(
`Tables using non-InnoDB engine: ${nonInnoDBTables.map(t => t.Name).join(', ')}`
);
}
outputProgress({
operation: 'Core tables created',
message: `Successfully created tables: ${CORE_TABLES.join(', ')}`
});
// Read and execute config schema
outputProgress({
operation: 'Running config setup',
message: 'Creating configuration tables...'
});
const configSchemaSQL = fs.readFileSync(
path.join(__dirname, '../db/config-schema.sql'),
'utf8'
);
// Execute config schema statements one at a time
const configStatements = splitSQLStatements(configSchemaSQL);
outputProgress({
operation: 'Config SQL Execution',
message: {
totalStatements: configStatements.length,
statements: configStatements.map((stmt, i) => ({
number: i + 1,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
}))
}
});
for (let i = 0; i < configStatements.length; i++) {
const stmt = configStatements[i];
try {
const [result, fields] = await connection.query(stmt);
// Check for warnings
const [warnings] = await connection.query('SHOW WARNINGS');
if (warnings && warnings.length > 0) {
outputProgress({
status: 'warning',
operation: 'Config SQL Warning',
statement: i + 1,
warnings: warnings
});
}
outputProgress({
operation: 'Config SQL Progress',
message: {
statement: i + 1,
total: configStatements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
affectedRows: result.affectedRows
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'Config SQL Error',
error: sqlError.message,
sqlState: sqlError.sqlState,
errno: sqlError.errno,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
}
// Read and execute metrics schema
outputProgress({
operation: 'Running metrics setup',
message: 'Creating metrics tables...'
});
const metricsSchemaSQL = fs.readFileSync(
path.join(__dirname, '../db/metrics-schema.sql'),
'utf8'
);
// Execute metrics schema statements one at a time
const metricsStatements = splitSQLStatements(metricsSchemaSQL);
outputProgress({
operation: 'Metrics SQL Execution',
message: {
totalStatements: metricsStatements.length,
statements: metricsStatements.map((stmt, i) => ({
number: i + 1,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
}))
}
});
for (let i = 0; i < metricsStatements.length; i++) {
const stmt = metricsStatements[i];
try {
const [result, fields] = await connection.query(stmt);
// Check for warnings
const [warnings] = await connection.query('SHOW WARNINGS');
if (warnings && warnings.length > 0) {
outputProgress({
status: 'warning',
operation: 'Metrics SQL Warning',
statement: i + 1,
warnings: warnings
});
}
outputProgress({
operation: 'Metrics SQL Progress',
message: {
statement: i + 1,
total: metricsStatements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
affectedRows: result.affectedRows
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'Metrics SQL Error',
error: sqlError.message,
sqlState: sqlError.sqlState,
errno: sqlError.errno,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
}
outputProgress({
status: 'complete',
operation: 'Database reset complete',
message: 'Database has been reset and all tables recreated'
});
} catch (error) {
outputProgress({
status: 'error',
operation: 'Failed to reset database',
error: error.message,
stack: error.stack
});
process.exit(1);
} finally {
await connection.end();
}
}
// Run the reset
resetDatabase();