Merge branch 'master' into add-product-upload-page
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
const mysql = require('mysql2/promise');
|
||||
const { Client } = require('pg');
|
||||
const path = require('path');
|
||||
const dotenv = require('dotenv');
|
||||
const fs = require('fs');
|
||||
@@ -10,7 +10,7 @@ const dbConfig = {
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
multipleStatements: true
|
||||
port: process.env.DB_PORT || 5432
|
||||
};
|
||||
|
||||
// Helper function to output progress in JSON format
|
||||
@@ -54,14 +54,44 @@ function splitSQLStatements(sql) {
|
||||
let currentStatement = '';
|
||||
let inString = false;
|
||||
let stringChar = '';
|
||||
let inDollarQuote = false;
|
||||
let dollarQuoteTag = '';
|
||||
|
||||
// Process character by character
|
||||
for (let i = 0; i < sql.length; i++) {
|
||||
const char = sql[i];
|
||||
const nextChar = sql[i + 1] || '';
|
||||
|
||||
// Handle string literals
|
||||
if ((char === "'" || char === '"') && sql[i - 1] !== '\\') {
|
||||
// Handle dollar quotes
|
||||
if (char === '$' && !inString) {
|
||||
// Look ahead to find the dollar quote tag
|
||||
let tag = '$';
|
||||
let j = i + 1;
|
||||
while (j < sql.length && sql[j] !== '$') {
|
||||
tag += sql[j];
|
||||
j++;
|
||||
}
|
||||
tag += '$';
|
||||
|
||||
if (j < sql.length) { // Found closing $
|
||||
if (!inDollarQuote) {
|
||||
inDollarQuote = true;
|
||||
dollarQuoteTag = tag;
|
||||
currentStatement += tag;
|
||||
i = j;
|
||||
continue;
|
||||
} else if (sql.substring(i, j + 1) === dollarQuoteTag) {
|
||||
inDollarQuote = false;
|
||||
dollarQuoteTag = '';
|
||||
currentStatement += tag;
|
||||
i = j;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle string literals (only if not in dollar quote)
|
||||
if (!inDollarQuote && (char === "'" || char === '"') && sql[i - 1] !== '\\') {
|
||||
if (!inString) {
|
||||
inString = true;
|
||||
stringChar = char;
|
||||
@@ -70,23 +100,25 @@ function splitSQLStatements(sql) {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle comments
|
||||
if (!inString && char === '-' && nextChar === '-') {
|
||||
// Skip to end of line
|
||||
while (i < sql.length && sql[i] !== '\n') i++;
|
||||
continue;
|
||||
// Handle comments (only if not in string or dollar quote)
|
||||
if (!inString && !inDollarQuote) {
|
||||
if (char === '-' && nextChar === '-') {
|
||||
// Skip to end of line
|
||||
while (i < sql.length && sql[i] !== '\n') i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '/' && nextChar === '*') {
|
||||
// Skip until closing */
|
||||
i += 2;
|
||||
while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++;
|
||||
i++; // Skip the closing /
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!inString && char === '/' && nextChar === '*') {
|
||||
// Skip until closing */
|
||||
i += 2;
|
||||
while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++;
|
||||
i++; // Skip the closing /
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle statement boundaries
|
||||
if (!inString && char === ';') {
|
||||
// Handle statement boundaries (only if not in string or dollar quote)
|
||||
if (!inString && !inDollarQuote && char === ';') {
|
||||
if (currentStatement.trim()) {
|
||||
statements.push(currentStatement.trim());
|
||||
}
|
||||
@@ -120,30 +152,26 @@ async function resetDatabase() {
|
||||
}
|
||||
});
|
||||
|
||||
const connection = await mysql.createConnection(dbConfig);
|
||||
const client = new Client(dbConfig);
|
||||
await client.connect();
|
||||
|
||||
try {
|
||||
// Check MySQL privileges
|
||||
// Check PostgreSQL version and user
|
||||
outputProgress({
|
||||
operation: 'Checking privileges',
|
||||
message: 'Verifying MySQL user privileges...'
|
||||
operation: 'Checking database',
|
||||
message: 'Verifying PostgreSQL version and user privileges...'
|
||||
});
|
||||
|
||||
const [grants] = await connection.query('SHOW GRANTS');
|
||||
outputProgress({
|
||||
operation: 'User privileges',
|
||||
message: {
|
||||
grants: grants.map(g => Object.values(g)[0])
|
||||
}
|
||||
});
|
||||
|
||||
// Enable warnings as errors
|
||||
await connection.query('SET SESSION sql_notes = 1');
|
||||
const versionResult = await client.query('SELECT version()');
|
||||
const userResult = await client.query('SELECT current_user, current_database()');
|
||||
|
||||
// Log database config (without sensitive info)
|
||||
outputProgress({
|
||||
operation: 'Database config',
|
||||
message: `Using database: ${dbConfig.database} on host: ${dbConfig.host}`
|
||||
operation: 'Database info',
|
||||
message: {
|
||||
version: versionResult.rows[0].version,
|
||||
user: userResult.rows[0].current_user,
|
||||
database: userResult.rows[0].current_database
|
||||
}
|
||||
});
|
||||
|
||||
// Get list of all tables in the current database
|
||||
@@ -152,14 +180,14 @@ async function resetDatabase() {
|
||||
message: 'Retrieving all table names...'
|
||||
});
|
||||
|
||||
const [tables] = await connection.query(`
|
||||
SELECT GROUP_CONCAT(table_name) as tables
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
AND table_name NOT IN ('users', 'import_history', 'calculate_history')
|
||||
const tablesResult = await client.query(`
|
||||
SELECT string_agg(tablename, ', ') as tables
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename NOT IN ('users', 'calculate_history', 'import_history');
|
||||
`);
|
||||
|
||||
if (!tables[0].tables) {
|
||||
if (!tablesResult.rows[0].tables) {
|
||||
outputProgress({
|
||||
operation: 'No tables found',
|
||||
message: 'Database is already empty'
|
||||
@@ -170,20 +198,73 @@ async function resetDatabase() {
|
||||
message: 'Dropping all existing tables...'
|
||||
});
|
||||
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 0');
|
||||
const dropQuery = `
|
||||
DROP TABLE IF EXISTS
|
||||
${tables[0].tables
|
||||
.split(',')
|
||||
.filter(table => !['users', 'calculate_history'].includes(table))
|
||||
.map(table => '`' + table + '`')
|
||||
.join(', ')}
|
||||
`;
|
||||
await connection.query(dropQuery);
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 1');
|
||||
// Disable triggers/foreign key checks
|
||||
await client.query('SET session_replication_role = \'replica\';');
|
||||
|
||||
// Drop all tables except users
|
||||
const tables = tablesResult.rows[0].tables.split(', ');
|
||||
for (const table of tables) {
|
||||
if (!['users'].includes(table)) {
|
||||
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
|
||||
}
|
||||
}
|
||||
|
||||
// Only drop types if we're not preserving history tables
|
||||
const historyTablesExist = await client.query(`
|
||||
SELECT EXISTS (
|
||||
SELECT FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename IN ('calculate_history', 'import_history')
|
||||
);
|
||||
`);
|
||||
|
||||
if (!historyTablesExist.rows[0].exists) {
|
||||
await client.query('DROP TYPE IF EXISTS calculation_status CASCADE;');
|
||||
await client.query('DROP TYPE IF EXISTS module_name CASCADE;');
|
||||
}
|
||||
|
||||
// Re-enable triggers/foreign key checks
|
||||
await client.query('SET session_replication_role = \'origin\';');
|
||||
}
|
||||
|
||||
// Read and execute main schema (core tables)
|
||||
// Create enum types if they don't exist
|
||||
outputProgress({
|
||||
operation: 'Creating enum types',
|
||||
message: 'Setting up required enum types...'
|
||||
});
|
||||
|
||||
// Check if types exist before creating
|
||||
const typesExist = await client.query(`
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM pg_type
|
||||
WHERE typname = 'calculation_status'
|
||||
) as calc_status_exists,
|
||||
EXISTS (
|
||||
SELECT 1 FROM pg_type
|
||||
WHERE typname = 'module_name'
|
||||
) as module_name_exists;
|
||||
`);
|
||||
|
||||
if (!typesExist.rows[0].calc_status_exists) {
|
||||
await client.query(`CREATE TYPE calculation_status AS ENUM ('running', 'completed', 'failed', 'cancelled')`);
|
||||
}
|
||||
|
||||
if (!typesExist.rows[0].module_name_exists) {
|
||||
await client.query(`
|
||||
CREATE TYPE module_name AS ENUM (
|
||||
'product_metrics',
|
||||
'time_aggregates',
|
||||
'financial_metrics',
|
||||
'vendor_metrics',
|
||||
'category_metrics',
|
||||
'brand_metrics',
|
||||
'sales_forecasts',
|
||||
'abc_classification'
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
// Read and execute main schema first (core tables)
|
||||
outputProgress({
|
||||
operation: 'Running database setup',
|
||||
message: 'Creating core tables...'
|
||||
@@ -223,35 +304,24 @@ async function resetDatabase() {
|
||||
for (let i = 0; i < statements.length; i++) {
|
||||
const stmt = statements[i];
|
||||
try {
|
||||
const [result, fields] = await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'SQL Warning',
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
});
|
||||
}
|
||||
const result = await client.query(stmt);
|
||||
|
||||
// Verify if table was created (if this was a CREATE TABLE statement)
|
||||
if (stmt.trim().toLowerCase().startsWith('create table')) {
|
||||
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1];
|
||||
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1];
|
||||
if (tableName) {
|
||||
const [tableExists] = await connection.query(`
|
||||
const tableExists = await client.query(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
AND table_name = ?
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = $1
|
||||
`, [tableName]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Table Creation Verification',
|
||||
message: {
|
||||
table: tableName,
|
||||
exists: tableExists[0].count > 0
|
||||
exists: tableExists.rows[0].count > 0
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -263,7 +333,7 @@ async function resetDatabase() {
|
||||
statement: i + 1,
|
||||
total: statements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
affectedRows: result.affectedRows
|
||||
rowCount: result.rowCount
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
@@ -271,8 +341,6 @@ async function resetDatabase() {
|
||||
status: 'error',
|
||||
operation: 'SQL Error',
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
});
|
||||
@@ -280,66 +348,12 @@ async function resetDatabase() {
|
||||
}
|
||||
}
|
||||
|
||||
// List all tables in the database after schema execution
|
||||
outputProgress({
|
||||
operation: 'Debug database',
|
||||
message: {
|
||||
currentDatabase: (await connection.query('SELECT DATABASE() as db'))[0][0].db
|
||||
}
|
||||
});
|
||||
|
||||
const [allTables] = await connection.query(`
|
||||
SELECT
|
||||
table_schema,
|
||||
table_name,
|
||||
engine,
|
||||
create_time,
|
||||
table_rows
|
||||
// Verify core tables were created
|
||||
const existingTables = (await client.query(`
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
`);
|
||||
|
||||
if (allTables.length === 0) {
|
||||
outputProgress({
|
||||
operation: 'Warning',
|
||||
message: 'No tables found in database after schema execution'
|
||||
});
|
||||
} else {
|
||||
outputProgress({
|
||||
operation: 'Tables after schema execution',
|
||||
message: {
|
||||
count: allTables.length,
|
||||
tables: allTables.map(t => ({
|
||||
schema: t.table_schema,
|
||||
name: t.table_name,
|
||||
engine: t.engine,
|
||||
created: t.create_time,
|
||||
rows: t.table_rows
|
||||
}))
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Also check table status
|
||||
const [tableStatus] = await connection.query('SHOW TABLE STATUS');
|
||||
outputProgress({
|
||||
operation: 'Table Status',
|
||||
message: {
|
||||
tables: tableStatus.map(t => ({
|
||||
name: t.Name,
|
||||
engine: t.Engine,
|
||||
version: t.Version,
|
||||
rowFormat: t.Row_format,
|
||||
rows: t.Rows,
|
||||
createTime: t.Create_time,
|
||||
updateTime: t.Update_time
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
// Verify core tables were created using SHOW TABLES
|
||||
const [showTables] = await connection.query('SHOW TABLES');
|
||||
const existingTables = showTables.map(t => Object.values(t)[0]);
|
||||
WHERE table_schema = 'public'
|
||||
`)).rows.map(t => t.table_name);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Core tables verification',
|
||||
@@ -359,22 +373,12 @@ async function resetDatabase() {
|
||||
);
|
||||
}
|
||||
|
||||
// Verify all core tables use InnoDB
|
||||
const [engineStatus] = await connection.query('SHOW TABLE STATUS WHERE Name IN (?)', [CORE_TABLES]);
|
||||
const nonInnoDBTables = engineStatus.filter(t => t.Engine !== 'InnoDB');
|
||||
|
||||
if (nonInnoDBTables.length > 0) {
|
||||
throw new Error(
|
||||
`Tables using non-InnoDB engine: ${nonInnoDBTables.map(t => t.Name).join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Core tables created',
|
||||
message: `Successfully created tables: ${CORE_TABLES.join(', ')}`
|
||||
});
|
||||
|
||||
// Read and execute config schema
|
||||
// Now read and execute config schema (since core tables exist)
|
||||
outputProgress({
|
||||
operation: 'Running config setup',
|
||||
message: 'Creating configuration tables...'
|
||||
@@ -400,18 +404,7 @@ async function resetDatabase() {
|
||||
for (let i = 0; i < configStatements.length; i++) {
|
||||
const stmt = configStatements[i];
|
||||
try {
|
||||
const [result, fields] = await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'Config SQL Warning',
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
});
|
||||
}
|
||||
const result = await client.query(stmt);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config SQL Progress',
|
||||
@@ -419,7 +412,7 @@ async function resetDatabase() {
|
||||
statement: i + 1,
|
||||
total: configStatements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
affectedRows: result.affectedRows
|
||||
rowCount: result.rowCount
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
@@ -427,8 +420,6 @@ async function resetDatabase() {
|
||||
status: 'error',
|
||||
operation: 'Config SQL Error',
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
});
|
||||
@@ -436,33 +427,6 @@ async function resetDatabase() {
|
||||
}
|
||||
}
|
||||
|
||||
// Verify config tables were created
|
||||
const [showConfigTables] = await connection.query('SHOW TABLES');
|
||||
const existingConfigTables = showConfigTables.map(t => Object.values(t)[0]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config tables verification',
|
||||
message: {
|
||||
found: existingConfigTables,
|
||||
expected: CONFIG_TABLES
|
||||
}
|
||||
});
|
||||
|
||||
const missingConfigTables = CONFIG_TABLES.filter(
|
||||
t => !existingConfigTables.includes(t)
|
||||
);
|
||||
|
||||
if (missingConfigTables.length > 0) {
|
||||
throw new Error(
|
||||
`Failed to create config tables: ${missingConfigTables.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config tables created',
|
||||
message: `Successfully created tables: ${CONFIG_TABLES.join(', ')}`
|
||||
});
|
||||
|
||||
// Read and execute metrics schema (metrics tables)
|
||||
outputProgress({
|
||||
operation: 'Running metrics setup',
|
||||
@@ -489,18 +453,7 @@ async function resetDatabase() {
|
||||
for (let i = 0; i < metricsStatements.length; i++) {
|
||||
const stmt = metricsStatements[i];
|
||||
try {
|
||||
const [result, fields] = await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'Metrics SQL Warning',
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
});
|
||||
}
|
||||
const result = await client.query(stmt);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Metrics SQL Progress',
|
||||
@@ -508,7 +461,7 @@ async function resetDatabase() {
|
||||
statement: i + 1,
|
||||
total: metricsStatements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
affectedRows: result.affectedRows
|
||||
rowCount: result.rowCount
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
@@ -516,8 +469,6 @@ async function resetDatabase() {
|
||||
status: 'error',
|
||||
operation: 'Metrics SQL Error',
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
});
|
||||
@@ -539,7 +490,7 @@ async function resetDatabase() {
|
||||
});
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await connection.end();
|
||||
await client.end();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user