Fix up reset scripts again

This commit is contained in:
2025-01-12 00:44:53 -05:00
parent e48911ae24
commit 03ad15c731
4 changed files with 472 additions and 103 deletions

View File

@@ -85,10 +85,6 @@ CREATE TABLE IF NOT EXISTS vendor_metrics (
-- Re-enable foreign key checks -- Re-enable foreign key checks
SET FOREIGN_KEY_CHECKS = 1; SET FOREIGN_KEY_CHECKS = 1;
-- Create optimized indexes for metrics calculations
CREATE INDEX idx_orders_metrics ON orders (product_id, date, canceled, quantity, price);
CREATE INDEX idx_purchase_orders_metrics ON purchase_orders (product_id, date, status, ordered, received);
-- Create view for inventory health (after all tables are created) -- Create view for inventory health (after all tables are created)
CREATE OR REPLACE VIEW inventory_health AS CREATE OR REPLACE VIEW inventory_health AS
SELECT SELECT

View File

@@ -1,5 +1,9 @@
-- Create tables if they don't exist -- Enable strict error reporting
CREATE TABLE IF NOT EXISTS products ( SET sql_mode = 'STRICT_ALL_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ZERO_DATE,NO_ZERO_IN_DATE,NO_ENGINE_SUBSTITUTION';
SET FOREIGN_KEY_CHECKS = 0;
-- Create tables
CREATE TABLE products (
product_id BIGINT NOT NULL, product_id BIGINT NOT NULL,
title VARCHAR(255) NOT NULL, title VARCHAR(255) NOT NULL,
SKU VARCHAR(50) NOT NULL, SKU VARCHAR(50) NOT NULL,
@@ -28,9 +32,29 @@ CREATE TABLE IF NOT EXISTS products (
UNIQUE KEY unique_sku (SKU), UNIQUE KEY unique_sku (SKU),
INDEX idx_vendor (vendor), INDEX idx_vendor (vendor),
INDEX idx_brand (brand) INDEX idx_brand (brand)
); ) ENGINE=InnoDB;
CREATE TABLE IF NOT EXISTS orders ( -- Create categories table first (referenced by product_categories)
CREATE TABLE categories (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(100) NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE KEY unique_name (name)
) ENGINE=InnoDB;
-- Create product_categories junction table
CREATE TABLE product_categories (
product_id BIGINT NOT NULL,
category_id BIGINT NOT NULL,
PRIMARY KEY (product_id, category_id),
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
INDEX idx_category (category_id),
INDEX idx_product (product_id)
) ENGINE=InnoDB;
-- Create orders table with its indexes
CREATE TABLE orders (
id BIGINT AUTO_INCREMENT PRIMARY KEY, id BIGINT AUTO_INCREMENT PRIMARY KEY,
order_number VARCHAR(50) NOT NULL, order_number VARCHAR(50) NOT NULL,
product_id BIGINT NOT NULL, product_id BIGINT NOT NULL,
@@ -55,10 +79,12 @@ CREATE TABLE IF NOT EXISTS orders (
INDEX idx_customer (customer), INDEX idx_customer (customer),
INDEX idx_date (date), INDEX idx_date (date),
INDEX idx_status (status), INDEX idx_status (status),
INDEX idx_orders_metrics (product_id, date, canceled, quantity, price),
UNIQUE KEY unique_order_product (order_number, product_id) UNIQUE KEY unique_order_product (order_number, product_id)
); ) ENGINE=InnoDB;
CREATE TABLE IF NOT EXISTS purchase_orders ( -- Create purchase_orders table with its indexes
CREATE TABLE purchase_orders (
id BIGINT AUTO_INCREMENT PRIMARY KEY, id BIGINT AUTO_INCREMENT PRIMARY KEY,
po_id VARCHAR(50) NOT NULL, po_id VARCHAR(50) NOT NULL,
vendor VARCHAR(100) NOT NULL, vendor VARCHAR(100) NOT NULL,
@@ -77,27 +103,11 @@ CREATE TABLE IF NOT EXISTS purchase_orders (
INDEX idx_po_id (po_id), INDEX idx_po_id (po_id),
INDEX idx_vendor (vendor), INDEX idx_vendor (vendor),
INDEX idx_status (status), INDEX idx_status (status),
INDEX idx_purchase_orders_metrics (product_id, date, status, ordered, received),
UNIQUE KEY unique_po_product (po_id, product_id) UNIQUE KEY unique_po_product (po_id, product_id)
); ) ENGINE=InnoDB;
-- Create categories table SET FOREIGN_KEY_CHECKS = 1;
CREATE TABLE IF NOT EXISTS categories (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(100) NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE KEY unique_name (name)
);
-- Create product_categories junction table
CREATE TABLE IF NOT EXISTS product_categories (
product_id BIGINT NOT NULL,
category_id BIGINT NOT NULL,
PRIMARY KEY (product_id, category_id),
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
INDEX idx_category (category_id),
INDEX idx_product (product_id)
);
-- Create views for common calculations -- Create views for common calculations
CREATE OR REPLACE VIEW product_sales_trends AS CREATE OR REPLACE VIEW product_sales_trends AS

View File

@@ -24,27 +24,128 @@ function outputProgress(data) {
console.log(JSON.stringify(data)); console.log(JSON.stringify(data));
} }
// Core tables that must be created
const CORE_TABLES = [
'products',
'orders',
'purchase_orders',
'categories',
'product_categories'
];
// Split SQL into individual statements
function splitSQLStatements(sql) {
// First, normalize line endings
sql = sql.replace(/\r\n/g, '\n');
// Track statement boundaries
let statements = [];
let currentStatement = '';
let inString = false;
let stringChar = '';
// Process character by character
for (let i = 0; i < sql.length; i++) {
const char = sql[i];
const nextChar = sql[i + 1] || '';
// Handle string literals
if ((char === "'" || char === '"') && sql[i - 1] !== '\\') {
if (!inString) {
inString = true;
stringChar = char;
} else if (char === stringChar) {
inString = false;
}
}
// Handle comments
if (!inString && char === '-' && nextChar === '-') {
// Skip to end of line
while (i < sql.length && sql[i] !== '\n') i++;
continue;
}
if (!inString && char === '/' && nextChar === '*') {
// Skip until closing */
i += 2;
while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++;
i++; // Skip the closing /
continue;
}
// Handle statement boundaries
if (!inString && char === ';') {
if (currentStatement.trim()) {
statements.push(currentStatement.trim());
}
currentStatement = '';
} else {
currentStatement += char;
}
}
// Add the last statement if it exists
if (currentStatement.trim()) {
statements.push(currentStatement.trim());
}
return statements;
}
async function resetDatabase() { async function resetDatabase() {
outputProgress({ outputProgress({
operation: 'Starting database reset', operation: 'Starting database reset',
message: 'Connecting to database...' message: 'Connecting to database...'
}); });
// Debug: Log current directory and file paths
outputProgress({
operation: 'Debug paths',
message: {
currentDir: process.cwd(),
__dirname: __dirname,
schemaPath: path.join(__dirname, '../db/schema.sql')
}
});
const connection = await mysql.createConnection(dbConfig); const connection = await mysql.createConnection(dbConfig);
try { try {
// Get list of all tables efficiently // Check MySQL privileges
outputProgress({
operation: 'Checking privileges',
message: 'Verifying MySQL user privileges...'
});
const [grants] = await connection.query('SHOW GRANTS');
outputProgress({
operation: 'User privileges',
message: {
grants: grants.map(g => Object.values(g)[0])
}
});
// Enable warnings as errors
await connection.query('SET SESSION sql_notes = 1');
// Log database config (without sensitive info)
outputProgress({
operation: 'Database config',
message: `Using database: ${dbConfig.database} on host: ${dbConfig.host}`
});
// Get list of all tables in the current database
outputProgress({ outputProgress({
operation: 'Getting table list', operation: 'Getting table list',
message: 'Retrieving all table names...' message: 'Retrieving all table names...'
}); });
// More efficient query to get table names
const [tables] = await connection.query(` const [tables] = await connection.query(`
SELECT GROUP_CONCAT(table_name) as tables SELECT GROUP_CONCAT(table_name) as tables
FROM information_schema.tables FROM information_schema.tables
WHERE table_schema = DATABASE()` WHERE table_schema = DATABASE()
); `);
if (!tables[0].tables) { if (!tables[0].tables) {
outputProgress({ outputProgress({
@@ -52,38 +153,275 @@ async function resetDatabase() {
message: 'Database is already empty' message: 'Database is already empty'
}); });
} else { } else {
// Disable foreign key checks and drop all tables in one query
outputProgress({ outputProgress({
operation: 'Dropping tables', operation: 'Dropping tables',
message: 'Dropping all tables...' message: 'Dropping all existing tables...'
}); });
await connection.query('SET FOREIGN_KEY_CHECKS = 0'); await connection.query('SET FOREIGN_KEY_CHECKS = 0');
const dropQuery = `
// Create DROP TABLE statements for all tables at once DROP TABLE IF EXISTS
const dropQuery = `DROP TABLE IF EXISTS ${tables[0].tables.split(',').map(table => '`' + table + '`').join(', ')}`; ${tables[0].tables
.split(',')
.map(table => '`' + table + '`')
.join(', ')}
`;
await connection.query(dropQuery); await connection.query(dropQuery);
await connection.query('SET FOREIGN_KEY_CHECKS = 1'); await connection.query('SET FOREIGN_KEY_CHECKS = 1');
} }
// Read and execute main schema // Read and execute main schema (core tables)
outputProgress({ outputProgress({
operation: 'Running database setup', operation: 'Running database setup',
message: 'Creating core tables...' message: 'Creating core tables...'
}); });
const schemaPath = path.join(__dirname, '../db/schema.sql');
// Verify file exists
if (!fs.existsSync(schemaPath)) {
throw new Error(`Schema file not found at: ${schemaPath}`);
}
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8'); const schemaSQL = fs.readFileSync(schemaPath, 'utf8');
await connection.query(schemaSQL);
outputProgress({
operation: 'Schema file',
message: {
path: schemaPath,
exists: fs.existsSync(schemaPath),
size: fs.statSync(schemaPath).size,
firstFewLines: schemaSQL.split('\n').slice(0, 5).join('\n')
}
});
// Read and execute metrics schema // Execute schema statements one at a time
const statements = splitSQLStatements(schemaSQL);
outputProgress({
operation: 'SQL Execution',
message: {
totalStatements: statements.length,
statements: statements.map((stmt, i) => ({
number: i + 1,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
}))
}
});
for (let i = 0; i < statements.length; i++) {
const stmt = statements[i];
try {
const [result, fields] = await connection.query(stmt);
// Check for warnings
const [warnings] = await connection.query('SHOW WARNINGS');
if (warnings && warnings.length > 0) {
outputProgress({
status: 'warning',
operation: 'SQL Warning',
statement: i + 1,
warnings: warnings
});
}
// Verify if table was created (if this was a CREATE TABLE statement)
if (stmt.trim().toLowerCase().startsWith('create table')) {
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1];
if (tableName) {
const [tableExists] = await connection.query(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name = ?
`, [tableName]);
outputProgress({
operation: 'Table Creation Verification',
message: {
table: tableName,
exists: tableExists[0].count > 0
}
});
}
}
outputProgress({
operation: 'SQL Progress',
message: {
statement: i + 1,
total: statements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
affectedRows: result.affectedRows
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'SQL Error',
error: sqlError.message,
sqlState: sqlError.sqlState,
errno: sqlError.errno,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
}
// List all tables in the database after schema execution
outputProgress({
operation: 'Debug database',
message: {
currentDatabase: (await connection.query('SELECT DATABASE() as db'))[0][0].db
}
});
const [allTables] = await connection.query(`
SELECT
table_schema,
table_name,
engine,
create_time,
table_rows
FROM information_schema.tables
WHERE table_schema = DATABASE()
`);
if (allTables.length === 0) {
outputProgress({
operation: 'Warning',
message: 'No tables found in database after schema execution'
});
} else {
outputProgress({
operation: 'Tables after schema execution',
message: {
count: allTables.length,
tables: allTables.map(t => ({
schema: t.table_schema,
name: t.table_name,
engine: t.engine,
created: t.create_time,
rows: t.table_rows
}))
}
});
}
// Also check table status
const [tableStatus] = await connection.query('SHOW TABLE STATUS');
outputProgress({
operation: 'Table Status',
message: {
tables: tableStatus.map(t => ({
name: t.Name,
engine: t.Engine,
version: t.Version,
rowFormat: t.Row_format,
rows: t.Rows,
createTime: t.Create_time,
updateTime: t.Update_time
}))
}
});
// Verify core tables were created using SHOW TABLES
const [showTables] = await connection.query('SHOW TABLES');
const existingTables = showTables.map(t => Object.values(t)[0]);
outputProgress({
operation: 'Core tables verification',
message: {
found: existingTables,
expected: CORE_TABLES
}
});
const missingCoreTables = CORE_TABLES.filter(
t => !existingTables.includes(t)
);
if (missingCoreTables.length > 0) {
throw new Error(
`Failed to create core tables: ${missingCoreTables.join(', ')}`
);
}
// Verify all core tables use InnoDB
const [engineStatus] = await connection.query('SHOW TABLE STATUS WHERE Name IN (?)', [CORE_TABLES]);
const nonInnoDBTables = engineStatus.filter(t => t.Engine !== 'InnoDB');
if (nonInnoDBTables.length > 0) {
throw new Error(
`Tables using non-InnoDB engine: ${nonInnoDBTables.map(t => t.Name).join(', ')}`
);
}
outputProgress({
operation: 'Core tables created',
message: `Successfully created tables: ${CORE_TABLES.join(', ')}`
});
// Read and execute metrics schema (metrics tables)
outputProgress({ outputProgress({
operation: 'Running metrics setup', operation: 'Running metrics setup',
message: 'Creating metrics tables...' message: 'Creating metrics tables...'
}); });
const metricsSchemaSQL = fs.readFileSync(
path.join(__dirname, '../db/metrics-schema.sql'),
'utf8'
);
// Execute metrics schema statements one at a time
const metricsStatements = splitSQLStatements(metricsSchemaSQL);
outputProgress({
operation: 'Metrics SQL Execution',
message: {
totalStatements: metricsStatements.length,
statements: metricsStatements.map((stmt, i) => ({
number: i + 1,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
}))
}
});
const metricsSchemaSQL = fs.readFileSync(path.join(__dirname, '../db/metrics-schema.sql'), 'utf8'); for (let i = 0; i < metricsStatements.length; i++) {
await connection.query(metricsSchemaSQL); const stmt = metricsStatements[i];
try {
const [result, fields] = await connection.query(stmt);
// Check for warnings
const [warnings] = await connection.query('SHOW WARNINGS');
if (warnings && warnings.length > 0) {
outputProgress({
status: 'warning',
operation: 'Metrics SQL Warning',
statement: i + 1,
warnings: warnings
});
}
outputProgress({
operation: 'Metrics SQL Progress',
message: {
statement: i + 1,
total: metricsStatements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
affectedRows: result.affectedRows
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'Metrics SQL Error',
error: sqlError.message,
sqlState: sqlError.sqlState,
errno: sqlError.errno,
statement: stmt,
statementNumber: i + 1
});
throw sqlError;
}
}
outputProgress({ outputProgress({
status: 'complete', status: 'complete',
@@ -93,7 +431,9 @@ async function resetDatabase() {
} catch (error) { } catch (error) {
outputProgress({ outputProgress({
status: 'error', status: 'error',
error: error.message operation: 'Failed to reset database',
error: error.message,
stack: error.stack
}); });
process.exit(1); process.exit(1);
} finally { } finally {

View File

@@ -24,6 +24,13 @@ const METRICS_TABLES = [
'vendor_metrics' 'vendor_metrics'
]; ];
// Core tables that must exist
const REQUIRED_CORE_TABLES = [
'products',
'orders',
'purchase_orders'
];
async function resetMetrics() { async function resetMetrics() {
outputProgress({ outputProgress({
status: 'running', status: 'running',
@@ -34,91 +41,107 @@ async function resetMetrics() {
const connection = await mysql.createConnection(dbConfig); const connection = await mysql.createConnection(dbConfig);
try { try {
// First verify that core tables exist
outputProgress({
status: 'running',
operation: 'Verifying core tables exist',
percentage: '10'
});
// Use SHOW TABLES to verify core tables exist
const [showTables] = await connection.query('SHOW TABLES');
const existingTables = showTables.map(t => Object.values(t)[0]);
outputProgress({
operation: 'Core tables verification',
message: {
found: existingTables,
required: REQUIRED_CORE_TABLES
}
});
// Check if any core tables are missing
const missingCoreTables = REQUIRED_CORE_TABLES.filter(
t => !existingTables.includes(t)
);
if (missingCoreTables.length > 0) {
throw new Error(
`Core tables missing: ${missingCoreTables.join(', ')}. Please run reset-db.js first.`
);
}
// Verify all core tables use InnoDB
const [engineStatus] = await connection.query('SHOW TABLE STATUS WHERE Name IN (?)', [REQUIRED_CORE_TABLES]);
const nonInnoDBTables = engineStatus.filter(t => t.Engine !== 'InnoDB');
if (nonInnoDBTables.length > 0) {
throw new Error(
`Tables using non-InnoDB engine: ${nonInnoDBTables.map(t => t.Name).join(', ')}`
);
}
// Disable foreign key checks first // Disable foreign key checks first
await connection.query('SET FOREIGN_KEY_CHECKS = 0'); await connection.query('SET FOREIGN_KEY_CHECKS = 0');
// Get list of existing metrics tables // Drop only the metrics tables if they exist
const [tables] = await connection.query(` const [existing] = await connection.query(`
SELECT GROUP_CONCAT(table_name) as tables SELECT GROUP_CONCAT(table_name) as tables
FROM information_schema.tables FROM information_schema.tables
WHERE table_schema = DATABASE() WHERE table_schema = DATABASE()
AND table_name IN (${METRICS_TABLES.map(table => `'${table}'`).join(',')})` AND table_name IN (${METRICS_TABLES.map(table => `'${table}'`).join(',')})
); `);
if (tables[0].tables) { if (existing[0].tables) {
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Dropping existing metrics tables', operation: 'Dropping existing metrics tables',
percentage: '20' percentage: '20'
}); });
const dropQuery = `
// Drop all existing metrics tables in one query DROP TABLE IF EXISTS
const dropQuery = `DROP TABLE IF EXISTS ${tables[0].tables.split(',').map(table => '`' + table + '`').join(', ')}`; ${existing[0].tables
.split(',')
.map(table => '`' + table + '`')
.join(', ')}
`;
await connection.query(dropQuery); await connection.query(dropQuery);
} }
// Read metrics schema // Read metrics schema in its entirety
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Creating metrics tables', operation: 'Creating metrics tables',
percentage: '40' percentage: '40'
}); });
const schemaPath = path.join(__dirname, '../db/metrics-schema.sql'); const schemaPath = path.join(__dirname, '../db/metrics-schema.sql');
const schemaSQL = fs.readFileSync(schemaPath, 'utf8'); const schemaSQL = fs.readFileSync(schemaPath, 'utf8');
// Split schema into parts
const parts = schemaSQL.split('-- Create optimized indexes');
const tableSchema = parts[0];
// Execute table creation first
await connection.query(tableSchema);
// Verify all tables were created // Run the entire metrics-schema so it creates
// the metrics tables and indexes in one shot
await connection.query(schemaSQL);
// Verify all tables were actually created using SHOW TABLES
const [verifyTables] = await connection.query('SHOW TABLES');
const tablesAfterCreation = verifyTables.map(t => Object.values(t)[0]);
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Verifying tables', operation: 'Verifying metrics tables',
percentage: '60' message: {
found: tablesAfterCreation,
required: METRICS_TABLES
}
}); });
const [verifyTables] = await connection.query(` const missingTables = METRICS_TABLES.filter(
SELECT table_name t => !tablesAfterCreation.includes(t)
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name IN (${METRICS_TABLES.map(table => `'${table}'`).join(',')})`
);
const missingTables = METRICS_TABLES.filter(table =>
!verifyTables.find(t => t.table_name === table)
); );
if (missingTables.length > 0) { if (missingTables.length > 0) {
throw new Error(`Failed to create tables: ${missingTables.join(', ')}`); throw new Error(
} `Failed to create tables: ${missingTables.join(', ')}`
);
// Create indexes
outputProgress({
status: 'running',
operation: 'Creating indexes',
percentage: '80'
});
// Drop existing indexes if they exist
try {
await connection.query('DROP INDEX IF EXISTS idx_orders_metrics ON orders');
await connection.query('DROP INDEX IF EXISTS idx_purchase_orders_metrics ON purchase_orders');
} catch (err) {
// Ignore any errors dropping indexes
console.warn('Warning dropping indexes:', err.message);
}
// Create new indexes
try {
await connection.query('CREATE INDEX idx_orders_metrics ON orders (product_id, date, canceled, quantity, price)');
await connection.query('CREATE INDEX idx_purchase_orders_metrics ON purchase_orders (product_id, date, status, ordered, received)');
} catch (err) {
// Log index creation errors but don't fail
console.warn('Warning creating indexes:', err.message);
} }
// Re-enable foreign key checks // Re-enable foreign key checks
@@ -144,12 +167,12 @@ async function resetMetrics() {
} }
} }
// Export the function if being required as a module // Export if required as a module
if (typeof module !== 'undefined' && module.exports) { if (typeof module !== 'undefined' && module.exports) {
module.exports = resetMetrics; module.exports = resetMetrics;
} }
// Run directly if called from command line // Run if called from command line
if (require.main === module) { if (require.main === module) {
resetMetrics().catch(error => { resetMetrics().catch(error => {
console.error('Error:', error); console.error('Error:', error);