diff --git a/ecosystem.config.js b/ecosystem.config.js index 61cd2aa..1f4881d 100644 --- a/ecosystem.config.js +++ b/ecosystem.config.js @@ -1,5 +1,209 @@ +// ecosystem.config.js +const path = require('path'); +const dotenv = require('dotenv'); + +// Load environment variables safely with error handling +const loadEnvFile = (envPath) => { + try { + console.log('Loading env from:', envPath); + const result = dotenv.config({ path: envPath }); + if (result.error) { + console.warn(`Warning: .env file not found or invalid at ${envPath}:`, result.error.message); + return {}; + } + console.log('Env variables loaded from', envPath, ':', Object.keys(result.parsed || {})); + return result.parsed || {}; + } catch (error) { + console.warn(`Warning: Error loading .env file at ${envPath}:`, error.message); + return {}; + } +} + +// Load environment variables for each server +const authEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/auth-server/.env')); +const aircallEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/aircall-server/.env')); +const klaviyoEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/klaviyo-server/.env')); +const metaEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/meta-server/.env')); +const googleAnalyticsEnv = require('dotenv').config({ + path: path.resolve(__dirname, 'dashboard/google-server/.env') +}).parsed || {}; +const typeformEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/typeform-server/.env')); +const inventoryEnv = loadEnvFile(path.resolve(__dirname, 'inventory/.env')); + +// Common log settings for all apps +const logSettings = { + log_rotate: true, + max_size: '10M', + retain: '10', + log_date_format: 'YYYY-MM-DD HH:mm:ss' +}; + +// Common app settings +const commonSettings = { + instances: 1, + exec_mode: 'fork', + autorestart: true, + watch: false, + max_memory_restart: '1G', + time: true, + ...logSettings, + ignore_watch: [ + 'node_modules', + 'logs', + '.git', + '*.log' + ], + min_uptime: 5000, + max_restarts: 5, + restart_delay: 4000, + listen_timeout: 50000, + kill_timeout: 5000, + node_args: '--max-old-space-size=1536' +}; + module.exports = { apps: [ + { + ...commonSettings, + name: 'auth-server', + script: './dashboard/auth-server/index.js', + env: { + NODE_ENV: 'production', + PORT: 3003, + ...authEnv + }, + error_file: 'dashboard/auth-server/logs/pm2/err.log', + out_file: 'dashboard/auth-server/logs/pm2/out.log', + log_file: 'dashboard/auth-server/logs/pm2/combined.log', + env_production: { + NODE_ENV: 'production', + PORT: 3003 + }, + env_development: { + NODE_ENV: 'development', + PORT: 3003 + } + }, + { + ...commonSettings, + name: 'aircall-server', + script: './dashboard/aircall-server/server.js', + env: { + NODE_ENV: 'production', + AIRCALL_PORT: 3002, + ...aircallEnv + }, + error_file: 'dashboard/aircall-server/logs/pm2/err.log', + out_file: 'dashboard/aircall-server/logs/pm2/out.log', + log_file: 'dashboard/aircall-server/logs/pm2/combined.log', + env_production: { + NODE_ENV: 'production', + AIRCALL_PORT: 3002 + } + }, + { + ...commonSettings, + name: 'klaviyo-server', + script: './dashboard/klaviyo-server/server.js', + env: { + NODE_ENV: 'production', + KLAVIYO_PORT: 3004, + ...klaviyoEnv + }, + error_file: 'dashboard/klaviyo-server/logs/pm2/err.log', + out_file: 'dashboard/klaviyo-server/logs/pm2/out.log', + log_file: 'dashboard/klaviyo-server/logs/pm2/combined.log', + env_production: { + NODE_ENV: 'production', + KLAVIYO_PORT: 3004 + } + }, + { + ...commonSettings, + name: 'meta-server', + script: './dashboard/meta-server/server.js', + env: { + NODE_ENV: 'production', + PORT: 3005, + ...metaEnv + }, + error_file: 'dashboard/meta-server/logs/pm2/err.log', + out_file: 'dashboard/meta-server/logs/pm2/out.log', + log_file: 'dashboard/meta-server/logs/pm2/combined.log', + env_production: { + NODE_ENV: 'production', + PORT: 3005 + } + }, + { + name: "gorgias-server", + script: "./dashboard/gorgias-server/server.js", + env: { + NODE_ENV: "development", + PORT: 3006 + }, + env_production: { + NODE_ENV: "production", + PORT: 3006 + }, + error_file: "dashboard/logs/gorgias-server-error.log", + out_file: "dashboard/logs/gorgias-server-out.log", + log_file: "dashboard/logs/gorgias-server-combined.log", + time: true + }, + { + ...commonSettings, + name: 'google-server', + script: path.resolve(__dirname, 'dashboard/google-server/server.js'), + watch: false, + env: { + NODE_ENV: 'production', + GOOGLE_ANALYTICS_PORT: 3007, + ...googleAnalyticsEnv + }, + error_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/err.log'), + out_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/out.log'), + log_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/combined.log'), + env_production: { + NODE_ENV: 'production', + GOOGLE_ANALYTICS_PORT: 3007 + } + }, + { + ...commonSettings, + name: 'typeform-server', + script: './dashboard/typeform-server/server.js', + env: { + NODE_ENV: 'production', + TYPEFORM_PORT: 3008, + ...typeformEnv + }, + error_file: 'dashboard/typeform-server/logs/pm2/err.log', + out_file: 'dashboard/typeform-server/logs/pm2/out.log', + log_file: 'dashboard/typeform-server/logs/pm2/combined.log', + env_production: { + NODE_ENV: 'production', + TYPEFORM_PORT: 3008 + } + }, + { + ...commonSettings, + name: 'inventory-server', + script: './inventory/src/server.js', + env: { + NODE_ENV: 'production', + PORT: 3010, + ...inventoryEnv + }, + error_file: 'inventory/logs/pm2/err.log', + out_file: 'inventory/logs/pm2/out.log', + log_file: 'inventory/logs/pm2/combined.log', + env_production: { + NODE_ENV: 'production', + PORT: 3010, + ...inventoryEnv + } + }, { ...commonSettings, name: 'new-auth-server', @@ -7,16 +211,12 @@ module.exports = { env: { NODE_ENV: 'production', AUTH_PORT: 3011, + ...inventoryEnv, JWT_SECRET: process.env.JWT_SECRET }, error_file: 'inventory-server/auth/logs/pm2/err.log', out_file: 'inventory-server/auth/logs/pm2/out.log', - log_file: 'inventory-server/auth/logs/pm2/combined.log', - env_production: { - NODE_ENV: 'production', - AUTH_PORT: 3011, - JWT_SECRET: process.env.JWT_SECRET - } + log_file: 'inventory-server/auth/logs/pm2/combined.log' } ] -}; \ No newline at end of file +}; diff --git a/inventory-server/auth/add-user.js b/inventory-server/auth/add-user.js new file mode 100644 index 0000000..d6857bc --- /dev/null +++ b/inventory-server/auth/add-user.js @@ -0,0 +1,103 @@ +require('dotenv').config({ path: '../.env' }); +const bcrypt = require('bcrypt'); +const { Pool } = require('pg'); +const inquirer = require('inquirer'); + +// Log connection details for debugging (remove in production) +console.log('Attempting to connect with:', { + host: process.env.DB_HOST, + user: process.env.DB_USER, + database: process.env.DB_NAME, + port: process.env.DB_PORT +}); + +const pool = new Pool({ + host: process.env.DB_HOST, + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + port: process.env.DB_PORT, +}); + +async function promptUser() { + const questions = [ + { + type: 'input', + name: 'username', + message: 'Enter username:', + validate: (input) => { + if (input.length < 3) { + return 'Username must be at least 3 characters long'; + } + return true; + } + }, + { + type: 'password', + name: 'password', + message: 'Enter password:', + mask: '*', + validate: (input) => { + if (input.length < 8) { + return 'Password must be at least 8 characters long'; + } + return true; + } + }, + { + type: 'password', + name: 'confirmPassword', + message: 'Confirm password:', + mask: '*', + validate: (input, answers) => { + if (input !== answers.password) { + return 'Passwords do not match'; + } + return true; + } + } + ]; + + return inquirer.prompt(questions); +} + +async function addUser() { + try { + // Get user input + const answers = await promptUser(); + const { username, password } = answers; + + // Hash password + const saltRounds = 10; + const hashedPassword = await bcrypt.hash(password, saltRounds); + + // Check if user already exists + const checkResult = await pool.query( + 'SELECT id FROM users WHERE username = $1', + [username] + ); + + if (checkResult.rows.length > 0) { + console.error('Error: Username already exists'); + process.exit(1); + } + + // Insert new user + const result = await pool.query( + 'INSERT INTO users (username, password) VALUES ($1, $2) RETURNING id', + [username, hashedPassword] + ); + + console.log(`User ${username} created successfully with id ${result.rows[0].id}`); + } catch (error) { + console.error('Error creating user:', error); + console.error('Error details:', error.message); + if (error.code) { + console.error('Error code:', error.code); + } + } finally { + await pool.end(); + } +} + +addUser(); \ No newline at end of file diff --git a/inventory-server/auth/add_user.js b/inventory-server/auth/add_user.js deleted file mode 100644 index 113c332..0000000 --- a/inventory-server/auth/add_user.js +++ /dev/null @@ -1,41 +0,0 @@ -const bcrypt = require('bcrypt'); -const mysql = require('mysql2/promise'); -const readline = require('readline').createInterface({ - input: process.stdin, - output: process.stdout, -}); -require('dotenv').config({ path: '../.env' }); - -const dbConfig = { - host: process.env.DB_HOST, - user: process.env.DB_USER, - password: process.env.DB_PASSWORD, - database: process.env.DB_NAME, -}; - -async function addUser() { - const username = await askQuestion('Enter username: '); - const password = await askQuestion('Enter password: '); - - const hashedPassword = await bcrypt.hash(password, 10); - - const connection = await mysql.createConnection(dbConfig); - - try { - await connection.query('INSERT INTO users (username, password) VALUES (?, ?)', [username, hashedPassword]); - console.log(`User ${username} added successfully.`); - } catch (error) { - console.error('Error adding user:', error); - } finally { - connection.end(); - readline.close(); - } -} - -function askQuestion(query) { - return new Promise(resolve => readline.question(query, ans => { - resolve(ans); - })); -} - -addUser(); \ No newline at end of file diff --git a/inventory-server/auth/package-lock.json b/inventory-server/auth/package-lock.json index 1088a11..f104d88 100644 --- a/inventory-server/auth/package-lock.json +++ b/inventory-server/auth/package-lock.json @@ -1,21 +1,21 @@ { - "name": "auth-server", + "name": "inventory-auth-server", "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "auth-server", + "name": "inventory-auth-server", "version": "1.0.0", "dependencies": { "bcrypt": "^5.1.1", "cors": "^2.8.5", - "dotenv": "^16.4.5", + "dotenv": "^16.4.7", "express": "^4.18.2", - "jsonwebtoken": "^9.0.2" - }, - "devDependencies": { - "nodemon": "^3.1.0" + "inquirer": "^8.2.6", + "jsonwebtoken": "^9.0.2", + "morgan": "^1.10.0", + "pg": "^8.11.3" } }, "node_modules/@mapbox/node-pre-gyp": { @@ -92,6 +92,21 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "license": "MIT" }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -101,18 +116,19 @@ "node": ">=8" } }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "license": "ISC", + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" + "color-convert": "^2.0.1" }, "engines": { - "node": ">= 8" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/aproba": { @@ -147,6 +163,44 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "license": "MIT" }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/basic-auth/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, "node_modules/bcrypt": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", @@ -161,17 +215,15 @@ "node": ">= 10.0.0" } }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "dev": true, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" } }, "node_modules/body-parser": { @@ -208,17 +260,28 @@ "concat-map": "0.0.1" } }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], "license": "MIT", "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" } }, "node_modules/buffer-equal-constant-time": { @@ -265,31 +328,28 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "dev": true, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "license": "MIT", "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">= 8.10.0" + "node": ">=10" }, "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" + "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "license": "MIT" + }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -299,6 +359,66 @@ "node": ">=10" } }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "license": "MIT", + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-width": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "license": "ISC", + "engines": { + "node": ">= 10" + } + }, + "node_modules/clone": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", + "integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==", + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" + }, "node_modules/color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", @@ -378,6 +498,18 @@ "ms": "2.0.0" } }, + "node_modules/defaults": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", + "integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==", + "license": "MIT", + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", @@ -504,6 +636,15 @@ "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", "license": "MIT" }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", @@ -559,17 +700,33 @@ "url": "https://opencollective.com/express" } }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, + "node_modules/external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", "license": "MIT", "dependencies": { - "to-regex-range": "^5.0.1" + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^1.0.5" }, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/finalhandler": { @@ -638,21 +795,6 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "license": "ISC" }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -741,19 +883,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", @@ -767,13 +896,12 @@ } }, "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/has-symbols": { @@ -870,12 +998,25 @@ "node": ">=0.10.0" } }, - "node_modules/ignore-by-default": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", - "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==", - "dev": true, - "license": "ISC" + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" }, "node_modules/inflight": { "version": "1.0.6", @@ -894,6 +1035,32 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "license": "ISC" }, + "node_modules/inquirer": { + "version": "8.2.6", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.6.tgz", + "integrity": "sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg==", + "license": "MIT", + "dependencies": { + "ansi-escapes": "^4.2.1", + "chalk": "^4.1.1", + "cli-cursor": "^3.1.0", + "cli-width": "^3.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.21", + "mute-stream": "0.0.8", + "ora": "^5.4.1", + "run-async": "^2.4.0", + "rxjs": "^7.5.5", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", + "through": "^2.3.6", + "wrap-ansi": "^6.0.1" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -903,29 +1070,6 @@ "node": ">= 0.10" } }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -935,27 +1079,25 @@ "node": ">=8" } }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, + "node_modules/is-interactive": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", + "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", "license": "MIT", "engines": { - "node": ">=0.12.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/jsonwebtoken": { @@ -1007,6 +1149,12 @@ "safe-buffer": "^5.0.1" } }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "license": "MIT" + }, "node_modules/lodash.includes": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", @@ -1049,6 +1197,22 @@ "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", "license": "MIT" }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -1142,6 +1306,15 @@ "node": ">= 0.6" } }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -1200,12 +1373,46 @@ "node": ">=10" } }, + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "license": "MIT", + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/morgan/node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "license": "MIT" }, + "node_modules/mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "license": "ISC" + }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -1241,60 +1448,6 @@ } } }, - "node_modules/nodemon": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.9.tgz", - "integrity": "sha512-hdr1oIb2p6ZSxu3PB2JWWYS7ZQ0qvaZsc3hK8DR8f02kRzc8rjYmxAIvdz+aYC+8F2IjNaB7HMcSDg8nQpJxyg==", - "dev": true, - "license": "MIT", - "dependencies": { - "chokidar": "^3.5.2", - "debug": "^4", - "ignore-by-default": "^1.0.1", - "minimatch": "^3.1.2", - "pstree.remy": "^1.1.8", - "semver": "^7.5.3", - "simple-update-notifier": "^2.0.0", - "supports-color": "^5.5.0", - "touch": "^3.1.0", - "undefsafe": "^2.0.5" - }, - "bin": { - "nodemon": "bin/nodemon.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/nodemon" - } - }, - "node_modules/nodemon/node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/nodemon/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, - "license": "MIT" - }, "node_modules/nopt": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", @@ -1310,16 +1463,6 @@ "node": ">=6" } }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/npmlog": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", @@ -1366,6 +1509,15 @@ "node": ">= 0.8" } }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -1375,6 +1527,53 @@ "wrappy": "1" } }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", + "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", + "license": "MIT", + "dependencies": { + "bl": "^4.1.0", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "is-unicode-supported": "^0.1.0", + "log-symbols": "^4.1.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -1399,17 +1598,132 @@ "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", "license": "MIT" }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, + "node_modules/pg": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.13.3.tgz", + "integrity": "sha512-P6tPt9jXbL9HVu/SSRERNYaYG++MjnscnegFh9pPHihfoBSujsrka0hyuymMzeJKFWrcG8wvCKy8rCe8e5nDUQ==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.7.0", + "pg-pool": "^3.7.1", + "pg-protocol": "^1.7.1", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.1.1" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz", + "integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.7.0.tgz", + "integrity": "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.7.1.tgz", + "integrity": "sha512-xIOsFoh7Vdhojas6q3596mXFsR8nwBQBXX5JiV7p9buEVAGqYL4yFzclON5P9vFrpu1u7Zwl2oriyDa89n0wbw==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.7.1.tgz", + "integrity": "sha512-gjTHWGYWsEgy9MsY0Gp6ZJxV24IjDqdpTW7Eh0x+WfJLFsm/TJx1MzL6T0D88mBvkpxotCQ6TwW6N+Kko7lhgQ==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", "license": "MIT", "engines": { - "node": ">=8.6" + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" + "engines": { + "node": ">=0.10.0" } }, "node_modules/proxy-addr": { @@ -1425,13 +1739,6 @@ "node": ">= 0.10" } }, - "node_modules/pstree.remy": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", - "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", - "dev": true, - "license": "MIT" - }, "node_modules/qs": { "version": "6.13.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", @@ -1485,17 +1792,17 @@ "node": ">= 6" } }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", "license": "MIT", "dependencies": { - "picomatch": "^2.2.1" + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" }, "engines": { - "node": ">=8.10.0" + "node": ">=8" } }, "node_modules/rimraf": { @@ -1514,6 +1821,24 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/run-async": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -1696,17 +2021,13 @@ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "license": "ISC" }, - "node_modules/simple-update-notifier": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", - "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", "engines": { - "node": ">=10" + "node": ">= 10.x" } }, "node_modules/statuses": { @@ -1754,16 +2075,15 @@ } }, "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "license": "MIT", "dependencies": { - "has-flag": "^3.0.0" + "has-flag": "^4.0.0" }, "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/tar": { @@ -1783,17 +2103,22 @@ "node": ">=10" } }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "license": "MIT" + }, + "node_modules/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", "license": "MIT", "dependencies": { - "is-number": "^7.0.0" + "os-tmpdir": "~1.0.2" }, "engines": { - "node": ">=8.0" + "node": ">=0.6.0" } }, "node_modules/toidentifier": { @@ -1805,22 +2130,30 @@ "node": ">=0.6" } }, - "node_modules/touch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz", - "integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==", - "dev": true, - "license": "ISC", - "bin": { - "nodetouch": "bin/nodetouch.js" - } - }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", "license": "MIT" }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -1834,13 +2167,6 @@ "node": ">= 0.6" } }, - "node_modules/undefsafe": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", - "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", - "dev": true, - "license": "MIT" - }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -1874,6 +2200,15 @@ "node": ">= 0.8" } }, + "node_modules/wcwidth": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", + "integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==", + "license": "MIT", + "dependencies": { + "defaults": "^1.0.3" + } + }, "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", @@ -1899,12 +2234,35 @@ "string-width": "^1.0.2 || 2 || 3 || 4" } }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "license": "ISC" }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/inventory-server/auth/package.json b/inventory-server/auth/package.json index 51cbb85..bbbc607 100644 --- a/inventory-server/auth/package.json +++ b/inventory-server/auth/package.json @@ -1,21 +1,19 @@ { - "name": "auth-server", + "name": "inventory-auth-server", "version": "1.0.0", - "description": "Authentication server for inventory management", + "description": "Authentication server for inventory management system", "main": "server.js", "scripts": { - "start": "node server.js", - "dev": "nodemon server.js", - "add_user": "node add_user.js" + "start": "node server.js" }, "dependencies": { "bcrypt": "^5.1.1", "cors": "^2.8.5", - "dotenv": "^16.4.5", + "dotenv": "^16.4.7", "express": "^4.18.2", - "jsonwebtoken": "^9.0.2" - }, - "devDependencies": { - "nodemon": "^3.1.0" + "inquirer": "^8.2.6", + "jsonwebtoken": "^9.0.2", + "morgan": "^1.10.0", + "pg": "^8.11.3" } -} \ No newline at end of file +} diff --git a/inventory-server/auth/schema.sql b/inventory-server/auth/schema.sql index f143019..32a9562 100644 --- a/inventory-server/auth/schema.sql +++ b/inventory-server/auth/schema.sql @@ -1,6 +1,6 @@ -CREATE TABLE `users` ( - `id` INT AUTO_INCREMENT PRIMARY KEY, - `username` VARCHAR(255) NOT NULL UNIQUE, - `password` VARCHAR(255) NOT NULL, - `created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP +CREATE TABLE users ( + id SERIAL PRIMARY KEY, + username VARCHAR(255) NOT NULL UNIQUE, + password VARCHAR(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ); \ No newline at end of file diff --git a/inventory-server/auth/server.js b/inventory-server/auth/server.js index 64bd964..e2b2d81 100644 --- a/inventory-server/auth/server.js +++ b/inventory-server/auth/server.js @@ -1,135 +1,102 @@ +require('dotenv').config({ path: '../.env' }); const express = require('express'); +const cors = require('cors'); const bcrypt = require('bcrypt'); const jwt = require('jsonwebtoken'); -const cors = require('cors'); -const mysql = require('mysql2/promise'); -require('dotenv').config({ path: '../.env' }); +const { Pool } = require('pg'); +const morgan = require('morgan'); + +// Log startup configuration +console.log('Starting auth server with config:', { + host: process.env.DB_HOST, + user: process.env.DB_USER, + database: process.env.DB_NAME, + port: process.env.DB_PORT, + auth_port: process.env.AUTH_PORT +}); const app = express(); -const PORT = process.env.AUTH_PORT || 3011; +const port = process.env.AUTH_PORT || 3011; // Database configuration -const dbConfig = { +const pool = new Pool({ host: process.env.DB_HOST, user: process.env.DB_USER, password: process.env.DB_PASSWORD, database: process.env.DB_NAME, -}; + port: process.env.DB_PORT, +}); -// Create a connection pool -const pool = mysql.createPool(dbConfig); - -app.use(cors({ - origin: [ - 'https://inventory.kent.pw', - 'http://localhost:5173', - 'http://127.0.0.1:5173', - /^http:\/\/192\.168\.\d+\.\d+(:\d+)?$/, - /^http:\/\/10\.\d+\.\d+\.\d+(:\d+)?$/ - ], - methods: ['GET', 'POST', 'OPTIONS'], - allowedHeaders: ['Content-Type', 'Authorization', 'X-Requested-With'], - credentials: true, - exposedHeaders: ['set-cookie'] -})); +// Middleware app.use(express.json()); - -// Debug middleware to log request details -app.use((req, res, next) => { - console.log('Request details:', { - method: req.method, - url: req.url, - origin: req.get('Origin'), - headers: req.headers, - body: req.body, - }); - next(); -}); - -// Registration endpoint -app.post('/register', async (req, res) => { - try { - const { username, password } = req.body; - const hashedPassword = await bcrypt.hash(password, 10); - - const connection = await pool.getConnection(); - await connection.query('INSERT INTO users (username, password) VALUES (?, ?)', [username, hashedPassword]); - connection.release(); - - res.status(201).json({ message: 'User registered successfully' }); - } catch (error) { - console.error('Registration error:', error); - res.status(500).json({ error: 'Registration failed' }); - } -}); +app.use(morgan('combined')); +app.use(cors({ + origin: ['http://localhost:5173', 'https://inventory.kent.pw'], + credentials: true +})); // Login endpoint app.post('/login', async (req, res) => { + const { username, password } = req.body; + try { - const { username, password } = req.body; - console.log(`Login attempt for user: ${username}`); - - const connection = await pool.getConnection(); - const [rows] = await connection.query( - 'SELECT * FROM users WHERE username = ?', - [username], + // Get user from database + const result = await pool.query( + 'SELECT id, username, password FROM users WHERE username = $1', + [username] ); - connection.release(); - if (rows.length === 1) { - const user = rows[0]; - const passwordMatch = await bcrypt.compare(password, user.password); + const user = result.rows[0]; - if (passwordMatch) { - console.log(`User ${username} authenticated successfully`); - const token = jwt.sign( - { username: user.username }, - process.env.JWT_SECRET, - { expiresIn: '1h' }, - ); - res.json({ token }); - } else { - console.error(`Invalid password for user: ${username}`); - res.status(401).json({ error: 'Invalid credentials' }); - } - } else { - console.error(`User not found: ${username}`); - res.status(401).json({ error: 'Invalid credentials' }); + // Check if user exists and password is correct + if (!user || !(await bcrypt.compare(password, user.password))) { + return res.status(401).json({ error: 'Invalid username or password' }); } + + // Generate JWT token + const token = jwt.sign( + { userId: user.id, username: user.username }, + process.env.JWT_SECRET, + { expiresIn: '24h' } + ); + + res.json({ token }); } catch (error) { console.error('Login error:', error); - res.status(500).json({ error: 'Login failed' }); + res.status(500).json({ error: 'Internal server error' }); } }); -// Protected endpoint example +// Protected route to verify token app.get('/protected', async (req, res) => { const authHeader = req.headers.authorization; + if (!authHeader) { - return res.status(401).json({ error: 'Unauthorized' }); + return res.status(401).json({ error: 'No token provided' }); } - const token = authHeader.split(' ')[1]; try { + const token = authHeader.split(' ')[1]; const decoded = jwt.verify(token, process.env.JWT_SECRET); - - // Optionally, you can fetch the user from the database here - // to verify that the user still exists or to get more user information - const connection = await pool.getConnection(); - const [rows] = await connection.query('SELECT * FROM users WHERE username = ?', [decoded.username]); - connection.release(); - - if (rows.length === 0) { - return res.status(401).json({ error: 'User not found' }); - } - - res.json({ message: 'Protected resource accessed', user: decoded }); + res.json({ userId: decoded.userId, username: decoded.username }); } catch (error) { - console.error('Protected endpoint error:', error); - res.status(403).json({ error: 'Invalid token' }); + console.error('Token verification error:', error); + res.status(401).json({ error: 'Invalid token' }); } }); -app.listen(PORT, "0.0.0.0", () => { - console.log(`Auth server running on port ${PORT}`); -}); \ No newline at end of file +// Health check endpoint +app.get('/health', (req, res) => { + res.json({ status: 'healthy' }); +}); + +// Error handling middleware +app.use((err, req, res, next) => { + console.error(err.stack); + res.status(500).json({ error: 'Something broke!' }); +}); + +// Start server +app.listen(port, () => { + console.log(`Auth server running on port ${port}`); +}); diff --git a/inventory-server/db/config-schema.sql b/inventory-server/db/config-schema.sql index 9aa8330..a72539b 100644 --- a/inventory-server/db/config-schema.sql +++ b/inventory-server/db/config-schema.sql @@ -1,150 +1,207 @@ -- Configuration tables schema +-- Create function for updating timestamps if it doesn't exist +CREATE OR REPLACE FUNCTION update_updated_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated = CURRENT_TIMESTAMP; + RETURN NEW; +END; +$$ language 'plpgsql'; + +-- Create function for updating updated_at timestamps +CREATE OR REPLACE FUNCTION update_updated_at_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = CURRENT_TIMESTAMP; + RETURN NEW; +END; +$$ language 'plpgsql'; + -- Stock threshold configurations -CREATE TABLE IF NOT EXISTS stock_thresholds ( - id INT NOT NULL, +CREATE TABLE stock_thresholds ( + id INTEGER NOT NULL, category_id BIGINT, -- NULL means default/global threshold vendor VARCHAR(100), -- NULL means applies to all vendors - critical_days INT NOT NULL DEFAULT 7, - reorder_days INT NOT NULL DEFAULT 14, - overstock_days INT NOT NULL DEFAULT 90, - low_stock_threshold INT NOT NULL DEFAULT 5, - min_reorder_quantity INT NOT NULL DEFAULT 1, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + critical_days INTEGER NOT NULL DEFAULT 7, + reorder_days INTEGER NOT NULL DEFAULT 14, + overstock_days INTEGER NOT NULL DEFAULT 90, + low_stock_threshold INTEGER NOT NULL DEFAULT 5, + min_reorder_quantity INTEGER NOT NULL DEFAULT 1, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id), FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - UNIQUE KEY unique_category_vendor (category_id, vendor), - INDEX idx_st_metrics (category_id, vendor) + UNIQUE (category_id, vendor) ); +CREATE TRIGGER update_stock_thresholds_updated + BEFORE UPDATE ON stock_thresholds + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + +CREATE INDEX idx_st_metrics ON stock_thresholds(category_id, vendor); + -- Lead time threshold configurations -CREATE TABLE IF NOT EXISTS lead_time_thresholds ( - id INT NOT NULL, +CREATE TABLE lead_time_thresholds ( + id INTEGER NOT NULL, category_id BIGINT, -- NULL means default/global threshold vendor VARCHAR(100), -- NULL means applies to all vendors - target_days INT NOT NULL DEFAULT 14, - warning_days INT NOT NULL DEFAULT 21, - critical_days INT NOT NULL DEFAULT 30, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + target_days INTEGER NOT NULL DEFAULT 14, + warning_days INTEGER NOT NULL DEFAULT 21, + critical_days INTEGER NOT NULL DEFAULT 30, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id), FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - UNIQUE KEY unique_category_vendor (category_id, vendor) + UNIQUE (category_id, vendor) ); +CREATE TRIGGER update_lead_time_thresholds_updated + BEFORE UPDATE ON lead_time_thresholds + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + -- Sales velocity window configurations -CREATE TABLE IF NOT EXISTS sales_velocity_config ( - id INT NOT NULL, +CREATE TABLE sales_velocity_config ( + id INTEGER NOT NULL, category_id BIGINT, -- NULL means default/global threshold vendor VARCHAR(100), -- NULL means applies to all vendors - daily_window_days INT NOT NULL DEFAULT 30, - weekly_window_days INT NOT NULL DEFAULT 7, - monthly_window_days INT NOT NULL DEFAULT 90, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + daily_window_days INTEGER NOT NULL DEFAULT 30, + weekly_window_days INTEGER NOT NULL DEFAULT 7, + monthly_window_days INTEGER NOT NULL DEFAULT 90, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id), FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - UNIQUE KEY unique_category_vendor (category_id, vendor), - INDEX idx_sv_metrics (category_id, vendor) + UNIQUE (category_id, vendor) ); +CREATE TRIGGER update_sales_velocity_config_updated + BEFORE UPDATE ON sales_velocity_config + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + +CREATE INDEX idx_sv_metrics ON sales_velocity_config(category_id, vendor); + -- ABC Classification configurations -CREATE TABLE IF NOT EXISTS abc_classification_config ( - id INT NOT NULL PRIMARY KEY, +CREATE TABLE abc_classification_config ( + id INTEGER NOT NULL PRIMARY KEY, a_threshold DECIMAL(5,2) NOT NULL DEFAULT 20.0, b_threshold DECIMAL(5,2) NOT NULL DEFAULT 50.0, - classification_period_days INT NOT NULL DEFAULT 90, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP + classification_period_days INTEGER NOT NULL DEFAULT 90, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP ); +CREATE TRIGGER update_abc_classification_config_updated + BEFORE UPDATE ON abc_classification_config + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + -- Safety stock configurations -CREATE TABLE IF NOT EXISTS safety_stock_config ( - id INT NOT NULL, +CREATE TABLE safety_stock_config ( + id INTEGER NOT NULL, category_id BIGINT, -- NULL means default/global threshold vendor VARCHAR(100), -- NULL means applies to all vendors - coverage_days INT NOT NULL DEFAULT 14, + coverage_days INTEGER NOT NULL DEFAULT 14, service_level DECIMAL(5,2) NOT NULL DEFAULT 95.0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id), FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - UNIQUE KEY unique_category_vendor (category_id, vendor), - INDEX idx_ss_metrics (category_id, vendor) + UNIQUE (category_id, vendor) ); +CREATE TRIGGER update_safety_stock_config_updated + BEFORE UPDATE ON safety_stock_config + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + +CREATE INDEX idx_ss_metrics ON safety_stock_config(category_id, vendor); + -- Turnover rate configurations -CREATE TABLE IF NOT EXISTS turnover_config ( - id INT NOT NULL, +CREATE TABLE turnover_config ( + id INTEGER NOT NULL, category_id BIGINT, -- NULL means default/global threshold vendor VARCHAR(100), -- NULL means applies to all vendors - calculation_period_days INT NOT NULL DEFAULT 30, + calculation_period_days INTEGER NOT NULL DEFAULT 30, target_rate DECIMAL(10,2) NOT NULL DEFAULT 1.0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id), FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - UNIQUE KEY unique_category_vendor (category_id, vendor) + UNIQUE (category_id, vendor) ); +CREATE TRIGGER update_turnover_config_updated + BEFORE UPDATE ON turnover_config + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + -- Create table for sales seasonality factors -CREATE TABLE IF NOT EXISTS sales_seasonality ( - month INT NOT NULL, +CREATE TABLE sales_seasonality ( + month INTEGER NOT NULL, seasonality_factor DECIMAL(5,3) DEFAULT 0, - last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + last_updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (month), - CHECK (month BETWEEN 1 AND 12), - CHECK (seasonality_factor BETWEEN -1.0 AND 1.0) + CONSTRAINT month_range CHECK (month BETWEEN 1 AND 12), + CONSTRAINT seasonality_range CHECK (seasonality_factor BETWEEN -1.0 AND 1.0) ); --- Insert default global thresholds if not exists +CREATE TRIGGER update_sales_seasonality_updated + BEFORE UPDATE ON sales_seasonality + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + +-- Insert default global thresholds INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days) VALUES (1, NULL, NULL, 7, 14, 90) -ON DUPLICATE KEY UPDATE - critical_days = VALUES(critical_days), - reorder_days = VALUES(reorder_days), - overstock_days = VALUES(overstock_days); +ON CONFLICT (id) DO UPDATE SET + critical_days = EXCLUDED.critical_days, + reorder_days = EXCLUDED.reorder_days, + overstock_days = EXCLUDED.overstock_days; INSERT INTO lead_time_thresholds (id, category_id, vendor, target_days, warning_days, critical_days) VALUES (1, NULL, NULL, 14, 21, 30) -ON DUPLICATE KEY UPDATE - target_days = VALUES(target_days), - warning_days = VALUES(warning_days), - critical_days = VALUES(critical_days); +ON CONFLICT (id) DO UPDATE SET + target_days = EXCLUDED.target_days, + warning_days = EXCLUDED.warning_days, + critical_days = EXCLUDED.critical_days; INSERT INTO sales_velocity_config (id, category_id, vendor, daily_window_days, weekly_window_days, monthly_window_days) VALUES (1, NULL, NULL, 30, 7, 90) -ON DUPLICATE KEY UPDATE - daily_window_days = VALUES(daily_window_days), - weekly_window_days = VALUES(weekly_window_days), - monthly_window_days = VALUES(monthly_window_days); +ON CONFLICT (id) DO UPDATE SET + daily_window_days = EXCLUDED.daily_window_days, + weekly_window_days = EXCLUDED.weekly_window_days, + monthly_window_days = EXCLUDED.monthly_window_days; INSERT INTO abc_classification_config (id, a_threshold, b_threshold, classification_period_days) VALUES (1, 20.0, 50.0, 90) -ON DUPLICATE KEY UPDATE - a_threshold = VALUES(a_threshold), - b_threshold = VALUES(b_threshold), - classification_period_days = VALUES(classification_period_days); +ON CONFLICT (id) DO UPDATE SET + a_threshold = EXCLUDED.a_threshold, + b_threshold = EXCLUDED.b_threshold, + classification_period_days = EXCLUDED.classification_period_days; INSERT INTO safety_stock_config (id, category_id, vendor, coverage_days, service_level) VALUES (1, NULL, NULL, 14, 95.0) -ON DUPLICATE KEY UPDATE - coverage_days = VALUES(coverage_days), - service_level = VALUES(service_level); +ON CONFLICT (id) DO UPDATE SET + coverage_days = EXCLUDED.coverage_days, + service_level = EXCLUDED.service_level; INSERT INTO turnover_config (id, category_id, vendor, calculation_period_days, target_rate) VALUES (1, NULL, NULL, 30, 1.0) -ON DUPLICATE KEY UPDATE - calculation_period_days = VALUES(calculation_period_days), - target_rate = VALUES(target_rate); +ON CONFLICT (id) DO UPDATE SET + calculation_period_days = EXCLUDED.calculation_period_days, + target_rate = EXCLUDED.target_rate; -- Insert default seasonality factors (neutral) INSERT INTO sales_seasonality (month, seasonality_factor) VALUES (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0) -ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP; +ON CONFLICT (month) DO UPDATE SET + last_updated = CURRENT_TIMESTAMP; -- View to show thresholds with category names CREATE OR REPLACE VIEW stock_thresholds_view AS @@ -153,9 +210,9 @@ SELECT c.name as category_name, CASE WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 'Global Default' - WHEN st.category_id IS NULL THEN CONCAT('Vendor: ', st.vendor) - WHEN st.vendor IS NULL THEN CONCAT('Category: ', c.name) - ELSE CONCAT('Category: ', c.name, ' / Vendor: ', st.vendor) + WHEN st.category_id IS NULL THEN 'Vendor: ' || st.vendor + WHEN st.vendor IS NULL THEN 'Category: ' || c.name + ELSE 'Category: ' || c.name || ' / Vendor: ' || st.vendor END as threshold_scope FROM stock_thresholds st @@ -171,59 +228,51 @@ ORDER BY c.name, st.vendor; +-- History and status tables CREATE TABLE IF NOT EXISTS calculate_history ( - id BIGINT AUTO_INCREMENT PRIMARY KEY, - start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - end_time TIMESTAMP NULL, - duration_seconds INT, - duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED, - total_products INT DEFAULT 0, - total_orders INT DEFAULT 0, - total_purchase_orders INT DEFAULT 0, - processed_products INT DEFAULT 0, - processed_orders INT DEFAULT 0, - processed_purchase_orders INT DEFAULT 0, - status ENUM('running', 'completed', 'failed', 'cancelled') DEFAULT 'running', + id BIGSERIAL PRIMARY KEY, + start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, + end_time TIMESTAMP WITH TIME ZONE NULL, + duration_seconds INTEGER, + duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED, + total_products INTEGER DEFAULT 0, + total_orders INTEGER DEFAULT 0, + total_purchase_orders INTEGER DEFAULT 0, + processed_products INTEGER DEFAULT 0, + processed_orders INTEGER DEFAULT 0, + processed_purchase_orders INTEGER DEFAULT 0, + status calculation_status DEFAULT 'running', error_message TEXT, - additional_info JSON, - INDEX idx_status_time (status, start_time) + additional_info JSONB ); CREATE TABLE IF NOT EXISTS calculate_status ( - module_name ENUM( - 'product_metrics', - 'time_aggregates', - 'financial_metrics', - 'vendor_metrics', - 'category_metrics', - 'brand_metrics', - 'sales_forecasts', - 'abc_classification' - ) PRIMARY KEY, - last_calculation_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - INDEX idx_last_calc (last_calculation_timestamp) + module_name module_name PRIMARY KEY, + last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP ); CREATE TABLE IF NOT EXISTS sync_status ( table_name VARCHAR(50) PRIMARY KEY, - last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - last_sync_id BIGINT, - INDEX idx_last_sync (last_sync_timestamp) + last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, + last_sync_id BIGINT ); CREATE TABLE IF NOT EXISTS import_history ( - id BIGINT AUTO_INCREMENT PRIMARY KEY, + id BIGSERIAL PRIMARY KEY, table_name VARCHAR(50) NOT NULL, - start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - end_time TIMESTAMP NULL, - duration_seconds INT, - duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED, - records_added INT DEFAULT 0, - records_updated INT DEFAULT 0, + start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, + end_time TIMESTAMP WITH TIME ZONE NULL, + duration_seconds INTEGER, + duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED, + records_added INTEGER DEFAULT 0, + records_updated INTEGER DEFAULT 0, is_incremental BOOLEAN DEFAULT FALSE, - status ENUM('running', 'completed', 'failed', 'cancelled') DEFAULT 'running', + status calculation_status DEFAULT 'running', error_message TEXT, - additional_info JSON, - INDEX idx_table_time (table_name, start_time), - INDEX idx_status (status) -); \ No newline at end of file + additional_info JSONB +); + +-- Create all indexes after tables are fully created +CREATE INDEX IF NOT EXISTS idx_last_calc ON calculate_status(last_calculation_timestamp); +CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status(last_sync_timestamp); +CREATE INDEX IF NOT EXISTS idx_table_time ON import_history(table_name, start_time); \ No newline at end of file diff --git a/inventory-server/db/metrics-schema.sql b/inventory-server/db/metrics-schema.sql index 50bd903..6a1808a 100644 --- a/inventory-server/db/metrics-schema.sql +++ b/inventory-server/db/metrics-schema.sql @@ -1,8 +1,8 @@ -- Disable foreign key checks -SET FOREIGN_KEY_CHECKS = 0; +SET session_replication_role = 'replica'; -- Temporary tables for batch metrics processing -CREATE TABLE IF NOT EXISTS temp_sales_metrics ( +CREATE TABLE temp_sales_metrics ( pid BIGINT NOT NULL, daily_sales_avg DECIMAL(10,3), weekly_sales_avg DECIMAL(10,3), @@ -14,9 +14,9 @@ CREATE TABLE IF NOT EXISTS temp_sales_metrics ( PRIMARY KEY (pid) ); -CREATE TABLE IF NOT EXISTS temp_purchase_metrics ( +CREATE TABLE temp_purchase_metrics ( pid BIGINT NOT NULL, - avg_lead_time_days INT, + avg_lead_time_days INTEGER, last_purchase_date DATE, first_received_date DATE, last_received_date DATE, @@ -24,7 +24,7 @@ CREATE TABLE IF NOT EXISTS temp_purchase_metrics ( ); -- New table for product metrics -CREATE TABLE IF NOT EXISTS product_metrics ( +CREATE TABLE product_metrics ( pid BIGINT NOT NULL, last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, -- Sales velocity metrics @@ -32,16 +32,16 @@ CREATE TABLE IF NOT EXISTS product_metrics ( weekly_sales_avg DECIMAL(10,3), monthly_sales_avg DECIMAL(10,3), avg_quantity_per_order DECIMAL(10,3), - number_of_orders INT, + number_of_orders INTEGER, first_sale_date DATE, last_sale_date DATE, -- Stock metrics - days_of_inventory INT, - weeks_of_inventory INT, - reorder_point INT, - safety_stock INT, - reorder_qty INT DEFAULT 0, - overstocked_amt INT DEFAULT 0, + days_of_inventory INTEGER, + weeks_of_inventory INTEGER, + reorder_point INTEGER, + safety_stock INTEGER, + reorder_qty INTEGER DEFAULT 0, + overstocked_amt INTEGER DEFAULT 0, -- Financial metrics avg_margin_percent DECIMAL(10,3), total_revenue DECIMAL(10,3), @@ -50,7 +50,7 @@ CREATE TABLE IF NOT EXISTS product_metrics ( gross_profit DECIMAL(10,3), gmroi DECIMAL(10,3), -- Purchase metrics - avg_lead_time_days INT, + avg_lead_time_days INTEGER, last_purchase_date DATE, first_received_date DATE, last_received_date DATE, @@ -60,48 +60,50 @@ CREATE TABLE IF NOT EXISTS product_metrics ( -- Turnover metrics turnover_rate DECIMAL(12,3), -- Lead time metrics - current_lead_time INT, - target_lead_time INT, + current_lead_time INTEGER, + target_lead_time INTEGER, lead_time_status VARCHAR(20), -- Forecast metrics forecast_accuracy DECIMAL(5,2) DEFAULT NULL, forecast_bias DECIMAL(5,2) DEFAULT NULL, last_forecast_date DATE DEFAULT NULL, PRIMARY KEY (pid), - FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE, - INDEX idx_metrics_revenue (total_revenue), - INDEX idx_metrics_stock_status (stock_status), - INDEX idx_metrics_lead_time (lead_time_status), - INDEX idx_metrics_turnover (turnover_rate), - INDEX idx_metrics_last_calculated (last_calculated_at), - INDEX idx_metrics_abc (abc_class), - INDEX idx_metrics_sales (daily_sales_avg, weekly_sales_avg, monthly_sales_avg), - INDEX idx_metrics_forecast (forecast_accuracy, forecast_bias) + FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE ); +CREATE INDEX idx_metrics_revenue ON product_metrics(total_revenue); +CREATE INDEX idx_metrics_stock_status ON product_metrics(stock_status); +CREATE INDEX idx_metrics_lead_time ON product_metrics(lead_time_status); +CREATE INDEX idx_metrics_turnover ON product_metrics(turnover_rate); +CREATE INDEX idx_metrics_last_calculated ON product_metrics(last_calculated_at); +CREATE INDEX idx_metrics_abc ON product_metrics(abc_class); +CREATE INDEX idx_metrics_sales ON product_metrics(daily_sales_avg, weekly_sales_avg, monthly_sales_avg); +CREATE INDEX idx_metrics_forecast ON product_metrics(forecast_accuracy, forecast_bias); + -- New table for time-based aggregates -CREATE TABLE IF NOT EXISTS product_time_aggregates ( +CREATE TABLE product_time_aggregates ( pid BIGINT NOT NULL, - year INT NOT NULL, - month INT NOT NULL, + year INTEGER NOT NULL, + month INTEGER NOT NULL, -- Sales metrics - total_quantity_sold INT DEFAULT 0, + total_quantity_sold INTEGER DEFAULT 0, total_revenue DECIMAL(10,3) DEFAULT 0, total_cost DECIMAL(10,3) DEFAULT 0, - order_count INT DEFAULT 0, + order_count INTEGER DEFAULT 0, -- Stock changes - stock_received INT DEFAULT 0, - stock_ordered INT DEFAULT 0, + stock_received INTEGER DEFAULT 0, + stock_ordered INTEGER DEFAULT 0, -- Calculated fields avg_price DECIMAL(10,3), profit_margin DECIMAL(10,3), inventory_value DECIMAL(10,3), gmroi DECIMAL(10,3), PRIMARY KEY (pid, year, month), - FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE, - INDEX idx_date (year, month) + FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE ); +CREATE INDEX idx_date ON product_time_aggregates(year, month); + -- Create vendor_details table CREATE TABLE vendor_details ( vendor VARCHAR(100) PRIMARY KEY, @@ -110,45 +112,47 @@ CREATE TABLE vendor_details ( phone VARCHAR(50), status VARCHAR(20) DEFAULT 'active', created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - INDEX idx_status (status) -) ENGINE=InnoDB; + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX idx_vendor_details_status ON vendor_details(status); -- New table for vendor metrics -CREATE TABLE IF NOT EXISTS vendor_metrics ( +CREATE TABLE vendor_metrics ( vendor VARCHAR(100) NOT NULL, last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, -- Performance metrics avg_lead_time_days DECIMAL(10,3), on_time_delivery_rate DECIMAL(5,2), order_fill_rate DECIMAL(5,2), - total_orders INT DEFAULT 0, - total_late_orders INT DEFAULT 0, + total_orders INTEGER DEFAULT 0, + total_late_orders INTEGER DEFAULT 0, total_purchase_value DECIMAL(10,3) DEFAULT 0, avg_order_value DECIMAL(10,3), -- Product metrics - active_products INT DEFAULT 0, - total_products INT DEFAULT 0, + active_products INTEGER DEFAULT 0, + total_products INTEGER DEFAULT 0, -- Financial metrics total_revenue DECIMAL(10,3) DEFAULT 0, avg_margin_percent DECIMAL(5,2), -- Status status VARCHAR(20) DEFAULT 'active', PRIMARY KEY (vendor), - FOREIGN KEY (vendor) REFERENCES vendor_details(vendor) ON DELETE CASCADE, - INDEX idx_vendor_performance (on_time_delivery_rate), - INDEX idx_vendor_status (status), - INDEX idx_metrics_last_calculated (last_calculated_at), - INDEX idx_vendor_metrics_orders (total_orders, total_late_orders) + FOREIGN KEY (vendor) REFERENCES vendor_details(vendor) ON DELETE CASCADE ); +CREATE INDEX idx_vendor_performance ON vendor_metrics(on_time_delivery_rate); +CREATE INDEX idx_vendor_status ON vendor_metrics(status); +CREATE INDEX idx_vendor_metrics_last_calculated ON vendor_metrics(last_calculated_at); +CREATE INDEX idx_vendor_metrics_orders ON vendor_metrics(total_orders, total_late_orders); + -- New table for category metrics -CREATE TABLE IF NOT EXISTS category_metrics ( +CREATE TABLE category_metrics ( category_id BIGINT NOT NULL, last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, -- Product metrics - product_count INT DEFAULT 0, - active_products INT DEFAULT 0, + product_count INTEGER DEFAULT 0, + active_products INTEGER DEFAULT 0, -- Financial metrics total_value DECIMAL(15,3) DEFAULT 0, avg_margin DECIMAL(5,2), @@ -157,255 +161,215 @@ CREATE TABLE IF NOT EXISTS category_metrics ( -- Status status VARCHAR(20) DEFAULT 'active', PRIMARY KEY (category_id), - FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - INDEX idx_category_status (status), - INDEX idx_category_growth (growth_rate), - INDEX idx_metrics_last_calculated (last_calculated_at), - INDEX idx_category_metrics_products (product_count, active_products) + FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE ); +CREATE INDEX idx_category_status ON category_metrics(status); +CREATE INDEX idx_category_growth ON category_metrics(growth_rate); +CREATE INDEX idx_metrics_last_calculated_cat ON category_metrics(last_calculated_at); +CREATE INDEX idx_category_metrics_products ON category_metrics(product_count, active_products); + -- New table for vendor time-based metrics -CREATE TABLE IF NOT EXISTS vendor_time_metrics ( +CREATE TABLE vendor_time_metrics ( vendor VARCHAR(100) NOT NULL, - year INT NOT NULL, - month INT NOT NULL, + year INTEGER NOT NULL, + month INTEGER NOT NULL, -- Order metrics - total_orders INT DEFAULT 0, - late_orders INT DEFAULT 0, + total_orders INTEGER DEFAULT 0, + late_orders INTEGER DEFAULT 0, avg_lead_time_days DECIMAL(10,3), -- Financial metrics total_purchase_value DECIMAL(10,3) DEFAULT 0, total_revenue DECIMAL(10,3) DEFAULT 0, avg_margin_percent DECIMAL(5,2), PRIMARY KEY (vendor, year, month), - FOREIGN KEY (vendor) REFERENCES vendor_details(vendor) ON DELETE CASCADE, - INDEX idx_vendor_date (year, month) + FOREIGN KEY (vendor) REFERENCES vendor_details(vendor) ON DELETE CASCADE ); +CREATE INDEX idx_vendor_date ON vendor_time_metrics(year, month); + -- New table for category time-based metrics -CREATE TABLE IF NOT EXISTS category_time_metrics ( +CREATE TABLE category_time_metrics ( category_id BIGINT NOT NULL, - year INT NOT NULL, - month INT NOT NULL, + year INTEGER NOT NULL, + month INTEGER NOT NULL, -- Product metrics - product_count INT DEFAULT 0, - active_products INT DEFAULT 0, + product_count INTEGER DEFAULT 0, + active_products INTEGER DEFAULT 0, -- Financial metrics total_value DECIMAL(15,3) DEFAULT 0, total_revenue DECIMAL(15,3) DEFAULT 0, avg_margin DECIMAL(5,2), turnover_rate DECIMAL(12,3), PRIMARY KEY (category_id, year, month), - FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - INDEX idx_category_date (year, month) + FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE ); +CREATE INDEX idx_category_date ON category_time_metrics(year, month); + -- New table for category-based sales metrics -CREATE TABLE IF NOT EXISTS category_sales_metrics ( +CREATE TABLE category_sales_metrics ( category_id BIGINT NOT NULL, brand VARCHAR(100) NOT NULL, period_start DATE NOT NULL, period_end DATE NOT NULL, avg_daily_sales DECIMAL(10,3) DEFAULT 0, - total_sold INT DEFAULT 0, - num_products INT DEFAULT 0, + total_sold INTEGER DEFAULT 0, + num_products INTEGER DEFAULT 0, avg_price DECIMAL(10,3) DEFAULT 0, last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (category_id, brand, period_start, period_end), - FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - INDEX idx_category_brand (category_id, brand), - INDEX idx_period (period_start, period_end) + FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE ); +CREATE INDEX idx_category_brand ON category_sales_metrics(category_id, brand); +CREATE INDEX idx_period ON category_sales_metrics(period_start, period_end); + -- New table for brand metrics -CREATE TABLE IF NOT EXISTS brand_metrics ( +CREATE TABLE brand_metrics ( brand VARCHAR(100) NOT NULL, last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, -- Product metrics - product_count INT DEFAULT 0, - active_products INT DEFAULT 0, + product_count INTEGER DEFAULT 0, + active_products INTEGER DEFAULT 0, -- Stock metrics - total_stock_units INT DEFAULT 0, + total_stock_units INTEGER DEFAULT 0, total_stock_cost DECIMAL(15,2) DEFAULT 0, total_stock_retail DECIMAL(15,2) DEFAULT 0, -- Sales metrics total_revenue DECIMAL(15,2) DEFAULT 0, avg_margin DECIMAL(5,2) DEFAULT 0, growth_rate DECIMAL(5,2) DEFAULT 0, - PRIMARY KEY (brand), - INDEX idx_brand_metrics_last_calculated (last_calculated_at), - INDEX idx_brand_metrics_revenue (total_revenue), - INDEX idx_brand_metrics_growth (growth_rate) + PRIMARY KEY (brand) ); +CREATE INDEX idx_brand_metrics_last_calculated ON brand_metrics(last_calculated_at); +CREATE INDEX idx_brand_metrics_revenue ON brand_metrics(total_revenue); +CREATE INDEX idx_brand_metrics_growth ON brand_metrics(growth_rate); + -- New table for brand time-based metrics -CREATE TABLE IF NOT EXISTS brand_time_metrics ( +CREATE TABLE brand_time_metrics ( brand VARCHAR(100) NOT NULL, - year INT NOT NULL, - month INT NOT NULL, + year INTEGER NOT NULL, + month INTEGER NOT NULL, -- Product metrics - product_count INT DEFAULT 0, - active_products INT DEFAULT 0, + product_count INTEGER DEFAULT 0, + active_products INTEGER DEFAULT 0, -- Stock metrics - total_stock_units INT DEFAULT 0, + total_stock_units INTEGER DEFAULT 0, total_stock_cost DECIMAL(15,2) DEFAULT 0, total_stock_retail DECIMAL(15,2) DEFAULT 0, -- Sales metrics total_revenue DECIMAL(15,2) DEFAULT 0, avg_margin DECIMAL(5,2) DEFAULT 0, - PRIMARY KEY (brand, year, month), - INDEX idx_brand_date (year, month) + growth_rate DECIMAL(5,2) DEFAULT 0, + PRIMARY KEY (brand, year, month) ); +CREATE INDEX idx_brand_time_date ON brand_time_metrics(year, month); + -- New table for sales forecasts -CREATE TABLE IF NOT EXISTS sales_forecasts ( +CREATE TABLE sales_forecasts ( pid BIGINT NOT NULL, forecast_date DATE NOT NULL, - forecast_units DECIMAL(10,2) DEFAULT 0, - forecast_revenue DECIMAL(10,2) DEFAULT 0, - confidence_level DECIMAL(5,2) DEFAULT 0, - last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + forecast_quantity INTEGER, + confidence_level DECIMAL(5,2), + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (pid, forecast_date), - FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE, - INDEX idx_forecast_date (forecast_date), - INDEX idx_forecast_last_calculated (last_calculated_at) + FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE ); +CREATE INDEX idx_forecast_date ON sales_forecasts(forecast_date); + -- New table for category forecasts -CREATE TABLE IF NOT EXISTS category_forecasts ( +CREATE TABLE category_forecasts ( category_id BIGINT NOT NULL, forecast_date DATE NOT NULL, - forecast_units DECIMAL(10,2) DEFAULT 0, - forecast_revenue DECIMAL(10,2) DEFAULT 0, - confidence_level DECIMAL(5,2) DEFAULT 0, - last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + forecast_revenue DECIMAL(15,2), + forecast_units INTEGER, + confidence_level DECIMAL(5,2), + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (category_id, forecast_date), - FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - INDEX idx_category_forecast_date (forecast_date), - INDEX idx_category_forecast_last_calculated (last_calculated_at) + FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE ); --- Create view for inventory health +CREATE INDEX idx_cat_forecast_date ON category_forecasts(forecast_date); + +-- Create views for common calculations CREATE OR REPLACE VIEW inventory_health AS -WITH product_thresholds AS ( +WITH stock_levels AS ( SELECT p.pid, - COALESCE( - -- Try category+vendor specific - (SELECT critical_days FROM stock_thresholds st - JOIN product_categories pc ON st.category_id = pc.cat_id - WHERE pc.pid = p.pid - AND st.vendor = p.vendor LIMIT 1), - -- Try category specific - (SELECT critical_days FROM stock_thresholds st - JOIN product_categories pc ON st.category_id = pc.cat_id - WHERE pc.pid = p.pid - AND st.vendor IS NULL LIMIT 1), - -- Try vendor specific - (SELECT critical_days FROM stock_thresholds st - WHERE st.category_id IS NULL - AND st.vendor = p.vendor LIMIT 1), - -- Fall back to default - (SELECT critical_days FROM stock_thresholds st - WHERE st.category_id IS NULL - AND st.vendor IS NULL LIMIT 1), - 7 - ) as critical_days, - COALESCE( - -- Try category+vendor specific - (SELECT reorder_days FROM stock_thresholds st - JOIN product_categories pc ON st.category_id = pc.cat_id - WHERE pc.pid = p.pid - AND st.vendor = p.vendor LIMIT 1), - -- Try category specific - (SELECT reorder_days FROM stock_thresholds st - JOIN product_categories pc ON st.category_id = pc.cat_id - WHERE pc.pid = p.pid - AND st.vendor IS NULL LIMIT 1), - -- Try vendor specific - (SELECT reorder_days FROM stock_thresholds st - WHERE st.category_id IS NULL - AND st.vendor = p.vendor LIMIT 1), - -- Fall back to default - (SELECT reorder_days FROM stock_thresholds st - WHERE st.category_id IS NULL - AND st.vendor IS NULL LIMIT 1), - 14 - ) as reorder_days, - COALESCE( - -- Try category+vendor specific - (SELECT overstock_days FROM stock_thresholds st - JOIN product_categories pc ON st.category_id = pc.cat_id - WHERE pc.pid = p.pid - AND st.vendor = p.vendor LIMIT 1), - -- Try category specific - (SELECT overstock_days FROM stock_thresholds st - JOIN product_categories pc ON st.category_id = pc.cat_id - WHERE pc.pid = p.pid - AND st.vendor IS NULL LIMIT 1), - -- Try vendor specific - (SELECT overstock_days FROM stock_thresholds st - WHERE st.category_id IS NULL - AND st.vendor = p.vendor LIMIT 1), - -- Fall back to default - (SELECT overstock_days FROM stock_thresholds st - WHERE st.category_id IS NULL - AND st.vendor IS NULL LIMIT 1), - 90 - ) as overstock_days + p.title, + p.SKU, + p.stock_quantity, + p.preorder_count, + pm.daily_sales_avg, + pm.weekly_sales_avg, + pm.monthly_sales_avg, + pm.reorder_point, + pm.safety_stock, + pm.days_of_inventory, + pm.weeks_of_inventory, + pm.stock_status, + pm.abc_class, + pm.turnover_rate, + pm.avg_lead_time_days, + pm.current_lead_time, + pm.target_lead_time, + pm.lead_time_status, + p.cost_price, + p.price, + pm.inventory_value, + pm.gmroi FROM products p + LEFT JOIN product_metrics pm ON p.pid = pm.pid + WHERE p.managing_stock = true AND p.visible = true ) SELECT - p.pid, - p.SKU, - p.title, - p.stock_quantity, - COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg, - COALESCE(pm.days_of_inventory, 0) as days_of_inventory, - COALESCE(pm.reorder_point, 0) as reorder_point, - COALESCE(pm.safety_stock, 0) as safety_stock, + *, CASE - WHEN pm.daily_sales_avg = 0 THEN 'New' - WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * pt.critical_days) THEN 'Critical' - WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * pt.reorder_days) THEN 'Reorder' - WHEN p.stock_quantity > (pm.daily_sales_avg * pt.overstock_days) THEN 'Overstocked' + WHEN stock_quantity <= safety_stock THEN 'Critical' + WHEN stock_quantity <= reorder_point THEN 'Low' + WHEN stock_quantity > (reorder_point * 3) THEN 'Excess' ELSE 'Healthy' - END as stock_status -FROM - products p -LEFT JOIN - product_metrics pm ON p.pid = pm.pid -LEFT JOIN - product_thresholds pt ON p.pid = pt.pid -WHERE - p.managing_stock = true; + END as inventory_status, + CASE + WHEN lead_time_status = 'delayed' AND stock_status = 'low' THEN 'High' + WHEN lead_time_status = 'delayed' OR stock_status = 'low' THEN 'Medium' + ELSE 'Low' + END as risk_level +FROM stock_levels; -- Create view for category performance trends CREATE OR REPLACE VIEW category_performance_trends AS +WITH monthly_trends AS ( + SELECT + c.cat_id, + c.name as category_name, + ctm.year, + ctm.month, + ctm.product_count, + ctm.active_products, + ctm.total_value, + ctm.total_revenue, + ctm.avg_margin, + ctm.turnover_rate, + LAG(ctm.total_revenue) OVER (PARTITION BY c.cat_id ORDER BY ctm.year, ctm.month) as prev_month_revenue, + LAG(ctm.turnover_rate) OVER (PARTITION BY c.cat_id ORDER BY ctm.year, ctm.month) as prev_month_turnover + FROM categories c + JOIN category_time_metrics ctm ON c.cat_id = ctm.category_id +) SELECT - c.cat_id as category_id, - c.name, - c.description, - p.name as parent_name, - c.status, - cm.product_count, - cm.active_products, - cm.total_value, - cm.avg_margin, - cm.turnover_rate, - cm.growth_rate, + *, CASE - WHEN cm.growth_rate >= 20 THEN 'High Growth' - WHEN cm.growth_rate >= 5 THEN 'Growing' - WHEN cm.growth_rate >= -5 THEN 'Stable' - ELSE 'Declining' - END as performance_rating -FROM - categories c -LEFT JOIN - categories p ON c.parent_id = p.cat_id -LEFT JOIN - category_metrics cm ON c.cat_id = cm.category_id; + WHEN prev_month_revenue IS NULL THEN 0 + ELSE ((total_revenue - prev_month_revenue) / prev_month_revenue) * 100 + END as revenue_growth_percent, + CASE + WHEN prev_month_turnover IS NULL THEN 0 + ELSE ((turnover_rate - prev_month_turnover) / prev_month_turnover) * 100 + END as turnover_growth_percent +FROM monthly_trends; --- Re-enable foreign key checks -SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file +SET session_replication_role = 'origin'; \ No newline at end of file diff --git a/inventory-server/db/schema.sql b/inventory-server/db/schema.sql index ffef4e8..a01b13f 100644 --- a/inventory-server/db/schema.sql +++ b/inventory-server/db/schema.sql @@ -1,6 +1,13 @@ -- Enable strict error reporting -SET sql_mode = 'STRICT_ALL_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ZERO_DATE,NO_ZERO_IN_DATE,NO_ENGINE_SUBSTITUTION'; -SET FOREIGN_KEY_CHECKS = 0; +SET session_replication_role = 'replica'; -- Disable foreign key checks temporarily + +-- Create function for updating timestamps +CREATE OR REPLACE FUNCTION update_updated_column() RETURNS TRIGGER AS $func$ +BEGIN + NEW.updated = CURRENT_TIMESTAMP; + RETURN NEW; +END; +$func$ language plpgsql; -- Create tables CREATE TABLE products ( @@ -8,18 +15,18 @@ CREATE TABLE products ( title VARCHAR(255) NOT NULL, description TEXT, SKU VARCHAR(50) NOT NULL, - created_at TIMESTAMP NULL, - first_received TIMESTAMP NULL, - stock_quantity INT DEFAULT 0, - preorder_count INT DEFAULT 0, - notions_inv_count INT DEFAULT 0, + created_at TIMESTAMP WITH TIME ZONE, + first_received TIMESTAMP WITH TIME ZONE, + stock_quantity INTEGER DEFAULT 0, + preorder_count INTEGER DEFAULT 0, + notions_inv_count INTEGER DEFAULT 0, price DECIMAL(10, 3) NOT NULL, regular_price DECIMAL(10, 3) NOT NULL, cost_price DECIMAL(10, 3), landing_cost_price DECIMAL(10, 3), barcode VARCHAR(50), harmonized_tariff_code VARCHAR(20), - updated_at TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE, visible BOOLEAN DEFAULT true, managing_stock BOOLEAN DEFAULT true, replenishable BOOLEAN DEFAULT true, @@ -37,47 +44,64 @@ CREATE TABLE products ( artist VARCHAR(100), options TEXT, tags TEXT, - moq INT DEFAULT 1, - uom INT DEFAULT 1, + moq INTEGER DEFAULT 1, + uom INTEGER DEFAULT 1, rating DECIMAL(10,2) DEFAULT 0.00, - reviews INT UNSIGNED DEFAULT 0, + reviews INTEGER DEFAULT 0, weight DECIMAL(10,3), length DECIMAL(10,3), width DECIMAL(10,3), height DECIMAL(10,3), country_of_origin VARCHAR(5), location VARCHAR(50), - total_sold INT UNSIGNED DEFAULT 0, - baskets INT UNSIGNED DEFAULT 0, - notifies INT UNSIGNED DEFAULT 0, + total_sold INTEGER DEFAULT 0, + baskets INTEGER DEFAULT 0, + notifies INTEGER DEFAULT 0, date_last_sold DATE, - updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (pid), - INDEX idx_sku (SKU), - INDEX idx_vendor (vendor), - INDEX idx_brand (brand), - INDEX idx_location (location), - INDEX idx_total_sold (total_sold), - INDEX idx_date_last_sold (date_last_sold), - INDEX idx_updated (updated) -) ENGINE=InnoDB; + updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (pid) +); + +-- Create trigger for products +CREATE TRIGGER update_products_updated + BEFORE UPDATE ON products + FOR EACH ROW + EXECUTE FUNCTION update_updated_column(); + +-- Create indexes for products table +CREATE INDEX idx_products_sku ON products(SKU); +CREATE INDEX idx_products_vendor ON products(vendor); +CREATE INDEX idx_products_brand ON products(brand); +CREATE INDEX idx_products_location ON products(location); +CREATE INDEX idx_products_total_sold ON products(total_sold); +CREATE INDEX idx_products_date_last_sold ON products(date_last_sold); +CREATE INDEX idx_products_updated ON products(updated); -- Create categories table with hierarchy support CREATE TABLE categories ( cat_id BIGINT PRIMARY KEY, name VARCHAR(100) NOT NULL, - type SMALLINT NOT NULL COMMENT '10=section, 11=category, 12=subcategory, 13=subsubcategory, 1=company, 2=line, 3=subline, 40=artist', + type SMALLINT NOT NULL, parent_id BIGINT, description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, status VARCHAR(20) DEFAULT 'active', - FOREIGN KEY (parent_id) REFERENCES categories(cat_id), - INDEX idx_parent (parent_id), - INDEX idx_type (type), - INDEX idx_status (status), - INDEX idx_name_type (name, type) -) ENGINE=InnoDB; + FOREIGN KEY (parent_id) REFERENCES categories(cat_id) +); + +-- Create trigger for categories +CREATE TRIGGER update_categories_updated_at + BEFORE UPDATE ON categories + FOR EACH ROW + EXECUTE FUNCTION update_updated_column(); + +COMMENT ON COLUMN categories.type IS '10=section, 11=category, 12=subcategory, 13=subsubcategory, 1=company, 2=line, 3=subline, 40=artist'; + +CREATE INDEX idx_categories_parent ON categories(parent_id); +CREATE INDEX idx_categories_type ON categories(type); +CREATE INDEX idx_categories_status ON categories(status); +CREATE INDEX idx_categories_name_type ON categories(name, type); -- Create product_categories junction table CREATE TABLE product_categories ( @@ -85,78 +109,98 @@ CREATE TABLE product_categories ( pid BIGINT NOT NULL, PRIMARY KEY (pid, cat_id), FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE, - FOREIGN KEY (cat_id) REFERENCES categories(cat_id) ON DELETE CASCADE, - INDEX idx_category (cat_id), - INDEX idx_product (pid) -) ENGINE=InnoDB; + FOREIGN KEY (cat_id) REFERENCES categories(cat_id) ON DELETE CASCADE +); + +CREATE INDEX idx_product_categories_category ON product_categories(cat_id); +CREATE INDEX idx_product_categories_product ON product_categories(pid); -- Create orders table with its indexes -CREATE TABLE IF NOT EXISTS orders ( - id BIGINT NOT NULL AUTO_INCREMENT, +CREATE TABLE orders ( + id BIGSERIAL PRIMARY KEY, order_number VARCHAR(50) NOT NULL, pid BIGINT NOT NULL, SKU VARCHAR(50) NOT NULL, date DATE NOT NULL, price DECIMAL(10,3) NOT NULL, - quantity INT NOT NULL, + quantity INTEGER NOT NULL, discount DECIMAL(10,3) DEFAULT 0.000, tax DECIMAL(10,3) DEFAULT 0.000, - tax_included TINYINT(1) DEFAULT 0, + tax_included BOOLEAN DEFAULT false, shipping DECIMAL(10,3) DEFAULT 0.000, costeach DECIMAL(10,3) DEFAULT 0.000, customer VARCHAR(50) NOT NULL, customer_name VARCHAR(100), status VARCHAR(20) DEFAULT 'pending', - canceled TINYINT(1) DEFAULT 0, - updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (id), - UNIQUE KEY unique_order_line (order_number, pid), - KEY order_number (order_number), - KEY pid (pid), - KEY customer (customer), - KEY date (date), - KEY status (status), - INDEX idx_orders_metrics (pid, date, canceled), - INDEX idx_updated (updated) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + canceled BOOLEAN DEFAULT false, + updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, + UNIQUE (order_number, pid) +); + +-- Create trigger for orders +CREATE TRIGGER update_orders_updated + BEFORE UPDATE ON orders + FOR EACH ROW + EXECUTE FUNCTION update_updated_column(); + +CREATE INDEX idx_orders_number ON orders(order_number); +CREATE INDEX idx_orders_pid ON orders(pid); +CREATE INDEX idx_orders_customer ON orders(customer); +CREATE INDEX idx_orders_date ON orders(date); +CREATE INDEX idx_orders_status ON orders(status); +CREATE INDEX idx_orders_metrics ON orders(pid, date, canceled); +CREATE INDEX idx_orders_updated ON orders(updated); -- Create purchase_orders table with its indexes CREATE TABLE purchase_orders ( - id BIGINT AUTO_INCREMENT PRIMARY KEY, + id BIGSERIAL PRIMARY KEY, po_id VARCHAR(50) NOT NULL, vendor VARCHAR(100) NOT NULL, date DATE NOT NULL, expected_date DATE, pid BIGINT NOT NULL, sku VARCHAR(50) NOT NULL, - name VARCHAR(100) NOT NULL COMMENT 'Product name from products.description', + name VARCHAR(100) NOT NULL, cost_price DECIMAL(10, 3) NOT NULL, - po_cost_price DECIMAL(10, 3) NOT NULL COMMENT 'Original cost from PO, before receiving adjustments', - status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done', - receiving_status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,30=partial_received,40=full_received,50=paid', + po_cost_price DECIMAL(10, 3) NOT NULL, + status SMALLINT DEFAULT 1, + receiving_status SMALLINT DEFAULT 1, notes TEXT, long_note TEXT, - ordered INT NOT NULL, - received INT DEFAULT 0, - received_date DATE COMMENT 'Date of first receiving', - last_received_date DATE COMMENT 'Date of most recent receiving', - received_by VARCHAR(100) COMMENT 'Name of person who first received this PO line', - receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag', - updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + ordered INTEGER NOT NULL, + received INTEGER DEFAULT 0, + received_date DATE, + last_received_date DATE, + received_by VARCHAR(100), + receiving_history JSONB, + updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (pid) REFERENCES products(pid), - INDEX idx_po_id (po_id), - INDEX idx_vendor (vendor), - INDEX idx_status (status), - INDEX idx_receiving_status (receiving_status), - INDEX idx_purchase_orders_metrics (pid, date, status, ordered, received), - INDEX idx_po_metrics (pid, date, receiving_status, received_date), - INDEX idx_po_product_date (pid, date), - INDEX idx_po_product_status (pid, status), - INDEX idx_updated (updated), - UNIQUE KEY unique_po_product (po_id, pid) -) ENGINE=InnoDB; + UNIQUE (po_id, pid) +); -SET FOREIGN_KEY_CHECKS = 1; +-- Create trigger for purchase_orders +CREATE TRIGGER update_purchase_orders_updated + BEFORE UPDATE ON purchase_orders + FOR EACH ROW + EXECUTE FUNCTION update_updated_column(); + +COMMENT ON COLUMN purchase_orders.name IS 'Product name from products.description'; +COMMENT ON COLUMN purchase_orders.po_cost_price IS 'Original cost from PO, before receiving adjustments'; +COMMENT ON COLUMN purchase_orders.status IS '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done'; +COMMENT ON COLUMN purchase_orders.receiving_status IS '0=canceled,1=created,30=partial_received,40=full_received,50=paid'; +COMMENT ON COLUMN purchase_orders.receiving_history IS 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag'; + +CREATE INDEX idx_po_id ON purchase_orders(po_id); +CREATE INDEX idx_po_vendor ON purchase_orders(vendor); +CREATE INDEX idx_po_status ON purchase_orders(status); +CREATE INDEX idx_po_receiving_status ON purchase_orders(receiving_status); +CREATE INDEX idx_po_metrics ON purchase_orders(pid, date, status, ordered, received); +CREATE INDEX idx_po_metrics_receiving ON purchase_orders(pid, date, receiving_status, received_date); +CREATE INDEX idx_po_product_date ON purchase_orders(pid, date); +CREATE INDEX idx_po_product_status ON purchase_orders(pid, status); +CREATE INDEX idx_po_updated ON purchase_orders(updated); + +SET session_replication_role = 'origin'; -- Re-enable foreign key checks -- Create views for common calculations -- product_sales_trends view moved to metrics-schema.sql \ No newline at end of file diff --git a/inventory-server/scripts/old_csv/import-csv.js b/inventory-server/old/old_csv/import-csv.js similarity index 100% rename from inventory-server/scripts/old_csv/import-csv.js rename to inventory-server/old/old_csv/import-csv.js diff --git a/inventory-server/scripts/old_csv/update-csv.js b/inventory-server/old/old_csv/update-csv.js similarity index 100% rename from inventory-server/scripts/old_csv/update-csv.js rename to inventory-server/old/old_csv/update-csv.js diff --git a/inventory-server/scripts/scripts.js b/inventory-server/old/scripts.js similarity index 100% rename from inventory-server/scripts/scripts.js rename to inventory-server/old/scripts.js diff --git a/inventory-server/src/routes/test-connection.js b/inventory-server/old/test-connection.js similarity index 100% rename from inventory-server/src/routes/test-connection.js rename to inventory-server/old/test-connection.js diff --git a/inventory-server/scripts/test-prod-connection.js b/inventory-server/old/test-prod-connection.js similarity index 100% rename from inventory-server/scripts/test-prod-connection.js rename to inventory-server/old/test-prod-connection.js diff --git a/inventory-server/package-lock.json b/inventory-server/package-lock.json index c667c9f..f73617e 100755 --- a/inventory-server/package-lock.json +++ b/inventory-server/package-lock.json @@ -9,6 +9,7 @@ "version": "1.0.0", "license": "ISC", "dependencies": { + "bcrypt": "^5.1.1", "cors": "^2.8.5", "csv-parse": "^5.6.0", "dotenv": "^16.4.7", @@ -22,9 +23,79 @@ "uuid": "^9.0.1" }, "devDependencies": { + "@types/express": "^4.17.21", + "@types/pg": "^8.11.2", "nodemon": "^3.0.2" } }, + "node_modules/@mapbox/node-pre-gyp": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", + "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", + "license": "BSD-3-Clause", + "dependencies": { + "detect-libc": "^2.0.0", + "https-proxy-agent": "^5.0.0", + "make-dir": "^3.1.0", + "node-fetch": "^2.6.7", + "nopt": "^5.0.0", + "npmlog": "^5.0.1", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.11" + }, + "bin": { + "node-pre-gyp": "bin/node-pre-gyp" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "license": "MIT", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, "node_modules/@pm2/agent": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/@pm2/agent/-/agent-2.0.4.tgz", @@ -320,6 +391,67 @@ "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", "license": "MIT" }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.19.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz", + "integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/node": { "version": "18.19.76", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.76.tgz", @@ -339,6 +471,123 @@ "form-data": "^4.0.0" } }, + "node_modules/@types/pg": { + "version": "8.11.11", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.11.tgz", + "integrity": "sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^4.0.1" + } + }, + "node_modules/@types/pg/node_modules/pg-types": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-4.0.2.tgz", + "integrity": "sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==", + "dev": true, + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "pg-numeric": "1.0.2", + "postgres-array": "~3.0.1", + "postgres-bytea": "~3.0.0", + "postgres-date": "~2.1.0", + "postgres-interval": "^3.0.0", + "postgres-range": "^1.1.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@types/pg/node_modules/postgres-array": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.2.tgz", + "integrity": "sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/@types/pg/node_modules/postgres-bytea": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-3.0.0.tgz", + "integrity": "sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "obuf": "~1.1.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/pg/node_modules/postgres-date": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-2.1.0.tgz", + "integrity": "sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/@types/pg/node_modules/postgres-interval": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-3.0.0.tgz", + "integrity": "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/@types/qs": { + "version": "6.9.18", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.18.tgz", + "integrity": "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "license": "ISC" + }, "node_modules/abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", @@ -409,6 +658,15 @@ "node": ">=6" } }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -443,6 +701,40 @@ "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==", "license": "MIT" }, + "node_modules/aproba": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", + "license": "ISC" + }, + "node_modules/are-we-there-yet": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", + "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/are-we-there-yet/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -507,7 +799,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true, "license": "MIT" }, "node_modules/basic-ftp": { @@ -519,6 +810,20 @@ "node": ">=10.0.0" } }, + "node_modules/bcrypt": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", + "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.11", + "node-addon-api": "^5.0.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", @@ -586,7 +891,6 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", @@ -712,6 +1016,15 @@ "fsevents": "~2.3.2" } }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "license": "ISC", + "engines": { + "node": ">=10" + } + }, "node_modules/cli-tableau": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/cli-tableau/-/cli-tableau-2.0.1.tgz", @@ -741,6 +1054,15 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "license": "MIT" }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "license": "ISC", + "bin": { + "color-support": "bin.js" + } + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -763,7 +1085,6 @@ "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true, "license": "MIT" }, "node_modules/concat-stream": { @@ -781,6 +1102,12 @@ "typedarray": "^0.0.6" } }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "license": "ISC" + }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -915,6 +1242,12 @@ "node": ">=0.4.0" } }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "license": "MIT" + }, "node_modules/denque": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz", @@ -943,6 +1276,15 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/detect-libc": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", + "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, "node_modules/dotenv": { "version": "16.4.7", "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz", @@ -975,6 +1317,12 @@ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", "license": "MIT" }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, "node_modules/encodeurl": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", @@ -1304,6 +1652,36 @@ "node": ">= 0.6" } }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -1327,6 +1705,27 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gauge": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", + "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", + "signal-exit": "^3.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.2" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/generate-function": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz", @@ -1422,6 +1821,27 @@ "integrity": "sha512-2e/nZezdVlyCopOCYHeW0onkbZg7xP1Ad6pndPy1rCygeRykefUS6r7oA5cJRGEFvseiaz5a/qUHFVX1dd6Isg==", "license": "MIT" }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -1482,6 +1902,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "license": "ISC" + }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -1610,6 +2036,17 @@ "dev": true, "license": "ISC" }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -1686,6 +2123,15 @@ "node": ">=0.10.0" } }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -1801,6 +2247,30 @@ "url": "https://github.com/sponsors/wellwelwel" } }, + "node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "license": "MIT", + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -1874,7 +2344,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" @@ -1892,6 +2361,40 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "license": "MIT", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", @@ -2053,6 +2556,12 @@ "node": ">= 0.4.0" } }, + "node_modules/node-addon-api": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", + "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==", + "license": "MIT" + }, "node_modules/node-domexception": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", @@ -2169,6 +2678,21 @@ "node": ">=4" } }, + "node_modules/nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "license": "ISC", + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -2178,6 +2702,19 @@ "node": ">=0.10.0" } }, + "node_modules/npmlog": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", + "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "are-we-there-yet": "^2.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" + } + }, "node_modules/nssocket": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/nssocket/-/nssocket-0.6.0.tgz", @@ -2218,6 +2755,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/obuf": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", + "dev": true, + "license": "MIT" + }, "node_modules/on-finished": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", @@ -2230,6 +2774,15 @@ "node": ">= 0.8" } }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, "node_modules/openai": { "version": "4.85.3", "resolved": "https://registry.npmjs.org/openai/-/openai-4.85.3.tgz", @@ -2330,6 +2883,15 @@ "node": ">= 0.8" } }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", @@ -2391,6 +2953,16 @@ "node": ">=4.0.0" } }, + "node_modules/pg-numeric": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pg-numeric/-/pg-numeric-1.0.2.tgz", + "integrity": "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=4" + } + }, "node_modules/pg-pool": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.7.1.tgz", @@ -2713,6 +3285,13 @@ "node": ">=0.10.0" } }, + "node_modules/postgres-range": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/postgres-range/-/postgres-range-1.1.4.tgz", + "integrity": "sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==", + "dev": true, + "license": "MIT" + }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -2937,6 +3516,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/run-series": { "version": "1.1.9", "resolved": "https://registry.npmjs.org/run-series/-/run-series-1.1.9.tgz", @@ -3060,6 +3655,12 @@ "node": ">= 0.8.0" } }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "license": "ISC" + }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -3316,6 +3917,32 @@ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", "license": "MIT" }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -3367,6 +3994,23 @@ "url": "https://www.buymeacoffee.com/systeminfo" } }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "license": "ISC", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -3571,6 +4215,21 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "license": "ISC", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, "node_modules/ws": { "version": "8.18.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", diff --git a/inventory-server/package.json b/inventory-server/package.json index bd1451a..05aa4c6 100755 --- a/inventory-server/package.json +++ b/inventory-server/package.json @@ -18,6 +18,7 @@ "author": "", "license": "ISC", "dependencies": { + "bcrypt": "^5.1.1", "cors": "^2.8.5", "csv-parse": "^5.6.0", "dotenv": "^16.4.7", @@ -31,6 +32,8 @@ "uuid": "^9.0.1" }, "devDependencies": { + "@types/express": "^4.17.21", + "@types/pg": "^8.11.2", "nodemon": "^3.0.2" } } diff --git a/inventory-server/scripts/full-reset.js b/inventory-server/scripts/full-reset.js index 9a6aff8..3e96105 100644 --- a/inventory-server/scripts/full-reset.js +++ b/inventory-server/scripts/full-reset.js @@ -14,7 +14,15 @@ function outputProgress(data) { function runScript(scriptPath) { return new Promise((resolve, reject) => { const child = spawn('node', [scriptPath], { - stdio: ['inherit', 'pipe', 'pipe'] + stdio: ['inherit', 'pipe', 'pipe'], + env: { + ...process.env, + PGHOST: process.env.DB_HOST, + PGUSER: process.env.DB_USER, + PGPASSWORD: process.env.DB_PASSWORD, + PGDATABASE: process.env.DB_NAME, + PGPORT: process.env.DB_PORT || '5432' + } }); let output = ''; diff --git a/inventory-server/scripts/import-from-prod.js b/inventory-server/scripts/import-from-prod.js index 67b6678..dd0f239 100644 --- a/inventory-server/scripts/import-from-prod.js +++ b/inventory-server/scripts/import-from-prod.js @@ -19,7 +19,6 @@ const IMPORT_PURCHASE_ORDERS = true; const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false // SSH configuration -// In import-from-prod.js const sshConfig = { ssh: { host: process.env.PROD_SSH_HOST, @@ -31,6 +30,7 @@ const sshConfig = { compress: true, // Enable SSH compression }, prodDbConfig: { + // MySQL config for production host: process.env.PROD_DB_HOST || "localhost", user: process.env.PROD_DB_USER, password: process.env.PROD_DB_PASSWORD, @@ -39,21 +39,16 @@ const sshConfig = { timezone: 'Z', }, localDbConfig: { + // PostgreSQL config for local host: process.env.DB_HOST, user: process.env.DB_USER, password: process.env.DB_PASSWORD, database: process.env.DB_NAME, - multipleStatements: true, - waitForConnections: true, - connectionLimit: 10, - queueLimit: 0, - namedPlaceholders: true, - connectTimeout: 60000, - enableKeepAlive: true, - keepAliveInitialDelay: 10000, - compress: true, - timezone: 'Z', - stringifyObjects: false, + port: process.env.DB_PORT || 5432, + ssl: process.env.DB_SSL === 'true', + connectionTimeoutMillis: 60000, + idleTimeoutMillis: 30000, + max: 10 // connection pool max size } }; @@ -108,7 +103,7 @@ async function main() { SET status = 'cancelled', end_time = NOW(), - duration_seconds = TIMESTAMPDIFF(SECOND, start_time, NOW()), + duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER, error_message = 'Previous import was not completed properly' WHERE status = 'running' `); @@ -118,9 +113,10 @@ async function main() { CREATE TABLE IF NOT EXISTS sync_status ( table_name VARCHAR(50) PRIMARY KEY, last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - last_sync_id BIGINT, - INDEX idx_last_sync (last_sync_timestamp) + last_sync_id BIGINT ); + + CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status (last_sync_timestamp); `); // Create import history record for the overall session @@ -134,17 +130,17 @@ async function main() { ) VALUES ( 'all_tables', NOW(), - ?, + $1::boolean, 'running', - JSON_OBJECT( - 'categories_enabled', ?, - 'products_enabled', ?, - 'orders_enabled', ?, - 'purchase_orders_enabled', ? + jsonb_build_object( + 'categories_enabled', $2::boolean, + 'products_enabled', $3::boolean, + 'orders_enabled', $4::boolean, + 'purchase_orders_enabled', $5::boolean ) - ) + ) RETURNING id `, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS]); - importHistoryId = historyResult.insertId; + importHistoryId = historyResult.rows[0].id; const results = { categories: null, @@ -162,8 +158,8 @@ async function main() { if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; console.log('Categories import result:', results.categories); - totalRecordsAdded += results.categories?.recordsAdded || 0; - totalRecordsUpdated += results.categories?.recordsUpdated || 0; + totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0); + totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0); } if (IMPORT_PRODUCTS) { @@ -171,8 +167,8 @@ async function main() { if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; console.log('Products import result:', results.products); - totalRecordsAdded += results.products?.recordsAdded || 0; - totalRecordsUpdated += results.products?.recordsUpdated || 0; + totalRecordsAdded += parseInt(results.products?.recordsAdded || 0); + totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0); } if (IMPORT_ORDERS) { @@ -180,8 +176,8 @@ async function main() { if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; console.log('Orders import result:', results.orders); - totalRecordsAdded += results.orders?.recordsAdded || 0; - totalRecordsUpdated += results.orders?.recordsUpdated || 0; + totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0); + totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0); } if (IMPORT_PURCHASE_ORDERS) { @@ -189,8 +185,8 @@ async function main() { if (isImportCancelled) throw new Error("Import cancelled"); completedSteps++; console.log('Purchase orders import result:', results.purchaseOrders); - totalRecordsAdded += results.purchaseOrders?.recordsAdded || 0; - totalRecordsUpdated += results.purchaseOrders?.recordsUpdated || 0; + totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0); + totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0); } const endTime = Date.now(); @@ -201,21 +197,21 @@ async function main() { UPDATE import_history SET end_time = NOW(), - duration_seconds = ?, - records_added = ?, - records_updated = ?, + duration_seconds = $1, + records_added = $2, + records_updated = $3, status = 'completed', - additional_info = JSON_OBJECT( - 'categories_enabled', ?, - 'products_enabled', ?, - 'orders_enabled', ?, - 'purchase_orders_enabled', ?, - 'categories_result', CAST(? AS JSON), - 'products_result', CAST(? AS JSON), - 'orders_result', CAST(? AS JSON), - 'purchase_orders_result', CAST(? AS JSON) + additional_info = jsonb_build_object( + 'categories_enabled', $4::boolean, + 'products_enabled', $5::boolean, + 'orders_enabled', $6::boolean, + 'purchase_orders_enabled', $7::boolean, + 'categories_result', COALESCE($8::jsonb, 'null'::jsonb), + 'products_result', COALESCE($9::jsonb, 'null'::jsonb), + 'orders_result', COALESCE($10::jsonb, 'null'::jsonb), + 'purchase_orders_result', COALESCE($11::jsonb, 'null'::jsonb) ) - WHERE id = ? + WHERE id = $12 `, [ totalElapsedSeconds, totalRecordsAdded, @@ -259,10 +255,10 @@ async function main() { UPDATE import_history SET end_time = NOW(), - duration_seconds = ?, - status = ?, - error_message = ? - WHERE id = ? + duration_seconds = $1, + status = $2, + error_message = $3 + WHERE id = $4 `, [totalElapsedSeconds, error.message === "Import cancelled" ? 'cancelled' : 'failed', error.message, importHistoryId]); } @@ -288,16 +284,23 @@ async function main() { throw error; } finally { if (connections) { - await closeConnections(connections); + await closeConnections(connections).catch(err => { + console.error("Error closing connections:", err); + }); } } } // Run the import only if this is the main module if (require.main === module) { - main().catch((error) => { + main().then((results) => { + console.log('Import completed successfully:', results); + // Force exit after a small delay to ensure all logs are written + setTimeout(() => process.exit(0), 500); + }).catch((error) => { console.error("Unhandled error in main process:", error); - process.exit(1); + // Force exit with error code after a small delay + setTimeout(() => process.exit(1), 500); }); } diff --git a/inventory-server/scripts/import/categories.js b/inventory-server/scripts/import/categories.js index 7dffc3c..2634348 100644 --- a/inventory-server/scripts/import/categories.js +++ b/inventory-server/scripts/import/categories.js @@ -9,170 +9,206 @@ async function importCategories(prodConnection, localConnection) { const startTime = Date.now(); const typeOrder = [10, 20, 11, 21, 12, 13]; let totalInserted = 0; + let totalUpdated = 0; let skippedCategories = []; try { - // Process each type in order with its own query + // Start a single transaction for the entire import + await localConnection.query('BEGIN'); + + // Process each type in order with its own savepoint for (const type of typeOrder) { - const [categories] = await prodConnection.query( - ` - SELECT - pc.cat_id, - pc.name, - pc.type, - CASE - WHEN pc.type IN (10, 20) THEN NULL -- Top level categories should have no parent - WHEN pc.master_cat_id IS NULL THEN NULL - ELSE pc.master_cat_id - END as parent_id, - pc.combined_name as description - FROM product_categories pc - WHERE pc.type = ? - ORDER BY pc.cat_id - `, - [type] - ); + try { + // Create a savepoint for this type + await localConnection.query(`SAVEPOINT category_type_${type}`); - if (categories.length === 0) continue; - - console.log(`\nProcessing ${categories.length} type ${type} categories`); - if (type === 10) { - console.log("Type 10 categories:", JSON.stringify(categories, null, 2)); - } - - // For types that can have parents (11, 21, 12, 13), verify parent existence - let categoriesToInsert = categories; - if (![10, 20].includes(type)) { - // Get all parent IDs - const parentIds = [ - ...new Set( - categories.map((c) => c.parent_id).filter((id) => id !== null) - ), - ]; - - // Check which parents exist - const [existingParents] = await localConnection.query( - "SELECT cat_id FROM categories WHERE cat_id IN (?)", - [parentIds] - ); - const existingParentIds = new Set(existingParents.map((p) => p.cat_id)); - - // Filter categories and track skipped ones - categoriesToInsert = categories.filter( - (cat) => - cat.parent_id === null || existingParentIds.has(cat.parent_id) - ); - const invalidCategories = categories.filter( - (cat) => - cat.parent_id !== null && !existingParentIds.has(cat.parent_id) + // Production query remains MySQL compatible + const [categories] = await prodConnection.query( + ` + SELECT + pc.cat_id, + pc.name, + pc.type, + CASE + WHEN pc.type IN (10, 20) THEN NULL -- Top level categories should have no parent + WHEN pc.master_cat_id IS NULL THEN NULL + ELSE pc.master_cat_id + END as parent_id, + pc.combined_name as description + FROM product_categories pc + WHERE pc.type = ? + ORDER BY pc.cat_id + `, + [type] ); - if (invalidCategories.length > 0) { - const skippedInfo = invalidCategories.map((c) => ({ - id: c.cat_id, - name: c.name, - type: c.type, - missing_parent: c.parent_id, - })); - skippedCategories.push(...skippedInfo); + if (categories.length === 0) { + await localConnection.query(`RELEASE SAVEPOINT category_type_${type}`); + continue; + } - console.log( - "\nSkipping categories with missing parents:", - invalidCategories - .map( - (c) => - `${c.cat_id} - ${c.name} (missing parent: ${c.parent_id})` - ) - .join("\n") - ); + console.log(`\nProcessing ${categories.length} type ${type} categories`); + if (type === 10) { + console.log("Type 10 categories:", JSON.stringify(categories, null, 2)); + } + + // For types that can have parents (11, 21, 12, 13), verify parent existence + let categoriesToInsert = categories; + if (![10, 20].includes(type)) { + // Get all parent IDs + const parentIds = [ + ...new Set( + categories + .filter(c => c && c.parent_id !== null) + .map(c => c.parent_id) + ), + ]; + + console.log(`Processing ${categories.length} type ${type} categories with ${parentIds.length} unique parent IDs`); + console.log('Parent IDs:', parentIds); + + // No need to check for parent existence - we trust they exist since they were just inserted + categoriesToInsert = categories; } if (categoriesToInsert.length === 0) { console.log( - `No valid categories of type ${type} to insert - all had missing parents` + `No valid categories of type ${type} to insert` ); + await localConnection.query(`RELEASE SAVEPOINT category_type_${type}`); continue; } + + console.log( + `Inserting ${categoriesToInsert.length} type ${type} categories` + ); + + // PostgreSQL upsert query with parameterized values + const values = categoriesToInsert.flatMap((cat) => [ + cat.cat_id, + cat.name, + cat.type, + cat.parent_id, + cat.description, + 'active', + new Date(), + new Date() + ]); + + console.log('Attempting to insert/update with values:', JSON.stringify(values, null, 2)); + + const placeholders = categoriesToInsert + .map((_, i) => `($${i * 8 + 1}, $${i * 8 + 2}, $${i * 8 + 3}, $${i * 8 + 4}, $${i * 8 + 5}, $${i * 8 + 6}, $${i * 8 + 7}, $${i * 8 + 8})`) + .join(','); + + console.log('Using placeholders:', placeholders); + + // Insert categories with ON CONFLICT clause for PostgreSQL + const query = ` + WITH inserted_categories AS ( + INSERT INTO categories ( + cat_id, name, type, parent_id, description, status, created_at, updated_at + ) + VALUES ${placeholders} + ON CONFLICT (cat_id) DO UPDATE SET + name = EXCLUDED.name, + type = EXCLUDED.type, + parent_id = EXCLUDED.parent_id, + description = EXCLUDED.description, + status = EXCLUDED.status, + updated_at = EXCLUDED.updated_at + RETURNING + cat_id, + CASE + WHEN xmax = 0 THEN true + ELSE false + END as is_insert + ) + SELECT + COUNT(*) as total, + COUNT(*) FILTER (WHERE is_insert) as inserted, + COUNT(*) FILTER (WHERE NOT is_insert) as updated + FROM inserted_categories`; + + console.log('Executing query:', query); + + const result = await localConnection.query(query, values); + console.log('Query result:', result); + + // Get the first result since query returns an array + const queryResult = Array.isArray(result) ? result[0] : result; + + if (!queryResult || !queryResult.rows || !queryResult.rows[0]) { + console.error('Query failed to return results. Result:', queryResult); + throw new Error('Query did not return expected results'); + } + + const total = parseInt(queryResult.rows[0].total) || 0; + const inserted = parseInt(queryResult.rows[0].inserted) || 0; + const updated = parseInt(queryResult.rows[0].updated) || 0; + + console.log(`Total: ${total}, Inserted: ${inserted}, Updated: ${updated}`); + + totalInserted += inserted; + totalUpdated += updated; + + // Release the savepoint for this type + await localConnection.query(`RELEASE SAVEPOINT category_type_${type}`); + + outputProgress({ + status: "running", + operation: "Categories import", + message: `Imported ${inserted} (updated ${updated}) categories of type ${type}`, + current: totalInserted + totalUpdated, + total: categories.length, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + }); + } catch (error) { + // Rollback to the savepoint for this type + await localConnection.query(`ROLLBACK TO SAVEPOINT category_type_${type}`); + throw error; } - - console.log( - `Inserting ${categoriesToInsert.length} type ${type} categories` - ); - - const placeholders = categoriesToInsert - .map(() => "(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)") - .join(","); - - const values = categoriesToInsert.flatMap((cat) => [ - cat.cat_id, - cat.name, - cat.type, - cat.parent_id, - cat.description, - "active", - ]); - - // Insert categories and create relationships in one query to avoid race conditions - await localConnection.query( - ` - INSERT INTO categories (cat_id, name, type, parent_id, description, status, created_at, updated_at) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE - name = VALUES(name), - type = VALUES(type), - parent_id = VALUES(parent_id), - description = VALUES(description), - status = VALUES(status), - updated_at = CURRENT_TIMESTAMP - `, - values - ); - - totalInserted += categoriesToInsert.length; - outputProgress({ - status: "running", - operation: "Categories import", - current: totalInserted, - total: totalInserted, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000), - }); } - // After all imports, if we skipped any categories, throw an error - if (skippedCategories.length > 0) { - const error = new Error( - "Categories import completed with errors - some categories were skipped due to missing parents" - ); - error.skippedCategories = skippedCategories; - throw error; - } + // Commit the entire transaction - we'll do this even if we have skipped categories + await localConnection.query('COMMIT'); outputProgress({ status: "complete", operation: "Categories import completed", - current: totalInserted, - total: totalInserted, + current: totalInserted + totalUpdated, + total: totalInserted + totalUpdated, duration: formatElapsedTime((Date.now() - startTime) / 1000), + warnings: skippedCategories.length > 0 ? { + message: "Some categories were skipped due to missing parents", + skippedCategories + } : undefined }); return { status: "complete", - totalImported: totalInserted + recordsAdded: totalInserted, + recordsUpdated: totalUpdated, + totalRecords: totalInserted + totalUpdated, + warnings: skippedCategories.length > 0 ? { + message: "Some categories were skipped due to missing parents", + skippedCategories + } : undefined }; } catch (error) { console.error("Error importing categories:", error); - if (error.skippedCategories) { - console.error( - "Skipped categories:", - JSON.stringify(error.skippedCategories, null, 2) - ); + + // Only rollback if we haven't committed yet + try { + await localConnection.query('ROLLBACK'); + } catch (rollbackError) { + console.error("Error during rollback:", rollbackError); } outputProgress({ status: "error", operation: "Categories import failed", - error: error.message, - skippedCategories: error.skippedCategories + error: error.message }); throw error; diff --git a/inventory-server/scripts/import/orders.js b/inventory-server/scripts/import/orders.js index 450c0d2..7608bd7 100644 --- a/inventory-server/scripts/import/orders.js +++ b/inventory-server/scripts/import/orders.js @@ -2,7 +2,7 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products'); /** - * Imports orders from a production MySQL database to a local MySQL database. + * Imports orders from a production MySQL database to a local PostgreSQL database. * It can run in two modes: * 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time. * 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp. @@ -23,93 +23,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = let importedCount = 0; let totalOrderItems = 0; let totalUniqueOrders = 0; - - // Add a cumulative counter for processed orders before the loop let cumulativeProcessedOrders = 0; try { - // Clean up any existing temp tables first - await localConnection.query(` - DROP TEMPORARY TABLE IF EXISTS temp_order_items; - DROP TEMPORARY TABLE IF EXISTS temp_order_meta; - DROP TEMPORARY TABLE IF EXISTS temp_order_discounts; - DROP TEMPORARY TABLE IF EXISTS temp_order_taxes; - DROP TEMPORARY TABLE IF EXISTS temp_order_costs; - `); - - // Create all temp tables with correct schema - await localConnection.query(` - CREATE TEMPORARY TABLE temp_order_items ( - order_id INT UNSIGNED NOT NULL, - pid INT UNSIGNED NOT NULL, - SKU VARCHAR(50) NOT NULL, - price DECIMAL(10,2) NOT NULL, - quantity INT NOT NULL, - base_discount DECIMAL(10,2) DEFAULT 0, - PRIMARY KEY (order_id, pid) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8; - `); - - await localConnection.query(` - CREATE TEMPORARY TABLE temp_order_meta ( - order_id INT UNSIGNED NOT NULL, - date DATE NOT NULL, - customer VARCHAR(100) NOT NULL, - customer_name VARCHAR(150) NOT NULL, - status INT, - canceled TINYINT(1), - summary_discount DECIMAL(10,2) DEFAULT 0.00, - summary_subtotal DECIMAL(10,2) DEFAULT 0.00, - PRIMARY KEY (order_id) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8; - `); - - await localConnection.query(` - CREATE TEMPORARY TABLE temp_order_discounts ( - order_id INT UNSIGNED NOT NULL, - pid INT UNSIGNED NOT NULL, - discount DECIMAL(10,2) NOT NULL, - PRIMARY KEY (order_id, pid) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8; - `); - - await localConnection.query(` - CREATE TEMPORARY TABLE temp_order_taxes ( - order_id INT UNSIGNED NOT NULL, - pid INT UNSIGNED NOT NULL, - tax DECIMAL(10,2) NOT NULL, - PRIMARY KEY (order_id, pid) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8; - `); - - await localConnection.query(` - CREATE TEMPORARY TABLE temp_order_costs ( - order_id INT UNSIGNED NOT NULL, - pid INT UNSIGNED NOT NULL, - costeach DECIMAL(10,3) DEFAULT 0.000, - PRIMARY KEY (order_id, pid) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8; - `); - - // Get column names from the local table - const [columns] = await localConnection.query(` - SELECT COLUMN_NAME - FROM INFORMATION_SCHEMA.COLUMNS - WHERE TABLE_NAME = 'orders' - AND COLUMN_NAME != 'updated' -- Exclude the updated column - ORDER BY ORDINAL_POSITION - `); - const columnNames = columns.map(col => col.COLUMN_NAME); - // Get last sync info const [syncInfo] = await localConnection.query( "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'" ); - const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; + const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01'; console.log('Orders: Using last sync time:', lastSyncTime); - // First get count of order items + // First get count of order items - Keep MySQL compatible for production const [[{ total }]] = await prodConnection.query(` SELECT COUNT(*) as total FROM order_items oi @@ -141,12 +66,13 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = totalOrderItems = total; console.log('Orders: Found changes:', totalOrderItems); - // Get order items in batches + // Get order items - Keep MySQL compatible for production + console.log('Orders: Starting MySQL query...'); const [orderItems] = await prodConnection.query(` SELECT oi.order_id, - oi.prod_pid as pid, - oi.prod_itemnumber as SKU, + oi.prod_pid, + COALESCE(NULLIF(TRIM(oi.prod_itemnumber), ''), 'NO-SKU') as SKU, oi.prod_price as price, oi.qty_ordered as quantity, COALESCE(oi.prod_price_reg - oi.prod_price, 0) as base_discount, @@ -177,24 +103,78 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = ` : ''} `, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []); - console.log('Orders: Processing', orderItems.length, 'order items'); + console.log('Orders: Found', orderItems.length, 'order items to process'); + + // Create tables in PostgreSQL for debugging + await localConnection.query(` + DROP TABLE IF EXISTS debug_order_items; + DROP TABLE IF EXISTS debug_order_meta; + DROP TABLE IF EXISTS debug_order_discounts; + DROP TABLE IF EXISTS debug_order_taxes; + DROP TABLE IF EXISTS debug_order_costs; + + CREATE TABLE debug_order_items ( + order_id INTEGER NOT NULL, + pid INTEGER NOT NULL, + SKU VARCHAR(50) NOT NULL, + price DECIMAL(10,2) NOT NULL, + quantity INTEGER NOT NULL, + base_discount DECIMAL(10,2) DEFAULT 0, + PRIMARY KEY (order_id, pid) + ); + + CREATE TABLE debug_order_meta ( + order_id INTEGER NOT NULL, + date DATE NOT NULL, + customer VARCHAR(100) NOT NULL, + customer_name VARCHAR(150) NOT NULL, + status INTEGER, + canceled BOOLEAN, + summary_discount DECIMAL(10,2) DEFAULT 0.00, + summary_subtotal DECIMAL(10,2) DEFAULT 0.00, + PRIMARY KEY (order_id) + ); + + CREATE TABLE debug_order_discounts ( + order_id INTEGER NOT NULL, + pid INTEGER NOT NULL, + discount DECIMAL(10,2) NOT NULL, + PRIMARY KEY (order_id, pid) + ); + + CREATE TABLE debug_order_taxes ( + order_id INTEGER NOT NULL, + pid INTEGER NOT NULL, + tax DECIMAL(10,2) NOT NULL, + PRIMARY KEY (order_id, pid) + ); + + CREATE TABLE debug_order_costs ( + order_id INTEGER NOT NULL, + pid INTEGER NOT NULL, + costeach DECIMAL(10,3) DEFAULT 0.000, + PRIMARY KEY (order_id, pid) + ); + `); // Insert order items in batches for (let i = 0; i < orderItems.length; i += 5000) { const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length)); - const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(","); + const placeholders = batch.map((_, idx) => + `($${idx * 6 + 1}, $${idx * 6 + 2}, $${idx * 6 + 3}, $${idx * 6 + 4}, $${idx * 6 + 5}, $${idx * 6 + 6})` + ).join(","); const values = batch.flatMap(item => [ - item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount + item.order_id, item.prod_pid, item.SKU, item.price, item.quantity, item.base_discount ]); await localConnection.query(` - INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount) + INSERT INTO debug_order_items (order_id, pid, SKU, price, quantity, base_discount) VALUES ${placeholders} - ON DUPLICATE KEY UPDATE - SKU = VALUES(SKU), - price = VALUES(price), - quantity = VALUES(quantity), - base_discount = VALUES(base_discount) + ON CONFLICT (order_id, pid) DO UPDATE SET + SKU = EXCLUDED.SKU, + price = EXCLUDED.price, + quantity = EXCLUDED.quantity, + base_discount = EXCLUDED.base_discount `, values); processedCount = i + batch.length; @@ -203,24 +183,26 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = operation: "Orders import", message: `Loading order items: ${processedCount} of ${totalOrderItems}`, current: processedCount, - total: totalOrderItems + total: totalOrderItems, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, processedCount, totalOrderItems), + rate: calculateRate(startTime, processedCount) }); } // Get unique order IDs const orderIds = [...new Set(orderItems.map(item => item.order_id))]; totalUniqueOrders = orderIds.length; - console.log('Total unique order IDs:', totalUniqueOrders); + console.log('Orders: Processing', totalUniqueOrders, 'unique orders'); // Reset processed count for order processing phase processedCount = 0; - // Get order metadata in batches - for (let i = 0; i < orderIds.length; i += 5000) { - const batchIds = orderIds.slice(i, i + 5000); - console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`); - console.log('Sample of batch IDs:', batchIds.slice(0, 5)); - + // Process metadata, discounts, taxes, and costs in parallel + const METADATA_BATCH_SIZE = 2000; + const PG_BATCH_SIZE = 200; + + const processMetadataBatch = async (batchIds) => { const [orders] = await prodConnection.query(` SELECT o.order_id, @@ -235,64 +217,46 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = LEFT JOIN users u ON o.order_cid = u.cid WHERE o.order_id IN (?) `, [batchIds]); - - console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`); - const duplicates = orders.filter((order, index, self) => - self.findIndex(o => o.order_id === order.order_id) !== index - ); - if (duplicates.length > 0) { - console.log('Found duplicates:', duplicates); + + // Process in sub-batches for PostgreSQL + for (let j = 0; j < orders.length; j += PG_BATCH_SIZE) { + const subBatch = orders.slice(j, j + PG_BATCH_SIZE); + if (subBatch.length === 0) continue; + + const placeholders = subBatch.map((_, idx) => + `($${idx * 8 + 1}, $${idx * 8 + 2}, $${idx * 8 + 3}, $${idx * 8 + 4}, $${idx * 8 + 5}, $${idx * 8 + 6}, $${idx * 8 + 7}, $${idx * 8 + 8})` + ).join(","); + + const values = subBatch.flatMap(order => [ + order.order_id, + order.date, + order.customer, + order.customer_name || '', + order.status, + order.canceled, + order.summary_discount || 0, + order.summary_subtotal || 0 + ]); + + await localConnection.query(` + INSERT INTO debug_order_meta ( + order_id, date, customer, customer_name, status, canceled, + summary_discount, summary_subtotal + ) + VALUES ${placeholders} + ON CONFLICT (order_id) DO UPDATE SET + date = EXCLUDED.date, + customer = EXCLUDED.customer, + customer_name = EXCLUDED.customer_name, + status = EXCLUDED.status, + canceled = EXCLUDED.canceled, + summary_discount = EXCLUDED.summary_discount, + summary_subtotal = EXCLUDED.summary_subtotal + `, values); } + }; - const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?, ?, ?)").join(","); - const values = orders.flatMap(order => [ - order.order_id, - order.date, - order.customer, - order.customer_name, - order.status, - order.canceled, - order.summary_discount, - order.summary_subtotal - ]); - - await localConnection.query(` - INSERT INTO temp_order_meta ( - order_id, - date, - customer, - customer_name, - status, - canceled, - summary_discount, - summary_subtotal - ) VALUES ${placeholders} - ON DUPLICATE KEY UPDATE - date = VALUES(date), - customer = VALUES(customer), - customer_name = VALUES(customer_name), - status = VALUES(status), - canceled = VALUES(canceled), - summary_discount = VALUES(summary_discount), - summary_subtotal = VALUES(summary_subtotal) - `, values); - - processedCount = i + orders.length; - outputProgress({ - status: "running", - operation: "Orders import", - message: `Loading order metadata: ${processedCount} of ${totalUniqueOrders}`, - current: processedCount, - total: totalUniqueOrders - }); - } - - // Reset processed count for final phase - processedCount = 0; - - // Get promotional discounts in batches - for (let i = 0; i < orderIds.length; i += 5000) { - const batchIds = orderIds.slice(i, i + 5000); + const processDiscountsBatch = async (batchIds) => { const [discounts] = await prodConnection.query(` SELECT order_id, pid, SUM(amount) as discount FROM order_discount_items @@ -300,313 +264,296 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate = GROUP BY order_id, pid `, [batchIds]); - if (discounts.length > 0) { - const placeholders = discounts.map(() => "(?, ?, ?)").join(","); - const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]); + if (discounts.length === 0) return; + + for (let j = 0; j < discounts.length; j += PG_BATCH_SIZE) { + const subBatch = discounts.slice(j, j + PG_BATCH_SIZE); + if (subBatch.length === 0) continue; + + const placeholders = subBatch.map((_, idx) => + `($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})` + ).join(","); + + const values = subBatch.flatMap(d => [ + d.order_id, + d.pid, + d.discount || 0 + ]); await localConnection.query(` - INSERT INTO temp_order_discounts VALUES ${placeholders} - ON DUPLICATE KEY UPDATE - discount = VALUES(discount) + INSERT INTO debug_order_discounts (order_id, pid, discount) + VALUES ${placeholders} + ON CONFLICT (order_id, pid) DO UPDATE SET + discount = EXCLUDED.discount `, values); } - } + }; - // Get tax information in batches - for (let i = 0; i < orderIds.length; i += 5000) { - const batchIds = orderIds.slice(i, i + 5000); + const processTaxesBatch = async (batchIds) => { + // Optimized tax query to avoid subquery const [taxes] = await prodConnection.query(` - SELECT DISTINCT - oti.order_id, - otip.pid, - otip.item_taxes_to_collect as tax - FROM order_tax_info oti - JOIN ( - SELECT order_id, MAX(stamp) as max_stamp + SELECT oti.order_id, otip.pid, otip.item_taxes_to_collect as tax + FROM ( + SELECT order_id, MAX(taxinfo_id) as latest_taxinfo_id FROM order_tax_info WHERE order_id IN (?) GROUP BY order_id - ) latest ON oti.order_id = latest.order_id AND oti.stamp = latest.max_stamp + ) latest_info + JOIN order_tax_info oti ON oti.order_id = latest_info.order_id + AND oti.taxinfo_id = latest_info.latest_taxinfo_id JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id `, [batchIds]); - if (taxes.length > 0) { - // Remove any duplicates before inserting - const uniqueTaxes = new Map(); - taxes.forEach(t => { - const key = `${t.order_id}-${t.pid}`; - uniqueTaxes.set(key, t); - }); + if (taxes.length === 0) return; - const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]); - if (values.length > 0) { - const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(","); - await localConnection.query(` - INSERT INTO temp_order_taxes VALUES ${placeholders} - ON DUPLICATE KEY UPDATE tax = VALUES(tax) - `, values); - } - } - } + for (let j = 0; j < taxes.length; j += PG_BATCH_SIZE) { + const subBatch = taxes.slice(j, j + PG_BATCH_SIZE); + if (subBatch.length === 0) continue; - // Get costeach values in batches - for (let i = 0; i < orderIds.length; i += 5000) { - const batchIds = orderIds.slice(i, i + 5000); - const [costs] = await prodConnection.query(` - SELECT - oc.orderid as order_id, - oc.pid, - COALESCE( - oc.costeach, - (SELECT pi.costeach - FROM product_inventory pi - WHERE pi.pid = oc.pid - AND pi.daterec <= o.date_placed - ORDER BY pi.daterec DESC LIMIT 1) - ) as costeach - FROM order_costs oc - JOIN _order o ON oc.orderid = o.order_id - WHERE oc.orderid IN (?) - `, [batchIds]); + const placeholders = subBatch.map((_, idx) => + `($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})` + ).join(","); + + const values = subBatch.flatMap(t => [ + t.order_id, + t.pid, + t.tax || 0 + ]); - if (costs.length > 0) { - const placeholders = costs.map(() => '(?, ?, ?)').join(","); - const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach || 0]); await localConnection.query(` - INSERT INTO temp_order_costs (order_id, pid, costeach) + INSERT INTO debug_order_taxes (order_id, pid, tax) VALUES ${placeholders} - ON DUPLICATE KEY UPDATE costeach = VALUES(costeach) + ON CONFLICT (order_id, pid) DO UPDATE SET + tax = EXCLUDED.tax `, values); } - } + }; - // Now combine all the data and insert into orders table - // Pre-check all products at once instead of per batch - const allOrderPids = [...new Set(orderItems.map(item => item.pid))]; - const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [allOrderPids] - ) : [[]]; - const existingPids = new Set(existingProducts.map(p => p.pid)); - - // Process in larger batches - for (let i = 0; i < orderIds.length; i += 5000) { - const batchIds = orderIds.slice(i, i + 5000); - - // Get combined data for this batch - const [orders] = await localConnection.query(` + const processCostsBatch = async (batchIds) => { + const [costs] = await prodConnection.query(` SELECT - oi.order_id as order_number, - oi.pid, - oi.SKU, - om.date, - oi.price, - oi.quantity, - oi.base_discount + COALESCE(od.discount, 0) + - CASE - WHEN om.summary_discount > 0 THEN - ROUND((om.summary_discount * (oi.price * oi.quantity)) / - NULLIF(om.summary_subtotal, 0), 2) - ELSE 0 - END as discount, - COALESCE(ot.tax, 0) as tax, - 0 as tax_included, - 0 as shipping, - om.customer, - om.customer_name, - om.status, - om.canceled, - COALESCE(tc.costeach, 0) as costeach - FROM temp_order_items oi - JOIN temp_order_meta om ON oi.order_id = om.order_id - LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid - LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid - LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid - WHERE oi.order_id IN (?) + oc.orderid as order_id, + oc.pid, + oc.costeach + FROM order_costs oc + WHERE oc.orderid IN (?) + AND oc.pending = 0 `, [batchIds]); - // Filter orders and track missing products - do this in a single pass - const validOrders = []; - const values = []; - const processedOrderItems = new Set(); // Track unique order items - const processedOrders = new Set(); // Track unique orders - - for (const order of orders) { - if (!existingPids.has(order.pid)) { - missingProducts.add(order.pid); - skippedOrders.add(order.order_number); - continue; - } - validOrders.push(order); - values.push(...columnNames.map(col => order[col] ?? null)); - processedOrderItems.add(`${order.order_number}-${order.pid}`); - processedOrders.add(order.order_number); - } + if (costs.length === 0) return; - if (validOrders.length > 0) { - // Pre-compute the placeholders string once - const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`; - const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(","); + for (let j = 0; j < costs.length; j += PG_BATCH_SIZE) { + const subBatch = costs.slice(j, j + PG_BATCH_SIZE); + if (subBatch.length === 0) continue; + + const placeholders = subBatch.map((_, idx) => + `($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})` + ).join(","); - const result = await localConnection.query(` - INSERT INTO orders (${columnNames.join(",")}) + const values = subBatch.flatMap(c => [ + c.order_id, + c.pid, + c.costeach || 0 + ]); + + await localConnection.query(` + INSERT INTO debug_order_costs (order_id, pid, costeach) VALUES ${placeholders} - ON DUPLICATE KEY UPDATE - SKU = VALUES(SKU), - date = VALUES(date), - price = VALUES(price), - quantity = VALUES(quantity), - discount = VALUES(discount), - tax = VALUES(tax), - tax_included = VALUES(tax_included), - shipping = VALUES(shipping), - customer = VALUES(customer), - customer_name = VALUES(customer_name), - status = VALUES(status), - canceled = VALUES(canceled), - costeach = VALUES(costeach) - `, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat()); - - const affectedRows = result[0].affectedRows; - const updates = Math.floor(affectedRows / 2); - const inserts = affectedRows - (updates * 2); - - recordsAdded += inserts; - recordsUpdated += updates; - importedCount += processedOrderItems.size; // Count unique order items processed + ON CONFLICT (order_id, pid) DO UPDATE SET + costeach = EXCLUDED.costeach + `, values); } + }; - // Update progress based on unique orders processed - cumulativeProcessedOrders += processedOrders.size; + // Process all data types in parallel for each batch + for (let i = 0; i < orderIds.length; i += METADATA_BATCH_SIZE) { + const batchIds = orderIds.slice(i, i + METADATA_BATCH_SIZE); + + await Promise.all([ + processMetadataBatch(batchIds), + processDiscountsBatch(batchIds), + processTaxesBatch(batchIds), + processCostsBatch(batchIds) + ]); + + processedCount = i + batchIds.length; outputProgress({ status: "running", operation: "Orders import", - message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`, - current: cumulativeProcessedOrders, + message: `Loading order data: ${processedCount} of ${totalUniqueOrders}`, + current: processedCount, total: totalUniqueOrders, elapsed: formatElapsedTime((Date.now() - startTime) / 1000), - remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders), - rate: calculateRate(startTime, cumulativeProcessedOrders) + remaining: estimateRemaining(startTime, processedCount, totalUniqueOrders), + rate: calculateRate(startTime, processedCount) }); } - // Now try to import any orders that were skipped due to missing products - if (skippedOrders.size > 0) { - try { - outputProgress({ - status: "running", - operation: "Orders import", - message: `Retrying import of ${skippedOrders.size} orders with previously missing products`, - }); + // Pre-check all products at once + const allOrderPids = [...new Set(orderItems.map(item => item.prod_pid))]; + console.log('Orders: Checking', allOrderPids.length, 'unique products'); + + const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query( + "SELECT pid FROM products WHERE pid = ANY($1::bigint[])", + [allOrderPids] + ) : [[]]; + + const existingPids = new Set(existingProducts.rows.map(p => p.pid)); + + // Process in smaller batches + for (let i = 0; i < orderIds.length; i += 1000) { + const batchIds = orderIds.slice(i, i + 1000); - // Get the orders that were skipped - const [skippedProdOrders] = await localConnection.query(` - SELECT DISTINCT + // Get combined data for this batch in sub-batches + const PG_BATCH_SIZE = 100; // Process 100 records at a time + for (let j = 0; j < batchIds.length; j += PG_BATCH_SIZE) { + const subBatchIds = batchIds.slice(j, j + PG_BATCH_SIZE); + + const [orders] = await localConnection.query(` + WITH order_totals AS ( + SELECT + oi.order_id, + oi.pid, + SUM(COALESCE(od.discount, 0)) as promo_discount, + COALESCE(ot.tax, 0) as total_tax, + COALESCE(oi.price * 0.5, 0) as costeach + FROM debug_order_items oi + LEFT JOIN debug_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid + LEFT JOIN debug_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid + GROUP BY oi.order_id, oi.pid, ot.tax + ) + SELECT oi.order_id as order_number, - oi.pid, - oi.SKU, + oi.pid::bigint as pid, + oi.SKU as sku, om.date, oi.price, oi.quantity, - oi.base_discount + COALESCE(od.discount, 0) + - CASE - WHEN o.summary_discount > 0 THEN - ROUND((o.summary_discount * (oi.price * oi.quantity)) / - NULLIF(o.summary_subtotal, 0), 2) + (oi.base_discount + + COALESCE(ot.promo_discount, 0) + + CASE + WHEN om.summary_discount > 0 AND om.summary_subtotal > 0 THEN + ROUND((om.summary_discount * (oi.price * oi.quantity)) / NULLIF(om.summary_subtotal, 0), 2) ELSE 0 - END as discount, - COALESCE(ot.tax, 0) as tax, - 0 as tax_included, + END)::DECIMAL(10,2) as discount, + COALESCE(ot.total_tax, 0)::DECIMAL(10,2) as tax, + false as tax_included, 0 as shipping, om.customer, om.customer_name, om.status, om.canceled, - COALESCE(tc.costeach, 0) as costeach - FROM temp_order_items oi - JOIN temp_order_meta om ON oi.order_id = om.order_id - LEFT JOIN _order o ON oi.order_id = o.order_id - LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid - LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid - LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid - WHERE oi.order_id IN (?) - `, [Array.from(skippedOrders)]); + COALESCE(ot.costeach, oi.price * 0.5)::DECIMAL(10,3) as costeach + FROM ( + SELECT DISTINCT ON (order_id, pid) + order_id, pid, SKU, price, quantity, base_discount + FROM debug_order_items + WHERE order_id = ANY($1) + ORDER BY order_id, pid + ) oi + JOIN debug_order_meta om ON oi.order_id = om.order_id + LEFT JOIN order_totals ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid + ORDER BY oi.order_id, oi.pid + `, [subBatchIds]); - // Check which products exist now - const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))]; - const [existingProducts] = skippedPids.length > 0 ? await localConnection.query( - "SELECT pid FROM products WHERE pid IN (?)", - [skippedPids] - ) : [[]]; - const existingPids = new Set(existingProducts.map(p => p.pid)); - - // Filter orders that can now be imported - const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid)); - const retryOrderItems = new Set(); // Track unique order items in retry - - if (validOrders.length > 0) { - const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(","); - const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat(); - - const result = await localConnection.query(` - INSERT INTO orders (${columnNames.join(", ")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE - SKU = VALUES(SKU), - date = VALUES(date), - price = VALUES(price), - quantity = VALUES(quantity), - discount = VALUES(discount), - tax = VALUES(tax), - tax_included = VALUES(tax_included), - shipping = VALUES(shipping), - customer = VALUES(customer), - customer_name = VALUES(customer_name), - status = VALUES(status), - canceled = VALUES(canceled), - costeach = VALUES(costeach) - `, values); - - const affectedRows = result[0].affectedRows; - const updates = Math.floor(affectedRows / 2); - const inserts = affectedRows - (updates * 2); - - // Track unique order items - validOrders.forEach(order => { - retryOrderItems.add(`${order.order_number}-${order.pid}`); - }); - - outputProgress({ - status: "running", - operation: "Orders import", - message: `Successfully imported ${retryOrderItems.size} previously skipped order items`, - }); - - // Update the main counters - recordsAdded += inserts; - recordsUpdated += updates; - importedCount += retryOrderItems.size; + // Filter orders and track missing products + const validOrders = []; + const processedOrderItems = new Set(); + const processedOrders = new Set(); + + for (const order of orders.rows) { + if (!existingPids.has(order.pid)) { + missingProducts.add(order.pid); + skippedOrders.add(order.order_number); + continue; + } + validOrders.push(order); + processedOrderItems.add(`${order.order_number}-${order.pid}`); + processedOrders.add(order.order_number); } - } catch (error) { - console.warn('Warning: Failed to retry skipped orders:', error.message); - console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`); + + // Process valid orders in smaller sub-batches + const FINAL_BATCH_SIZE = 50; + for (let k = 0; k < validOrders.length; k += FINAL_BATCH_SIZE) { + const subBatch = validOrders.slice(k, k + FINAL_BATCH_SIZE); + + const placeholders = subBatch.map((_, idx) => { + const base = idx * 14; // 14 columns (removed updated) + return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14})`; + }).join(','); + + const batchValues = subBatch.flatMap(o => [ + o.order_number, + o.pid, + o.sku || 'NO-SKU', + o.date, + o.price, + o.quantity, + o.discount, + o.tax, + o.tax_included, + o.shipping, + o.customer, + o.customer_name, + o.status, + o.canceled + ]); + + const [result] = await localConnection.query(` + WITH inserted_orders AS ( + INSERT INTO orders ( + order_number, pid, sku, date, price, quantity, discount, + tax, tax_included, shipping, customer, customer_name, + status, canceled + ) + VALUES ${placeholders} + ON CONFLICT (order_number, pid) DO UPDATE SET + sku = EXCLUDED.sku, + date = EXCLUDED.date, + price = EXCLUDED.price, + quantity = EXCLUDED.quantity, + discount = EXCLUDED.discount, + tax = EXCLUDED.tax, + tax_included = EXCLUDED.tax_included, + shipping = EXCLUDED.shipping, + customer = EXCLUDED.customer, + customer_name = EXCLUDED.customer_name, + status = EXCLUDED.status, + canceled = EXCLUDED.canceled + RETURNING xmax = 0 as inserted + ) + SELECT + COUNT(*) FILTER (WHERE inserted) as inserted, + COUNT(*) FILTER (WHERE NOT inserted) as updated + FROM inserted_orders + `, batchValues); + + const { inserted, updated } = result.rows[0]; + recordsAdded += inserted; + recordsUpdated += updated; + importedCount += subBatch.length; + } + + cumulativeProcessedOrders += processedOrders.size; + outputProgress({ + status: "running", + operation: "Orders import", + message: `Importing orders: ${cumulativeProcessedOrders} of ${totalUniqueOrders}`, + current: cumulativeProcessedOrders, + total: totalUniqueOrders, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders), + rate: calculateRate(startTime, cumulativeProcessedOrders) + }); } } - // Clean up temporary tables after ALL processing is complete - await localConnection.query(` - DROP TEMPORARY TABLE IF EXISTS temp_order_items; - DROP TEMPORARY TABLE IF EXISTS temp_order_meta; - DROP TEMPORARY TABLE IF EXISTS temp_order_discounts; - DROP TEMPORARY TABLE IF EXISTS temp_order_taxes; - DROP TEMPORARY TABLE IF EXISTS temp_order_costs; - `); - - // Only update sync status if we get here (no errors thrown) + // Update sync status await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('orders', NOW()) - ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() + ON CONFLICT (table_name) DO UPDATE SET + last_sync_timestamp = NOW() `); return { diff --git a/inventory-server/scripts/import/products.js b/inventory-server/scripts/import/products.js index e92396b..8e0dc8f 100644 --- a/inventory-server/scripts/import/products.js +++ b/inventory-server/scripts/import/products.js @@ -1,4 +1,7 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress'); +const BATCH_SIZE = 100; // Smaller batch size for better progress tracking +const MAX_RETRIES = 3; +const RETRY_DELAY = 5000; // 5 seconds // Utility functions const imageUrlBase = 'https://sbing.com/i/products/0000/'; @@ -15,75 +18,314 @@ const getImageUrls = (pid, iid = 1) => { }; }; -async function setupAndCleanupTempTables(connection, operation = 'setup') { - if (operation === 'setup') { - await connection.query(` - CREATE TEMPORARY TABLE IF NOT EXISTS temp_products ( - pid BIGINT NOT NULL, - title VARCHAR(255), - description TEXT, - SKU VARCHAR(50), - stock_quantity INT DEFAULT 0, - pending_qty INT DEFAULT 0, - preorder_count INT DEFAULT 0, - notions_inv_count INT DEFAULT 0, - price DECIMAL(10,3) NOT NULL DEFAULT 0, - regular_price DECIMAL(10,3) NOT NULL DEFAULT 0, - cost_price DECIMAL(10,3), - vendor VARCHAR(100), - vendor_reference VARCHAR(100), - notions_reference VARCHAR(100), - brand VARCHAR(100), - line VARCHAR(100), - subline VARCHAR(100), - artist VARCHAR(100), - category_ids TEXT, - created_at DATETIME, - first_received DATETIME, - landing_cost_price DECIMAL(10,3), - barcode VARCHAR(50), - harmonized_tariff_code VARCHAR(50), - updated_at DATETIME, - visible BOOLEAN, - replenishable BOOLEAN, - permalink VARCHAR(255), - moq DECIMAL(10,3), - rating DECIMAL(10,2), - reviews INT, - weight DECIMAL(10,3), - length DECIMAL(10,3), - width DECIMAL(10,3), - height DECIMAL(10,3), - country_of_origin VARCHAR(100), - location VARCHAR(100), - total_sold INT, - baskets INT, - notifies INT, - date_last_sold DATETIME, - needs_update BOOLEAN DEFAULT TRUE, - PRIMARY KEY (pid), - INDEX idx_needs_update (needs_update) - ) ENGINE=InnoDB; - `); - } else { - await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_products;'); +// Add helper function for retrying operations +async function withRetry(operation, errorMessage) { + let lastError; + for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) { + try { + return await operation(); + } catch (error) { + lastError = error; + console.error(`${errorMessage} (Attempt ${attempt}/${MAX_RETRIES}):`, error); + if (attempt < MAX_RETRIES) { + await new Promise(resolve => setTimeout(resolve, RETRY_DELAY)); + } + } + } + throw lastError; +} + +// Add helper function at the top of the file +function validateDate(mysqlDate) { + if (!mysqlDate || mysqlDate === '0000-00-00' || mysqlDate === '0000-00-00 00:00:00') { + return null; + } + // Check if the date is valid + const date = new Date(mysqlDate); + return isNaN(date.getTime()) ? null : mysqlDate; +} + +async function setupTemporaryTables(connection) { + // Drop the table if it exists + await connection.query('DROP TABLE IF EXISTS temp_products'); + + // Create the temporary table + await connection.query(` + CREATE TEMP TABLE temp_products ( + pid BIGINT NOT NULL, + title VARCHAR(255), + description TEXT, + sku VARCHAR(50), + stock_quantity INTEGER DEFAULT 0, + preorder_count INTEGER DEFAULT 0, + notions_inv_count INTEGER DEFAULT 0, + price DECIMAL(10,3) NOT NULL DEFAULT 0, + regular_price DECIMAL(10,3) NOT NULL DEFAULT 0, + cost_price DECIMAL(10,3), + vendor VARCHAR(100), + vendor_reference VARCHAR(100), + notions_reference VARCHAR(100), + brand VARCHAR(100), + line VARCHAR(100), + subline VARCHAR(100), + artist VARCHAR(100), + categories TEXT, + created_at TIMESTAMP, + first_received TIMESTAMP, + landing_cost_price DECIMAL(10,3), + barcode VARCHAR(50), + harmonized_tariff_code VARCHAR(50), + updated_at TIMESTAMP, + visible BOOLEAN, + managing_stock BOOLEAN DEFAULT true, + replenishable BOOLEAN, + permalink VARCHAR(255), + moq INTEGER DEFAULT 1, + uom INTEGER DEFAULT 1, + rating DECIMAL(10,2), + reviews INTEGER, + weight DECIMAL(10,3), + length DECIMAL(10,3), + width DECIMAL(10,3), + height DECIMAL(10,3), + country_of_origin VARCHAR(100), + location VARCHAR(100), + total_sold INTEGER, + baskets INTEGER, + notifies INTEGER, + date_last_sold TIMESTAMP, + image VARCHAR(255), + image_175 VARCHAR(255), + image_full VARCHAR(255), + options TEXT, + tags TEXT, + needs_update BOOLEAN DEFAULT TRUE, + PRIMARY KEY (pid) + )`); + + // Create the index + await connection.query('CREATE INDEX idx_temp_products_needs_update ON temp_products (needs_update)'); +} + +async function cleanupTemporaryTables(connection) { + await connection.query('DROP TABLE IF EXISTS temp_products'); +} + +async function importMissingProducts(prodConnection, localConnection, missingPids) { + if (!missingPids || missingPids.length === 0) { + return { + status: "complete", + recordsAdded: 0, + message: "No missing products to import" + }; + } + + try { + // Setup temporary tables + await setupTemporaryTables(localConnection); + + // Get product data from production - Keep MySQL compatible + const [prodData] = await prodConnection.query(` + SELECT + p.pid, + p.description AS title, + p.notes AS description, + p.itemnumber AS sku, + p.date_created, + p.datein AS first_received, + p.location, + p.upc AS barcode, + p.harmonized_tariff_code, + p.stamp AS updated_at, + CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, + CASE + WHEN p.reorder < 0 THEN 0 + WHEN ( + (COALESCE(pls.date_sold, '0000-00-00') = '0000-00-00' OR pls.date_sold <= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)) + OR (p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(CURRENT_TIMESTAMP, INTERVAL 5 YEAR)) + OR (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(CURRENT_TIMESTAMP, INTERVAL 5 YEAR)) + ) THEN 0 + ELSE 1 + END AS replenishable, + COALESCE(si.available_local, 0) as stock_quantity, + 0 as pending_qty, + COALESCE(ci.onpreorder, 0) as preorder_count, + COALESCE(pnb.inventory, 0) as notions_inv_count, + COALESCE(pcp.price_each, 0) as price, + COALESCE(p.sellingprice, 0) AS regular_price, + CASE + WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0) + THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0) + ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1) + END AS cost_price, + NULL as landing_cost_price, + s.companyname AS vendor, + CASE + WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber + ELSE sid.supplier_itemnumber + END AS vendor_reference, + sid.notions_itemnumber AS notions_reference, + CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink, + pc1.name AS brand, + pc2.name AS line, + pc3.name AS subline, + pc4.name AS artist, + COALESCE(CASE + WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit + ELSE sid.supplier_qty_per_unit + END, sid.notions_qty_per_unit) AS moq, + p.rating, + p.rating_votes AS reviews, + p.weight, + p.length, + p.width, + p.height, + p.country_of_origin, + (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, + (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, + p.totalsold AS total_sold, + pls.date_sold as date_last_sold, + GROUP_CONCAT(DISTINCT CASE + WHEN pc.cat_id IS NOT NULL + AND pc.type IN (10, 20, 11, 21, 12, 13) + AND pci.cat_id NOT IN (16, 17) + THEN pci.cat_id + END) as category_ids + FROM products p + LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 + LEFT JOIN current_inventory ci ON p.pid = ci.pid + LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid + LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 + LEFT JOIN supplier_item_data sid ON p.pid = sid.pid + LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid + LEFT JOIN product_category_index pci ON p.pid = pci.pid + LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id + LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id + LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id + LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id + LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id + LEFT JOIN product_last_sold pls ON p.pid = pls.pid + WHERE p.pid IN (?) + GROUP BY p.pid + `, [missingPids]); + + if (!prodData || prodData.length === 0) { + return { + status: "complete", + recordsAdded: 0, + message: "No products found in production database" + }; + } + + // Process in batches + let recordsAdded = 0; + for (let i = 0; i < prodData.length; i += BATCH_SIZE) { + const batch = prodData.slice(i, i + BATCH_SIZE); + + const placeholders = batch.map((_, idx) => { + const base = idx * 47; // 47 columns + return `(${Array.from({ length: 47 }, (_, i) => `$${base + i + 1}`).join(', ')})`; + }).join(','); + + const values = batch.flatMap(row => { + const imageUrls = getImageUrls(row.pid); + return [ + row.pid, + row.title, + row.description, + row.itemnumber || '', + row.stock_quantity > 5000 ? 0 : Math.max(0, row.stock_quantity), + row.preorder_count, + row.notions_inv_count, + row.price, + row.regular_price, + row.cost_price, + row.vendor, + row.vendor_reference, + row.notions_reference, + row.brand, + row.line, + row.subline, + row.artist, + row.category_ids, + validateDate(row.date_created), + validateDate(row.first_received), + row.landing_cost_price, + row.barcode, + row.harmonized_tariff_code, + validateDate(row.updated_at), + row.visible, + true, + row.replenishable, + row.permalink, + Math.max(1, Math.round(row.moq || 1)), + 1, + row.rating, + row.reviews, + row.weight, + row.length, + row.width, + row.height, + row.country_of_origin, + row.location, + row.total_sold, + row.baskets, + row.notifies, + validateDate(row.date_last_sold), + imageUrls.image, + imageUrls.image_175, + imageUrls.image_full, + null, + null + ]; + }); + + const [result] = await localConnection.query(` + WITH inserted_products AS ( + INSERT INTO products ( + pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count, + price, regular_price, cost_price, vendor, vendor_reference, notions_reference, + brand, line, subline, artist, categories, created_at, first_received, + landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible, + managing_stock, replenishable, permalink, moq, uom, rating, reviews, + weight, length, width, height, country_of_origin, location, total_sold, + baskets, notifies, date_last_sold, image, image_175, image_full, options, tags + ) + VALUES ${placeholders} + ON CONFLICT (pid) DO NOTHING + RETURNING pid + ) + SELECT COUNT(*) as inserted FROM inserted_products + `, values); + + recordsAdded += parseInt(result.rows[0].inserted, 10) || 0; + } + + return { + status: "complete", + recordsAdded, + message: `Successfully imported ${recordsAdded} missing products` + }; + } catch (error) { + console.error('Error importing missing products:', error); + throw error; } } -async function materializeCalculations(prodConnection, localConnection, incrementalUpdate = true, lastSyncTime = '1970-01-01') { +async function materializeCalculations(prodConnection, localConnection, incrementalUpdate = true, lastSyncTime = '1970-01-01', startTime = Date.now()) { outputProgress({ status: "running", operation: "Products import", message: "Fetching product data from production" }); - // Get all product data in a single optimized query + // Get all product data in a single optimized query - Keep MySQL compatible const [prodData] = await prodConnection.query(` SELECT p.pid, p.description AS title, p.notes AS description, - p.itemnumber AS SKU, + p.itemnumber AS sku, p.date_created, p.datein AS first_received, p.location, @@ -94,42 +336,14 @@ async function materializeCalculations(prodConnection, localConnection, incremen CASE WHEN p.reorder < 0 THEN 0 WHEN ( - (IFNULL(pls.date_sold, '0000-00-00') = '0000-00-00' OR pls.date_sold <= DATE_SUB(CURDATE(), INTERVAL 5 YEAR)) - OR (p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) - OR (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) + (COALESCE(pls.date_sold, '0000-00-00') = '0000-00-00' OR pls.date_sold <= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)) + OR (p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(CURRENT_TIMESTAMP, INTERVAL 5 YEAR)) + OR (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(CURRENT_TIMESTAMP, INTERVAL 5 YEAR)) ) THEN 0 ELSE 1 END AS replenishable, - COALESCE(si.available_local, 0) - COALESCE( - (SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0 - ), 0 - ) as stock_quantity, - COALESCE( - (SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0 - ), 0 - ) as pending_qty, + COALESCE(si.available_local, 0) as stock_quantity, + 0 as pending_qty, COALESCE(ci.onpreorder, 0) as preorder_count, COALESCE(pnb.inventory, 0) as notions_inv_count, COALESCE(pcp.price_each, 0) as price, @@ -203,121 +417,138 @@ async function materializeCalculations(prodConnection, localConnection, incremen }); // Insert all product data into temp table in batches - for (let i = 0; i < prodData.length; i += 1000) { - const batch = prodData.slice(i, i + 1000); - const values = batch.map(row => [ - row.pid, - row.title, - row.description, - row.SKU, - // Set stock quantity to 0 if it's over 5000 - row.stock_quantity > 5000 ? 0 : Math.max(0, row.stock_quantity), - row.pending_qty, - row.preorder_count, - row.notions_inv_count, - row.price, - row.regular_price, - row.cost_price, - row.vendor, - row.vendor_reference, - row.notions_reference, - row.brand, - row.line, - row.subline, - row.artist, - row.category_ids, - row.date_created, // map to created_at - row.first_received, - row.landing_cost_price, - row.barcode, - row.harmonized_tariff_code, - row.updated_at, - row.visible, - row.replenishable, - row.permalink, - row.moq, - row.rating ? Number(row.rating).toFixed(2) : null, - row.reviews, - row.weight, - row.length, - row.width, - row.height, - row.country_of_origin, - row.location, - row.total_sold, - row.baskets, - row.notifies, - row.date_last_sold, - true // Mark as needing update - ]); + for (let i = 0; i < prodData.length; i += BATCH_SIZE) { + const batch = prodData.slice(i, Math.min(i + BATCH_SIZE, prodData.length)); + + await withRetry(async () => { + const placeholders = batch.map((_, idx) => { + const base = idx * 47; // 47 columns + return `(${Array.from({ length: 47 }, (_, i) => `$${base + i + 1}`).join(', ')})`; + }).join(','); + + const values = batch.flatMap(row => { + const imageUrls = getImageUrls(row.pid); + return [ + row.pid, + row.title, + row.description, + row.itemnumber || '', + row.stock_quantity > 5000 ? 0 : Math.max(0, row.stock_quantity), + row.preorder_count, + row.notions_inv_count, + row.price, + row.regular_price, + row.cost_price, + row.vendor, + row.vendor_reference, + row.notions_reference, + row.brand, + row.line, + row.subline, + row.artist, + row.category_ids, + validateDate(row.date_created), + validateDate(row.first_received), + row.landing_cost_price, + row.barcode, + row.harmonized_tariff_code, + validateDate(row.updated_at), + row.visible, + true, + row.replenishable, + row.permalink, + Math.max(1, Math.round(row.moq || 1)), + 1, + row.rating, + row.reviews, + row.weight, + row.length, + row.width, + row.height, + row.country_of_origin, + row.location, + row.total_sold, + row.baskets, + row.notifies, + validateDate(row.date_last_sold), + imageUrls.image, + imageUrls.image_175, + imageUrls.image_full, + null, + null + ]; + }); - if (values.length > 0) { await localConnection.query(` INSERT INTO temp_products ( - pid, title, description, SKU, - stock_quantity, pending_qty, preorder_count, notions_inv_count, - price, regular_price, cost_price, - vendor, vendor_reference, notions_reference, - brand, line, subline, artist, - category_ids, created_at, first_received, - landing_cost_price, barcode, harmonized_tariff_code, - updated_at, visible, replenishable, permalink, - moq, rating, reviews, weight, length, width, - height, country_of_origin, location, total_sold, - baskets, notifies, date_last_sold, needs_update - ) - VALUES ? - ON DUPLICATE KEY UPDATE - title = VALUES(title), - description = VALUES(description), - SKU = VALUES(SKU), - stock_quantity = VALUES(stock_quantity), - pending_qty = VALUES(pending_qty), - preorder_count = VALUES(preorder_count), - notions_inv_count = VALUES(notions_inv_count), - price = VALUES(price), - regular_price = VALUES(regular_price), - cost_price = VALUES(cost_price), - vendor = VALUES(vendor), - vendor_reference = VALUES(vendor_reference), - notions_reference = VALUES(notions_reference), - brand = VALUES(brand), - line = VALUES(line), - subline = VALUES(subline), - artist = VALUES(artist), - category_ids = VALUES(category_ids), - created_at = VALUES(created_at), - first_received = VALUES(first_received), - landing_cost_price = VALUES(landing_cost_price), - barcode = VALUES(barcode), - harmonized_tariff_code = VALUES(harmonized_tariff_code), - updated_at = VALUES(updated_at), - visible = VALUES(visible), - replenishable = VALUES(replenishable), - permalink = VALUES(permalink), - moq = VALUES(moq), - rating = VALUES(rating), - reviews = VALUES(reviews), - weight = VALUES(weight), - length = VALUES(length), - width = VALUES(width), - height = VALUES(height), - country_of_origin = VALUES(country_of_origin), - location = VALUES(location), - total_sold = VALUES(total_sold), - baskets = VALUES(baskets), - notifies = VALUES(notifies), - date_last_sold = VALUES(date_last_sold), - needs_update = TRUE - `, [values]); - } + pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count, + price, regular_price, cost_price, vendor, vendor_reference, notions_reference, + brand, line, subline, artist, categories, created_at, first_received, + landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible, + managing_stock, replenishable, permalink, moq, uom, rating, reviews, + weight, length, width, height, country_of_origin, location, total_sold, + baskets, notifies, date_last_sold, image, image_175, image_full, options, tags + ) VALUES ${placeholders} + ON CONFLICT (pid) DO UPDATE SET + title = EXCLUDED.title, + description = EXCLUDED.description, + sku = EXCLUDED.sku, + stock_quantity = EXCLUDED.stock_quantity, + preorder_count = EXCLUDED.preorder_count, + notions_inv_count = EXCLUDED.notions_inv_count, + price = EXCLUDED.price, + regular_price = EXCLUDED.regular_price, + cost_price = EXCLUDED.cost_price, + vendor = EXCLUDED.vendor, + vendor_reference = EXCLUDED.vendor_reference, + notions_reference = EXCLUDED.notions_reference, + brand = EXCLUDED.brand, + line = EXCLUDED.line, + subline = EXCLUDED.subline, + artist = EXCLUDED.artist, + created_at = EXCLUDED.created_at, + first_received = EXCLUDED.first_received, + landing_cost_price = EXCLUDED.landing_cost_price, + barcode = EXCLUDED.barcode, + harmonized_tariff_code = EXCLUDED.harmonized_tariff_code, + updated_at = EXCLUDED.updated_at, + visible = EXCLUDED.visible, + managing_stock = EXCLUDED.managing_stock, + replenishable = EXCLUDED.replenishable, + permalink = EXCLUDED.permalink, + moq = EXCLUDED.moq, + uom = EXCLUDED.uom, + rating = EXCLUDED.rating, + reviews = EXCLUDED.reviews, + weight = EXCLUDED.weight, + length = EXCLUDED.length, + width = EXCLUDED.width, + height = EXCLUDED.height, + country_of_origin = EXCLUDED.country_of_origin, + location = EXCLUDED.location, + total_sold = EXCLUDED.total_sold, + baskets = EXCLUDED.baskets, + notifies = EXCLUDED.notifies, + date_last_sold = EXCLUDED.date_last_sold, + image = EXCLUDED.image, + image_175 = EXCLUDED.image_175, + image_full = EXCLUDED.image_full, + options = EXCLUDED.options, + tags = EXCLUDED.tags + RETURNING + xmax = 0 as inserted + `, values); + }, `Error inserting batch ${i} to ${i + batch.length}`); outputProgress({ status: "running", operation: "Products import", - message: `Processed ${Math.min(i + 1000, prodData.length)} of ${prodData.length} product records`, + message: `Imported ${i + batch.length} of ${prodData.length} products`, current: i + batch.length, - total: prodData.length + total: prodData.length, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, i + batch.length, prodData.length), + rate: calculateRate(startTime, i + batch.length) }); } @@ -330,473 +561,283 @@ async function materializeCalculations(prodConnection, localConnection, incremen async function importProducts(prodConnection, localConnection, incrementalUpdate = true) { const startTime = Date.now(); - let recordsAdded = 0; - let recordsUpdated = 0; + let lastSyncTime = '1970-01-01'; try { - // Get column names first - const [columns] = await localConnection.query(` - SELECT COLUMN_NAME - FROM INFORMATION_SCHEMA.COLUMNS - WHERE TABLE_NAME = 'products' - AND COLUMN_NAME != 'updated' -- Exclude the updated column - ORDER BY ORDINAL_POSITION - `); - const columnNames = columns.map(col => col.COLUMN_NAME); - - // Get last sync info - const [syncInfo] = await localConnection.query( - "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'products'" - ); - const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; - - console.log('Products: Using last sync time:', lastSyncTime); - - // Setup temporary tables - await setupAndCleanupTempTables(localConnection, 'setup'); - - // Materialize calculations - this will populate temp_products - await materializeCalculations(prodConnection, localConnection, incrementalUpdate, lastSyncTime); - - // Get actual count from temp table - only count products that need updates - const [[{ actualTotal }]] = await localConnection.query(` - SELECT COUNT(DISTINCT pid) as actualTotal - FROM temp_products - WHERE needs_update = 1 - `); - - console.log('Products: Found changes:', actualTotal); - - // Process in batches - const BATCH_SIZE = 5000; - let processed = 0; - - while (processed < actualTotal) { - const [batch] = await localConnection.query(` - SELECT * FROM temp_products - WHERE needs_update = 1 - LIMIT ? OFFSET ? - `, [BATCH_SIZE, processed]); - - if (!batch || batch.length === 0) break; - - // Add image URLs - batch.forEach(row => { - const urls = getImageUrls(row.pid); - row.image = urls.image; - row.image_175 = urls.image_175; - row.image_full = urls.image_full; - }); - - if (batch.length > 0) { - // Get existing products in one query - const [existingProducts] = await localConnection.query( - `SELECT ${columnNames.join(',')} FROM products WHERE pid IN (?)`, - [batch.map(p => p.pid)] - ); - const existingPidsMap = new Map(existingProducts.map(p => [p.pid, p])); - - // Split into inserts and updates - const insertsAndUpdates = batch.reduce((acc, product) => { - if (existingPidsMap.has(product.pid)) { - const existing = existingPidsMap.get(product.pid); - // Check if any values are different - const hasChanges = columnNames.some(col => { - const newVal = product[col] ?? null; - const oldVal = existing[col] ?? null; - if (col === "managing_stock") return false; // Skip this as it's always 1 - if (typeof newVal === 'number' && typeof oldVal === 'number') { - return Math.abs(newVal - oldVal) > 0.00001; - } - return newVal !== oldVal; - }); - - if (hasChanges) { - acc.updates.push(product); - } - } else { - acc.inserts.push(product); - } - return acc; - }, { inserts: [], updates: [] }); - - // Process inserts - if (insertsAndUpdates.inserts.length > 0) { - const insertValues = insertsAndUpdates.inserts.map(product => - columnNames.map(col => { - const val = product[col] ?? null; - if (col === "managing_stock") return 1; - return val; - }) - ); - - const insertPlaceholders = insertsAndUpdates.inserts - .map(() => `(${Array(columnNames.length).fill('?').join(',')})`) - .join(','); - - const insertResult = await localConnection.query(` - INSERT INTO products (${columnNames.join(',')}) - VALUES ${insertPlaceholders} - `, insertValues.flat()); - - recordsAdded += insertResult[0].affectedRows; - } - - // Process updates - if (insertsAndUpdates.updates.length > 0) { - const updateValues = insertsAndUpdates.updates.map(product => - columnNames.map(col => { - const val = product[col] ?? null; - if (col === "managing_stock") return 1; - return val; - }) - ); - - const updatePlaceholders = insertsAndUpdates.updates - .map(() => `(${Array(columnNames.length).fill('?').join(',')})`) - .join(','); - - const updateResult = await localConnection.query(` - INSERT INTO products (${columnNames.join(',')}) - VALUES ${updatePlaceholders} - ON DUPLICATE KEY UPDATE - ${columnNames - .filter(col => col !== 'pid') - .map(col => `${col} = VALUES(${col})`) - .join(',')}; - `, updateValues.flat()); - - recordsUpdated += insertsAndUpdates.updates.length; - } - - // Process category relationships - if (batch.some(p => p.category_ids)) { - // First get all valid categories - const allCategoryIds = [...new Set( - batch - .filter(p => p.category_ids) - .flatMap(product => - product.category_ids - .split(',') - .map(id => id.trim()) - .filter(id => id) - .map(Number) - .filter(id => !isNaN(id)) - ) - )]; - - // Verify categories exist and get their hierarchy - const [categories] = await localConnection.query(` - WITH RECURSIVE category_hierarchy AS ( - SELECT - cat_id, - parent_id, - type, - 1 as level, - CAST(cat_id AS CHAR(200)) as path - FROM categories - WHERE cat_id IN (?) - UNION ALL - SELECT - c.cat_id, - c.parent_id, - c.type, - ch.level + 1, - CONCAT(ch.path, ',', c.cat_id) - FROM categories c - JOIN category_hierarchy ch ON c.parent_id = ch.cat_id - WHERE ch.level < 10 -- Prevent infinite recursion - ) - SELECT - h.cat_id, - h.parent_id, - h.type, - h.path, - h.level - FROM ( - SELECT DISTINCT cat_id, parent_id, type, path, level - FROM category_hierarchy - WHERE cat_id IN (?) - ) h - ORDER BY h.level DESC - `, [allCategoryIds, allCategoryIds]); - - const validCategories = new Map(categories.map(c => [c.cat_id, c])); - const validCategoryIds = new Set(categories.map(c => c.cat_id)); - - // Build category relationships ensuring proper hierarchy - const categoryRelationships = []; - batch - .filter(p => p.category_ids) - .forEach(product => { - const productCategories = product.category_ids - .split(',') - .map(id => id.trim()) - .filter(id => id) - .map(Number) - .filter(id => !isNaN(id)) - .filter(id => validCategoryIds.has(id)) - .map(id => validCategories.get(id)) - .sort((a, b) => a.type - b.type); // Sort by type to ensure proper hierarchy - - // Only add relationships that maintain proper hierarchy - productCategories.forEach(category => { - if (category.path.split(',').every(parentId => - validCategoryIds.has(Number(parentId)) - )) { - categoryRelationships.push([category.cat_id, product.pid]); - } - }); - }); - - if (categoryRelationships.length > 0) { - // First remove any existing relationships that will be replaced - await localConnection.query(` - DELETE FROM product_categories - WHERE pid IN (?) AND cat_id IN (?) - `, [ - [...new Set(categoryRelationships.map(([_, pid]) => pid))], - [...new Set(categoryRelationships.map(([catId, _]) => catId))] - ]); - - // Then insert the new relationships - const placeholders = categoryRelationships - .map(() => "(?, ?)") - .join(","); - - await localConnection.query(` - INSERT INTO product_categories (cat_id, pid) - VALUES ${placeholders} - `, categoryRelationships.flat()); - } - } - } - - processed += batch.length; - - outputProgress({ - status: "running", - operation: "Products import", - message: `Processed ${processed} of ${actualTotal} products`, - current: processed, - total: actualTotal, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000), - remaining: estimateRemaining(startTime, processed, actualTotal), - rate: calculateRate(startTime, processed) - }); - } - - // Drop temporary tables - await setupAndCleanupTempTables(localConnection, 'cleanup'); - - // Only update sync status if we get here (no errors thrown) - await localConnection.query(` - INSERT INTO sync_status (table_name, last_sync_timestamp) - VALUES ('products', NOW()) - ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW() - `); - - return { - status: "complete", - totalImported: actualTotal, - recordsAdded: recordsAdded || 0, - recordsUpdated: recordsUpdated || 0, - incrementalUpdate, - lastSyncTime - }; - } catch (error) { - throw error; - } -} - -async function importMissingProducts(prodConnection, localConnection, missingPids) { - try { - // Get column names first - const [columns] = await localConnection.query(` - SELECT COLUMN_NAME - FROM INFORMATION_SCHEMA.COLUMNS - WHERE TABLE_NAME = 'products' - AND COLUMN_NAME != 'updated' -- Exclude the updated column - ORDER BY ORDINAL_POSITION - `); - const columnNames = columns.map((col) => col.COLUMN_NAME); - - // Get the missing products with all their data in one optimized query - const [products] = await prodConnection.query(` - SELECT - p.pid, - p.description AS title, - p.notes AS description, - p.itemnumber AS SKU, - p.date_created, - p.datein AS first_received, - p.location, - p.upc AS barcode, - p.harmonized_tariff_code, - p.stamp AS updated_at, - CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible, - CASE - WHEN p.reorder < 0 THEN 0 - WHEN ( - (IFNULL(pls.date_sold, '0000-00-00') = '0000-00-00' OR pls.date_sold <= DATE_SUB(CURDATE(), INTERVAL 5 YEAR)) - OR (p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) - OR (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(NOW(), INTERVAL 5 YEAR)) - ) THEN 0 - ELSE 1 - END AS replenishable, - COALESCE(si.available_local, 0) as stock_quantity, - COALESCE( - (SELECT SUM(oi.qty_ordered - oi.qty_placed) - FROM order_items oi - JOIN _order o ON oi.order_id = o.order_id - WHERE oi.prod_pid = p.pid - AND o.date_placed != '0000-00-00 00:00:00' - AND o.date_shipped = '0000-00-00 00:00:00' - AND oi.pick_finished = 0 - AND oi.qty_back = 0 - AND o.order_status != 15 - AND o.order_status < 90 - AND oi.qty_ordered >= oi.qty_placed - AND oi.qty_ordered > 0 - ), 0 - ) as pending_qty, - COALESCE(ci.onpreorder, 0) as preorder_count, - COALESCE(pnb.inventory, 0) as notions_inv_count, - COALESCE(pcp.price_each, 0) as price, - COALESCE(p.sellingprice, 0) AS regular_price, - CASE - WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0) - THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0) - ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1) - END AS cost_price, - NULL AS landing_cost_price, - p.rating, - p.rating_votes AS reviews, - p.weight, - p.length, - p.width, - p.height, - (SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets, - (SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies, - p.totalsold AS total_sold, - p.country_of_origin, - pls.date_sold as date_last_sold, - GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids - FROM products p - LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0 - LEFT JOIN supplier_item_data sid ON p.pid = sid.pid - LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid - LEFT JOIN product_category_index pci ON p.pid = pci.pid - LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id - AND pc.type IN (10, 20, 11, 21, 12, 13) - AND pci.cat_id NOT IN (16, 17) - LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id - LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id - LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id - LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id - LEFT JOIN product_last_sold pls ON p.pid = pls.pid - LEFT JOIN current_inventory ci ON p.pid = ci.pid - LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1 - LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid - WHERE p.pid IN (?) - GROUP BY p.pid - `, [missingPids]); - - // Add image URLs - products.forEach(product => { - const urls = getImageUrls(product.pid); - product.image = urls.image; - product.image_175 = urls.image_175; - product.image_full = urls.image_full; - }); - - let recordsAdded = 0; - let recordsUpdated = 0; - - if (products.length > 0) { - // Map values in the same order as columns - const productValues = products.flatMap(product => - columnNames.map(col => { - const val = product[col] ?? null; - if (col === "managing_stock") return 1; - if (typeof val === "number") return val || 0; - return val; - }) + // Get last sync time if doing incremental update + if (incrementalUpdate) { + const [syncResult] = await localConnection.query( + "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'products'" ); - - // Generate placeholders for all products - const placeholders = products - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - - // Build and execute the query - const query = ` - INSERT INTO products (${columnNames.join(",")}) - VALUES ${placeholders} - ON DUPLICATE KEY UPDATE ${columnNames - .filter((col) => col !== "pid") - .map((col) => `${col} = VALUES(${col})`) - .join(",")}; - `; - - const result = await localConnection.query(query, productValues); - recordsAdded = result.affectedRows - result.changedRows; - recordsUpdated = result.changedRows; - - // Handle category relationships if any - const categoryRelationships = []; - products.forEach(product => { - if (product.category_ids) { - const catIds = product.category_ids - .split(",") - .map(id => id.trim()) - .filter(id => id) - .map(Number); - catIds.forEach(catId => { - if (catId) categoryRelationships.push([catId, product.pid]); - }); - } - }); - - if (categoryRelationships.length > 0) { - // Verify categories exist before inserting relationships - const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))]; - const [existingCats] = await localConnection.query( - "SELECT cat_id FROM categories WHERE cat_id IN (?)", - [uniqueCatIds] - ); - const existingCatIds = new Set(existingCats.map(c => c.cat_id)); - - // Filter relationships to only include existing categories - const validRelationships = categoryRelationships.filter(([catId]) => - existingCatIds.has(catId) - ); - - if (validRelationships.length > 0) { - const catPlaceholders = validRelationships - .map(() => "(?, ?)") - .join(","); - await localConnection.query( - `INSERT IGNORE INTO product_categories (cat_id, pid) - VALUES ${catPlaceholders}`, - validRelationships.flat() - ); - } + if (syncResult.rows.length > 0) { + lastSyncTime = syncResult.rows[0].last_sync_timestamp; } } - return { - status: "complete", - totalImported: products.length, - recordsAdded, - recordsUpdated - }; + // Start a transaction to ensure temporary tables persist + await localConnection.beginTransaction(); + + try { + // Setup temporary tables + await setupTemporaryTables(localConnection); + + // Materialize calculations into temp table + await materializeCalculations(prodConnection, localConnection, incrementalUpdate, lastSyncTime, startTime); + + // Get the list of products that need updating + const [products] = await localConnection.query(` + SELECT + t.pid, + t.title, + t.description, + t.sku, + t.stock_quantity, + t.preorder_count, + t.notions_inv_count, + t.price, + t.regular_price, + t.cost_price, + t.vendor, + t.vendor_reference, + t.notions_reference, + t.brand, + t.line, + t.subline, + t.artist, + t.categories, + t.created_at, + t.first_received, + t.landing_cost_price, + t.barcode, + t.harmonized_tariff_code, + t.updated_at, + t.visible, + t.managing_stock, + t.replenishable, + t.permalink, + t.moq, + t.rating, + t.reviews, + t.weight, + t.length, + t.width, + t.height, + t.country_of_origin, + t.location, + t.total_sold, + t.baskets, + t.notifies, + t.date_last_sold, + t.image, + t.image_175, + t.image_full, + t.options, + t.tags + FROM temp_products t + WHERE t.needs_update = true + `); + + // Process products in batches + let recordsAdded = 0; + let recordsUpdated = 0; + + for (let i = 0; i < products.rows.length; i += BATCH_SIZE) { + const batch = products.rows.slice(i, i + BATCH_SIZE); + + const placeholders = batch.map((_, idx) => { + const base = idx * 47; // 47 columns + return `(${Array.from({ length: 47 }, (_, i) => `$${base + i + 1}`).join(', ')})`; + }).join(','); + + const values = batch.flatMap(row => { + const imageUrls = getImageUrls(row.pid); + return [ + row.pid, + row.title, + row.description, + row.sku || '', + row.stock_quantity > 5000 ? 0 : Math.max(0, row.stock_quantity), + row.preorder_count, + row.notions_inv_count, + row.price, + row.regular_price, + row.cost_price, + row.vendor, + row.vendor_reference, + row.notions_reference, + row.brand, + row.line, + row.subline, + row.artist, + row.categories, + validateDate(row.created_at), + validateDate(row.first_received), + row.landing_cost_price, + row.barcode, + row.harmonized_tariff_code, + validateDate(row.updated_at), + row.visible, + row.managing_stock, + row.replenishable, + row.permalink, + row.moq, + 1, + row.rating, + row.reviews, + row.weight, + row.length, + row.width, + row.height, + row.country_of_origin, + row.location, + row.total_sold, + row.baskets, + row.notifies, + validateDate(row.date_last_sold), + imageUrls.image, + imageUrls.image_175, + imageUrls.image_full, + row.options, + row.tags + ]; + }); + + const [result] = await localConnection.query(` + WITH upserted AS ( + INSERT INTO products ( + pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count, + price, regular_price, cost_price, vendor, vendor_reference, notions_reference, + brand, line, subline, artist, categories, created_at, first_received, + landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible, + managing_stock, replenishable, permalink, moq, uom, rating, reviews, + weight, length, width, height, country_of_origin, location, total_sold, + baskets, notifies, date_last_sold, image, image_175, image_full, options, tags + ) + VALUES ${placeholders} + ON CONFLICT (pid) DO UPDATE SET + title = EXCLUDED.title, + description = EXCLUDED.description, + sku = EXCLUDED.sku, + stock_quantity = EXCLUDED.stock_quantity, + preorder_count = EXCLUDED.preorder_count, + notions_inv_count = EXCLUDED.notions_inv_count, + price = EXCLUDED.price, + regular_price = EXCLUDED.regular_price, + cost_price = EXCLUDED.cost_price, + vendor = EXCLUDED.vendor, + vendor_reference = EXCLUDED.vendor_reference, + notions_reference = EXCLUDED.notions_reference, + brand = EXCLUDED.brand, + line = EXCLUDED.line, + subline = EXCLUDED.subline, + artist = EXCLUDED.artist, + created_at = EXCLUDED.created_at, + first_received = EXCLUDED.first_received, + landing_cost_price = EXCLUDED.landing_cost_price, + barcode = EXCLUDED.barcode, + harmonized_tariff_code = EXCLUDED.harmonized_tariff_code, + updated_at = EXCLUDED.updated_at, + visible = EXCLUDED.visible, + managing_stock = EXCLUDED.managing_stock, + replenishable = EXCLUDED.replenishable, + permalink = EXCLUDED.permalink, + moq = EXCLUDED.moq, + uom = EXCLUDED.uom, + rating = EXCLUDED.rating, + reviews = EXCLUDED.reviews, + weight = EXCLUDED.weight, + length = EXCLUDED.length, + width = EXCLUDED.width, + height = EXCLUDED.height, + country_of_origin = EXCLUDED.country_of_origin, + location = EXCLUDED.location, + total_sold = EXCLUDED.total_sold, + baskets = EXCLUDED.baskets, + notifies = EXCLUDED.notifies, + date_last_sold = EXCLUDED.date_last_sold, + image = EXCLUDED.image, + image_175 = EXCLUDED.image_175, + image_full = EXCLUDED.image_full, + options = EXCLUDED.options, + tags = EXCLUDED.tags + RETURNING + xmax = 0 as inserted + ) + SELECT + COUNT(*) FILTER (WHERE inserted) as inserted, + COUNT(*) FILTER (WHERE NOT inserted) as updated + FROM upserted + `, values); + + recordsAdded += parseInt(result.rows[0].inserted, 10) || 0; + recordsUpdated += parseInt(result.rows[0].updated, 10) || 0; + + // Process category relationships for each product in the batch + for (const row of batch) { + if (row.categories) { + const categoryIds = row.categories.split(',').filter(id => id && id.trim()); + if (categoryIds.length > 0) { + const catPlaceholders = categoryIds.map((_, idx) => + `($${idx * 2 + 1}, $${idx * 2 + 2})` + ).join(','); + const catValues = categoryIds.flatMap(catId => [row.pid, parseInt(catId.trim(), 10)]); + + // First delete existing relationships for this product + await localConnection.query( + 'DELETE FROM product_categories WHERE pid = $1', + [row.pid] + ); + + // Then insert the new relationships + await localConnection.query(` + INSERT INTO product_categories (pid, cat_id) + VALUES ${catPlaceholders} + ON CONFLICT (pid, cat_id) DO NOTHING + `, catValues); + } + } + } + + outputProgress({ + status: "running", + operation: "Products import", + message: `Processing products: ${i + batch.length} of ${products.rows.length}`, + current: i + batch.length, + total: products.rows.length, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, i + batch.length, products.rows.length), + rate: calculateRate(startTime, i + batch.length) + }); + } + + // Cleanup temporary tables + await cleanupTemporaryTables(localConnection); + + // Commit the transaction + await localConnection.commit(); + + return { + status: 'complete', + recordsAdded, + recordsUpdated, + totalRecords: products.rows.length, + duration: formatElapsedTime(Date.now() - startTime) + }; + } catch (error) { + // Rollback on error + await localConnection.rollback(); + throw error; + } } catch (error) { + console.error('Error in importProducts:', error); throw error; } } module.exports = { importProducts, - importMissingProducts + importMissingProducts, + setupTemporaryTables, + cleanupTemporaryTables, + materializeCalculations }; \ No newline at end of file diff --git a/inventory-server/scripts/import/purchase-orders.js b/inventory-server/scripts/import/purchase-orders.js index b2fbaeb..e450118 100644 --- a/inventory-server/scripts/import/purchase-orders.js +++ b/inventory-server/scripts/import/purchase-orders.js @@ -10,22 +10,42 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const [syncInfo] = await localConnection.query( "SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'" ); - const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01'; + const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01'; console.log('Purchase Orders: Using last sync time:', lastSyncTime); - // Insert temporary table creation query for purchase orders + // Create temporary tables with PostgreSQL syntax await localConnection.query(` - CREATE TABLE IF NOT EXISTS temp_purchase_orders ( - po_id INT UNSIGNED NOT NULL, - pid INT UNSIGNED NOT NULL, + DROP TABLE IF EXISTS temp_purchase_orders; + DROP TABLE IF EXISTS temp_po_receivings; + + CREATE TEMP TABLE temp_purchase_orders ( + po_id INTEGER NOT NULL, + pid INTEGER NOT NULL, + sku VARCHAR(50), + name VARCHAR(255), vendor VARCHAR(255), - date DATE, - expected_date DATE, - status INT, + date TIMESTAMP WITH TIME ZONE, + expected_date TIMESTAMP WITH TIME ZONE, + status INTEGER, notes TEXT, + ordered INTEGER, + cost_price DECIMAL(10,3), PRIMARY KEY (po_id, pid) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + ); + + CREATE TEMP TABLE temp_po_receivings ( + po_id INTEGER, + pid INTEGER NOT NULL, + receiving_id INTEGER NOT NULL, + qty_each INTEGER, + cost_each DECIMAL(10,3), + received_date TIMESTAMP WITH TIME ZONE, + received_by INTEGER, + received_by_name VARCHAR(255), + is_alt_po INTEGER, + PRIMARY KEY (receiving_id, pid) + ); `); outputProgress({ @@ -33,8 +53,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental status: "running", }); - // Get column names first - const [columns] = await localConnection.query(` + // Get column names - Keep MySQL compatible for production + const [columns] = await prodConnection.query(` SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = 'purchase_orders' @@ -60,7 +80,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []; - // First get all relevant PO IDs with basic info + // First get all relevant PO IDs with basic info - Keep MySQL compatible for production const [[{ total }]] = await prodConnection.query(` SELECT COUNT(*) as total FROM ( @@ -99,6 +119,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental console.log('Purchase Orders: Found changes:', total); + // Get PO list - Keep MySQL compatible for production const [poList] = await prodConnection.query(` SELECT DISTINCT COALESCE(p.po_id, r.receiving_id) as po_id, @@ -109,12 +130,12 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental ) as vendor, CASE WHEN p.po_id IS NOT NULL THEN - DATE(COALESCE( + COALESCE( NULLIF(p.date_ordered, '0000-00-00 00:00:00'), p.date_created - )) + ) WHEN r.receiving_id IS NOT NULL THEN - DATE(r.date_created) + r.date_created END as date, CASE WHEN p.date_estin = '0000-00-00' THEN NULL @@ -185,14 +206,14 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental const batch = poList.slice(i, Math.min(i + BATCH_SIZE, poList.length)); const poIds = batch.map(po => po.po_id); - // Get all products for these POs in one query + // Get all products for these POs in one query - Keep MySQL compatible for production const [poProducts] = await prodConnection.query(` SELECT pop.po_id, pop.pid, pr.itemnumber as sku, pr.description as name, - pop.cost_each, + pop.cost_each as cost_price, pop.qty_each as ordered FROM po_products pop USE INDEX (PRIMARY) @@ -232,317 +253,397 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental ORDER BY r.po_id, rp.pid, rp.received_date `, [batchPoIds, productPids]); - // Create maps for this sub-batch - const poProductMap = new Map(); - productBatch.forEach(product => { - const key = `${product.po_id}-${product.pid}`; - poProductMap.set(key, product); - }); - - const receivingMap = new Map(); - const altReceivingMap = new Map(); - const noPOReceivingMap = new Map(); - - receivings.forEach(receiving => { - const key = `${receiving.po_id}-${receiving.pid}`; - if (receiving.is_alt_po === 2) { - // No PO - if (!noPOReceivingMap.has(receiving.pid)) { - noPOReceivingMap.set(receiving.pid, []); - } - noPOReceivingMap.get(receiving.pid).push(receiving); - } else if (receiving.is_alt_po === 1) { - // Different PO - if (!altReceivingMap.has(receiving.pid)) { - altReceivingMap.set(receiving.pid, []); - } - altReceivingMap.get(receiving.pid).push(receiving); - } else { - // Original PO - if (!receivingMap.has(key)) { - receivingMap.set(key, []); - } - receivingMap.get(key).push(receiving); - } - }); - - // Verify PIDs exist - const [existingPids] = await localConnection.query( - 'SELECT pid FROM products WHERE pid IN (?)', - [productPids] - ); - const validPids = new Set(existingPids.map(p => p.pid)); - - // First check which PO lines already exist and get their current values - const poLines = Array.from(poProductMap.values()) - .filter(p => validPids.has(p.pid)) - .map(p => [p.po_id, p.pid]); - - const [existingPOs] = await localConnection.query( - `SELECT ${columnNames.join(',')} FROM purchase_orders WHERE (po_id, pid) IN (${poLines.map(() => "(?,?)").join(",")})`, - poLines.flat() - ); - const existingPOMap = new Map( - existingPOs.map(po => [`${po.po_id}-${po.pid}`, po]) - ); - - // Split into inserts and updates - const insertsAndUpdates = { inserts: [], updates: [] }; - let batchProcessed = 0; - - for (const po of batch) { - const poProducts = Array.from(poProductMap.values()) - .filter(p => p.po_id === po.po_id && validPids.has(p.pid)); - - for (const product of poProducts) { - const key = `${po.po_id}-${product.pid}`; - const receivingHistory = receivingMap.get(key) || []; - const altReceivingHistory = altReceivingMap.get(product.pid) || []; - const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || []; + // Insert receivings into temp table + if (receivings.length > 0) { + // Process in smaller chunks to avoid parameter limits + const CHUNK_SIZE = 100; // Reduce chunk size to avoid parameter limits + for (let i = 0; i < receivings.length; i += CHUNK_SIZE) { + const chunk = receivings.slice(i, Math.min(i + CHUNK_SIZE, receivings.length)); - // Combine all receivings and sort by date - const allReceivings = [ - ...receivingHistory.map(r => ({ ...r, type: 'original' })), - ...altReceivingHistory.map(r => ({ ...r, type: 'alternate' })), - ...noPOReceivingHistory.map(r => ({ ...r, type: 'no_po' })) - ].sort((a, b) => new Date(a.received_date || '9999-12-31') - new Date(b.received_date || '9999-12-31')); - - // Split receivings into original PO and others - const originalPOReceivings = allReceivings.filter(r => r.type === 'original'); - const otherReceivings = allReceivings.filter(r => r.type !== 'original'); - - // Track FIFO fulfillment - let remainingToFulfill = product.ordered; - const fulfillmentTracking = []; - let totalReceived = 0; - let actualCost = null; // Will store the cost of the first receiving that fulfills this PO - let firstFulfillmentReceiving = null; - let lastFulfillmentReceiving = null; - - for (const receiving of allReceivings) { - // Convert quantities to base units using supplier data - const baseQtyReceived = receiving.qty_each * ( - receiving.type === 'original' ? 1 : - Math.max(1, product.supplier_qty_per_unit || 1) - ); - const qtyToApply = Math.min(remainingToFulfill, baseQtyReceived); - - if (qtyToApply > 0) { - // If this is the first receiving being applied, use its cost - if (actualCost === null && receiving.cost_each > 0) { - actualCost = receiving.cost_each; - firstFulfillmentReceiving = receiving; - } - lastFulfillmentReceiving = receiving; - fulfillmentTracking.push({ - receiving_id: receiving.receiving_id, - qty_applied: qtyToApply, - qty_total: baseQtyReceived, - cost: receiving.cost_each || actualCost || product.cost_each, - date: receiving.received_date, - received_by: receiving.received_by, - received_by_name: receiving.received_by_name || 'Unknown', - type: receiving.type, - remaining_qty: baseQtyReceived - qtyToApply - }); - remainingToFulfill -= qtyToApply; - } else { - // Track excess receivings - fulfillmentTracking.push({ - receiving_id: receiving.receiving_id, - qty_applied: 0, - qty_total: baseQtyReceived, - cost: receiving.cost_each || actualCost || product.cost_each, - date: receiving.received_date, - received_by: receiving.received_by, - received_by_name: receiving.received_by_name || 'Unknown', - type: receiving.type, - is_excess: true - }); - } - totalReceived += baseQtyReceived; - } - - const receiving_status = !totalReceived ? 1 : // created - remainingToFulfill > 0 ? 30 : // partial - 40; // full - - function formatDate(dateStr) { - if (!dateStr) return null; - if (dateStr === '0000-00-00' || dateStr === '0000-00-00 00:00:00') return null; - if (typeof dateStr === 'string' && !dateStr.match(/^\d{4}-\d{2}-\d{2}/)) return null; - try { - const date = new Date(dateStr); - if (isNaN(date.getTime())) return null; - if (date.getFullYear() < 1900 || date.getFullYear() > 2100) return null; - return date.toISOString().split('T')[0]; - } catch (e) { - return null; - } - } - - const rowValues = columnNames.map(col => { - switch (col) { - case 'po_id': return po.po_id; - case 'vendor': return po.vendor; - case 'date': return formatDate(po.date); - case 'expected_date': return formatDate(po.expected_date); - case 'pid': return product.pid; - case 'sku': return product.sku; - case 'name': return product.name; - case 'cost_price': return actualCost || product.cost_each; - case 'po_cost_price': return product.cost_each; - case 'status': return po.status; - case 'notes': return po.notes; - case 'long_note': return po.long_note; - case 'ordered': return product.ordered; - case 'received': return totalReceived; - case 'unfulfilled': return remainingToFulfill; - case 'excess_received': return Math.max(0, totalReceived - product.ordered); - case 'received_date': return formatDate(firstFulfillmentReceiving?.received_date); - case 'last_received_date': return formatDate(lastFulfillmentReceiving?.received_date); - case 'received_by': return firstFulfillmentReceiving?.received_by_name || null; - case 'receiving_status': return receiving_status; - case 'receiving_history': return JSON.stringify({ - fulfillment: fulfillmentTracking, - ordered_qty: product.ordered, - total_received: totalReceived, - remaining_unfulfilled: remainingToFulfill, - excess_received: Math.max(0, totalReceived - product.ordered), - po_cost: product.cost_each, - actual_cost: actualCost || product.cost_each - }); - default: return null; - } + const values = []; + const placeholders = []; + + chunk.forEach((r, idx) => { + values.push( + r.po_id, + r.pid, + r.receiving_id, + r.qty_each, + r.cost_each, + r.received_date, + r.received_by, + r.received_by_name || null, + r.is_alt_po + ); + + const offset = idx * 9; + placeholders.push(`($${offset + 1}, $${offset + 2}, $${offset + 3}, $${offset + 4}, $${offset + 5}, $${offset + 6}, $${offset + 7}, $${offset + 8}, $${offset + 9})`); }); - if (existingPOMap.has(key)) { - const existing = existingPOMap.get(key); - // Check if any values are different - const hasChanges = columnNames.some(col => { - const newVal = rowValues[columnNames.indexOf(col)]; - const oldVal = existing[col] ?? null; - // Special handling for numbers to avoid type coercion issues - if (typeof newVal === 'number' && typeof oldVal === 'number') { - return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences - } - // Special handling for receiving_history - parse and compare - if (col === 'receiving_history') { - const newHistory = JSON.parse(newVal || '{}'); - const oldHistory = JSON.parse(oldVal || '{}'); - return JSON.stringify(newHistory) !== JSON.stringify(oldHistory); - } - return newVal !== oldVal; - }); - - if (hasChanges) { - insertsAndUpdates.updates.push({ - po_id: po.po_id, - pid: product.pid, - values: rowValues - }); - } - } else { - insertsAndUpdates.inserts.push({ - po_id: po.po_id, - pid: product.pid, - values: rowValues - }); - } - batchProcessed++; + await localConnection.query(` + INSERT INTO temp_po_receivings ( + po_id, pid, receiving_id, qty_each, cost_each, received_date, + received_by, received_by_name, is_alt_po + ) + VALUES ${placeholders.join(',')} + ON CONFLICT (receiving_id, pid) DO UPDATE SET + po_id = EXCLUDED.po_id, + qty_each = EXCLUDED.qty_each, + cost_each = EXCLUDED.cost_each, + received_date = EXCLUDED.received_date, + received_by = EXCLUDED.received_by, + received_by_name = EXCLUDED.received_by_name, + is_alt_po = EXCLUDED.is_alt_po + `, values); } } - // Handle inserts - if (insertsAndUpdates.inserts.length > 0) { - const insertPlaceholders = insertsAndUpdates.inserts - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - - const insertResult = await localConnection.query(` - INSERT INTO purchase_orders (${columnNames.join(",")}) - VALUES ${insertPlaceholders} - `, insertsAndUpdates.inserts.map(i => i.values).flat()); - - const affectedRows = insertResult[0].affectedRows; - // For an upsert, MySQL counts rows twice for updates - // So if affectedRows is odd, we have (updates * 2 + inserts) - const updates = Math.floor(affectedRows / 2); - const inserts = affectedRows - (updates * 2); + // Process each PO product in chunks + const PRODUCT_CHUNK_SIZE = 100; + for (let i = 0; i < productBatch.length; i += PRODUCT_CHUNK_SIZE) { + const chunk = productBatch.slice(i, Math.min(i + PRODUCT_CHUNK_SIZE, productBatch.length)); + const values = []; + const placeholders = []; - recordsAdded += inserts; - recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates - processed += batchProcessed; - } - - // Handle updates - now we know these actually have changes - if (insertsAndUpdates.updates.length > 0) { - const updatePlaceholders = insertsAndUpdates.updates - .map(() => `(${Array(columnNames.length).fill("?").join(",")})`) - .join(","); - - const updateResult = await localConnection.query(` - INSERT INTO purchase_orders (${columnNames.join(",")}) - VALUES ${updatePlaceholders} - ON DUPLICATE KEY UPDATE ${columnNames - .filter((col) => col !== "po_id" && col !== "pid") - .map((col) => `${col} = VALUES(${col})`) - .join(",")}; - `, insertsAndUpdates.updates.map(u => u.values).flat()); - - const affectedRows = updateResult[0].affectedRows; - // For an upsert, MySQL counts rows twice for updates - // So if affectedRows is odd, we have (updates * 2 + inserts) - const updates = Math.floor(affectedRows / 2); - const inserts = affectedRows - (updates * 2); - - recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates - processed += batchProcessed; - } - - // Update progress based on time interval - const now = Date.now(); - if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) { - outputProgress({ - status: "running", - operation: "Purchase orders import", - current: processed, - total: totalItems, - elapsed: formatElapsedTime((Date.now() - startTime) / 1000), - remaining: estimateRemaining(startTime, processed, totalItems), - rate: calculateRate(startTime, processed) + chunk.forEach((product, idx) => { + const po = batch.find(p => p.po_id === product.po_id); + if (!po) return; + + values.push( + product.po_id, + product.pid, + product.sku, + product.name, + po.vendor, + po.date, + po.expected_date, + po.status, + po.notes || po.long_note, + product.ordered, + product.cost_price + ); + + const offset = idx * 11; // Updated to match 11 fields + placeholders.push(`($${offset + 1}, $${offset + 2}, $${offset + 3}, $${offset + 4}, $${offset + 5}, $${offset + 6}, $${offset + 7}, $${offset + 8}, $${offset + 9}, $${offset + 10}, $${offset + 11})`); }); - lastProgressUpdate = now; + + if (placeholders.length > 0) { + await localConnection.query(` + INSERT INTO temp_purchase_orders ( + po_id, pid, sku, name, vendor, date, expected_date, + status, notes, ordered, cost_price + ) + VALUES ${placeholders.join(',')} + ON CONFLICT (po_id, pid) DO UPDATE SET + sku = EXCLUDED.sku, + name = EXCLUDED.name, + vendor = EXCLUDED.vendor, + date = EXCLUDED.date, + expected_date = EXCLUDED.expected_date, + status = EXCLUDED.status, + notes = EXCLUDED.notes, + ordered = EXCLUDED.ordered, + cost_price = EXCLUDED.cost_price + `, values); + } + + processed += chunk.length; + + // Update progress based on time interval + const now = Date.now(); + if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) { + outputProgress({ + status: "running", + operation: "Purchase orders import", + current: processed, + total: totalItems, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, processed, totalItems), + rate: calculateRate(startTime, processed) + }); + lastProgressUpdate = now; + } } } } - // Only update sync status if we get here (no errors thrown) + // Insert final data into purchase_orders table in chunks + const FINAL_CHUNK_SIZE = 1000; + let totalProcessed = 0; + const totalPosResult = await localConnection.query('SELECT COUNT(*) as total_pos FROM temp_purchase_orders'); + const total_pos = parseInt(totalPosResult.rows?.[0]?.total_pos || '0', 10); + + outputProgress({ + status: "running", + operation: "Purchase orders final import", + message: `Processing ${total_pos} purchase orders for final import`, + current: 0, + total: total_pos + }); + + // Process in chunks using cursor-based pagination + let lastPoId = 0; + let lastPid = 0; + let recordsAdded = 0; + let recordsUpdated = 0; + + while (true) { + console.log('Fetching next chunk with lastPoId:', lastPoId, 'lastPid:', lastPid); + const chunkResult = await localConnection.query(` + SELECT po_id, pid FROM temp_purchase_orders + WHERE (po_id, pid) > ($1, $2) + ORDER BY po_id, pid + LIMIT $3 + `, [lastPoId, lastPid, FINAL_CHUNK_SIZE]); + + if (!chunkResult?.rows) { + console.error('No rows returned from chunk query:', chunkResult); + break; + } + + const chunk = chunkResult.rows; + console.log('Got chunk of size:', chunk.length); + if (chunk.length === 0) break; + + const result = await localConnection.query(` + WITH inserted_pos AS ( + INSERT INTO purchase_orders ( + po_id, pid, sku, name, cost_price, po_cost_price, + vendor, date, expected_date, status, notes, + ordered, received, receiving_status, + received_date, last_received_date, received_by, + receiving_history + ) + SELECT + po.po_id, + po.pid, + po.sku, + po.name, + COALESCE( + ( + SELECT cost_each + FROM temp_po_receivings r2 + WHERE r2.pid = po.pid + AND r2.po_id = po.po_id + AND r2.is_alt_po = 0 + AND r2.cost_each > 0 + ORDER BY r2.received_date + LIMIT 1 + ), + po.cost_price + ) as cost_price, + po.cost_price as po_cost_price, + po.vendor, + po.date, + po.expected_date, + po.status, + po.notes, + po.ordered, + COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0) as received, + CASE + WHEN COUNT(r.receiving_id) = 0 THEN 1 -- created + WHEN SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END) < po.ordered THEN 30 -- partial + ELSE 40 -- full + END as receiving_status, + MIN(CASE WHEN r.is_alt_po = 0 THEN r.received_date END) as received_date, + MAX(CASE WHEN r.is_alt_po = 0 THEN r.received_date END) as last_received_date, + ( + SELECT r2.received_by_name + FROM temp_po_receivings r2 + WHERE r2.pid = po.pid + AND r2.is_alt_po = 0 + ORDER BY r2.received_date + LIMIT 1 + ) as received_by, + jsonb_build_object( + 'ordered_qty', po.ordered, + 'total_received', COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0), + 'remaining_unfulfilled', GREATEST(0, po.ordered - COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0)), + 'excess_received', GREATEST(0, COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0) - po.ordered), + 'po_cost', po.cost_price, + 'actual_cost', COALESCE( + ( + SELECT cost_each + FROM temp_po_receivings r2 + WHERE r2.pid = po.pid + AND r2.is_alt_po = 0 + AND r2.cost_each > 0 + ORDER BY r2.received_date + LIMIT 1 + ), + po.cost_price + ), + 'fulfillment', ( + SELECT jsonb_agg( + jsonb_build_object( + 'receiving_id', r2.receiving_id, + 'qty_applied', CASE + WHEN r2.running_total <= po.ordered THEN r2.qty_each + WHEN r2.running_total - r2.qty_each < po.ordered THEN po.ordered - (r2.running_total - r2.qty_each) + ELSE 0 + END, + 'qty_total', r2.qty_each, + 'cost', r2.cost_each, + 'date', r2.received_date, + 'received_by', r2.received_by, + 'received_by_name', r2.received_by_name, + 'type', CASE r2.is_alt_po + WHEN 0 THEN 'original' + WHEN 1 THEN 'alternate' + ELSE 'no_po' + END, + 'remaining_qty', CASE + WHEN r2.running_total <= po.ordered THEN 0 + WHEN r2.running_total - r2.qty_each < po.ordered THEN r2.running_total - po.ordered + ELSE r2.qty_each + END, + 'is_excess', r2.running_total > po.ordered + ) + ORDER BY r2.received_date + ) + FROM ( + SELECT + r2.*, + SUM(r2.qty_each) OVER ( + PARTITION BY r2.pid + ORDER BY r2.received_date + ROWS UNBOUNDED PRECEDING + ) as running_total + FROM temp_po_receivings r2 + WHERE r2.pid = po.pid + ) r2 + ), + 'alternate_po_receivings', ( + SELECT jsonb_agg( + jsonb_build_object( + 'receiving_id', r2.receiving_id, + 'qty', r2.qty_each, + 'cost', r2.cost_each, + 'date', r2.received_date, + 'received_by', r2.received_by, + 'received_by_name', r2.received_by_name + ) + ORDER BY r2.received_date + ) + FROM temp_po_receivings r2 + WHERE r2.pid = po.pid AND r2.is_alt_po = 1 + ), + 'no_po_receivings', ( + SELECT jsonb_agg( + jsonb_build_object( + 'receiving_id', r2.receiving_id, + 'qty', r2.qty_each, + 'cost', r2.cost_each, + 'date', r2.received_date, + 'received_by', r2.received_by, + 'received_by_name', r2.received_by_name + ) + ORDER BY r2.received_date + ) + FROM temp_po_receivings r2 + WHERE r2.pid = po.pid AND r2.is_alt_po = 2 + ) + ) as receiving_history + FROM temp_purchase_orders po + LEFT JOIN temp_po_receivings r ON po.pid = r.pid + WHERE (po.po_id, po.pid) IN ( + SELECT po_id, pid FROM UNNEST($1::int[], $2::int[]) + ) + GROUP BY po.po_id, po.pid, po.sku, po.name, po.vendor, po.date, + po.expected_date, po.status, po.notes, po.ordered, po.cost_price + ON CONFLICT (po_id, pid) DO UPDATE SET + vendor = EXCLUDED.vendor, + date = EXCLUDED.date, + expected_date = EXCLUDED.expected_date, + status = EXCLUDED.status, + notes = EXCLUDED.notes, + ordered = EXCLUDED.ordered, + received = EXCLUDED.received, + receiving_status = EXCLUDED.receiving_status, + received_date = EXCLUDED.received_date, + last_received_date = EXCLUDED.last_received_date, + received_by = EXCLUDED.received_by, + receiving_history = EXCLUDED.receiving_history, + cost_price = EXCLUDED.cost_price, + po_cost_price = EXCLUDED.po_cost_price + RETURNING xmax + ) + SELECT + COUNT(*) FILTER (WHERE xmax = 0) as inserted, + COUNT(*) FILTER (WHERE xmax <> 0) as updated + FROM inserted_pos + `, [ + chunk.map(r => r.po_id), + chunk.map(r => r.pid) + ]); + + // Add debug logging + console.log('Insert result:', result?.rows?.[0]); + + // Handle the result properly for PostgreSQL with more defensive coding + const resultRow = result?.rows?.[0] || {}; + const insertCount = parseInt(resultRow.inserted || '0', 10); + const updateCount = parseInt(resultRow.updated || '0', 10); + + recordsAdded += insertCount; + recordsUpdated += updateCount; + totalProcessed += chunk.length; + + // Update progress + outputProgress({ + status: "running", + operation: "Purchase orders final import", + message: `Processed ${totalProcessed} of ${total_pos} purchase orders`, + current: totalProcessed, + total: total_pos, + elapsed: formatElapsedTime((Date.now() - startTime) / 1000), + remaining: estimateRemaining(startTime, totalProcessed, total_pos), + rate: calculateRate(startTime, totalProcessed) + }); + + // Update last processed IDs for next chunk with safety check + if (chunk.length > 0) { + const lastItem = chunk[chunk.length - 1]; + if (lastItem) { + lastPoId = lastItem.po_id; + lastPid = lastItem.pid; + } + } + } + + // Update sync status await localConnection.query(` INSERT INTO sync_status (table_name, last_sync_timestamp) VALUES ('purchase_orders', NOW()) - ON DUPLICATE KEY UPDATE - last_sync_timestamp = NOW(), - last_sync_id = LAST_INSERT_ID(last_sync_id) + ON CONFLICT (table_name) DO UPDATE SET + last_sync_timestamp = NOW() + `); + + // Clean up temporary tables + await localConnection.query(` + DROP TABLE IF EXISTS temp_purchase_orders; + DROP TABLE IF EXISTS temp_po_receivings; `); return { status: "complete", - totalImported: totalItems, - recordsAdded: recordsAdded || 0, - recordsUpdated: recordsUpdated || 0, - incrementalUpdate, - lastSyncTime + recordsAdded, + recordsUpdated, + totalRecords: processed }; - } catch (error) { - outputProgress({ - operation: `${incrementalUpdate ? 'Incremental' : 'Full'} purchase orders import failed`, - status: "error", - error: error.message, - }); + console.error("Error during purchase orders import:", error); + // Attempt cleanup on error + try { + await localConnection.query(` + DROP TABLE IF EXISTS temp_purchase_orders; + DROP TABLE IF EXISTS temp_po_receivings; + `); + } catch (cleanupError) { + console.error('Error during cleanup:', cleanupError); + } throw error; } } -module.exports = importPurchaseOrders; \ No newline at end of file +module.exports = importPurchaseOrders; \ No newline at end of file diff --git a/inventory-server/scripts/import/utils.js b/inventory-server/scripts/import/utils.js index 12d8a21..b7d888d 100644 --- a/inventory-server/scripts/import/utils.js +++ b/inventory-server/scripts/import/utils.js @@ -1,5 +1,6 @@ const mysql = require("mysql2/promise"); const { Client } = require("ssh2"); +const { Pool } = require('pg'); const dotenv = require("dotenv"); const path = require("path"); @@ -41,23 +42,90 @@ async function setupSshTunnel(sshConfig) { async function setupConnections(sshConfig) { const tunnel = await setupSshTunnel(sshConfig); + // Setup MySQL connection for production const prodConnection = await mysql.createConnection({ ...sshConfig.prodDbConfig, stream: tunnel.stream, }); - const localConnection = await mysql.createPool({ - ...sshConfig.localDbConfig, - waitForConnections: true, - connectionLimit: 10, - queueLimit: 0 - }); + // Setup PostgreSQL connection pool for local + const localPool = new Pool(sshConfig.localDbConfig); - return { - ssh: tunnel.ssh, - prodConnection, - localConnection + // Test the PostgreSQL connection + try { + const client = await localPool.connect(); + await client.query('SELECT NOW()'); + client.release(); + console.log('PostgreSQL connection successful'); + } catch (err) { + console.error('PostgreSQL connection error:', err); + throw err; + } + + // Create a wrapper for the PostgreSQL pool to match MySQL interface + const localConnection = { + _client: null, + _transactionActive: false, + + query: async (text, params) => { + // If we're not in a transaction, use the pool directly + if (!localConnection._transactionActive) { + const client = await localPool.connect(); + try { + const result = await client.query(text, params); + return [result]; + } finally { + client.release(); + } + } + + // If we're in a transaction, use the dedicated client + if (!localConnection._client) { + throw new Error('No active transaction client'); + } + const result = await localConnection._client.query(text, params); + return [result]; + }, + + beginTransaction: async () => { + if (localConnection._transactionActive) { + throw new Error('Transaction already active'); + } + localConnection._client = await localPool.connect(); + await localConnection._client.query('BEGIN'); + localConnection._transactionActive = true; + }, + + commit: async () => { + if (!localConnection._transactionActive) { + throw new Error('No active transaction to commit'); + } + await localConnection._client.query('COMMIT'); + localConnection._client.release(); + localConnection._client = null; + localConnection._transactionActive = false; + }, + + rollback: async () => { + if (!localConnection._transactionActive) { + throw new Error('No active transaction to rollback'); + } + await localConnection._client.query('ROLLBACK'); + localConnection._client.release(); + localConnection._client = null; + localConnection._transactionActive = false; + }, + + end: async () => { + if (localConnection._client) { + localConnection._client.release(); + localConnection._client = null; + } + await localPool.end(); + } }; + + return { prodConnection, localConnection, tunnel }; } // Helper function to close connections diff --git a/inventory-server/scripts/reset-db.js b/inventory-server/scripts/reset-db.js index b7bea1a..44673e9 100644 --- a/inventory-server/scripts/reset-db.js +++ b/inventory-server/scripts/reset-db.js @@ -1,4 +1,4 @@ -const mysql = require('mysql2/promise'); +const { Client } = require('pg'); const path = require('path'); const dotenv = require('dotenv'); const fs = require('fs'); @@ -10,7 +10,7 @@ const dbConfig = { user: process.env.DB_USER, password: process.env.DB_PASSWORD, database: process.env.DB_NAME, - multipleStatements: true + port: process.env.DB_PORT || 5432 }; // Helper function to output progress in JSON format @@ -54,14 +54,44 @@ function splitSQLStatements(sql) { let currentStatement = ''; let inString = false; let stringChar = ''; + let inDollarQuote = false; + let dollarQuoteTag = ''; // Process character by character for (let i = 0; i < sql.length; i++) { const char = sql[i]; const nextChar = sql[i + 1] || ''; - // Handle string literals - if ((char === "'" || char === '"') && sql[i - 1] !== '\\') { + // Handle dollar quotes + if (char === '$' && !inString) { + // Look ahead to find the dollar quote tag + let tag = '$'; + let j = i + 1; + while (j < sql.length && sql[j] !== '$') { + tag += sql[j]; + j++; + } + tag += '$'; + + if (j < sql.length) { // Found closing $ + if (!inDollarQuote) { + inDollarQuote = true; + dollarQuoteTag = tag; + currentStatement += tag; + i = j; + continue; + } else if (sql.substring(i, j + 1) === dollarQuoteTag) { + inDollarQuote = false; + dollarQuoteTag = ''; + currentStatement += tag; + i = j; + continue; + } + } + } + + // Handle string literals (only if not in dollar quote) + if (!inDollarQuote && (char === "'" || char === '"') && sql[i - 1] !== '\\') { if (!inString) { inString = true; stringChar = char; @@ -70,23 +100,25 @@ function splitSQLStatements(sql) { } } - // Handle comments - if (!inString && char === '-' && nextChar === '-') { - // Skip to end of line - while (i < sql.length && sql[i] !== '\n') i++; - continue; + // Handle comments (only if not in string or dollar quote) + if (!inString && !inDollarQuote) { + if (char === '-' && nextChar === '-') { + // Skip to end of line + while (i < sql.length && sql[i] !== '\n') i++; + continue; + } + + if (char === '/' && nextChar === '*') { + // Skip until closing */ + i += 2; + while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++; + i++; // Skip the closing / + continue; + } } - if (!inString && char === '/' && nextChar === '*') { - // Skip until closing */ - i += 2; - while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++; - i++; // Skip the closing / - continue; - } - - // Handle statement boundaries - if (!inString && char === ';') { + // Handle statement boundaries (only if not in string or dollar quote) + if (!inString && !inDollarQuote && char === ';') { if (currentStatement.trim()) { statements.push(currentStatement.trim()); } @@ -120,30 +152,26 @@ async function resetDatabase() { } }); - const connection = await mysql.createConnection(dbConfig); + const client = new Client(dbConfig); + await client.connect(); try { - // Check MySQL privileges + // Check PostgreSQL version and user outputProgress({ - operation: 'Checking privileges', - message: 'Verifying MySQL user privileges...' + operation: 'Checking database', + message: 'Verifying PostgreSQL version and user privileges...' }); - const [grants] = await connection.query('SHOW GRANTS'); - outputProgress({ - operation: 'User privileges', - message: { - grants: grants.map(g => Object.values(g)[0]) - } - }); - - // Enable warnings as errors - await connection.query('SET SESSION sql_notes = 1'); + const versionResult = await client.query('SELECT version()'); + const userResult = await client.query('SELECT current_user, current_database()'); - // Log database config (without sensitive info) outputProgress({ - operation: 'Database config', - message: `Using database: ${dbConfig.database} on host: ${dbConfig.host}` + operation: 'Database info', + message: { + version: versionResult.rows[0].version, + user: userResult.rows[0].current_user, + database: userResult.rows[0].current_database + } }); // Get list of all tables in the current database @@ -152,14 +180,14 @@ async function resetDatabase() { message: 'Retrieving all table names...' }); - const [tables] = await connection.query(` - SELECT GROUP_CONCAT(table_name) as tables - FROM information_schema.tables - WHERE table_schema = DATABASE() - AND table_name NOT IN ('users', 'import_history', 'calculate_history') + const tablesResult = await client.query(` + SELECT string_agg(tablename, ', ') as tables + FROM pg_tables + WHERE schemaname = 'public' + AND tablename NOT IN ('users', 'calculate_history', 'import_history'); `); - if (!tables[0].tables) { + if (!tablesResult.rows[0].tables) { outputProgress({ operation: 'No tables found', message: 'Database is already empty' @@ -170,20 +198,73 @@ async function resetDatabase() { message: 'Dropping all existing tables...' }); - await connection.query('SET FOREIGN_KEY_CHECKS = 0'); - const dropQuery = ` - DROP TABLE IF EXISTS - ${tables[0].tables - .split(',') - .filter(table => !['users', 'calculate_history'].includes(table)) - .map(table => '`' + table + '`') - .join(', ')} - `; - await connection.query(dropQuery); - await connection.query('SET FOREIGN_KEY_CHECKS = 1'); + // Disable triggers/foreign key checks + await client.query('SET session_replication_role = \'replica\';'); + + // Drop all tables except users + const tables = tablesResult.rows[0].tables.split(', '); + for (const table of tables) { + if (!['users'].includes(table)) { + await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`); + } + } + + // Only drop types if we're not preserving history tables + const historyTablesExist = await client.query(` + SELECT EXISTS ( + SELECT FROM pg_tables + WHERE schemaname = 'public' + AND tablename IN ('calculate_history', 'import_history') + ); + `); + + if (!historyTablesExist.rows[0].exists) { + await client.query('DROP TYPE IF EXISTS calculation_status CASCADE;'); + await client.query('DROP TYPE IF EXISTS module_name CASCADE;'); + } + + // Re-enable triggers/foreign key checks + await client.query('SET session_replication_role = \'origin\';'); } - // Read and execute main schema (core tables) + // Create enum types if they don't exist + outputProgress({ + operation: 'Creating enum types', + message: 'Setting up required enum types...' + }); + + // Check if types exist before creating + const typesExist = await client.query(` + SELECT EXISTS ( + SELECT 1 FROM pg_type + WHERE typname = 'calculation_status' + ) as calc_status_exists, + EXISTS ( + SELECT 1 FROM pg_type + WHERE typname = 'module_name' + ) as module_name_exists; + `); + + if (!typesExist.rows[0].calc_status_exists) { + await client.query(`CREATE TYPE calculation_status AS ENUM ('running', 'completed', 'failed', 'cancelled')`); + } + + if (!typesExist.rows[0].module_name_exists) { + await client.query(` + CREATE TYPE module_name AS ENUM ( + 'product_metrics', + 'time_aggregates', + 'financial_metrics', + 'vendor_metrics', + 'category_metrics', + 'brand_metrics', + 'sales_forecasts', + 'abc_classification' + ) + `); + } + + // Read and execute main schema first (core tables) outputProgress({ operation: 'Running database setup', message: 'Creating core tables...' @@ -223,35 +304,24 @@ async function resetDatabase() { for (let i = 0; i < statements.length; i++) { const stmt = statements[i]; try { - const [result, fields] = await connection.query(stmt); - - // Check for warnings - const [warnings] = await connection.query('SHOW WARNINGS'); - if (warnings && warnings.length > 0) { - outputProgress({ - status: 'warning', - operation: 'SQL Warning', - statement: i + 1, - warnings: warnings - }); - } + const result = await client.query(stmt); // Verify if table was created (if this was a CREATE TABLE statement) if (stmt.trim().toLowerCase().startsWith('create table')) { - const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1]; + const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1]; if (tableName) { - const [tableExists] = await connection.query(` + const tableExists = await client.query(` SELECT COUNT(*) as count FROM information_schema.tables - WHERE table_schema = DATABASE() - AND table_name = ? + WHERE table_schema = 'public' + AND table_name = $1 `, [tableName]); outputProgress({ operation: 'Table Creation Verification', message: { table: tableName, - exists: tableExists[0].count > 0 + exists: tableExists.rows[0].count > 0 } }); } @@ -263,7 +333,7 @@ async function resetDatabase() { statement: i + 1, total: statements.length, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), - affectedRows: result.affectedRows + rowCount: result.rowCount } }); } catch (sqlError) { @@ -271,8 +341,6 @@ async function resetDatabase() { status: 'error', operation: 'SQL Error', error: sqlError.message, - sqlState: sqlError.sqlState, - errno: sqlError.errno, statement: stmt, statementNumber: i + 1 }); @@ -280,66 +348,12 @@ async function resetDatabase() { } } - // List all tables in the database after schema execution - outputProgress({ - operation: 'Debug database', - message: { - currentDatabase: (await connection.query('SELECT DATABASE() as db'))[0][0].db - } - }); - - const [allTables] = await connection.query(` - SELECT - table_schema, - table_name, - engine, - create_time, - table_rows + // Verify core tables were created + const existingTables = (await client.query(` + SELECT table_name FROM information_schema.tables - WHERE table_schema = DATABASE() - `); - - if (allTables.length === 0) { - outputProgress({ - operation: 'Warning', - message: 'No tables found in database after schema execution' - }); - } else { - outputProgress({ - operation: 'Tables after schema execution', - message: { - count: allTables.length, - tables: allTables.map(t => ({ - schema: t.table_schema, - name: t.table_name, - engine: t.engine, - created: t.create_time, - rows: t.table_rows - })) - } - }); - } - - // Also check table status - const [tableStatus] = await connection.query('SHOW TABLE STATUS'); - outputProgress({ - operation: 'Table Status', - message: { - tables: tableStatus.map(t => ({ - name: t.Name, - engine: t.Engine, - version: t.Version, - rowFormat: t.Row_format, - rows: t.Rows, - createTime: t.Create_time, - updateTime: t.Update_time - })) - } - }); - - // Verify core tables were created using SHOW TABLES - const [showTables] = await connection.query('SHOW TABLES'); - const existingTables = showTables.map(t => Object.values(t)[0]); + WHERE table_schema = 'public' + `)).rows.map(t => t.table_name); outputProgress({ operation: 'Core tables verification', @@ -359,22 +373,12 @@ async function resetDatabase() { ); } - // Verify all core tables use InnoDB - const [engineStatus] = await connection.query('SHOW TABLE STATUS WHERE Name IN (?)', [CORE_TABLES]); - const nonInnoDBTables = engineStatus.filter(t => t.Engine !== 'InnoDB'); - - if (nonInnoDBTables.length > 0) { - throw new Error( - `Tables using non-InnoDB engine: ${nonInnoDBTables.map(t => t.Name).join(', ')}` - ); - } - outputProgress({ operation: 'Core tables created', message: `Successfully created tables: ${CORE_TABLES.join(', ')}` }); - // Read and execute config schema + // Now read and execute config schema (since core tables exist) outputProgress({ operation: 'Running config setup', message: 'Creating configuration tables...' @@ -400,18 +404,7 @@ async function resetDatabase() { for (let i = 0; i < configStatements.length; i++) { const stmt = configStatements[i]; try { - const [result, fields] = await connection.query(stmt); - - // Check for warnings - const [warnings] = await connection.query('SHOW WARNINGS'); - if (warnings && warnings.length > 0) { - outputProgress({ - status: 'warning', - operation: 'Config SQL Warning', - statement: i + 1, - warnings: warnings - }); - } + const result = await client.query(stmt); outputProgress({ operation: 'Config SQL Progress', @@ -419,7 +412,7 @@ async function resetDatabase() { statement: i + 1, total: configStatements.length, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), - affectedRows: result.affectedRows + rowCount: result.rowCount } }); } catch (sqlError) { @@ -427,8 +420,6 @@ async function resetDatabase() { status: 'error', operation: 'Config SQL Error', error: sqlError.message, - sqlState: sqlError.sqlState, - errno: sqlError.errno, statement: stmt, statementNumber: i + 1 }); @@ -436,33 +427,6 @@ async function resetDatabase() { } } - // Verify config tables were created - const [showConfigTables] = await connection.query('SHOW TABLES'); - const existingConfigTables = showConfigTables.map(t => Object.values(t)[0]); - - outputProgress({ - operation: 'Config tables verification', - message: { - found: existingConfigTables, - expected: CONFIG_TABLES - } - }); - - const missingConfigTables = CONFIG_TABLES.filter( - t => !existingConfigTables.includes(t) - ); - - if (missingConfigTables.length > 0) { - throw new Error( - `Failed to create config tables: ${missingConfigTables.join(', ')}` - ); - } - - outputProgress({ - operation: 'Config tables created', - message: `Successfully created tables: ${CONFIG_TABLES.join(', ')}` - }); - // Read and execute metrics schema (metrics tables) outputProgress({ operation: 'Running metrics setup', @@ -489,18 +453,7 @@ async function resetDatabase() { for (let i = 0; i < metricsStatements.length; i++) { const stmt = metricsStatements[i]; try { - const [result, fields] = await connection.query(stmt); - - // Check for warnings - const [warnings] = await connection.query('SHOW WARNINGS'); - if (warnings && warnings.length > 0) { - outputProgress({ - status: 'warning', - operation: 'Metrics SQL Warning', - statement: i + 1, - warnings: warnings - }); - } + const result = await client.query(stmt); outputProgress({ operation: 'Metrics SQL Progress', @@ -508,7 +461,7 @@ async function resetDatabase() { statement: i + 1, total: metricsStatements.length, preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), - affectedRows: result.affectedRows + rowCount: result.rowCount } }); } catch (sqlError) { @@ -516,8 +469,6 @@ async function resetDatabase() { status: 'error', operation: 'Metrics SQL Error', error: sqlError.message, - sqlState: sqlError.sqlState, - errno: sqlError.errno, statement: stmt, statementNumber: i + 1 }); @@ -539,7 +490,7 @@ async function resetDatabase() { }); process.exit(1); } finally { - await connection.end(); + await client.end(); } } diff --git a/inventory-server/scripts/reset-metrics.js b/inventory-server/scripts/reset-metrics.js index fcbb9a9..fd4220c 100644 --- a/inventory-server/scripts/reset-metrics.js +++ b/inventory-server/scripts/reset-metrics.js @@ -1,4 +1,4 @@ -const mysql = require('mysql2/promise'); +const { Client } = require('pg'); const path = require('path'); const fs = require('fs'); require('dotenv').config({ path: path.resolve(__dirname, '../.env') }); @@ -8,7 +8,7 @@ const dbConfig = { user: process.env.DB_USER, password: process.env.DB_PASSWORD, database: process.env.DB_NAME, - multipleStatements: true + port: process.env.DB_PORT || 5432 }; function outputProgress(data) { @@ -34,8 +34,8 @@ const METRICS_TABLES = [ 'sales_forecasts', 'temp_purchase_metrics', 'temp_sales_metrics', - 'vendor_metrics', //before vendor_details for foreign key - 'vendor_time_metrics', //before vendor_details for foreign key + 'vendor_metrics', + 'vendor_time_metrics', 'vendor_details' ]; @@ -90,31 +90,31 @@ function splitSQLStatements(sql) { } async function resetMetrics() { - let connection; + let client; try { outputProgress({ operation: 'Starting metrics reset', message: 'Connecting to database...' }); - connection = await mysql.createConnection(dbConfig); - await connection.beginTransaction(); + client = new Client(dbConfig); + await client.connect(); // First verify current state - const [initialTables] = await connection.query(` - SELECT TABLE_NAME as name - FROM information_schema.tables - WHERE TABLE_SCHEMA = DATABASE() - AND TABLE_NAME IN (?) + const initialTables = await client.query(` + SELECT tablename as name + FROM pg_tables + WHERE schemaname = 'public' + AND tablename = ANY($1) `, [METRICS_TABLES]); outputProgress({ operation: 'Initial state', - message: `Found ${initialTables.length} existing metrics tables: ${initialTables.map(t => t.name).join(', ')}` + message: `Found ${initialTables.rows.length} existing metrics tables: ${initialTables.rows.map(t => t.name).join(', ')}` }); // Disable foreign key checks at the start - await connection.query('SET FOREIGN_KEY_CHECKS = 0'); + await client.query('SET session_replication_role = \'replica\''); // Drop all metrics tables in reverse order to handle dependencies outputProgress({ @@ -124,17 +124,17 @@ async function resetMetrics() { for (const table of [...METRICS_TABLES].reverse()) { try { - await connection.query(`DROP TABLE IF EXISTS ${table}`); + await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`); // Verify the table was actually dropped - const [checkDrop] = await connection.query(` + const checkDrop = await client.query(` SELECT COUNT(*) as count - FROM information_schema.tables - WHERE TABLE_SCHEMA = DATABASE() - AND TABLE_NAME = ? + FROM pg_tables + WHERE schemaname = 'public' + AND tablename = $1 `, [table]); - if (checkDrop[0].count > 0) { + if (parseInt(checkDrop.rows[0].count) > 0) { throw new Error(`Failed to drop table ${table} - table still exists`); } @@ -153,15 +153,15 @@ async function resetMetrics() { } // Verify all tables were dropped - const [afterDrop] = await connection.query(` - SELECT TABLE_NAME as name - FROM information_schema.tables - WHERE TABLE_SCHEMA = DATABASE() - AND TABLE_NAME IN (?) + const afterDrop = await client.query(` + SELECT tablename as name + FROM pg_tables + WHERE schemaname = 'public' + AND tablename = ANY($1) `, [METRICS_TABLES]); - if (afterDrop.length > 0) { - throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.map(t => t.name).join(', ')}`); + if (afterDrop.rows.length > 0) { + throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.rows.map(t => t.name).join(', ')}`); } // Read metrics schema @@ -187,39 +187,26 @@ async function resetMetrics() { for (let i = 0; i < statements.length; i++) { const stmt = statements[i]; try { - await connection.query(stmt); - - // Check for warnings - const [warnings] = await connection.query('SHOW WARNINGS'); - if (warnings && warnings.length > 0) { - outputProgress({ - status: 'warning', - operation: 'SQL Warning', - message: { - statement: i + 1, - warnings: warnings - } - }); - } + const result = await client.query(stmt); // If this is a CREATE TABLE statement, verify the table was created if (stmt.trim().toLowerCase().startsWith('create table')) { - const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1]; + const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1]; if (tableName) { - const [checkCreate] = await connection.query(` - SELECT TABLE_NAME as name, CREATE_TIME as created - FROM information_schema.tables - WHERE TABLE_SCHEMA = DATABASE() - AND TABLE_NAME = ? + const checkCreate = await client.query(` + SELECT tablename as name + FROM pg_tables + WHERE schemaname = 'public' + AND tablename = $1 `, [tableName]); - if (checkCreate.length === 0) { + if (checkCreate.rows.length === 0) { throw new Error(`Failed to create table ${tableName} - table does not exist after CREATE statement`); } outputProgress({ operation: 'Table created', - message: `Successfully created table: ${tableName} at ${checkCreate[0].created}` + message: `Successfully created table: ${tableName}` }); } } @@ -229,7 +216,8 @@ async function resetMetrics() { message: { statement: i + 1, total: statements.length, - preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '') + preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''), + rowCount: result.rowCount } }); } catch (sqlError) { @@ -238,8 +226,6 @@ async function resetMetrics() { operation: 'SQL Error', message: { error: sqlError.message, - sqlState: sqlError.sqlState, - errno: sqlError.errno, statement: stmt, statementNumber: i + 1 } @@ -249,7 +235,7 @@ async function resetMetrics() { } // Re-enable foreign key checks after all tables are created - await connection.query('SET FOREIGN_KEY_CHECKS = 1'); + await client.query('SET session_replication_role = \'origin\''); // Verify metrics tables were created outputProgress({ @@ -257,37 +243,36 @@ async function resetMetrics() { message: 'Checking all metrics tables were created...' }); - const [metricsTablesResult] = await connection.query(` - SELECT - TABLE_NAME as name, - TABLE_ROWS as \`rows\`, - CREATE_TIME as created - FROM information_schema.tables - WHERE TABLE_SCHEMA = DATABASE() - AND TABLE_NAME IN (?) + const metricsTablesResult = await client.query(` + SELECT tablename as name + FROM pg_tables + WHERE schemaname = 'public' + AND tablename = ANY($1) `, [METRICS_TABLES]); outputProgress({ operation: 'Tables found', - message: `Found ${metricsTablesResult.length} tables: ${metricsTablesResult.map(t => - `${t.name} (created: ${t.created})` - ).join(', ')}` + message: `Found ${metricsTablesResult.rows.length} tables: ${metricsTablesResult.rows.map(t => t.name).join(', ')}` }); - const existingMetricsTables = metricsTablesResult.map(t => t.name); + const existingMetricsTables = metricsTablesResult.rows.map(t => t.name); const missingMetricsTables = METRICS_TABLES.filter(t => !existingMetricsTables.includes(t)); if (missingMetricsTables.length > 0) { // Do one final check of the actual tables - const [finalCheck] = await connection.query('SHOW TABLES'); + const finalCheck = await client.query(` + SELECT tablename as name + FROM pg_tables + WHERE schemaname = 'public' + `); outputProgress({ operation: 'Final table check', - message: `All database tables: ${finalCheck.map(t => Object.values(t)[0]).join(', ')}` + message: `All database tables: ${finalCheck.rows.map(t => t.name).join(', ')}` }); throw new Error(`Failed to create metrics tables: ${missingMetricsTables.join(', ')}`); } - await connection.commit(); + await client.query('COMMIT'); outputProgress({ status: 'complete', @@ -302,17 +287,17 @@ async function resetMetrics() { stack: error.stack }); - if (connection) { - await connection.rollback(); + if (client) { + await client.query('ROLLBACK'); // Make sure to re-enable foreign key checks even if there's an error - await connection.query('SET FOREIGN_KEY_CHECKS = 1').catch(() => {}); + await client.query('SET session_replication_role = \'origin\'').catch(() => {}); } throw error; } finally { - if (connection) { + if (client) { // One final attempt to ensure foreign key checks are enabled - await connection.query('SET FOREIGN_KEY_CHECKS = 1').catch(() => {}); - await connection.end(); + await client.query('SET session_replication_role = \'origin\'').catch(() => {}); + await client.end(); } } } diff --git a/inventory-server/src/routes/ai-validation.js b/inventory-server/src/routes/ai-validation.js index 8afaff9..fd627e4 100644 --- a/inventory-server/src/routes/ai-validation.js +++ b/inventory-server/src/routes/ai-validation.js @@ -3,11 +3,20 @@ const router = express.Router(); const OpenAI = require('openai'); const fs = require('fs').promises; const path = require('path'); +const dotenv = require('dotenv'); +// Ensure environment variables are loaded +dotenv.config({ path: path.join(__dirname, '../../.env') }); + +// Initialize OpenAI client const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); +if (!process.env.OPENAI_API_KEY) { + console.error('Warning: OPENAI_API_KEY is not set in environment variables'); +} + // Cache configuration const CACHE_TTL = 60 * 60 * 1000; // 1 hour in milliseconds diff --git a/inventory-server/src/routes/analytics.js b/inventory-server/src/routes/analytics.js index 351cc63..1553227 100644 --- a/inventory-server/src/routes/analytics.js +++ b/inventory-server/src/routes/analytics.js @@ -6,24 +6,24 @@ router.get('/stats', async (req, res) => { try { const pool = req.app.locals.pool; - const [results] = await pool.query(` + const { rows: [results] } = await pool.query(` SELECT COALESCE( ROUND( (SUM(o.price * o.quantity - p.cost_price * o.quantity) / - NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 + NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1 ), 0 ) as profitMargin, COALESCE( ROUND( - (AVG(p.price / NULLIF(p.cost_price, 0) - 1) * 100), 1 + (AVG(p.price / NULLIF(p.cost_price, 0) - 1) * 100)::numeric, 1 ), 0 ) as averageMarkup, COALESCE( ROUND( - SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0), 2 + (SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0))::numeric, 2 ), 0 ) as stockTurnoverRate, @@ -31,23 +31,23 @@ router.get('/stats', async (req, res) => { COALESCE(COUNT(DISTINCT p.categories), 0) as categoryCount, COALESCE( ROUND( - AVG(o.price * o.quantity), 2 + AVG(o.price * o.quantity)::numeric, 2 ), 0 ) as averageOrderValue FROM products p LEFT JOIN orders o ON p.pid = o.pid - WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) + WHERE o.date >= CURRENT_DATE - INTERVAL '30 days' `); // Ensure all values are numbers const stats = { - profitMargin: Number(results[0].profitMargin) || 0, - averageMarkup: Number(results[0].averageMarkup) || 0, - stockTurnoverRate: Number(results[0].stockTurnoverRate) || 0, - vendorCount: Number(results[0].vendorCount) || 0, - categoryCount: Number(results[0].categoryCount) || 0, - averageOrderValue: Number(results[0].averageOrderValue) || 0 + profitMargin: Number(results.profitmargin) || 0, + averageMarkup: Number(results.averagemarkup) || 0, + stockTurnoverRate: Number(results.stockturnoverrate) || 0, + vendorCount: Number(results.vendorcount) || 0, + categoryCount: Number(results.categorycount) || 0, + averageOrderValue: Number(results.averageordervalue) || 0 }; res.json(stats); @@ -63,13 +63,13 @@ router.get('/profit', async (req, res) => { const pool = req.app.locals.pool; // Get profit margins by category with full path - const [byCategory] = await pool.query(` + const { rows: byCategory } = await pool.query(` WITH RECURSIVE category_path AS ( SELECT c.cat_id, c.name, c.parent_id, - CAST(c.name AS CHAR(1000)) as path + c.name::text as path FROM categories c WHERE c.parent_id IS NULL @@ -79,7 +79,7 @@ router.get('/profit', async (req, res) => { c.cat_id, c.name, c.parent_id, - CONCAT(cp.path, ' > ', c.name) + cp.path || ' > ' || c.name FROM categories c JOIN category_path cp ON c.parent_id = cp.cat_id ) @@ -88,53 +88,46 @@ router.get('/profit', async (req, res) => { cp.path as categoryPath, ROUND( (SUM(o.price * o.quantity - p.cost_price * o.quantity) / - NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 + NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1 ) as profitMargin, - CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue, - CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost + ROUND(SUM(o.price * o.quantity)::numeric, 3) as revenue, + ROUND(SUM(p.cost_price * o.quantity)::numeric, 3) as cost FROM products p LEFT JOIN orders o ON p.pid = o.pid JOIN product_categories pc ON p.pid = pc.pid JOIN categories c ON pc.cat_id = c.cat_id JOIN category_path cp ON c.cat_id = cp.cat_id - WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) + WHERE o.date >= CURRENT_DATE - INTERVAL '30 days' GROUP BY c.name, cp.path ORDER BY profitMargin DESC LIMIT 10 `); // Get profit margin trend over time - const [overTime] = await pool.query(` + const { rows: overTime } = await pool.query(` SELECT - formatted_date as date, + to_char(o.date, 'YYYY-MM-DD') as date, ROUND( (SUM(o.price * o.quantity - p.cost_price * o.quantity) / - NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 + NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1 ) as profitMargin, - CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue, - CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost + ROUND(SUM(o.price * o.quantity)::numeric, 3) as revenue, + ROUND(SUM(p.cost_price * o.quantity)::numeric, 3) as cost FROM products p LEFT JOIN orders o ON p.pid = o.pid - CROSS JOIN ( - SELECT DATE_FORMAT(o.date, '%Y-%m-%d') as formatted_date - FROM orders o - WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) - GROUP BY DATE_FORMAT(o.date, '%Y-%m-%d') - ) dates - WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) - AND DATE_FORMAT(o.date, '%Y-%m-%d') = dates.formatted_date - GROUP BY formatted_date - ORDER BY formatted_date + WHERE o.date >= CURRENT_DATE - INTERVAL '30 days' + GROUP BY to_char(o.date, 'YYYY-MM-DD') + ORDER BY date `); // Get top performing products with category paths - const [topProducts] = await pool.query(` + const { rows: topProducts } = await pool.query(` WITH RECURSIVE category_path AS ( SELECT c.cat_id, c.name, c.parent_id, - CAST(c.name AS CHAR(1000)) as path + c.name::text as path FROM categories c WHERE c.parent_id IS NULL @@ -144,7 +137,7 @@ router.get('/profit', async (req, res) => { c.cat_id, c.name, c.parent_id, - CONCAT(cp.path, ' > ', c.name) + cp.path || ' > ' || c.name FROM categories c JOIN category_path cp ON c.parent_id = cp.cat_id ) @@ -154,18 +147,18 @@ router.get('/profit', async (req, res) => { cp.path as categoryPath, ROUND( (SUM(o.price * o.quantity - p.cost_price * o.quantity) / - NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 + NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1 ) as profitMargin, - CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue, - CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost + ROUND(SUM(o.price * o.quantity)::numeric, 3) as revenue, + ROUND(SUM(p.cost_price * o.quantity)::numeric, 3) as cost FROM products p LEFT JOIN orders o ON p.pid = o.pid JOIN product_categories pc ON p.pid = pc.pid JOIN categories c ON pc.cat_id = c.cat_id JOIN category_path cp ON c.cat_id = cp.cat_id - WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) + WHERE o.date >= CURRENT_DATE - INTERVAL '30 days' GROUP BY p.pid, p.title, c.name, cp.path - HAVING revenue > 0 + HAVING SUM(o.price * o.quantity) > 0 ORDER BY profitMargin DESC LIMIT 10 `); @@ -185,7 +178,7 @@ router.get('/vendors', async (req, res) => { console.log('Fetching vendor performance data...'); // First check if we have any vendors with sales - const [checkData] = await pool.query(` + const { rows: [checkData] } = await pool.query(` SELECT COUNT(DISTINCT p.vendor) as vendor_count, COUNT(DISTINCT o.order_number) as order_count FROM products p @@ -193,39 +186,39 @@ router.get('/vendors', async (req, res) => { WHERE p.vendor IS NOT NULL `); - console.log('Vendor data check:', checkData[0]); + console.log('Vendor data check:', checkData); // Get vendor performance metrics - const [performance] = await pool.query(` + const { rows: performance } = await pool.query(` WITH monthly_sales AS ( SELECT p.vendor, - CAST(SUM(CASE - WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) + ROUND(SUM(CASE + WHEN o.date >= CURRENT_DATE - INTERVAL '30 days' THEN o.price * o.quantity ELSE 0 - END) AS DECIMAL(15,3)) as current_month, - CAST(SUM(CASE - WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY) - AND o.date < DATE_SUB(CURDATE(), INTERVAL 30 DAY) + END)::numeric, 3) as current_month, + ROUND(SUM(CASE + WHEN o.date >= CURRENT_DATE - INTERVAL '60 days' + AND o.date < CURRENT_DATE - INTERVAL '30 days' THEN o.price * o.quantity ELSE 0 - END) AS DECIMAL(15,3)) as previous_month + END)::numeric, 3) as previous_month FROM products p LEFT JOIN orders o ON p.pid = o.pid WHERE p.vendor IS NOT NULL - AND o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY) + AND o.date >= CURRENT_DATE - INTERVAL '60 days' GROUP BY p.vendor ) SELECT p.vendor, - CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as salesVolume, + ROUND(SUM(o.price * o.quantity)::numeric, 3) as salesVolume, COALESCE(ROUND( (SUM(o.price * o.quantity - p.cost_price * o.quantity) / - NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 + NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1 ), 0) as profitMargin, COALESCE(ROUND( - SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0), 1 + (SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0))::numeric, 1 ), 0) as stockTurnover, COUNT(DISTINCT p.pid) as productCount, ROUND( @@ -236,7 +229,7 @@ router.get('/vendors', async (req, res) => { LEFT JOIN orders o ON p.pid = o.pid LEFT JOIN monthly_sales ms ON p.vendor = ms.vendor WHERE p.vendor IS NOT NULL - AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) + AND o.date >= CURRENT_DATE - INTERVAL '30 days' GROUP BY p.vendor, ms.current_month, ms.previous_month ORDER BY salesVolume DESC LIMIT 10 @@ -244,45 +237,7 @@ router.get('/vendors', async (req, res) => { console.log('Performance data:', performance); - // Get vendor comparison data - const [comparison] = await pool.query(` - SELECT - p.vendor, - CAST(COALESCE(ROUND(SUM(o.price * o.quantity) / NULLIF(COUNT(DISTINCT p.pid), 0), 2), 0) AS DECIMAL(15,3)) as salesPerProduct, - COALESCE(ROUND(AVG((o.price - p.cost_price) / NULLIF(o.price, 0) * 100), 1), 0) as averageMargin, - COUNT(DISTINCT p.pid) as size - FROM products p - LEFT JOIN orders o ON p.pid = o.pid AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) - WHERE p.vendor IS NOT NULL - GROUP BY p.vendor - ORDER BY salesPerProduct DESC - LIMIT 20 - `); - - console.log('Comparison data:', comparison); - - // Get vendor sales trends - const [trends] = await pool.query(` - SELECT - p.vendor, - DATE_FORMAT(o.date, '%b %Y') as month, - CAST(COALESCE(SUM(o.price * o.quantity), 0) AS DECIMAL(15,3)) as sales - FROM products p - LEFT JOIN orders o ON p.pid = o.pid - WHERE p.vendor IS NOT NULL - AND o.date >= DATE_SUB(CURDATE(), INTERVAL 6 MONTH) - GROUP BY - p.vendor, - DATE_FORMAT(o.date, '%b %Y'), - DATE_FORMAT(o.date, '%Y-%m') - ORDER BY - p.vendor, - DATE_FORMAT(o.date, '%Y-%m') - `); - - console.log('Trends data:', trends); - - res.json({ performance, comparison, trends }); + res.json({ performance }); } catch (error) { console.error('Error fetching vendor performance:', error); res.status(500).json({ error: 'Failed to fetch vendor performance' }); diff --git a/inventory-server/src/routes/categories.js b/inventory-server/src/routes/categories.js index 8bdb7f7..dd8a5b8 100644 --- a/inventory-server/src/routes/categories.js +++ b/inventory-server/src/routes/categories.js @@ -6,7 +6,7 @@ router.get('/', async (req, res) => { const pool = req.app.locals.pool; try { // Get all categories with metrics and hierarchy info - const [categories] = await pool.query(` + const { rows: categories } = await pool.query(` SELECT c.cat_id, c.name, @@ -18,7 +18,7 @@ router.get('/', async (req, res) => { p.type as parent_type, COALESCE(cm.product_count, 0) as product_count, COALESCE(cm.active_products, 0) as active_products, - CAST(COALESCE(cm.total_value, 0) AS DECIMAL(15,3)) as total_value, + ROUND(COALESCE(cm.total_value, 0)::numeric, 3) as total_value, COALESCE(cm.avg_margin, 0) as avg_margin, COALESCE(cm.turnover_rate, 0) as turnover_rate, COALESCE(cm.growth_rate, 0) as growth_rate @@ -39,22 +39,22 @@ router.get('/', async (req, res) => { `); // Get overall stats - const [stats] = await pool.query(` + const { rows: [stats] } = await pool.query(` SELECT COUNT(DISTINCT c.cat_id) as totalCategories, COUNT(DISTINCT CASE WHEN c.status = 'active' THEN c.cat_id END) as activeCategories, - CAST(COALESCE(SUM(cm.total_value), 0) AS DECIMAL(15,3)) as totalValue, - COALESCE(ROUND(AVG(NULLIF(cm.avg_margin, 0)), 1), 0) as avgMargin, - COALESCE(ROUND(AVG(NULLIF(cm.growth_rate, 0)), 1), 0) as avgGrowth + ROUND(COALESCE(SUM(cm.total_value), 0)::numeric, 3) as totalValue, + COALESCE(ROUND(AVG(NULLIF(cm.avg_margin, 0))::numeric, 1), 0) as avgMargin, + COALESCE(ROUND(AVG(NULLIF(cm.growth_rate, 0))::numeric, 1), 0) as avgGrowth FROM categories c LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id `); // Get type counts for filtering - const [typeCounts] = await pool.query(` + const { rows: typeCounts } = await pool.query(` SELECT type, - COUNT(*) as count + COUNT(*)::integer as count FROM categories GROUP BY type ORDER BY type @@ -81,14 +81,14 @@ router.get('/', async (req, res) => { })), typeCounts: typeCounts.map(tc => ({ type: tc.type, - count: parseInt(tc.count) + count: tc.count // Already cast to integer in the query })), stats: { - totalCategories: parseInt(stats[0].totalCategories), - activeCategories: parseInt(stats[0].activeCategories), - totalValue: parseFloat(stats[0].totalValue), - avgMargin: parseFloat(stats[0].avgMargin), - avgGrowth: parseFloat(stats[0].avgGrowth) + totalCategories: parseInt(stats.totalcategories), + activeCategories: parseInt(stats.activecategories), + totalValue: parseFloat(stats.totalvalue), + avgMargin: parseFloat(stats.avgmargin), + avgGrowth: parseFloat(stats.avggrowth) } }); } catch (error) { diff --git a/inventory-server/src/routes/config.js b/inventory-server/src/routes/config.js index 2d7f881..7e730c7 100644 --- a/inventory-server/src/routes/config.js +++ b/inventory-server/src/routes/config.js @@ -13,22 +13,22 @@ router.get('/', async (req, res) => { try { console.log('[Config Route] Fetching configuration values...'); - const [stockThresholds] = await pool.query('SELECT * FROM stock_thresholds WHERE id = 1'); + const { rows: stockThresholds } = await pool.query('SELECT * FROM stock_thresholds WHERE id = 1'); console.log('[Config Route] Stock thresholds:', stockThresholds); - const [leadTimeThresholds] = await pool.query('SELECT * FROM lead_time_thresholds WHERE id = 1'); + const { rows: leadTimeThresholds } = await pool.query('SELECT * FROM lead_time_thresholds WHERE id = 1'); console.log('[Config Route] Lead time thresholds:', leadTimeThresholds); - const [salesVelocityConfig] = await pool.query('SELECT * FROM sales_velocity_config WHERE id = 1'); + const { rows: salesVelocityConfig } = await pool.query('SELECT * FROM sales_velocity_config WHERE id = 1'); console.log('[Config Route] Sales velocity config:', salesVelocityConfig); - const [abcConfig] = await pool.query('SELECT * FROM abc_classification_config WHERE id = 1'); + const { rows: abcConfig } = await pool.query('SELECT * FROM abc_classification_config WHERE id = 1'); console.log('[Config Route] ABC config:', abcConfig); - const [safetyStockConfig] = await pool.query('SELECT * FROM safety_stock_config WHERE id = 1'); + const { rows: safetyStockConfig } = await pool.query('SELECT * FROM safety_stock_config WHERE id = 1'); console.log('[Config Route] Safety stock config:', safetyStockConfig); - const [turnoverConfig] = await pool.query('SELECT * FROM turnover_config WHERE id = 1'); + const { rows: turnoverConfig } = await pool.query('SELECT * FROM turnover_config WHERE id = 1'); console.log('[Config Route] Turnover config:', turnoverConfig); const response = { @@ -53,14 +53,14 @@ router.put('/stock-thresholds/:id', async (req, res) => { const pool = req.app.locals.pool; try { const { critical_days, reorder_days, overstock_days, low_stock_threshold, min_reorder_quantity } = req.body; - const [result] = await pool.query( + const { rows } = await pool.query( `UPDATE stock_thresholds - SET critical_days = ?, - reorder_days = ?, - overstock_days = ?, - low_stock_threshold = ?, - min_reorder_quantity = ? - WHERE id = ?`, + SET critical_days = $1, + reorder_days = $2, + overstock_days = $3, + low_stock_threshold = $4, + min_reorder_quantity = $5 + WHERE id = $6`, [critical_days, reorder_days, overstock_days, low_stock_threshold, min_reorder_quantity, req.params.id] ); res.json({ success: true }); @@ -75,12 +75,12 @@ router.put('/lead-time-thresholds/:id', async (req, res) => { const pool = req.app.locals.pool; try { const { target_days, warning_days, critical_days } = req.body; - const [result] = await pool.query( + const { rows } = await pool.query( `UPDATE lead_time_thresholds - SET target_days = ?, - warning_days = ?, - critical_days = ? - WHERE id = ?`, + SET target_days = $1, + warning_days = $2, + critical_days = $3 + WHERE id = $4`, [target_days, warning_days, critical_days, req.params.id] ); res.json({ success: true }); @@ -95,12 +95,12 @@ router.put('/sales-velocity/:id', async (req, res) => { const pool = req.app.locals.pool; try { const { daily_window_days, weekly_window_days, monthly_window_days } = req.body; - const [result] = await pool.query( + const { rows } = await pool.query( `UPDATE sales_velocity_config - SET daily_window_days = ?, - weekly_window_days = ?, - monthly_window_days = ? - WHERE id = ?`, + SET daily_window_days = $1, + weekly_window_days = $2, + monthly_window_days = $3 + WHERE id = $4`, [daily_window_days, weekly_window_days, monthly_window_days, req.params.id] ); res.json({ success: true }); @@ -115,12 +115,12 @@ router.put('/abc-classification/:id', async (req, res) => { const pool = req.app.locals.pool; try { const { a_threshold, b_threshold, classification_period_days } = req.body; - const [result] = await pool.query( + const { rows } = await pool.query( `UPDATE abc_classification_config - SET a_threshold = ?, - b_threshold = ?, - classification_period_days = ? - WHERE id = ?`, + SET a_threshold = $1, + b_threshold = $2, + classification_period_days = $3 + WHERE id = $4`, [a_threshold, b_threshold, classification_period_days, req.params.id] ); res.json({ success: true }); @@ -135,11 +135,11 @@ router.put('/safety-stock/:id', async (req, res) => { const pool = req.app.locals.pool; try { const { coverage_days, service_level } = req.body; - const [result] = await pool.query( + const { rows } = await pool.query( `UPDATE safety_stock_config - SET coverage_days = ?, - service_level = ? - WHERE id = ?`, + SET coverage_days = $1, + service_level = $2 + WHERE id = $3`, [coverage_days, service_level, req.params.id] ); res.json({ success: true }); @@ -154,11 +154,11 @@ router.put('/turnover/:id', async (req, res) => { const pool = req.app.locals.pool; try { const { calculation_period_days, target_rate } = req.body; - const [result] = await pool.query( + const { rows } = await pool.query( `UPDATE turnover_config - SET calculation_period_days = ?, - target_rate = ? - WHERE id = ?`, + SET calculation_period_days = $1, + target_rate = $2 + WHERE id = $3`, [calculation_period_days, target_rate, req.params.id] ); res.json({ success: true }); diff --git a/inventory-server/src/routes/csv.js b/inventory-server/src/routes/csv.js index d0dfd06..ce916f9 100644 --- a/inventory-server/src/routes/csv.js +++ b/inventory-server/src/routes/csv.js @@ -750,8 +750,16 @@ router.post('/full-reset', async (req, res) => { router.get('/history/import', async (req, res) => { try { const pool = req.app.locals.pool; - const [rows] = await pool.query(` - SELECT * FROM import_history + const { rows } = await pool.query(` + SELECT + id, + start_time, + end_time, + status, + error_message, + rows_processed::integer, + files_processed::integer + FROM import_history ORDER BY start_time DESC LIMIT 20 `); @@ -766,8 +774,16 @@ router.get('/history/import', async (req, res) => { router.get('/history/calculate', async (req, res) => { try { const pool = req.app.locals.pool; - const [rows] = await pool.query(` - SELECT * FROM calculate_history + const { rows } = await pool.query(` + SELECT + id, + start_time, + end_time, + status, + error_message, + modules_processed::integer, + total_modules::integer + FROM calculate_history ORDER BY start_time DESC LIMIT 20 `); @@ -782,8 +798,10 @@ router.get('/history/calculate', async (req, res) => { router.get('/status/modules', async (req, res) => { try { const pool = req.app.locals.pool; - const [rows] = await pool.query(` - SELECT module_name, last_calculation_timestamp + const { rows } = await pool.query(` + SELECT + module_name, + last_calculation_timestamp::timestamp FROM calculate_status ORDER BY module_name `); @@ -798,8 +816,10 @@ router.get('/status/modules', async (req, res) => { router.get('/status/tables', async (req, res) => { try { const pool = req.app.locals.pool; - const [rows] = await pool.query(` - SELECT table_name, last_sync_timestamp + const { rows } = await pool.query(` + SELECT + table_name, + last_sync_timestamp::timestamp FROM sync_status ORDER BY table_name `); diff --git a/inventory-server/src/routes/dashboard.js b/inventory-server/src/routes/dashboard.js index 4db5db2..845a3ab 100644 --- a/inventory-server/src/routes/dashboard.js +++ b/inventory-server/src/routes/dashboard.js @@ -19,16 +19,15 @@ async function executeQuery(sql, params = []) { router.get('/stock/metrics', async (req, res) => { try { // Get stock metrics - const [rows] = await executeQuery(` + const { rows: [stockMetrics] } = await executeQuery(` SELECT - COALESCE(COUNT(*), 0) as total_products, - COALESCE(COUNT(CASE WHEN stock_quantity > 0 THEN 1 END), 0) as products_in_stock, - COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity END), 0) as total_units, - COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity * cost_price END), 0) as total_cost, - COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity * price END), 0) as total_retail + COALESCE(COUNT(*), 0)::integer as total_products, + COALESCE(COUNT(CASE WHEN stock_quantity > 0 THEN 1 END), 0)::integer as products_in_stock, + COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity END), 0)::integer as total_units, + ROUND(COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity * cost_price END), 0)::numeric, 3) as total_cost, + ROUND(COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity * price END), 0)::numeric, 3) as total_retail FROM products `); - const stockMetrics = rows[0]; console.log('Raw stockMetrics from database:', stockMetrics); console.log('stockMetrics.total_products:', stockMetrics.total_products); @@ -38,26 +37,26 @@ router.get('/stock/metrics', async (req, res) => { console.log('stockMetrics.total_retail:', stockMetrics.total_retail); // Get brand stock values with Other category - const [brandValues] = await executeQuery(` + const { rows: brandValues } = await executeQuery(` WITH brand_totals AS ( SELECT COALESCE(brand, 'Unbranded') as brand, - COUNT(DISTINCT pid) as variant_count, - COALESCE(SUM(stock_quantity), 0) as stock_units, - CAST(COALESCE(SUM(stock_quantity * cost_price), 0) AS DECIMAL(15,3)) as stock_cost, - CAST(COALESCE(SUM(stock_quantity * price), 0) AS DECIMAL(15,3)) as stock_retail + COUNT(DISTINCT pid)::integer as variant_count, + COALESCE(SUM(stock_quantity), 0)::integer as stock_units, + ROUND(COALESCE(SUM(stock_quantity * cost_price), 0)::numeric, 3) as stock_cost, + ROUND(COALESCE(SUM(stock_quantity * price), 0)::numeric, 3) as stock_retail FROM products WHERE stock_quantity > 0 GROUP BY COALESCE(brand, 'Unbranded') - HAVING stock_cost > 0 + HAVING ROUND(COALESCE(SUM(stock_quantity * cost_price), 0)::numeric, 3) > 0 ), other_brands AS ( SELECT 'Other' as brand, - SUM(variant_count) as variant_count, - SUM(stock_units) as stock_units, - CAST(SUM(stock_cost) AS DECIMAL(15,3)) as stock_cost, - CAST(SUM(stock_retail) AS DECIMAL(15,3)) as stock_retail + SUM(variant_count)::integer as variant_count, + SUM(stock_units)::integer as stock_units, + ROUND(SUM(stock_cost)::numeric, 3) as stock_cost, + ROUND(SUM(stock_retail)::numeric, 3) as stock_retail FROM brand_totals WHERE stock_cost <= 5000 ), @@ -101,51 +100,50 @@ router.get('/stock/metrics', async (req, res) => { // Returns purchase order metrics by vendor router.get('/purchase/metrics', async (req, res) => { try { - const [rows] = await executeQuery(` + const { rows: [poMetrics] } = await executeQuery(` SELECT COALESCE(COUNT(DISTINCT CASE - WHEN po.receiving_status < ${ReceivingStatus.PartialReceived} + WHEN po.receiving_status < $1 THEN po.po_id - END), 0) as active_pos, + END), 0)::integer as active_pos, COALESCE(COUNT(DISTINCT CASE - WHEN po.receiving_status < ${ReceivingStatus.PartialReceived} - AND po.expected_date < CURDATE() + WHEN po.receiving_status < $1 + AND po.expected_date < CURRENT_DATE THEN po.po_id - END), 0) as overdue_pos, + END), 0)::integer as overdue_pos, COALESCE(SUM(CASE - WHEN po.receiving_status < ${ReceivingStatus.PartialReceived} + WHEN po.receiving_status < $1 THEN po.ordered ELSE 0 - END), 0) as total_units, - CAST(COALESCE(SUM(CASE - WHEN po.receiving_status < ${ReceivingStatus.PartialReceived} + END), 0)::integer as total_units, + ROUND(COALESCE(SUM(CASE + WHEN po.receiving_status < $1 THEN po.ordered * po.cost_price ELSE 0 - END), 0) AS DECIMAL(15,3)) as total_cost, - CAST(COALESCE(SUM(CASE - WHEN po.receiving_status < ${ReceivingStatus.PartialReceived} + END), 0)::numeric, 3) as total_cost, + ROUND(COALESCE(SUM(CASE + WHEN po.receiving_status < $1 THEN po.ordered * p.price ELSE 0 - END), 0) AS DECIMAL(15,3)) as total_retail + END), 0)::numeric, 3) as total_retail FROM purchase_orders po JOIN products p ON po.pid = p.pid - `); - const poMetrics = rows[0]; + `, [ReceivingStatus.PartialReceived]); - const [vendorOrders] = await executeQuery(` + const { rows: vendorOrders } = await executeQuery(` SELECT po.vendor, - COUNT(DISTINCT po.po_id) as orders, - COALESCE(SUM(po.ordered), 0) as units, - CAST(COALESCE(SUM(po.ordered * po.cost_price), 0) AS DECIMAL(15,3)) as cost, - CAST(COALESCE(SUM(po.ordered * p.price), 0) AS DECIMAL(15,3)) as retail + COUNT(DISTINCT po.po_id)::integer as orders, + COALESCE(SUM(po.ordered), 0)::integer as units, + ROUND(COALESCE(SUM(po.ordered * po.cost_price), 0)::numeric, 3) as cost, + ROUND(COALESCE(SUM(po.ordered * p.price), 0)::numeric, 3) as retail FROM purchase_orders po JOIN products p ON po.pid = p.pid - WHERE po.receiving_status < ${ReceivingStatus.PartialReceived} + WHERE po.receiving_status < $1 GROUP BY po.vendor - HAVING cost > 0 + HAVING ROUND(COALESCE(SUM(po.ordered * po.cost_price), 0)::numeric, 3) > 0 ORDER BY cost DESC - `); + `, [ReceivingStatus.PartialReceived]); // Format response to match PurchaseMetricsData interface const response = { @@ -175,21 +173,21 @@ router.get('/purchase/metrics', async (req, res) => { router.get('/replenishment/metrics', async (req, res) => { try { // Get summary metrics - const [metrics] = await executeQuery(` + const { rows: [metrics] } = await executeQuery(` SELECT - COUNT(DISTINCT p.pid) as products_to_replenish, + COUNT(DISTINCT p.pid)::integer as products_to_replenish, COALESCE(SUM(CASE WHEN p.stock_quantity < 0 THEN ABS(p.stock_quantity) + pm.reorder_qty ELSE pm.reorder_qty - END), 0) as total_units_needed, - CAST(COALESCE(SUM(CASE + END), 0)::integer as total_units_needed, + ROUND(COALESCE(SUM(CASE WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.cost_price ELSE pm.reorder_qty * p.cost_price - END), 0) AS DECIMAL(15,3)) as total_cost, - CAST(COALESCE(SUM(CASE + END), 0)::numeric, 3) as total_cost, + ROUND(COALESCE(SUM(CASE WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.price ELSE pm.reorder_qty * p.price - END), 0) AS DECIMAL(15,3)) as total_retail + END), 0)::numeric, 3) as total_retail FROM products p JOIN product_metrics pm ON p.pid = pm.pid WHERE p.replenishable = true @@ -199,23 +197,23 @@ router.get('/replenishment/metrics', async (req, res) => { `); // Get top variants to replenish - const [variants] = await executeQuery(` + const { rows: variants } = await executeQuery(` SELECT p.pid, p.title, - p.stock_quantity as current_stock, + p.stock_quantity::integer as current_stock, CASE WHEN p.stock_quantity < 0 THEN ABS(p.stock_quantity) + pm.reorder_qty ELSE pm.reorder_qty - END as replenish_qty, - CAST(CASE + END::integer as replenish_qty, + ROUND(CASE WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.cost_price ELSE pm.reorder_qty * p.cost_price - END AS DECIMAL(15,3)) as replenish_cost, - CAST(CASE + END::numeric, 3) as replenish_cost, + ROUND(CASE WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.price ELSE pm.reorder_qty * p.price - END AS DECIMAL(15,3)) as replenish_retail, + END::numeric, 3) as replenish_retail, pm.stock_status FROM products p JOIN product_metrics pm ON p.pid = pm.pid @@ -234,10 +232,10 @@ router.get('/replenishment/metrics', async (req, res) => { // Format response const response = { - productsToReplenish: parseInt(metrics[0].products_to_replenish) || 0, - unitsToReplenish: parseInt(metrics[0].total_units_needed) || 0, - replenishmentCost: parseFloat(metrics[0].total_cost) || 0, - replenishmentRetail: parseFloat(metrics[0].total_retail) || 0, + productsToReplenish: parseInt(metrics.products_to_replenish) || 0, + unitsToReplenish: parseInt(metrics.total_units_needed) || 0, + replenishmentCost: parseFloat(metrics.total_cost) || 0, + replenishmentRetail: parseFloat(metrics.total_retail) || 0, topVariants: variants.map(v => ({ id: v.pid, title: v.title, diff --git a/inventory-server/src/routes/metrics.js b/inventory-server/src/routes/metrics.js index 4b7d0d2..64d9f2d 100644 --- a/inventory-server/src/routes/metrics.js +++ b/inventory-server/src/routes/metrics.js @@ -5,26 +5,28 @@ const router = express.Router(); router.get('/trends', async (req, res) => { const pool = req.app.locals.pool; try { - const [rows] = await pool.query(` + const { rows } = await pool.query(` WITH MonthlyMetrics AS ( SELECT - DATE(CONCAT(pta.year, '-', LPAD(pta.month, 2, '0'), '-01')) as date, - CAST(COALESCE(SUM(pta.total_revenue), 0) AS DECIMAL(15,3)) as revenue, - CAST(COALESCE(SUM(pta.total_cost), 0) AS DECIMAL(15,3)) as cost, - CAST(COALESCE(SUM(pm.inventory_value), 0) AS DECIMAL(15,3)) as inventory_value, + make_date(pta.year, pta.month, 1) as date, + ROUND(COALESCE(SUM(pta.total_revenue), 0)::numeric, 3) as revenue, + ROUND(COALESCE(SUM(pta.total_cost), 0)::numeric, 3) as cost, + ROUND(COALESCE(SUM(pm.inventory_value), 0)::numeric, 3) as inventory_value, CASE WHEN SUM(pm.inventory_value) > 0 - THEN CAST((SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value)) * 100 AS DECIMAL(15,3)) + THEN ROUND((SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value) * 100)::numeric, 3) ELSE 0 END as gmroi FROM product_time_aggregates pta JOIN product_metrics pm ON pta.pid = pm.pid - WHERE (pta.year * 100 + pta.month) >= DATE_FORMAT(DATE_SUB(CURDATE(), INTERVAL 12 MONTH), '%Y%m') + WHERE (pta.year * 100 + pta.month) >= + EXTRACT(YEAR FROM CURRENT_DATE - INTERVAL '12 months')::integer * 100 + + EXTRACT(MONTH FROM CURRENT_DATE - INTERVAL '12 months')::integer GROUP BY pta.year, pta.month ORDER BY date ASC ) SELECT - DATE_FORMAT(date, '%b %y') as date, + to_char(date, 'Mon YY') as date, revenue, inventory_value, gmroi diff --git a/inventory-server/src/routes/orders.js b/inventory-server/src/routes/orders.js index d34e87b..b109111 100644 --- a/inventory-server/src/routes/orders.js +++ b/inventory-server/src/routes/orders.js @@ -20,39 +20,46 @@ router.get('/', async (req, res) => { // Build the WHERE clause const conditions = ['o1.canceled = false']; const params = []; + let paramCounter = 1; if (search) { - conditions.push('(o1.order_number LIKE ? OR o1.customer LIKE ?)'); - params.push(`%${search}%`, `%${search}%`); + conditions.push(`(o1.order_number ILIKE $${paramCounter} OR o1.customer ILIKE $${paramCounter})`); + params.push(`%${search}%`); + paramCounter++; } if (status !== 'all') { - conditions.push('o1.status = ?'); + conditions.push(`o1.status = $${paramCounter}`); params.push(status); + paramCounter++; } if (fromDate) { - conditions.push('DATE(o1.date) >= DATE(?)'); + conditions.push(`DATE(o1.date) >= DATE($${paramCounter})`); params.push(fromDate.toISOString()); + paramCounter++; } if (toDate) { - conditions.push('DATE(o1.date) <= DATE(?)'); + conditions.push(`DATE(o1.date) <= DATE($${paramCounter})`); params.push(toDate.toISOString()); + paramCounter++; } if (minAmount > 0) { - conditions.push('total_amount >= ?'); + conditions.push(`total_amount >= $${paramCounter}`); params.push(minAmount); + paramCounter++; } if (maxAmount) { - conditions.push('total_amount <= ?'); + conditions.push(`total_amount <= $${paramCounter}`); params.push(maxAmount); + paramCounter++; } // Get total count for pagination - const [countResult] = await pool.query(` + const { rows: [countResult] } = await pool.query(` SELECT COUNT(DISTINCT o1.order_number) as total FROM orders o1 LEFT JOIN ( @@ -63,7 +70,7 @@ router.get('/', async (req, res) => { WHERE ${conditions.join(' AND ')} `, params); - const total = countResult[0].total; + const total = countResult.total; // Get paginated results const query = ` @@ -75,7 +82,7 @@ router.get('/', async (req, res) => { o1.payment_method, o1.shipping_method, COUNT(o2.pid) as items_count, - CAST(SUM(o2.price * o2.quantity) AS DECIMAL(15,3)) as total_amount + ROUND(SUM(o2.price * o2.quantity)::numeric, 3) as total_amount FROM orders o1 JOIN orders o2 ON o1.order_number = o2.order_number WHERE ${conditions.join(' AND ')} @@ -91,36 +98,37 @@ router.get('/', async (req, res) => { ? `${sortColumn} ${sortDirection}` : `o1.${sortColumn} ${sortDirection}` } - LIMIT ? OFFSET ? + LIMIT $${paramCounter} OFFSET $${paramCounter + 1} `; - const [rows] = await pool.query(query, [...params, limit, offset]); + params.push(limit, offset); + const { rows } = await pool.query(query, params); // Get order statistics - const [stats] = await pool.query(` + const { rows: [orderStats] } = await pool.query(` WITH CurrentStats AS ( SELECT COUNT(DISTINCT order_number) as total_orders, - CAST(SUM(price * quantity) AS DECIMAL(15,3)) as total_revenue + ROUND(SUM(price * quantity)::numeric, 3) as total_revenue FROM orders WHERE canceled = false - AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) + AND DATE(date) >= CURRENT_DATE - INTERVAL '30 days' ), PreviousStats AS ( SELECT COUNT(DISTINCT order_number) as prev_orders, - CAST(SUM(price * quantity) AS DECIMAL(15,3)) as prev_revenue + ROUND(SUM(price * quantity)::numeric, 3) as prev_revenue FROM orders WHERE canceled = false - AND DATE(date) BETWEEN DATE_SUB(CURDATE(), INTERVAL 60 DAY) AND DATE_SUB(CURDATE(), INTERVAL 30 DAY) + AND DATE(date) BETWEEN CURRENT_DATE - INTERVAL '60 days' AND CURRENT_DATE - INTERVAL '30 days' ), OrderValues AS ( SELECT order_number, - CAST(SUM(price * quantity) AS DECIMAL(15,3)) as order_value + ROUND(SUM(price * quantity)::numeric, 3) as order_value FROM orders WHERE canceled = false - AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) + AND DATE(date) >= CURRENT_DATE - INTERVAL '30 days' GROUP BY order_number ) SELECT @@ -128,29 +136,27 @@ router.get('/', async (req, res) => { cs.total_revenue, CASE WHEN ps.prev_orders > 0 - THEN ((cs.total_orders - ps.prev_orders) / ps.prev_orders * 100) + THEN ROUND(((cs.total_orders - ps.prev_orders)::numeric / ps.prev_orders * 100), 1) ELSE 0 END as order_growth, CASE WHEN ps.prev_revenue > 0 - THEN ((cs.total_revenue - ps.prev_revenue) / ps.prev_revenue * 100) + THEN ROUND(((cs.total_revenue - ps.prev_revenue)::numeric / ps.prev_revenue * 100), 1) ELSE 0 END as revenue_growth, CASE WHEN cs.total_orders > 0 - THEN CAST((cs.total_revenue / cs.total_orders) AS DECIMAL(15,3)) + THEN ROUND((cs.total_revenue::numeric / cs.total_orders), 3) ELSE 0 END as average_order_value, CASE WHEN ps.prev_orders > 0 - THEN CAST((ps.prev_revenue / ps.prev_orders) AS DECIMAL(15,3)) + THEN ROUND((ps.prev_revenue::numeric / ps.prev_orders), 3) ELSE 0 END as prev_average_order_value FROM CurrentStats cs CROSS JOIN PreviousStats ps `); - - const orderStats = stats[0]; res.json({ orders: rows.map(row => ({ @@ -189,7 +195,7 @@ router.get('/:orderNumber', async (req, res) => { const pool = req.app.locals.pool; try { // Get order details - const [orderRows] = await pool.query(` + const { rows: orderRows } = await pool.query(` SELECT DISTINCT o1.order_number, o1.customer, @@ -200,10 +206,10 @@ router.get('/:orderNumber', async (req, res) => { o1.shipping_address, o1.billing_address, COUNT(o2.pid) as items_count, - CAST(SUM(o2.price * o2.quantity) AS DECIMAL(15,3)) as total_amount + ROUND(SUM(o2.price * o2.quantity)::numeric, 3) as total_amount FROM orders o1 JOIN orders o2 ON o1.order_number = o2.order_number - WHERE o1.order_number = ? AND o1.canceled = false + WHERE o1.order_number = $1 AND o1.canceled = false GROUP BY o1.order_number, o1.customer, @@ -220,17 +226,17 @@ router.get('/:orderNumber', async (req, res) => { } // Get order items - const [itemRows] = await pool.query(` + const { rows: itemRows } = await pool.query(` SELECT o.pid, p.title, p.SKU, o.quantity, o.price, - CAST((o.price * o.quantity) AS DECIMAL(15,3)) as total + ROUND((o.price * o.quantity)::numeric, 3) as total FROM orders o JOIN products p ON o.pid = p.pid - WHERE o.order_number = ? AND o.canceled = false + WHERE o.order_number = $1 AND o.canceled = false `, [req.params.orderNumber]); const order = { diff --git a/inventory-server/src/routes/products.js b/inventory-server/src/routes/products.js index 94441ff..e3f9630 100755 --- a/inventory-server/src/routes/products.js +++ b/inventory-server/src/routes/products.js @@ -20,7 +20,7 @@ router.get('/brands', async (req, res) => { const pool = req.app.locals.pool; console.log('Fetching brands from database...'); - const [results] = await pool.query(` + const { rows } = await pool.query(` SELECT DISTINCT COALESCE(p.brand, 'Unbranded') as brand FROM products p JOIN purchase_orders po ON p.pid = po.pid @@ -30,8 +30,8 @@ router.get('/brands', async (req, res) => { ORDER BY COALESCE(p.brand, 'Unbranded') `); - console.log(`Found ${results.length} brands:`, results.slice(0, 3)); - res.json(results.map(r => r.brand)); + console.log(`Found ${rows.length} brands:`, rows.slice(0, 3)); + res.json(rows.map(r => r.brand)); } catch (error) { console.error('Error fetching brands:', error); res.status(500).json({ error: 'Failed to fetch brands' }); @@ -50,6 +50,7 @@ router.get('/', async (req, res) => { const conditions = ['p.visible = true']; const params = []; + let paramCounter = 1; // Add default replenishable filter unless explicitly showing non-replenishable if (req.query.showNonReplenishable !== 'true') { @@ -58,9 +59,10 @@ router.get('/', async (req, res) => { // Handle search filter if (req.query.search) { - conditions.push('(p.title LIKE ? OR p.SKU LIKE ? OR p.barcode LIKE ?)'); + conditions.push(`(p.title ILIKE $${paramCounter} OR p.SKU ILIKE $${paramCounter} OR p.barcode ILIKE $${paramCounter})`); const searchTerm = `%${req.query.search}%`; - params.push(searchTerm, searchTerm, searchTerm); + params.push(searchTerm); + paramCounter++; } // Handle numeric filters with operators @@ -84,61 +86,69 @@ router.get('/', async (req, res) => { if (field) { const operator = req.query[`${key}_operator`] || '='; if (operator === 'between') { - // Handle between operator try { const [min, max] = JSON.parse(value); - conditions.push(`${field} BETWEEN ? AND ?`); + conditions.push(`${field} BETWEEN $${paramCounter} AND $${paramCounter + 1}`); params.push(min, max); + paramCounter += 2; } catch (e) { console.error(`Invalid between value for ${key}:`, value); } } else { - // Handle other operators - conditions.push(`${field} ${operator} ?`); + conditions.push(`${field} ${operator} $${paramCounter}`); params.push(parseFloat(value)); + paramCounter++; } } }); // Handle select filters if (req.query.vendor) { - conditions.push('p.vendor = ?'); + conditions.push(`p.vendor = $${paramCounter}`); params.push(req.query.vendor); + paramCounter++; } if (req.query.brand) { - conditions.push('p.brand = ?'); + conditions.push(`p.brand = $${paramCounter}`); params.push(req.query.brand); + paramCounter++; } if (req.query.category) { - conditions.push('p.categories LIKE ?'); + conditions.push(`p.categories ILIKE $${paramCounter}`); params.push(`%${req.query.category}%`); + paramCounter++; } if (req.query.stockStatus && req.query.stockStatus !== 'all') { - conditions.push('pm.stock_status = ?'); + conditions.push(`pm.stock_status = $${paramCounter}`); params.push(req.query.stockStatus); + paramCounter++; } if (req.query.abcClass) { - conditions.push('pm.abc_class = ?'); + conditions.push(`pm.abc_class = $${paramCounter}`); params.push(req.query.abcClass); + paramCounter++; } if (req.query.leadTimeStatus) { - conditions.push('pm.lead_time_status = ?'); + conditions.push(`pm.lead_time_status = $${paramCounter}`); params.push(req.query.leadTimeStatus); + paramCounter++; } if (req.query.replenishable !== undefined) { - conditions.push('p.replenishable = ?'); - params.push(req.query.replenishable === 'true' ? 1 : 0); + conditions.push(`p.replenishable = $${paramCounter}`); + params.push(req.query.replenishable === 'true'); + paramCounter++; } if (req.query.managingStock !== undefined) { - conditions.push('p.managing_stock = ?'); - params.push(req.query.managingStock === 'true' ? 1 : 0); + conditions.push(`p.managing_stock = $${paramCounter}`); + params.push(req.query.managingStock === 'true'); + paramCounter++; } // Combine all conditions with AND @@ -151,17 +161,17 @@ router.get('/', async (req, res) => { LEFT JOIN product_metrics pm ON p.pid = pm.pid ${whereClause} `; - const [countResult] = await pool.query(countQuery, params); - const total = countResult[0].total; + const { rows: [countResult] } = await pool.query(countQuery, params); + const total = countResult.total; // Get available filters - const [categories] = await pool.query( + const { rows: categories } = await pool.query( 'SELECT name FROM categories ORDER BY name' ); - const [vendors] = await pool.query( - 'SELECT DISTINCT vendor FROM products WHERE visible = true AND vendor IS NOT NULL AND vendor != "" ORDER BY vendor' + const { rows: vendors } = await pool.query( + 'SELECT DISTINCT vendor FROM products WHERE visible = true AND vendor IS NOT NULL AND vendor != \'\' ORDER BY vendor' ); - const [brands] = await pool.query( + const { rows: brands } = await pool.query( 'SELECT DISTINCT COALESCE(brand, \'Unbranded\') as brand FROM products WHERE visible = true ORDER BY brand' ); @@ -173,7 +183,7 @@ router.get('/', async (req, res) => { c.cat_id, c.name, c.parent_id, - CAST(c.name AS CHAR(1000)) as path + CAST(c.name AS text) as path FROM categories c WHERE c.parent_id IS NULL @@ -183,7 +193,7 @@ router.get('/', async (req, res) => { c.cat_id, c.name, c.parent_id, - CONCAT(cp.path, ' > ', c.name) + cp.path || ' > ' || c.name FROM categories c JOIN category_path cp ON c.parent_id = cp.cat_id ), @@ -210,7 +220,6 @@ router.get('/', async (req, res) => { FROM products p ), product_leaf_categories AS ( - -- Find categories that aren't parents to other categories for this product SELECT DISTINCT pc.cat_id FROM product_categories pc WHERE NOT EXISTS ( @@ -224,7 +233,7 @@ router.get('/', async (req, res) => { SELECT p.*, COALESCE(p.brand, 'Unbranded') as brand, - GROUP_CONCAT(DISTINCT CONCAT(c.cat_id, ':', c.name)) as categories, + string_agg(DISTINCT (c.cat_id || ':' || c.name), ',') as categories, pm.daily_sales_avg, pm.weekly_sales_avg, pm.monthly_sales_avg, @@ -247,83 +256,32 @@ router.get('/', async (req, res) => { pm.last_received_date, pm.abc_class, pm.stock_status, - pm.turnover_rate, - pm.current_lead_time, - pm.target_lead_time, - pm.lead_time_status, - pm.reorder_qty, - pm.overstocked_amt, - COALESCE(pm.days_of_inventory / NULLIF(pt.target_days, 0), 0) as stock_coverage_ratio + pm.turnover_rate FROM products p LEFT JOIN product_metrics pm ON p.pid = pm.pid LEFT JOIN product_categories pc ON p.pid = pc.pid LEFT JOIN categories c ON pc.cat_id = c.cat_id - LEFT JOIN product_thresholds pt ON p.pid = pt.pid - JOIN product_leaf_categories plc ON c.cat_id = plc.cat_id - ${whereClause ? 'WHERE ' + whereClause.substring(6) : ''} - GROUP BY p.pid + ${whereClause} + GROUP BY p.pid, pm.pid ORDER BY ${sortColumn} ${sortDirection} - LIMIT ? OFFSET ? + LIMIT $${paramCounter} OFFSET $${paramCounter + 1} `; - - // Add pagination params to the main query params - const queryParams = [...params, limit, offset]; - console.log('Query:', query.replace(/\s+/g, ' ')); - console.log('Params:', queryParams); - - const [rows] = await pool.query(query, queryParams); - - // Transform the results - const products = rows.map(row => ({ - ...row, - categories: row.categories ? row.categories.split(',') : [], - price: parseFloat(row.price), - cost_price: parseFloat(row.cost_price), - landing_cost_price: row.landing_cost_price ? parseFloat(row.landing_cost_price) : null, - stock_quantity: parseInt(row.stock_quantity), - daily_sales_avg: parseFloat(row.daily_sales_avg) || 0, - weekly_sales_avg: parseFloat(row.weekly_sales_avg) || 0, - monthly_sales_avg: parseFloat(row.monthly_sales_avg) || 0, - avg_quantity_per_order: parseFloat(row.avg_quantity_per_order) || 0, - number_of_orders: parseInt(row.number_of_orders) || 0, - first_sale_date: row.first_sale_date || null, - last_sale_date: row.last_sale_date || null, - days_of_inventory: parseFloat(row.days_of_inventory) || 0, - weeks_of_inventory: parseFloat(row.weeks_of_inventory) || 0, - reorder_point: parseFloat(row.reorder_point) || 0, - safety_stock: parseFloat(row.safety_stock) || 0, - avg_margin_percent: parseFloat(row.avg_margin_percent) || 0, - total_revenue: parseFloat(row.total_revenue) || 0, - inventory_value: parseFloat(row.inventory_value) || 0, - cost_of_goods_sold: parseFloat(row.cost_of_goods_sold) || 0, - gross_profit: parseFloat(row.gross_profit) || 0, - gmroi: parseFloat(row.gmroi) || 0, - avg_lead_time_days: parseFloat(row.avg_lead_time_days) || 0, - last_purchase_date: row.last_purchase_date || null, - last_received_date: row.last_received_date || null, - abc_class: row.abc_class || null, - stock_status: row.stock_status || null, - turnover_rate: parseFloat(row.turnover_rate) || 0, - current_lead_time: parseFloat(row.current_lead_time) || 0, - target_lead_time: parseFloat(row.target_lead_time) || 0, - lead_time_status: row.lead_time_status || null, - stock_coverage_ratio: parseFloat(row.stock_coverage_ratio) || 0, - reorder_qty: parseInt(row.reorder_qty) || 0, - overstocked_amt: parseInt(row.overstocked_amt) || 0 - })); + + params.push(limit, offset); + const { rows: products } = await pool.query(query, params); res.json({ products, pagination: { total, - currentPage: page, pages: Math.ceil(total / limit), + currentPage: page, limit }, filters: { - categories: categories.map(category => category.name), - vendors: vendors.map(vendor => vendor.vendor), - brands: brands.map(brand => brand.brand) + categories: categories.map(c => c.name), + vendors: vendors.map(v => v.vendor), + brands: brands.map(b => b.brand) } }); } catch (error) { diff --git a/inventory-server/src/routes/purchase-orders.js b/inventory-server/src/routes/purchase-orders.js index 78d1bc8..87a42df 100644 --- a/inventory-server/src/routes/purchase-orders.js +++ b/inventory-server/src/routes/purchase-orders.js @@ -29,40 +29,46 @@ router.get('/', async (req, res) => { let whereClause = '1=1'; const params = []; + let paramCounter = 1; if (search) { - whereClause += ' AND (po.po_id LIKE ? OR po.vendor LIKE ?)'; - params.push(`%${search}%`, `%${search}%`); + whereClause += ` AND (po.po_id ILIKE $${paramCounter} OR po.vendor ILIKE $${paramCounter})`; + params.push(`%${search}%`); + paramCounter++; } if (status && status !== 'all') { - whereClause += ' AND po.status = ?'; + whereClause += ` AND po.status = $${paramCounter}`; params.push(Number(status)); + paramCounter++; } if (vendor && vendor !== 'all') { - whereClause += ' AND po.vendor = ?'; + whereClause += ` AND po.vendor = $${paramCounter}`; params.push(vendor); + paramCounter++; } if (startDate) { - whereClause += ' AND po.date >= ?'; + whereClause += ` AND po.date >= $${paramCounter}`; params.push(startDate); + paramCounter++; } if (endDate) { - whereClause += ' AND po.date <= ?'; + whereClause += ` AND po.date <= $${paramCounter}`; params.push(endDate); + paramCounter++; } // Get filtered summary metrics - const [summary] = await pool.query(` + const { rows: [summary] } = await pool.query(` WITH po_totals AS ( SELECT po_id, SUM(ordered) as total_ordered, SUM(received) as total_received, - CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_cost + ROUND(SUM(ordered * cost_price)::numeric, 3) as total_cost FROM purchase_orders po WHERE ${whereClause} GROUP BY po_id @@ -72,26 +78,26 @@ router.get('/', async (req, res) => { SUM(total_ordered) as total_ordered, SUM(total_received) as total_received, ROUND( - SUM(total_received) / NULLIF(SUM(total_ordered), 0), 3 + (SUM(total_received)::numeric / NULLIF(SUM(total_ordered), 0)), 3 ) as fulfillment_rate, - CAST(SUM(total_cost) AS DECIMAL(15,3)) as total_value, - CAST(AVG(total_cost) AS DECIMAL(15,3)) as avg_cost + ROUND(SUM(total_cost)::numeric, 3) as total_value, + ROUND(AVG(total_cost)::numeric, 3) as avg_cost FROM po_totals `, params); // Get total count for pagination - const [countResult] = await pool.query(` + const { rows: [countResult] } = await pool.query(` SELECT COUNT(DISTINCT po_id) as total FROM purchase_orders po WHERE ${whereClause} `, params); - const total = countResult[0].total; + const total = countResult.total; const offset = (page - 1) * limit; const pages = Math.ceil(total / limit); // Get recent purchase orders - const [orders] = await pool.query(` + const { rows: orders } = await pool.query(` WITH po_totals AS ( SELECT po_id, @@ -101,10 +107,10 @@ router.get('/', async (req, res) => { receiving_status, COUNT(DISTINCT pid) as total_items, SUM(ordered) as total_quantity, - CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_cost, + ROUND(SUM(ordered * cost_price)::numeric, 3) as total_cost, SUM(received) as total_received, ROUND( - SUM(received) / NULLIF(SUM(ordered), 0), 3 + (SUM(received)::numeric / NULLIF(SUM(ordered), 0)), 3 ) as fulfillment_rate FROM purchase_orders po WHERE ${whereClause} @@ -113,7 +119,7 @@ router.get('/', async (req, res) => { SELECT po_id as id, vendor as vendor_name, - DATE_FORMAT(date, '%Y-%m-%d') as order_date, + to_char(date, 'YYYY-MM-DD') as order_date, status, receiving_status, total_items, @@ -124,21 +130,21 @@ router.get('/', async (req, res) => { FROM po_totals ORDER BY CASE - WHEN ? = 'order_date' THEN date - WHEN ? = 'vendor_name' THEN vendor - WHEN ? = 'total_cost' THEN CAST(total_cost AS DECIMAL(15,3)) - WHEN ? = 'total_received' THEN CAST(total_received AS DECIMAL(15,3)) - WHEN ? = 'total_items' THEN CAST(total_items AS SIGNED) - WHEN ? = 'total_quantity' THEN CAST(total_quantity AS SIGNED) - WHEN ? = 'fulfillment_rate' THEN CAST(fulfillment_rate AS DECIMAL(5,3)) - WHEN ? = 'status' THEN status + WHEN $${paramCounter} = 'order_date' THEN date + WHEN $${paramCounter} = 'vendor_name' THEN vendor + WHEN $${paramCounter} = 'total_cost' THEN total_cost + WHEN $${paramCounter} = 'total_received' THEN total_received + WHEN $${paramCounter} = 'total_items' THEN total_items + WHEN $${paramCounter} = 'total_quantity' THEN total_quantity + WHEN $${paramCounter} = 'fulfillment_rate' THEN fulfillment_rate + WHEN $${paramCounter} = 'status' THEN status ELSE date END ${sortDirection === 'desc' ? 'DESC' : 'ASC'} - LIMIT ? OFFSET ? - `, [...params, sortColumn, sortColumn, sortColumn, sortColumn, sortColumn, sortColumn, sortColumn, sortColumn, Number(limit), offset]); + LIMIT $${paramCounter + 1} OFFSET $${paramCounter + 2} + `, [...params, sortColumn, Number(limit), offset]); // Get unique vendors for filter options - const [vendors] = await pool.query(` + const { rows: vendors } = await pool.query(` SELECT DISTINCT vendor FROM purchase_orders WHERE vendor IS NOT NULL AND vendor != '' @@ -146,7 +152,7 @@ router.get('/', async (req, res) => { `); // Get unique statuses for filter options - const [statuses] = await pool.query(` + const { rows: statuses } = await pool.query(` SELECT DISTINCT status FROM purchase_orders WHERE status IS NOT NULL @@ -169,12 +175,12 @@ router.get('/', async (req, res) => { // Parse summary metrics const parsedSummary = { - order_count: Number(summary[0].order_count) || 0, - total_ordered: Number(summary[0].total_ordered) || 0, - total_received: Number(summary[0].total_received) || 0, - fulfillment_rate: Number(summary[0].fulfillment_rate) || 0, - total_value: Number(summary[0].total_value) || 0, - avg_cost: Number(summary[0].avg_cost) || 0 + order_count: Number(summary.order_count) || 0, + total_ordered: Number(summary.total_ordered) || 0, + total_received: Number(summary.total_received) || 0, + fulfillment_rate: Number(summary.fulfillment_rate) || 0, + total_value: Number(summary.total_value) || 0, + avg_cost: Number(summary.avg_cost) || 0 }; res.json({ @@ -202,7 +208,7 @@ router.get('/vendor-metrics', async (req, res) => { try { const pool = req.app.locals.pool; - const [metrics] = await pool.query(` + const { rows: metrics } = await pool.query(` WITH delivery_metrics AS ( SELECT vendor, @@ -213,7 +219,7 @@ router.get('/vendor-metrics', async (req, res) => { CASE WHEN status >= ${STATUS.RECEIVING_STARTED} AND receiving_status >= ${RECEIVING_STATUS.PARTIAL_RECEIVED} AND received_date IS NOT NULL AND date IS NOT NULL - THEN DATEDIFF(received_date, date) + THEN (received_date - date)::integer ELSE NULL END as delivery_days FROM purchase_orders @@ -226,18 +232,18 @@ router.get('/vendor-metrics', async (req, res) => { SUM(ordered) as total_ordered, SUM(received) as total_received, ROUND( - SUM(received) / NULLIF(SUM(ordered), 0), 3 + (SUM(received)::numeric / NULLIF(SUM(ordered), 0)), 3 ) as fulfillment_rate, - CAST(ROUND( - SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2 - ) AS DECIMAL(15,3)) as avg_unit_cost, - CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend, ROUND( - AVG(NULLIF(delivery_days, 0)), 1 + (SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2 + ) as avg_unit_cost, + ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend, + ROUND( + AVG(NULLIF(delivery_days, 0))::numeric, 1 ) as avg_delivery_days FROM delivery_metrics GROUP BY vendor - HAVING total_orders > 0 + HAVING COUNT(DISTINCT po_id) > 0 ORDER BY total_spend DESC `); @@ -251,7 +257,7 @@ router.get('/vendor-metrics', async (req, res) => { fulfillment_rate: Number(vendor.fulfillment_rate) || 0, avg_unit_cost: Number(vendor.avg_unit_cost) || 0, total_spend: Number(vendor.total_spend) || 0, - avg_delivery_days: vendor.avg_delivery_days === null ? null : Number(vendor.avg_delivery_days) + avg_delivery_days: Number(vendor.avg_delivery_days) || 0 })); res.json(parsedMetrics); diff --git a/inventory-server/src/routes/templates.ts b/inventory-server/src/routes/templates.js similarity index 96% rename from inventory-server/src/routes/templates.ts rename to inventory-server/src/routes/templates.js index 91d6442..6deb3cf 100644 --- a/inventory-server/src/routes/templates.ts +++ b/inventory-server/src/routes/templates.js @@ -1,11 +1,11 @@ -import { Router } from 'express'; -import { Pool } from 'pg'; -import dotenv from 'dotenv'; -import path from 'path'; +const express = require('express'); +const { Pool } = require('pg'); +const dotenv = require('dotenv'); +const path = require('path'); dotenv.config({ path: path.join(__dirname, "../../.env") }); -const router = Router(); +const router = express.Router(); // Initialize PostgreSQL connection pool const pool = new Pool({ @@ -261,7 +261,7 @@ router.delete('/:id', async (req, res) => { }); // Error handling middleware -router.use((err: Error, req: any, res: any, next: any) => { +router.use((err, req, res, next) => { console.error('Template route error:', err); res.status(500).json({ error: 'Internal server error', @@ -269,4 +269,4 @@ router.use((err: Error, req: any, res: any, next: any) => { }); }); -export default router; \ No newline at end of file +module.exports = router; \ No newline at end of file diff --git a/inventory-server/src/routes/vendors.js b/inventory-server/src/routes/vendors.js index 9ecbab1..caa7bb9 100644 --- a/inventory-server/src/routes/vendors.js +++ b/inventory-server/src/routes/vendors.js @@ -6,7 +6,7 @@ router.get('/', async (req, res) => { const pool = req.app.locals.pool; try { // Get all vendors with metrics - const [vendors] = await pool.query(` + const { rows: vendors } = await pool.query(` SELECT DISTINCT p.vendor as name, COALESCE(vm.active_products, 0) as active_products, @@ -26,16 +26,16 @@ router.get('/', async (req, res) => { // Get cost metrics for all vendors const vendorNames = vendors.map(v => v.name); - const [costMetrics] = await pool.query(` + const { rows: costMetrics } = await pool.query(` SELECT vendor, - CAST(ROUND(SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2) AS DECIMAL(15,3)) as avg_unit_cost, - CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend + ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost, + ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend FROM purchase_orders WHERE status = 'closed' AND cost_price IS NOT NULL AND ordered > 0 - AND vendor IN (?) + AND vendor = ANY($1) GROUP BY vendor `, [vendorNames]); @@ -49,26 +49,26 @@ router.get('/', async (req, res) => { }, {}); // Get overall stats - const [stats] = await pool.query(` + const { rows: [stats] } = await pool.query(` SELECT COUNT(DISTINCT p.vendor) as totalVendors, COUNT(DISTINCT CASE WHEN COALESCE(vm.total_orders, 0) > 0 AND COALESCE(vm.order_fill_rate, 0) >= 75 THEN p.vendor END) as activeVendors, - COALESCE(ROUND(AVG(NULLIF(vm.avg_lead_time_days, 0)), 1), 0) as avgLeadTime, - COALESCE(ROUND(AVG(NULLIF(vm.order_fill_rate, 0)), 1), 0) as avgFillRate, - COALESCE(ROUND(AVG(NULLIF(vm.on_time_delivery_rate, 0)), 1), 0) as avgOnTimeDelivery + COALESCE(ROUND(AVG(NULLIF(vm.avg_lead_time_days, 0))::numeric, 1), 0) as avgLeadTime, + COALESCE(ROUND(AVG(NULLIF(vm.order_fill_rate, 0))::numeric, 1), 0) as avgFillRate, + COALESCE(ROUND(AVG(NULLIF(vm.on_time_delivery_rate, 0))::numeric, 1), 0) as avgOnTimeDelivery FROM products p LEFT JOIN vendor_metrics vm ON p.vendor = vm.vendor WHERE p.vendor IS NOT NULL AND p.vendor != '' `); // Get overall cost metrics - const [overallCostMetrics] = await pool.query(` + const { rows: [overallCostMetrics] } = await pool.query(` SELECT - CAST(ROUND(SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2) AS DECIMAL(15,3)) as avg_unit_cost, - CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend + ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost, + ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend FROM purchase_orders WHERE status = 'closed' AND cost_price IS NOT NULL @@ -90,13 +90,13 @@ router.get('/', async (req, res) => { total_spend: parseFloat(costMetricsMap[vendor.name]?.total_spend || 0) })), stats: { - totalVendors: parseInt(stats[0].totalVendors), - activeVendors: parseInt(stats[0].activeVendors), - avgLeadTime: parseFloat(stats[0].avgLeadTime), - avgFillRate: parseFloat(stats[0].avgFillRate), - avgOnTimeDelivery: parseFloat(stats[0].avgOnTimeDelivery), - avgUnitCost: parseFloat(overallCostMetrics[0].avg_unit_cost), - totalSpend: parseFloat(overallCostMetrics[0].total_spend) + totalVendors: parseInt(stats.totalvendors), + activeVendors: parseInt(stats.activevendors), + avgLeadTime: parseFloat(stats.avgleadtime), + avgFillRate: parseFloat(stats.avgfillrate), + avgOnTimeDelivery: parseFloat(stats.avgontimedelivery), + avgUnitCost: parseFloat(overallCostMetrics.avg_unit_cost), + totalSpend: parseFloat(overallCostMetrics.total_spend) } }); } catch (error) { diff --git a/inventory-server/src/server.js b/inventory-server/src/server.js index 075f2bf..bf0c9e9 100755 --- a/inventory-server/src/server.js +++ b/inventory-server/src/server.js @@ -3,7 +3,6 @@ const cors = require('cors'); const { spawn } = require('child_process'); const path = require('path'); const fs = require('fs'); -const mysql = require('mysql2/promise'); const { corsMiddleware, corsErrorHandler } = require('./middleware/cors'); const { initPool } = require('./utils/db'); const productsRouter = require('./routes/products'); @@ -16,14 +15,12 @@ const configRouter = require('./routes/config'); const metricsRouter = require('./routes/metrics'); const vendorsRouter = require('./routes/vendors'); const categoriesRouter = require('./routes/categories'); -const testConnectionRouter = require('./routes/test-connection'); const importRouter = require('./routes/import'); const aiValidationRouter = require('./routes/ai-validation'); const templatesRouter = require('./routes/templates'); // Get the absolute path to the .env file -const envPath = path.resolve(process.cwd(), '.env'); -console.log('Current working directory:', process.cwd()); +const envPath = path.join(__dirname, '..', '.env'); console.log('Looking for .env file at:', envPath); console.log('.env file exists:', fs.existsSync(envPath)); @@ -36,6 +33,10 @@ try { DB_HOST: process.env.DB_HOST || 'not set', DB_USER: process.env.DB_USER || 'not set', DB_NAME: process.env.DB_NAME || 'not set', + DB_PASSWORD: process.env.DB_PASSWORD ? '[password set]' : 'not set', + DB_PORT: process.env.DB_PORT || 'not set', + DB_SSL: process.env.DB_SSL || 'not set', + OPENAI_API_KEY: process.env.OPENAI_API_KEY ? '[key set]' : 'not set' }); } catch (error) { console.error('Error loading .env file:', error); @@ -71,13 +72,19 @@ app.use(express.urlencoded({ extended: true, limit: '10mb' })); // Initialize database pool and start server async function startServer() { try { - // Initialize database pool + // Initialize database pool with PostgreSQL configuration const pool = await initPool({ - waitForConnections: true, - connectionLimit: process.env.NODE_ENV === 'production' ? 20 : 10, - queueLimit: 0, - enableKeepAlive: true, - keepAliveInitialDelay: 0 + host: process.env.DB_HOST, + port: parseInt(process.env.DB_PORT || '5432', 10), + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + max: process.env.NODE_ENV === 'production' ? 20 : 10, + idleTimeoutMillis: 30000, + connectionTimeoutMillis: 2000, + ssl: process.env.DB_SSL === 'true' ? { + rejectUnauthorized: false + } : false }); // Make pool available to routes @@ -97,14 +104,14 @@ async function startServer() { app.use('/api/import', importRouter); app.use('/api/ai-validation', aiValidationRouter); app.use('/api/templates', templatesRouter); - app.use('/api', testConnectionRouter); // Basic health check route app.get('/health', (req, res) => { res.json({ status: 'ok', timestamp: new Date().toISOString(), - environment: process.env.NODE_ENV + environment: process.env.NODE_ENV, + database: 'connected' }); }); diff --git a/inventory-server/src/utils/db.js b/inventory-server/src/utils/db.js index 653ebc8..d2dd88f 100644 --- a/inventory-server/src/utils/db.js +++ b/inventory-server/src/utils/db.js @@ -1,65 +1,40 @@ -const mysql = require('mysql2/promise'); -const { Client } = require('ssh2'); +const { Pool } = require('pg'); +const { Client: SSHClient } = require('ssh2'); let pool; -async function setupSshTunnel() { - const sshConfig = { - host: process.env.PROD_SSH_HOST, - port: process.env.PROD_SSH_PORT || 22, - username: process.env.PROD_SSH_USER, - privateKey: process.env.PROD_SSH_KEY_PATH - ? require('fs').readFileSync(process.env.PROD_SSH_KEY_PATH) - : undefined, - compress: true - }; - - return new Promise((resolve, reject) => { - const ssh = new Client(); - - ssh.on('error', (err) => { - console.error('SSH connection error:', err); - reject(err); - }); - - ssh.on('ready', () => { - ssh.forwardOut( - '127.0.0.1', - 0, - process.env.PROD_DB_HOST || 'localhost', - process.env.PROD_DB_PORT || 3306, - (err, stream) => { - if (err) reject(err); - resolve({ ssh, stream }); - } - ); - }).connect(sshConfig); - }); -} - async function initPool(config) { + // Log config without sensitive data + const safeConfig = { + host: config.host, + user: config.user, + database: config.database, + port: config.port, + max: config.max, + idleTimeoutMillis: config.idleTimeoutMillis, + connectionTimeoutMillis: config.connectionTimeoutMillis, + ssl: config.ssl, + password: config.password ? '[password set]' : '[no password]' + }; + console.log('[Database] Initializing pool with config:', safeConfig); + try { - const tunnel = await setupSshTunnel(); - - pool = mysql.createPool({ - ...config, - stream: tunnel.stream, - host: process.env.PROD_DB_HOST || 'localhost', - user: process.env.PROD_DB_USER, - password: process.env.PROD_DB_PASSWORD, - database: process.env.PROD_DB_NAME, - port: process.env.PROD_DB_PORT || 3306 - }); + // Create the pool + pool = new Pool(config); // Test the connection - const connection = await pool.getConnection(); - console.log('[Database] Connected successfully through SSH tunnel'); - connection.release(); + const client = await pool.connect(); + try { + await client.query('SELECT NOW()'); + console.log('[Database] Pool connection test successful'); + } finally { + client.release(); + } return pool; - } catch (error) { - console.error('[Database] Error initializing pool:', error); - throw error; + } catch (err) { + console.error('[Database] Connection failed:', err); + throw err; } } @@ -67,11 +42,27 @@ async function getConnection() { if (!pool) { throw new Error('Database pool not initialized'); } - return pool.getConnection(); + return pool.connect(); +} + +// Helper function to execute a query with error handling +async function query(text, params = []) { + if (!pool) { + throw new Error('Database pool not initialized'); + } + + try { + const result = await pool.query(text, params); + return result; + } catch (err) { + console.error('[Database] Query error:', err); + throw err; + } } module.exports = { initPool, getConnection, - getPool: () => pool + getPool: () => pool, + query }; \ No newline at end of file diff --git a/inventory/src/components/settings/DataManagement.tsx b/inventory/src/components/settings/DataManagement.tsx index 1108948..2f5593f 100644 --- a/inventory/src/components/settings/DataManagement.tsx +++ b/inventory/src/components/settings/DataManagement.tsx @@ -85,9 +85,7 @@ export function DataManagement() { const [] = useState(null); const [eventSource, setEventSource] = useState(null); const [importHistory, setImportHistory] = useState([]); - const [calculateHistory, setCalculateHistory] = useState< - CalculateHistoryRecord[] - >([]); + const [calculateHistory, setCalculateHistory] = useState([]); const [moduleStatus, setModuleStatus] = useState([]); const [tableStatus, setTableStatus] = useState([]); const [scriptOutput, setScriptOutput] = useState([]); @@ -368,6 +366,10 @@ export function DataManagement() { fetch(`${config.apiUrl}/csv/status/tables`), ]); + if (!importRes.ok || !calcRes.ok || !moduleRes.ok || !tableRes.ok) { + throw new Error('One or more requests failed'); + } + const [importData, calcData, moduleData, tableData] = await Promise.all([ importRes.json(), calcRes.json(), @@ -375,52 +377,66 @@ export function DataManagement() { tableRes.json(), ]); - setImportHistory(importData); - setCalculateHistory(calcData); - setModuleStatus(moduleData); - setTableStatus(tableData); + // Ensure we're setting arrays even if the response is empty or invalid + setImportHistory(Array.isArray(importData) ? importData : []); + setCalculateHistory(Array.isArray(calcData) ? calcData : []); + setModuleStatus(Array.isArray(moduleData) ? moduleData : []); + setTableStatus(Array.isArray(tableData) ? tableData : []); } catch (error) { console.error("Error fetching history:", error); + // Set empty arrays as fallback + setImportHistory([]); + setCalculateHistory([]); + setModuleStatus([]); + setTableStatus([]); } }; const refreshTableStatus = async () => { try { const response = await fetch(`${config.apiUrl}/csv/status/tables`); + if (!response.ok) throw new Error('Failed to fetch table status'); const data = await response.json(); - setTableStatus(data); + setTableStatus(Array.isArray(data) ? data : []); } catch (error) { toast.error("Failed to refresh table status"); + setTableStatus([]); } }; const refreshModuleStatus = async () => { try { const response = await fetch(`${config.apiUrl}/csv/status/modules`); + if (!response.ok) throw new Error('Failed to fetch module status'); const data = await response.json(); - setModuleStatus(data); + setModuleStatus(Array.isArray(data) ? data : []); } catch (error) { toast.error("Failed to refresh module status"); + setModuleStatus([]); } }; const refreshImportHistory = async () => { try { const response = await fetch(`${config.apiUrl}/csv/history/import`); + if (!response.ok) throw new Error('Failed to fetch import history'); const data = await response.json(); - setImportHistory(data); + setImportHistory(Array.isArray(data) ? data : []); } catch (error) { toast.error("Failed to refresh import history"); + setImportHistory([]); } }; const refreshCalculateHistory = async () => { try { const response = await fetch(`${config.apiUrl}/csv/history/calculate`); + if (!response.ok) throw new Error('Failed to fetch calculate history'); const data = await response.json(); - setCalculateHistory(data); + setCalculateHistory(Array.isArray(data) ? data : []); } catch (error) { toast.error("Failed to refresh calculate history"); + setCalculateHistory([]); } }; diff --git a/inventory/src/lib/react-spreadsheet-import/src/steps/ValidationStep/ValidationStep.tsx b/inventory/src/lib/react-spreadsheet-import/src/steps/ValidationStep/ValidationStep.tsx index f2d8457..f6b0437 100644 --- a/inventory/src/lib/react-spreadsheet-import/src/steps/ValidationStep/ValidationStep.tsx +++ b/inventory/src/lib/react-spreadsheet-import/src/steps/ValidationStep/ValidationStep.tsx @@ -914,7 +914,7 @@ export const ValidationStep = ({ initialData, file, onBack }: const hasMultiValueRows = rowsWithValues.some(count => count > 1); // Filter out empty rows and rows with single values (if we have multi-value rows) - const nonEmptyRows = data.filter((row, index) => { + const nonEmptyRows = data.filter((_row, index) => { const nonEmptyCount = rowsWithValues[index]; // Keep the row if: diff --git a/inventory/src/pages/Login.tsx b/inventory/src/pages/Login.tsx index 024d33a..4d58cec 100644 --- a/inventory/src/pages/Login.tsx +++ b/inventory/src/pages/Login.tsx @@ -22,7 +22,7 @@ export function Login() { setIsLoading(true); try { - const url = isDev ? "/auth-inv/login" : `${config.authUrl}/login`; + const url = `${config.authUrl}/login`; console.log("Making login request:", { url, method: "POST", diff --git a/inventory/src/pages/PurchaseOrders.tsx b/inventory/src/pages/PurchaseOrders.tsx index 4dec90a..35b077d 100644 --- a/inventory/src/pages/PurchaseOrders.tsx +++ b/inventory/src/pages/PurchaseOrders.tsx @@ -22,7 +22,6 @@ import { import { motion } from 'motion/react'; import { PurchaseOrderStatus, - ReceivingStatus as ReceivingStatusCode, getPurchaseOrderStatusLabel, getReceivingStatusLabel, getPurchaseOrderStatusVariant, @@ -113,7 +112,7 @@ export default function PurchaseOrders() { statuses: string[]; }>({ vendors: [], - statuses: [], + statuses: [] }); const [pagination, setPagination] = useState({ total: 0, @@ -154,15 +153,57 @@ export default function PurchaseOrders() { fetch('/api/purchase-orders/cost-analysis') ]); - const [ - purchaseOrdersData, - vendorMetricsData, - costAnalysisData - ] = await Promise.all([ - purchaseOrdersRes.json() as Promise, - vendorMetricsRes.json(), - costAnalysisRes.json() - ]); + // Initialize default data + let purchaseOrdersData: PurchaseOrdersResponse = { + orders: [], + summary: { + order_count: 0, + total_ordered: 0, + total_received: 0, + fulfillment_rate: 0, + total_value: 0, + avg_cost: 0 + }, + pagination: { + total: 0, + pages: 0, + page: 1, + limit: 100 + }, + filters: { + vendors: [], + statuses: [] + } + }; + + let vendorMetricsData: VendorMetrics[] = []; + let costAnalysisData: CostAnalysis = { + unique_products: 0, + avg_cost: 0, + min_cost: 0, + max_cost: 0, + cost_variance: 0, + total_spend_by_category: [] + }; + + // Only try to parse responses if they were successful + if (purchaseOrdersRes.ok) { + purchaseOrdersData = await purchaseOrdersRes.json(); + } else { + console.error('Failed to fetch purchase orders:', await purchaseOrdersRes.text()); + } + + if (vendorMetricsRes.ok) { + vendorMetricsData = await vendorMetricsRes.json(); + } else { + console.error('Failed to fetch vendor metrics:', await vendorMetricsRes.text()); + } + + if (costAnalysisRes.ok) { + costAnalysisData = await costAnalysisRes.json(); + } else { + console.error('Failed to fetch cost analysis:', await costAnalysisRes.text()); + } setPurchaseOrders(purchaseOrdersData.orders); setPagination(purchaseOrdersData.pagination); @@ -172,6 +213,27 @@ export default function PurchaseOrders() { setCostAnalysis(costAnalysisData); } catch (error) { console.error('Error fetching data:', error); + // Set default values in case of error + setPurchaseOrders([]); + setPagination({ total: 0, pages: 0, page: 1, limit: 100 }); + setFilterOptions({ vendors: [], statuses: [] }); + setSummary({ + order_count: 0, + total_ordered: 0, + total_received: 0, + fulfillment_rate: 0, + total_value: 0, + avg_cost: 0 + }); + setVendorMetrics([]); + setCostAnalysis({ + unique_products: 0, + avg_cost: 0, + min_cost: 0, + max_cost: 0, + cost_variance: 0, + total_spend_by_category: [] + }); } finally { setLoading(false); } @@ -311,7 +373,7 @@ export default function PurchaseOrders() { All Vendors - {filterOptions.vendors.map(vendor => ( + {filterOptions?.vendors?.map(vendor => ( {vendor} diff --git a/inventory/src/types/products.ts b/inventory/src/types/products.ts index bdeba2c..634581f 100644 --- a/inventory/src/types/products.ts +++ b/inventory/src/types/products.ts @@ -3,10 +3,10 @@ export interface Product { title: string; SKU: string; stock_quantity: number; - price: string; // DECIMAL(15,3) - regular_price: string; // DECIMAL(15,3) - cost_price: string; // DECIMAL(15,3) - landing_cost_price: string | null; // DECIMAL(15,3) + price: string; // numeric(15,3) + regular_price: string; // numeric(15,3) + cost_price: string; // numeric(15,3) + landing_cost_price: string | null; // numeric(15,3) barcode: string; vendor: string; vendor_reference: string; @@ -24,32 +24,32 @@ export interface Product { updated_at: string; // Metrics - daily_sales_avg?: string; // DECIMAL(15,3) - weekly_sales_avg?: string; // DECIMAL(15,3) - monthly_sales_avg?: string; // DECIMAL(15,3) - avg_quantity_per_order?: string; // DECIMAL(15,3) + daily_sales_avg?: string; // numeric(15,3) + weekly_sales_avg?: string; // numeric(15,3) + monthly_sales_avg?: string; // numeric(15,3) + avg_quantity_per_order?: string; // numeric(15,3) number_of_orders?: number; first_sale_date?: string; last_sale_date?: string; last_purchase_date?: string; - days_of_inventory?: string; // DECIMAL(15,3) - weeks_of_inventory?: string; // DECIMAL(15,3) - reorder_point?: string; // DECIMAL(15,3) - safety_stock?: string; // DECIMAL(15,3) - avg_margin_percent?: string; // DECIMAL(15,3) - total_revenue?: string; // DECIMAL(15,3) - inventory_value?: string; // DECIMAL(15,3) - cost_of_goods_sold?: string; // DECIMAL(15,3) - gross_profit?: string; // DECIMAL(15,3) - gmroi?: string; // DECIMAL(15,3) - avg_lead_time_days?: string; // DECIMAL(15,3) + days_of_inventory?: string; // numeric(15,3) + weeks_of_inventory?: string; // numeric(15,3) + reorder_point?: string; // numeric(15,3) + safety_stock?: string; // numeric(15,3) + avg_margin_percent?: string; // numeric(15,3) + total_revenue?: string; // numeric(15,3) + inventory_value?: string; // numeric(15,3) + cost_of_goods_sold?: string; // numeric(15,3) + gross_profit?: string; // numeric(15,3) + gmroi?: string; // numeric(15,3) + avg_lead_time_days?: string; // numeric(15,3) last_received_date?: string; abc_class?: string; stock_status?: string; - turnover_rate?: string; // DECIMAL(15,3) - current_lead_time?: string; // DECIMAL(15,3) - target_lead_time?: string; // DECIMAL(15,3) + turnover_rate?: string; // numeric(15,3) + current_lead_time?: string; // numeric(15,3) + target_lead_time?: string; // numeric(15,3) lead_time_status?: string; reorder_qty?: number; - overstocked_amt?: string; // DECIMAL(15,3) + overstocked_amt?: string; // numeric(15,3) } diff --git a/inventory/tsconfig.tsbuildinfo b/inventory/tsconfig.tsbuildinfo index 9be69dd..2bbb564 100644 --- a/inventory/tsconfig.tsbuildinfo +++ b/inventory/tsconfig.tsbuildinfo @@ -1 +1 @@ -{"root":["./src/app.tsx","./src/config.ts","./src/main.tsx","./src/vite-env.d.ts","./src/components/analytics/categoryperformance.tsx","./src/components/analytics/priceanalysis.tsx","./src/components/analytics/profitanalysis.tsx","./src/components/analytics/stockanalysis.tsx","./src/components/analytics/vendorperformance.tsx","./src/components/auth/requireauth.tsx","./src/components/dashboard/bestsellers.tsx","./src/components/dashboard/forecastmetrics.tsx","./src/components/dashboard/inventoryhealthsummary.tsx","./src/components/dashboard/inventorystats.tsx","./src/components/dashboard/keymetricscharts.tsx","./src/components/dashboard/lowstockalerts.tsx","./src/components/dashboard/overstockmetrics.tsx","./src/components/dashboard/overview.tsx","./src/components/dashboard/purchasemetrics.tsx","./src/components/dashboard/recentsales.tsx","./src/components/dashboard/replenishmentmetrics.tsx","./src/components/dashboard/salesbycategory.tsx","./src/components/dashboard/salesmetrics.tsx","./src/components/dashboard/stockmetrics.tsx","./src/components/dashboard/topoverstockedproducts.tsx","./src/components/dashboard/topreplenishproducts.tsx","./src/components/dashboard/trendingproducts.tsx","./src/components/dashboard/vendorperformance.tsx","./src/components/forecasting/columns.tsx","./src/components/layout/appsidebar.tsx","./src/components/layout/mainlayout.tsx","./src/components/products/productdetail.tsx","./src/components/products/productfilters.tsx","./src/components/products/producttable.tsx","./src/components/products/producttableskeleton.tsx","./src/components/products/productviews.tsx","./src/components/settings/calculationsettings.tsx","./src/components/settings/configuration.tsx","./src/components/settings/datamanagement.tsx","./src/components/settings/performancemetrics.tsx","./src/components/settings/stockmanagement.tsx","./src/components/ui/accordion.tsx","./src/components/ui/alert-dialog.tsx","./src/components/ui/alert.tsx","./src/components/ui/avatar.tsx","./src/components/ui/badge.tsx","./src/components/ui/button.tsx","./src/components/ui/calendar.tsx","./src/components/ui/card.tsx","./src/components/ui/command.tsx","./src/components/ui/date-range-picker-narrow.tsx","./src/components/ui/date-range-picker.tsx","./src/components/ui/dialog.tsx","./src/components/ui/drawer.tsx","./src/components/ui/dropdown-menu.tsx","./src/components/ui/input.tsx","./src/components/ui/label.tsx","./src/components/ui/pagination.tsx","./src/components/ui/popover.tsx","./src/components/ui/progress.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/select.tsx","./src/components/ui/separator.tsx","./src/components/ui/sheet.tsx","./src/components/ui/sidebar.tsx","./src/components/ui/skeleton.tsx","./src/components/ui/sonner.tsx","./src/components/ui/switch.tsx","./src/components/ui/table.tsx","./src/components/ui/tabs.tsx","./src/components/ui/toggle-group.tsx","./src/components/ui/toggle.tsx","./src/components/ui/tooltip.tsx","./src/hooks/use-mobile.tsx","./src/lib/utils.ts","./src/pages/analytics.tsx","./src/pages/categories.tsx","./src/pages/dashboard.tsx","./src/pages/forecasting.tsx","./src/pages/login.tsx","./src/pages/orders.tsx","./src/pages/products.tsx","./src/pages/purchaseorders.tsx","./src/pages/settings.tsx","./src/pages/vendors.tsx","./src/routes/forecasting.tsx","./src/types/products.ts"],"version":"5.6.3"} \ No newline at end of file +{"root":["./src/app.tsx","./src/config.ts","./src/main.tsx","./src/vite-env.d.ts","./src/components/analytics/categoryperformance.tsx","./src/components/analytics/priceanalysis.tsx","./src/components/analytics/profitanalysis.tsx","./src/components/analytics/stockanalysis.tsx","./src/components/analytics/vendorperformance.tsx","./src/components/auth/requireauth.tsx","./src/components/dashboard/bestsellers.tsx","./src/components/dashboard/forecastmetrics.tsx","./src/components/dashboard/inventoryhealthsummary.tsx","./src/components/dashboard/inventorystats.tsx","./src/components/dashboard/keymetricscharts.tsx","./src/components/dashboard/lowstockalerts.tsx","./src/components/dashboard/overstockmetrics.tsx","./src/components/dashboard/overview.tsx","./src/components/dashboard/purchasemetrics.tsx","./src/components/dashboard/recentsales.tsx","./src/components/dashboard/replenishmentmetrics.tsx","./src/components/dashboard/salesbycategory.tsx","./src/components/dashboard/salesmetrics.tsx","./src/components/dashboard/stockmetrics.tsx","./src/components/dashboard/topoverstockedproducts.tsx","./src/components/dashboard/topreplenishproducts.tsx","./src/components/dashboard/trendingproducts.tsx","./src/components/dashboard/vendorperformance.tsx","./src/components/forecasting/columns.tsx","./src/components/layout/appsidebar.tsx","./src/components/layout/mainlayout.tsx","./src/components/products/productdetail.tsx","./src/components/products/productfilters.tsx","./src/components/products/producttable.tsx","./src/components/products/producttableskeleton.tsx","./src/components/products/productviews.tsx","./src/components/settings/calculationsettings.tsx","./src/components/settings/configuration.tsx","./src/components/settings/datamanagement.tsx","./src/components/settings/performancemetrics.tsx","./src/components/settings/stockmanagement.tsx","./src/components/ui/accordion.tsx","./src/components/ui/alert-dialog.tsx","./src/components/ui/alert.tsx","./src/components/ui/avatar.tsx","./src/components/ui/badge.tsx","./src/components/ui/button.tsx","./src/components/ui/calendar.tsx","./src/components/ui/card.tsx","./src/components/ui/command.tsx","./src/components/ui/date-range-picker-narrow.tsx","./src/components/ui/date-range-picker.tsx","./src/components/ui/dialog.tsx","./src/components/ui/drawer.tsx","./src/components/ui/dropdown-menu.tsx","./src/components/ui/input.tsx","./src/components/ui/label.tsx","./src/components/ui/pagination.tsx","./src/components/ui/popover.tsx","./src/components/ui/progress.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/select.tsx","./src/components/ui/separator.tsx","./src/components/ui/sheet.tsx","./src/components/ui/sidebar.tsx","./src/components/ui/skeleton.tsx","./src/components/ui/sonner.tsx","./src/components/ui/switch.tsx","./src/components/ui/table.tsx","./src/components/ui/tabs.tsx","./src/components/ui/toggle-group.tsx","./src/components/ui/toggle.tsx","./src/components/ui/tooltip.tsx","./src/hooks/use-mobile.tsx","./src/lib/utils.ts","./src/pages/analytics.tsx","./src/pages/categories.tsx","./src/pages/dashboard.tsx","./src/pages/forecasting.tsx","./src/pages/login.tsx","./src/pages/orders.tsx","./src/pages/products.tsx","./src/pages/purchaseorders.tsx","./src/pages/settings.tsx","./src/pages/vendors.tsx","./src/types/products.ts","./src/types/status-codes.ts"],"version":"5.6.3"} \ No newline at end of file diff --git a/mountremote.command b/mountremote.command index 3e3722f..431976e 100755 --- a/mountremote.command +++ b/mountremote.command @@ -4,4 +4,5 @@ umount '/Users/matt/Library/Mobile Documents/com~apple~CloudDocs/Dev/inventory/inventory-server' #Mount +sshfs matt@dashboard.kent.pw:/var/www/html/inventory -p 22122 '/Users/matt/Library/Mobile Documents/com~apple~CloudDocs/Dev/inventory/inventory-server/' sshfs matt@dashboard.kent.pw:/var/www/html/inventory -p 22122 '/Users/matt/Library/Mobile Documents/com~apple~CloudDocs/Dev/inventory/inventory-server/' \ No newline at end of file