Compare commits
10 Commits
add-produc
...
a8d3fd8033
| Author | SHA1 | Date | |
|---|---|---|---|
| a8d3fd8033 | |||
| 702b956ff1 | |||
| 9b8577f258 | |||
| 9623681a15 | |||
| cc22fd8c35 | |||
| 0ef1b6100e | |||
| a519746ccb | |||
| f29dd8ef8b | |||
| f2a5c06005 | |||
| fb9f959fe5 |
@@ -1,5 +1,209 @@
|
||||
// ecosystem.config.js
|
||||
const path = require('path');
|
||||
const dotenv = require('dotenv');
|
||||
|
||||
// Load environment variables safely with error handling
|
||||
const loadEnvFile = (envPath) => {
|
||||
try {
|
||||
console.log('Loading env from:', envPath);
|
||||
const result = dotenv.config({ path: envPath });
|
||||
if (result.error) {
|
||||
console.warn(`Warning: .env file not found or invalid at ${envPath}:`, result.error.message);
|
||||
return {};
|
||||
}
|
||||
console.log('Env variables loaded from', envPath, ':', Object.keys(result.parsed || {}));
|
||||
return result.parsed || {};
|
||||
} catch (error) {
|
||||
console.warn(`Warning: Error loading .env file at ${envPath}:`, error.message);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
// Load environment variables for each server
|
||||
const authEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/auth-server/.env'));
|
||||
const aircallEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/aircall-server/.env'));
|
||||
const klaviyoEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/klaviyo-server/.env'));
|
||||
const metaEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/meta-server/.env'));
|
||||
const googleAnalyticsEnv = require('dotenv').config({
|
||||
path: path.resolve(__dirname, 'dashboard/google-server/.env')
|
||||
}).parsed || {};
|
||||
const typeformEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/typeform-server/.env'));
|
||||
const inventoryEnv = loadEnvFile(path.resolve(__dirname, 'inventory/.env'));
|
||||
|
||||
// Common log settings for all apps
|
||||
const logSettings = {
|
||||
log_rotate: true,
|
||||
max_size: '10M',
|
||||
retain: '10',
|
||||
log_date_format: 'YYYY-MM-DD HH:mm:ss'
|
||||
};
|
||||
|
||||
// Common app settings
|
||||
const commonSettings = {
|
||||
instances: 1,
|
||||
exec_mode: 'fork',
|
||||
autorestart: true,
|
||||
watch: false,
|
||||
max_memory_restart: '1G',
|
||||
time: true,
|
||||
...logSettings,
|
||||
ignore_watch: [
|
||||
'node_modules',
|
||||
'logs',
|
||||
'.git',
|
||||
'*.log'
|
||||
],
|
||||
min_uptime: 5000,
|
||||
max_restarts: 5,
|
||||
restart_delay: 4000,
|
||||
listen_timeout: 50000,
|
||||
kill_timeout: 5000,
|
||||
node_args: '--max-old-space-size=1536'
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
apps: [
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'auth-server',
|
||||
script: './dashboard/auth-server/index.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3003,
|
||||
...authEnv
|
||||
},
|
||||
error_file: 'dashboard/auth-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/auth-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/auth-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3003
|
||||
},
|
||||
env_development: {
|
||||
NODE_ENV: 'development',
|
||||
PORT: 3003
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'aircall-server',
|
||||
script: './dashboard/aircall-server/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
AIRCALL_PORT: 3002,
|
||||
...aircallEnv
|
||||
},
|
||||
error_file: 'dashboard/aircall-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/aircall-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/aircall-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
AIRCALL_PORT: 3002
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'klaviyo-server',
|
||||
script: './dashboard/klaviyo-server/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
KLAVIYO_PORT: 3004,
|
||||
...klaviyoEnv
|
||||
},
|
||||
error_file: 'dashboard/klaviyo-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/klaviyo-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/klaviyo-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
KLAVIYO_PORT: 3004
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'meta-server',
|
||||
script: './dashboard/meta-server/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3005,
|
||||
...metaEnv
|
||||
},
|
||||
error_file: 'dashboard/meta-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/meta-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/meta-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3005
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "gorgias-server",
|
||||
script: "./dashboard/gorgias-server/server.js",
|
||||
env: {
|
||||
NODE_ENV: "development",
|
||||
PORT: 3006
|
||||
},
|
||||
env_production: {
|
||||
NODE_ENV: "production",
|
||||
PORT: 3006
|
||||
},
|
||||
error_file: "dashboard/logs/gorgias-server-error.log",
|
||||
out_file: "dashboard/logs/gorgias-server-out.log",
|
||||
log_file: "dashboard/logs/gorgias-server-combined.log",
|
||||
time: true
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'google-server',
|
||||
script: path.resolve(__dirname, 'dashboard/google-server/server.js'),
|
||||
watch: false,
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
GOOGLE_ANALYTICS_PORT: 3007,
|
||||
...googleAnalyticsEnv
|
||||
},
|
||||
error_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/err.log'),
|
||||
out_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/out.log'),
|
||||
log_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/combined.log'),
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
GOOGLE_ANALYTICS_PORT: 3007
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'typeform-server',
|
||||
script: './dashboard/typeform-server/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
TYPEFORM_PORT: 3008,
|
||||
...typeformEnv
|
||||
},
|
||||
error_file: 'dashboard/typeform-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/typeform-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/typeform-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
TYPEFORM_PORT: 3008
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'inventory-server',
|
||||
script: './inventory/src/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3010,
|
||||
...inventoryEnv
|
||||
},
|
||||
error_file: 'inventory/logs/pm2/err.log',
|
||||
out_file: 'inventory/logs/pm2/out.log',
|
||||
log_file: 'inventory/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3010,
|
||||
...inventoryEnv
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'new-auth-server',
|
||||
@@ -7,16 +211,12 @@ module.exports = {
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
AUTH_PORT: 3011,
|
||||
...inventoryEnv,
|
||||
JWT_SECRET: process.env.JWT_SECRET
|
||||
},
|
||||
error_file: 'inventory-server/auth/logs/pm2/err.log',
|
||||
out_file: 'inventory-server/auth/logs/pm2/out.log',
|
||||
log_file: 'inventory-server/auth/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
AUTH_PORT: 3011,
|
||||
JWT_SECRET: process.env.JWT_SECRET
|
||||
}
|
||||
log_file: 'inventory-server/auth/logs/pm2/combined.log'
|
||||
}
|
||||
]
|
||||
};
|
||||
103
inventory-server/auth/add-user.js
Normal file
103
inventory-server/auth/add-user.js
Normal file
@@ -0,0 +1,103 @@
|
||||
require('dotenv').config({ path: '../.env' });
|
||||
const bcrypt = require('bcrypt');
|
||||
const { Pool } = require('pg');
|
||||
const inquirer = require('inquirer');
|
||||
|
||||
// Log connection details for debugging (remove in production)
|
||||
console.log('Attempting to connect with:', {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
database: process.env.DB_NAME,
|
||||
port: process.env.DB_PORT
|
||||
});
|
||||
|
||||
const pool = new Pool({
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
port: process.env.DB_PORT,
|
||||
});
|
||||
|
||||
async function promptUser() {
|
||||
const questions = [
|
||||
{
|
||||
type: 'input',
|
||||
name: 'username',
|
||||
message: 'Enter username:',
|
||||
validate: (input) => {
|
||||
if (input.length < 3) {
|
||||
return 'Username must be at least 3 characters long';
|
||||
}
|
||||
return true;
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'password',
|
||||
name: 'password',
|
||||
message: 'Enter password:',
|
||||
mask: '*',
|
||||
validate: (input) => {
|
||||
if (input.length < 8) {
|
||||
return 'Password must be at least 8 characters long';
|
||||
}
|
||||
return true;
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'password',
|
||||
name: 'confirmPassword',
|
||||
message: 'Confirm password:',
|
||||
mask: '*',
|
||||
validate: (input, answers) => {
|
||||
if (input !== answers.password) {
|
||||
return 'Passwords do not match';
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
return inquirer.prompt(questions);
|
||||
}
|
||||
|
||||
async function addUser() {
|
||||
try {
|
||||
// Get user input
|
||||
const answers = await promptUser();
|
||||
const { username, password } = answers;
|
||||
|
||||
// Hash password
|
||||
const saltRounds = 10;
|
||||
const hashedPassword = await bcrypt.hash(password, saltRounds);
|
||||
|
||||
// Check if user already exists
|
||||
const checkResult = await pool.query(
|
||||
'SELECT id FROM users WHERE username = $1',
|
||||
[username]
|
||||
);
|
||||
|
||||
if (checkResult.rows.length > 0) {
|
||||
console.error('Error: Username already exists');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Insert new user
|
||||
const result = await pool.query(
|
||||
'INSERT INTO users (username, password) VALUES ($1, $2) RETURNING id',
|
||||
[username, hashedPassword]
|
||||
);
|
||||
|
||||
console.log(`User ${username} created successfully with id ${result.rows[0].id}`);
|
||||
} catch (error) {
|
||||
console.error('Error creating user:', error);
|
||||
console.error('Error details:', error.message);
|
||||
if (error.code) {
|
||||
console.error('Error code:', error.code);
|
||||
}
|
||||
} finally {
|
||||
await pool.end();
|
||||
}
|
||||
}
|
||||
|
||||
addUser();
|
||||
@@ -1,41 +0,0 @@
|
||||
const bcrypt = require('bcrypt');
|
||||
const mysql = require('mysql2/promise');
|
||||
const readline = require('readline').createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
require('dotenv').config({ path: '../.env' });
|
||||
|
||||
const dbConfig = {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
};
|
||||
|
||||
async function addUser() {
|
||||
const username = await askQuestion('Enter username: ');
|
||||
const password = await askQuestion('Enter password: ');
|
||||
|
||||
const hashedPassword = await bcrypt.hash(password, 10);
|
||||
|
||||
const connection = await mysql.createConnection(dbConfig);
|
||||
|
||||
try {
|
||||
await connection.query('INSERT INTO users (username, password) VALUES (?, ?)', [username, hashedPassword]);
|
||||
console.log(`User ${username} added successfully.`);
|
||||
} catch (error) {
|
||||
console.error('Error adding user:', error);
|
||||
} finally {
|
||||
connection.end();
|
||||
readline.close();
|
||||
}
|
||||
}
|
||||
|
||||
function askQuestion(query) {
|
||||
return new Promise(resolve => readline.question(query, ans => {
|
||||
resolve(ans);
|
||||
}));
|
||||
}
|
||||
|
||||
addUser();
|
||||
878
inventory-server/auth/package-lock.json
generated
878
inventory-server/auth/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,21 +1,19 @@
|
||||
{
|
||||
"name": "auth-server",
|
||||
"name": "inventory-auth-server",
|
||||
"version": "1.0.0",
|
||||
"description": "Authentication server for inventory management",
|
||||
"description": "Authentication server for inventory management system",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"dev": "nodemon server.js",
|
||||
"add_user": "node add_user.js"
|
||||
"start": "node server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"bcrypt": "^5.1.1",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.4.5",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^4.18.2",
|
||||
"jsonwebtoken": "^9.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.1.0"
|
||||
"inquirer": "^8.2.6",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"morgan": "^1.10.0",
|
||||
"pg": "^8.11.3"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
CREATE TABLE `users` (
|
||||
`id` INT AUTO_INCREMENT PRIMARY KEY,
|
||||
`username` VARCHAR(255) NOT NULL UNIQUE,
|
||||
`password` VARCHAR(255) NOT NULL,
|
||||
`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
CREATE TABLE users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
password VARCHAR(255) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -1,135 +1,102 @@
|
||||
require('dotenv').config({ path: '../.env' });
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const bcrypt = require('bcrypt');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const cors = require('cors');
|
||||
const mysql = require('mysql2/promise');
|
||||
require('dotenv').config({ path: '../.env' });
|
||||
const { Pool } = require('pg');
|
||||
const morgan = require('morgan');
|
||||
|
||||
// Log startup configuration
|
||||
console.log('Starting auth server with config:', {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
database: process.env.DB_NAME,
|
||||
port: process.env.DB_PORT,
|
||||
auth_port: process.env.AUTH_PORT
|
||||
});
|
||||
|
||||
const app = express();
|
||||
const PORT = process.env.AUTH_PORT || 3011;
|
||||
const port = process.env.AUTH_PORT || 3011;
|
||||
|
||||
// Database configuration
|
||||
const dbConfig = {
|
||||
const pool = new Pool({
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
};
|
||||
port: process.env.DB_PORT,
|
||||
});
|
||||
|
||||
// Create a connection pool
|
||||
const pool = mysql.createPool(dbConfig);
|
||||
|
||||
app.use(cors({
|
||||
origin: [
|
||||
'https://inventory.kent.pw',
|
||||
'http://localhost:5173',
|
||||
'http://127.0.0.1:5173',
|
||||
/^http:\/\/192\.168\.\d+\.\d+(:\d+)?$/,
|
||||
/^http:\/\/10\.\d+\.\d+\.\d+(:\d+)?$/
|
||||
],
|
||||
methods: ['GET', 'POST', 'OPTIONS'],
|
||||
allowedHeaders: ['Content-Type', 'Authorization', 'X-Requested-With'],
|
||||
credentials: true,
|
||||
exposedHeaders: ['set-cookie']
|
||||
}));
|
||||
// Middleware
|
||||
app.use(express.json());
|
||||
|
||||
// Debug middleware to log request details
|
||||
app.use((req, res, next) => {
|
||||
console.log('Request details:', {
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
origin: req.get('Origin'),
|
||||
headers: req.headers,
|
||||
body: req.body,
|
||||
});
|
||||
next();
|
||||
});
|
||||
|
||||
// Registration endpoint
|
||||
app.post('/register', async (req, res) => {
|
||||
try {
|
||||
const { username, password } = req.body;
|
||||
const hashedPassword = await bcrypt.hash(password, 10);
|
||||
|
||||
const connection = await pool.getConnection();
|
||||
await connection.query('INSERT INTO users (username, password) VALUES (?, ?)', [username, hashedPassword]);
|
||||
connection.release();
|
||||
|
||||
res.status(201).json({ message: 'User registered successfully' });
|
||||
} catch (error) {
|
||||
console.error('Registration error:', error);
|
||||
res.status(500).json({ error: 'Registration failed' });
|
||||
}
|
||||
});
|
||||
app.use(morgan('combined'));
|
||||
app.use(cors({
|
||||
origin: ['http://localhost:5173', 'https://inventory.kent.pw'],
|
||||
credentials: true
|
||||
}));
|
||||
|
||||
// Login endpoint
|
||||
app.post('/login', async (req, res) => {
|
||||
try {
|
||||
const { username, password } = req.body;
|
||||
console.log(`Login attempt for user: ${username}`);
|
||||
|
||||
const connection = await pool.getConnection();
|
||||
const [rows] = await connection.query(
|
||||
'SELECT * FROM users WHERE username = ?',
|
||||
[username],
|
||||
try {
|
||||
// Get user from database
|
||||
const result = await pool.query(
|
||||
'SELECT id, username, password FROM users WHERE username = $1',
|
||||
[username]
|
||||
);
|
||||
connection.release();
|
||||
|
||||
if (rows.length === 1) {
|
||||
const user = rows[0];
|
||||
const passwordMatch = await bcrypt.compare(password, user.password);
|
||||
const user = result.rows[0];
|
||||
|
||||
if (passwordMatch) {
|
||||
console.log(`User ${username} authenticated successfully`);
|
||||
// Check if user exists and password is correct
|
||||
if (!user || !(await bcrypt.compare(password, user.password))) {
|
||||
return res.status(401).json({ error: 'Invalid username or password' });
|
||||
}
|
||||
|
||||
// Generate JWT token
|
||||
const token = jwt.sign(
|
||||
{ username: user.username },
|
||||
{ userId: user.id, username: user.username },
|
||||
process.env.JWT_SECRET,
|
||||
{ expiresIn: '1h' },
|
||||
{ expiresIn: '24h' }
|
||||
);
|
||||
|
||||
res.json({ token });
|
||||
} else {
|
||||
console.error(`Invalid password for user: ${username}`);
|
||||
res.status(401).json({ error: 'Invalid credentials' });
|
||||
}
|
||||
} else {
|
||||
console.error(`User not found: ${username}`);
|
||||
res.status(401).json({ error: 'Invalid credentials' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Login error:', error);
|
||||
res.status(500).json({ error: 'Login failed' });
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// Protected endpoint example
|
||||
// Protected route to verify token
|
||||
app.get('/protected', async (req, res) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
|
||||
if (!authHeader) {
|
||||
return res.status(401).json({ error: 'Unauthorized' });
|
||||
return res.status(401).json({ error: 'No token provided' });
|
||||
}
|
||||
|
||||
const token = authHeader.split(' ')[1];
|
||||
try {
|
||||
const token = authHeader.split(' ')[1];
|
||||
const decoded = jwt.verify(token, process.env.JWT_SECRET);
|
||||
|
||||
// Optionally, you can fetch the user from the database here
|
||||
// to verify that the user still exists or to get more user information
|
||||
const connection = await pool.getConnection();
|
||||
const [rows] = await connection.query('SELECT * FROM users WHERE username = ?', [decoded.username]);
|
||||
connection.release();
|
||||
|
||||
if (rows.length === 0) {
|
||||
return res.status(401).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Protected resource accessed', user: decoded });
|
||||
res.json({ userId: decoded.userId, username: decoded.username });
|
||||
} catch (error) {
|
||||
console.error('Protected endpoint error:', error);
|
||||
res.status(403).json({ error: 'Invalid token' });
|
||||
console.error('Token verification error:', error);
|
||||
res.status(401).json({ error: 'Invalid token' });
|
||||
}
|
||||
});
|
||||
|
||||
app.listen(PORT, "0.0.0.0", () => {
|
||||
console.log(`Auth server running on port ${PORT}`);
|
||||
// Health check endpoint
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({ status: 'healthy' });
|
||||
});
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
console.error(err.stack);
|
||||
res.status(500).json({ error: 'Something broke!' });
|
||||
});
|
||||
|
||||
// Start server
|
||||
app.listen(port, () => {
|
||||
console.log(`Auth server running on port ${port}`);
|
||||
});
|
||||
@@ -1,150 +1,154 @@
|
||||
-- Configuration tables schema
|
||||
|
||||
-- Stock threshold configurations
|
||||
CREATE TABLE IF NOT EXISTS stock_thresholds (
|
||||
id INT NOT NULL,
|
||||
CREATE TABLE stock_thresholds (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
critical_days INT NOT NULL DEFAULT 7,
|
||||
reorder_days INT NOT NULL DEFAULT 14,
|
||||
overstock_days INT NOT NULL DEFAULT 90,
|
||||
low_stock_threshold INT NOT NULL DEFAULT 5,
|
||||
min_reorder_quantity INT NOT NULL DEFAULT 1,
|
||||
critical_days INTEGER NOT NULL DEFAULT 7,
|
||||
reorder_days INTEGER NOT NULL DEFAULT 14,
|
||||
overstock_days INTEGER NOT NULL DEFAULT 90,
|
||||
low_stock_threshold INTEGER NOT NULL DEFAULT 5,
|
||||
min_reorder_quantity INTEGER NOT NULL DEFAULT 1,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor),
|
||||
INDEX idx_st_metrics (category_id, vendor)
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_st_metrics ON stock_thresholds(category_id, vendor);
|
||||
|
||||
-- Lead time threshold configurations
|
||||
CREATE TABLE IF NOT EXISTS lead_time_thresholds (
|
||||
id INT NOT NULL,
|
||||
CREATE TABLE lead_time_thresholds (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
target_days INT NOT NULL DEFAULT 14,
|
||||
warning_days INT NOT NULL DEFAULT 21,
|
||||
critical_days INT NOT NULL DEFAULT 30,
|
||||
target_days INTEGER NOT NULL DEFAULT 14,
|
||||
warning_days INTEGER NOT NULL DEFAULT 21,
|
||||
critical_days INTEGER NOT NULL DEFAULT 30,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor)
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
-- Sales velocity window configurations
|
||||
CREATE TABLE IF NOT EXISTS sales_velocity_config (
|
||||
id INT NOT NULL,
|
||||
CREATE TABLE sales_velocity_config (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
daily_window_days INT NOT NULL DEFAULT 30,
|
||||
weekly_window_days INT NOT NULL DEFAULT 7,
|
||||
monthly_window_days INT NOT NULL DEFAULT 90,
|
||||
daily_window_days INTEGER NOT NULL DEFAULT 30,
|
||||
weekly_window_days INTEGER NOT NULL DEFAULT 7,
|
||||
monthly_window_days INTEGER NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor),
|
||||
INDEX idx_sv_metrics (category_id, vendor)
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_sv_metrics ON sales_velocity_config(category_id, vendor);
|
||||
|
||||
-- ABC Classification configurations
|
||||
CREATE TABLE IF NOT EXISTS abc_classification_config (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
CREATE TABLE abc_classification_config (
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
a_threshold DECIMAL(5,2) NOT NULL DEFAULT 20.0,
|
||||
b_threshold DECIMAL(5,2) NOT NULL DEFAULT 50.0,
|
||||
classification_period_days INT NOT NULL DEFAULT 90,
|
||||
classification_period_days INTEGER NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Safety stock configurations
|
||||
CREATE TABLE IF NOT EXISTS safety_stock_config (
|
||||
id INT NOT NULL,
|
||||
CREATE TABLE safety_stock_config (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
coverage_days INT NOT NULL DEFAULT 14,
|
||||
coverage_days INTEGER NOT NULL DEFAULT 14,
|
||||
service_level DECIMAL(5,2) NOT NULL DEFAULT 95.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor),
|
||||
INDEX idx_ss_metrics (category_id, vendor)
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_ss_metrics ON safety_stock_config(category_id, vendor);
|
||||
|
||||
-- Turnover rate configurations
|
||||
CREATE TABLE IF NOT EXISTS turnover_config (
|
||||
id INT NOT NULL,
|
||||
CREATE TABLE turnover_config (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
calculation_period_days INT NOT NULL DEFAULT 30,
|
||||
calculation_period_days INTEGER NOT NULL DEFAULT 30,
|
||||
target_rate DECIMAL(10,2) NOT NULL DEFAULT 1.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE KEY unique_category_vendor (category_id, vendor)
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
-- Create table for sales seasonality factors
|
||||
CREATE TABLE IF NOT EXISTS sales_seasonality (
|
||||
month INT NOT NULL,
|
||||
CREATE TABLE sales_seasonality (
|
||||
month INTEGER NOT NULL,
|
||||
seasonality_factor DECIMAL(5,3) DEFAULT 0,
|
||||
last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (month),
|
||||
CHECK (month BETWEEN 1 AND 12),
|
||||
CHECK (seasonality_factor BETWEEN -1.0 AND 1.0)
|
||||
CONSTRAINT month_range CHECK (month BETWEEN 1 AND 12),
|
||||
CONSTRAINT seasonality_range CHECK (seasonality_factor BETWEEN -1.0 AND 1.0)
|
||||
);
|
||||
|
||||
-- Insert default global thresholds if not exists
|
||||
-- Insert default global thresholds
|
||||
INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
|
||||
VALUES (1, NULL, NULL, 7, 14, 90)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
critical_days = VALUES(critical_days),
|
||||
reorder_days = VALUES(reorder_days),
|
||||
overstock_days = VALUES(overstock_days);
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
critical_days = EXCLUDED.critical_days,
|
||||
reorder_days = EXCLUDED.reorder_days,
|
||||
overstock_days = EXCLUDED.overstock_days;
|
||||
|
||||
INSERT INTO lead_time_thresholds (id, category_id, vendor, target_days, warning_days, critical_days)
|
||||
VALUES (1, NULL, NULL, 14, 21, 30)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
target_days = VALUES(target_days),
|
||||
warning_days = VALUES(warning_days),
|
||||
critical_days = VALUES(critical_days);
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
target_days = EXCLUDED.target_days,
|
||||
warning_days = EXCLUDED.warning_days,
|
||||
critical_days = EXCLUDED.critical_days;
|
||||
|
||||
INSERT INTO sales_velocity_config (id, category_id, vendor, daily_window_days, weekly_window_days, monthly_window_days)
|
||||
VALUES (1, NULL, NULL, 30, 7, 90)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
daily_window_days = VALUES(daily_window_days),
|
||||
weekly_window_days = VALUES(weekly_window_days),
|
||||
monthly_window_days = VALUES(monthly_window_days);
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
daily_window_days = EXCLUDED.daily_window_days,
|
||||
weekly_window_days = EXCLUDED.weekly_window_days,
|
||||
monthly_window_days = EXCLUDED.monthly_window_days;
|
||||
|
||||
INSERT INTO abc_classification_config (id, a_threshold, b_threshold, classification_period_days)
|
||||
VALUES (1, 20.0, 50.0, 90)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
a_threshold = VALUES(a_threshold),
|
||||
b_threshold = VALUES(b_threshold),
|
||||
classification_period_days = VALUES(classification_period_days);
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
a_threshold = EXCLUDED.a_threshold,
|
||||
b_threshold = EXCLUDED.b_threshold,
|
||||
classification_period_days = EXCLUDED.classification_period_days;
|
||||
|
||||
INSERT INTO safety_stock_config (id, category_id, vendor, coverage_days, service_level)
|
||||
VALUES (1, NULL, NULL, 14, 95.0)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
coverage_days = VALUES(coverage_days),
|
||||
service_level = VALUES(service_level);
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
coverage_days = EXCLUDED.coverage_days,
|
||||
service_level = EXCLUDED.service_level;
|
||||
|
||||
INSERT INTO turnover_config (id, category_id, vendor, calculation_period_days, target_rate)
|
||||
VALUES (1, NULL, NULL, 30, 1.0)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
calculation_period_days = VALUES(calculation_period_days),
|
||||
target_rate = VALUES(target_rate);
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
calculation_period_days = EXCLUDED.calculation_period_days,
|
||||
target_rate = EXCLUDED.target_rate;
|
||||
|
||||
-- Insert default seasonality factors (neutral)
|
||||
INSERT INTO sales_seasonality (month, seasonality_factor)
|
||||
VALUES
|
||||
(1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0),
|
||||
(7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0)
|
||||
ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP;
|
||||
ON CONFLICT (month) DO UPDATE SET
|
||||
last_updated = CURRENT_TIMESTAMP;
|
||||
|
||||
-- View to show thresholds with category names
|
||||
CREATE OR REPLACE VIEW stock_thresholds_view AS
|
||||
@@ -153,9 +157,9 @@ SELECT
|
||||
c.name as category_name,
|
||||
CASE
|
||||
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 'Global Default'
|
||||
WHEN st.category_id IS NULL THEN CONCAT('Vendor: ', st.vendor)
|
||||
WHEN st.vendor IS NULL THEN CONCAT('Category: ', c.name)
|
||||
ELSE CONCAT('Category: ', c.name, ' / Vendor: ', st.vendor)
|
||||
WHEN st.category_id IS NULL THEN 'Vendor: ' || st.vendor
|
||||
WHEN st.vendor IS NULL THEN 'Category: ' || c.name
|
||||
ELSE 'Category: ' || c.name || ' / Vendor: ' || st.vendor
|
||||
END as threshold_scope
|
||||
FROM
|
||||
stock_thresholds st
|
||||
@@ -171,59 +175,51 @@ ORDER BY
|
||||
c.name,
|
||||
st.vendor;
|
||||
|
||||
-- History and status tables
|
||||
CREATE TABLE IF NOT EXISTS calculate_history (
|
||||
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP NULL,
|
||||
duration_seconds INT,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED,
|
||||
total_products INT DEFAULT 0,
|
||||
total_orders INT DEFAULT 0,
|
||||
total_purchase_orders INT DEFAULT 0,
|
||||
processed_products INT DEFAULT 0,
|
||||
processed_orders INT DEFAULT 0,
|
||||
processed_purchase_orders INT DEFAULT 0,
|
||||
status ENUM('running', 'completed', 'failed', 'cancelled') DEFAULT 'running',
|
||||
duration_seconds INTEGER,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||
total_products INTEGER DEFAULT 0,
|
||||
total_orders INTEGER DEFAULT 0,
|
||||
total_purchase_orders INTEGER DEFAULT 0,
|
||||
processed_products INTEGER DEFAULT 0,
|
||||
processed_orders INTEGER DEFAULT 0,
|
||||
processed_purchase_orders INTEGER DEFAULT 0,
|
||||
status calculation_status DEFAULT 'running',
|
||||
error_message TEXT,
|
||||
additional_info JSON,
|
||||
INDEX idx_status_time (status, start_time)
|
||||
additional_info JSONB
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS calculate_status (
|
||||
module_name ENUM(
|
||||
'product_metrics',
|
||||
'time_aggregates',
|
||||
'financial_metrics',
|
||||
'vendor_metrics',
|
||||
'category_metrics',
|
||||
'brand_metrics',
|
||||
'sales_forecasts',
|
||||
'abc_classification'
|
||||
) PRIMARY KEY,
|
||||
last_calculation_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_last_calc (last_calculation_timestamp)
|
||||
module_name module_name PRIMARY KEY,
|
||||
last_calculation_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sync_status (
|
||||
table_name VARCHAR(50) PRIMARY KEY,
|
||||
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_sync_id BIGINT,
|
||||
INDEX idx_last_sync (last_sync_timestamp)
|
||||
last_sync_id BIGINT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import_history (
|
||||
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
table_name VARCHAR(50) NOT NULL,
|
||||
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP NULL,
|
||||
duration_seconds INT,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED,
|
||||
records_added INT DEFAULT 0,
|
||||
records_updated INT DEFAULT 0,
|
||||
duration_seconds INTEGER,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||
records_added INTEGER DEFAULT 0,
|
||||
records_updated INTEGER DEFAULT 0,
|
||||
is_incremental BOOLEAN DEFAULT FALSE,
|
||||
status ENUM('running', 'completed', 'failed', 'cancelled') DEFAULT 'running',
|
||||
status calculation_status DEFAULT 'running',
|
||||
error_message TEXT,
|
||||
additional_info JSON,
|
||||
INDEX idx_table_time (table_name, start_time),
|
||||
INDEX idx_status (status)
|
||||
additional_info JSONB
|
||||
);
|
||||
|
||||
-- Create all indexes after tables are fully created
|
||||
CREATE INDEX IF NOT EXISTS idx_last_calc ON calculate_status(last_calculation_timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status(last_sync_timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_table_time ON import_history(table_name, start_time);
|
||||
@@ -1,8 +1,8 @@
|
||||
-- Disable foreign key checks
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
SET session_replication_role = 'replica';
|
||||
|
||||
-- Temporary tables for batch metrics processing
|
||||
CREATE TABLE IF NOT EXISTS temp_sales_metrics (
|
||||
CREATE TABLE temp_sales_metrics (
|
||||
pid BIGINT NOT NULL,
|
||||
daily_sales_avg DECIMAL(10,3),
|
||||
weekly_sales_avg DECIMAL(10,3),
|
||||
@@ -14,9 +14,9 @@ CREATE TABLE IF NOT EXISTS temp_sales_metrics (
|
||||
PRIMARY KEY (pid)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS temp_purchase_metrics (
|
||||
CREATE TABLE temp_purchase_metrics (
|
||||
pid BIGINT NOT NULL,
|
||||
avg_lead_time_days INT,
|
||||
avg_lead_time_days INTEGER,
|
||||
last_purchase_date DATE,
|
||||
first_received_date DATE,
|
||||
last_received_date DATE,
|
||||
@@ -24,7 +24,7 @@ CREATE TABLE IF NOT EXISTS temp_purchase_metrics (
|
||||
);
|
||||
|
||||
-- New table for product metrics
|
||||
CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
CREATE TABLE product_metrics (
|
||||
pid BIGINT NOT NULL,
|
||||
last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
-- Sales velocity metrics
|
||||
@@ -32,16 +32,16 @@ CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
weekly_sales_avg DECIMAL(10,3),
|
||||
monthly_sales_avg DECIMAL(10,3),
|
||||
avg_quantity_per_order DECIMAL(10,3),
|
||||
number_of_orders INT,
|
||||
number_of_orders INTEGER,
|
||||
first_sale_date DATE,
|
||||
last_sale_date DATE,
|
||||
-- Stock metrics
|
||||
days_of_inventory INT,
|
||||
weeks_of_inventory INT,
|
||||
reorder_point INT,
|
||||
safety_stock INT,
|
||||
reorder_qty INT DEFAULT 0,
|
||||
overstocked_amt INT DEFAULT 0,
|
||||
days_of_inventory INTEGER,
|
||||
weeks_of_inventory INTEGER,
|
||||
reorder_point INTEGER,
|
||||
safety_stock INTEGER,
|
||||
reorder_qty INTEGER DEFAULT 0,
|
||||
overstocked_amt INTEGER DEFAULT 0,
|
||||
-- Financial metrics
|
||||
avg_margin_percent DECIMAL(10,3),
|
||||
total_revenue DECIMAL(10,3),
|
||||
@@ -50,7 +50,7 @@ CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
gross_profit DECIMAL(10,3),
|
||||
gmroi DECIMAL(10,3),
|
||||
-- Purchase metrics
|
||||
avg_lead_time_days INT,
|
||||
avg_lead_time_days INTEGER,
|
||||
last_purchase_date DATE,
|
||||
first_received_date DATE,
|
||||
last_received_date DATE,
|
||||
@@ -60,48 +60,50 @@ CREATE TABLE IF NOT EXISTS product_metrics (
|
||||
-- Turnover metrics
|
||||
turnover_rate DECIMAL(12,3),
|
||||
-- Lead time metrics
|
||||
current_lead_time INT,
|
||||
target_lead_time INT,
|
||||
current_lead_time INTEGER,
|
||||
target_lead_time INTEGER,
|
||||
lead_time_status VARCHAR(20),
|
||||
-- Forecast metrics
|
||||
forecast_accuracy DECIMAL(5,2) DEFAULT NULL,
|
||||
forecast_bias DECIMAL(5,2) DEFAULT NULL,
|
||||
last_forecast_date DATE DEFAULT NULL,
|
||||
PRIMARY KEY (pid),
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
|
||||
INDEX idx_metrics_revenue (total_revenue),
|
||||
INDEX idx_metrics_stock_status (stock_status),
|
||||
INDEX idx_metrics_lead_time (lead_time_status),
|
||||
INDEX idx_metrics_turnover (turnover_rate),
|
||||
INDEX idx_metrics_last_calculated (last_calculated_at),
|
||||
INDEX idx_metrics_abc (abc_class),
|
||||
INDEX idx_metrics_sales (daily_sales_avg, weekly_sales_avg, monthly_sales_avg),
|
||||
INDEX idx_metrics_forecast (forecast_accuracy, forecast_bias)
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_metrics_revenue ON product_metrics(total_revenue);
|
||||
CREATE INDEX idx_metrics_stock_status ON product_metrics(stock_status);
|
||||
CREATE INDEX idx_metrics_lead_time ON product_metrics(lead_time_status);
|
||||
CREATE INDEX idx_metrics_turnover ON product_metrics(turnover_rate);
|
||||
CREATE INDEX idx_metrics_last_calculated ON product_metrics(last_calculated_at);
|
||||
CREATE INDEX idx_metrics_abc ON product_metrics(abc_class);
|
||||
CREATE INDEX idx_metrics_sales ON product_metrics(daily_sales_avg, weekly_sales_avg, monthly_sales_avg);
|
||||
CREATE INDEX idx_metrics_forecast ON product_metrics(forecast_accuracy, forecast_bias);
|
||||
|
||||
-- New table for time-based aggregates
|
||||
CREATE TABLE IF NOT EXISTS product_time_aggregates (
|
||||
CREATE TABLE product_time_aggregates (
|
||||
pid BIGINT NOT NULL,
|
||||
year INT NOT NULL,
|
||||
month INT NOT NULL,
|
||||
year INTEGER NOT NULL,
|
||||
month INTEGER NOT NULL,
|
||||
-- Sales metrics
|
||||
total_quantity_sold INT DEFAULT 0,
|
||||
total_quantity_sold INTEGER DEFAULT 0,
|
||||
total_revenue DECIMAL(10,3) DEFAULT 0,
|
||||
total_cost DECIMAL(10,3) DEFAULT 0,
|
||||
order_count INT DEFAULT 0,
|
||||
order_count INTEGER DEFAULT 0,
|
||||
-- Stock changes
|
||||
stock_received INT DEFAULT 0,
|
||||
stock_ordered INT DEFAULT 0,
|
||||
stock_received INTEGER DEFAULT 0,
|
||||
stock_ordered INTEGER DEFAULT 0,
|
||||
-- Calculated fields
|
||||
avg_price DECIMAL(10,3),
|
||||
profit_margin DECIMAL(10,3),
|
||||
inventory_value DECIMAL(10,3),
|
||||
gmroi DECIMAL(10,3),
|
||||
PRIMARY KEY (pid, year, month),
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
|
||||
INDEX idx_date (year, month)
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_date ON product_time_aggregates(year, month);
|
||||
|
||||
-- Create vendor_details table
|
||||
CREATE TABLE vendor_details (
|
||||
vendor VARCHAR(100) PRIMARY KEY,
|
||||
@@ -110,45 +112,47 @@ CREATE TABLE vendor_details (
|
||||
phone VARCHAR(50),
|
||||
status VARCHAR(20) DEFAULT 'active',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_status (status)
|
||||
) ENGINE=InnoDB;
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX idx_vendor_details_status ON vendor_details(status);
|
||||
|
||||
-- New table for vendor metrics
|
||||
CREATE TABLE IF NOT EXISTS vendor_metrics (
|
||||
CREATE TABLE vendor_metrics (
|
||||
vendor VARCHAR(100) NOT NULL,
|
||||
last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
-- Performance metrics
|
||||
avg_lead_time_days DECIMAL(10,3),
|
||||
on_time_delivery_rate DECIMAL(5,2),
|
||||
order_fill_rate DECIMAL(5,2),
|
||||
total_orders INT DEFAULT 0,
|
||||
total_late_orders INT DEFAULT 0,
|
||||
total_orders INTEGER DEFAULT 0,
|
||||
total_late_orders INTEGER DEFAULT 0,
|
||||
total_purchase_value DECIMAL(10,3) DEFAULT 0,
|
||||
avg_order_value DECIMAL(10,3),
|
||||
-- Product metrics
|
||||
active_products INT DEFAULT 0,
|
||||
total_products INT DEFAULT 0,
|
||||
active_products INTEGER DEFAULT 0,
|
||||
total_products INTEGER DEFAULT 0,
|
||||
-- Financial metrics
|
||||
total_revenue DECIMAL(10,3) DEFAULT 0,
|
||||
avg_margin_percent DECIMAL(5,2),
|
||||
-- Status
|
||||
status VARCHAR(20) DEFAULT 'active',
|
||||
PRIMARY KEY (vendor),
|
||||
FOREIGN KEY (vendor) REFERENCES vendor_details(vendor) ON DELETE CASCADE,
|
||||
INDEX idx_vendor_performance (on_time_delivery_rate),
|
||||
INDEX idx_vendor_status (status),
|
||||
INDEX idx_metrics_last_calculated (last_calculated_at),
|
||||
INDEX idx_vendor_metrics_orders (total_orders, total_late_orders)
|
||||
FOREIGN KEY (vendor) REFERENCES vendor_details(vendor) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_vendor_performance ON vendor_metrics(on_time_delivery_rate);
|
||||
CREATE INDEX idx_vendor_status ON vendor_metrics(status);
|
||||
CREATE INDEX idx_vendor_metrics_last_calculated ON vendor_metrics(last_calculated_at);
|
||||
CREATE INDEX idx_vendor_metrics_orders ON vendor_metrics(total_orders, total_late_orders);
|
||||
|
||||
-- New table for category metrics
|
||||
CREATE TABLE IF NOT EXISTS category_metrics (
|
||||
CREATE TABLE category_metrics (
|
||||
category_id BIGINT NOT NULL,
|
||||
last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
-- Product metrics
|
||||
product_count INT DEFAULT 0,
|
||||
active_products INT DEFAULT 0,
|
||||
product_count INTEGER DEFAULT 0,
|
||||
active_products INTEGER DEFAULT 0,
|
||||
-- Financial metrics
|
||||
total_value DECIMAL(15,3) DEFAULT 0,
|
||||
avg_margin DECIMAL(5,2),
|
||||
@@ -157,255 +161,215 @@ CREATE TABLE IF NOT EXISTS category_metrics (
|
||||
-- Status
|
||||
status VARCHAR(20) DEFAULT 'active',
|
||||
PRIMARY KEY (category_id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
INDEX idx_category_status (status),
|
||||
INDEX idx_category_growth (growth_rate),
|
||||
INDEX idx_metrics_last_calculated (last_calculated_at),
|
||||
INDEX idx_category_metrics_products (product_count, active_products)
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_category_status ON category_metrics(status);
|
||||
CREATE INDEX idx_category_growth ON category_metrics(growth_rate);
|
||||
CREATE INDEX idx_metrics_last_calculated_cat ON category_metrics(last_calculated_at);
|
||||
CREATE INDEX idx_category_metrics_products ON category_metrics(product_count, active_products);
|
||||
|
||||
-- New table for vendor time-based metrics
|
||||
CREATE TABLE IF NOT EXISTS vendor_time_metrics (
|
||||
CREATE TABLE vendor_time_metrics (
|
||||
vendor VARCHAR(100) NOT NULL,
|
||||
year INT NOT NULL,
|
||||
month INT NOT NULL,
|
||||
year INTEGER NOT NULL,
|
||||
month INTEGER NOT NULL,
|
||||
-- Order metrics
|
||||
total_orders INT DEFAULT 0,
|
||||
late_orders INT DEFAULT 0,
|
||||
total_orders INTEGER DEFAULT 0,
|
||||
late_orders INTEGER DEFAULT 0,
|
||||
avg_lead_time_days DECIMAL(10,3),
|
||||
-- Financial metrics
|
||||
total_purchase_value DECIMAL(10,3) DEFAULT 0,
|
||||
total_revenue DECIMAL(10,3) DEFAULT 0,
|
||||
avg_margin_percent DECIMAL(5,2),
|
||||
PRIMARY KEY (vendor, year, month),
|
||||
FOREIGN KEY (vendor) REFERENCES vendor_details(vendor) ON DELETE CASCADE,
|
||||
INDEX idx_vendor_date (year, month)
|
||||
FOREIGN KEY (vendor) REFERENCES vendor_details(vendor) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_vendor_date ON vendor_time_metrics(year, month);
|
||||
|
||||
-- New table for category time-based metrics
|
||||
CREATE TABLE IF NOT EXISTS category_time_metrics (
|
||||
CREATE TABLE category_time_metrics (
|
||||
category_id BIGINT NOT NULL,
|
||||
year INT NOT NULL,
|
||||
month INT NOT NULL,
|
||||
year INTEGER NOT NULL,
|
||||
month INTEGER NOT NULL,
|
||||
-- Product metrics
|
||||
product_count INT DEFAULT 0,
|
||||
active_products INT DEFAULT 0,
|
||||
product_count INTEGER DEFAULT 0,
|
||||
active_products INTEGER DEFAULT 0,
|
||||
-- Financial metrics
|
||||
total_value DECIMAL(15,3) DEFAULT 0,
|
||||
total_revenue DECIMAL(15,3) DEFAULT 0,
|
||||
avg_margin DECIMAL(5,2),
|
||||
turnover_rate DECIMAL(12,3),
|
||||
PRIMARY KEY (category_id, year, month),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
INDEX idx_category_date (year, month)
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_category_date ON category_time_metrics(year, month);
|
||||
|
||||
-- New table for category-based sales metrics
|
||||
CREATE TABLE IF NOT EXISTS category_sales_metrics (
|
||||
CREATE TABLE category_sales_metrics (
|
||||
category_id BIGINT NOT NULL,
|
||||
brand VARCHAR(100) NOT NULL,
|
||||
period_start DATE NOT NULL,
|
||||
period_end DATE NOT NULL,
|
||||
avg_daily_sales DECIMAL(10,3) DEFAULT 0,
|
||||
total_sold INT DEFAULT 0,
|
||||
num_products INT DEFAULT 0,
|
||||
total_sold INTEGER DEFAULT 0,
|
||||
num_products INTEGER DEFAULT 0,
|
||||
avg_price DECIMAL(10,3) DEFAULT 0,
|
||||
last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (category_id, brand, period_start, period_end),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
INDEX idx_category_brand (category_id, brand),
|
||||
INDEX idx_period (period_start, period_end)
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_category_brand ON category_sales_metrics(category_id, brand);
|
||||
CREATE INDEX idx_period ON category_sales_metrics(period_start, period_end);
|
||||
|
||||
-- New table for brand metrics
|
||||
CREATE TABLE IF NOT EXISTS brand_metrics (
|
||||
CREATE TABLE brand_metrics (
|
||||
brand VARCHAR(100) NOT NULL,
|
||||
last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
-- Product metrics
|
||||
product_count INT DEFAULT 0,
|
||||
active_products INT DEFAULT 0,
|
||||
product_count INTEGER DEFAULT 0,
|
||||
active_products INTEGER DEFAULT 0,
|
||||
-- Stock metrics
|
||||
total_stock_units INT DEFAULT 0,
|
||||
total_stock_units INTEGER DEFAULT 0,
|
||||
total_stock_cost DECIMAL(15,2) DEFAULT 0,
|
||||
total_stock_retail DECIMAL(15,2) DEFAULT 0,
|
||||
-- Sales metrics
|
||||
total_revenue DECIMAL(15,2) DEFAULT 0,
|
||||
avg_margin DECIMAL(5,2) DEFAULT 0,
|
||||
growth_rate DECIMAL(5,2) DEFAULT 0,
|
||||
PRIMARY KEY (brand),
|
||||
INDEX idx_brand_metrics_last_calculated (last_calculated_at),
|
||||
INDEX idx_brand_metrics_revenue (total_revenue),
|
||||
INDEX idx_brand_metrics_growth (growth_rate)
|
||||
PRIMARY KEY (brand)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_brand_metrics_last_calculated ON brand_metrics(last_calculated_at);
|
||||
CREATE INDEX idx_brand_metrics_revenue ON brand_metrics(total_revenue);
|
||||
CREATE INDEX idx_brand_metrics_growth ON brand_metrics(growth_rate);
|
||||
|
||||
-- New table for brand time-based metrics
|
||||
CREATE TABLE IF NOT EXISTS brand_time_metrics (
|
||||
CREATE TABLE brand_time_metrics (
|
||||
brand VARCHAR(100) NOT NULL,
|
||||
year INT NOT NULL,
|
||||
month INT NOT NULL,
|
||||
year INTEGER NOT NULL,
|
||||
month INTEGER NOT NULL,
|
||||
-- Product metrics
|
||||
product_count INT DEFAULT 0,
|
||||
active_products INT DEFAULT 0,
|
||||
product_count INTEGER DEFAULT 0,
|
||||
active_products INTEGER DEFAULT 0,
|
||||
-- Stock metrics
|
||||
total_stock_units INT DEFAULT 0,
|
||||
total_stock_units INTEGER DEFAULT 0,
|
||||
total_stock_cost DECIMAL(15,2) DEFAULT 0,
|
||||
total_stock_retail DECIMAL(15,2) DEFAULT 0,
|
||||
-- Sales metrics
|
||||
total_revenue DECIMAL(15,2) DEFAULT 0,
|
||||
avg_margin DECIMAL(5,2) DEFAULT 0,
|
||||
PRIMARY KEY (brand, year, month),
|
||||
INDEX idx_brand_date (year, month)
|
||||
growth_rate DECIMAL(5,2) DEFAULT 0,
|
||||
PRIMARY KEY (brand, year, month)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_brand_time_date ON brand_time_metrics(year, month);
|
||||
|
||||
-- New table for sales forecasts
|
||||
CREATE TABLE IF NOT EXISTS sales_forecasts (
|
||||
CREATE TABLE sales_forecasts (
|
||||
pid BIGINT NOT NULL,
|
||||
forecast_date DATE NOT NULL,
|
||||
forecast_units DECIMAL(10,2) DEFAULT 0,
|
||||
forecast_revenue DECIMAL(10,2) DEFAULT 0,
|
||||
confidence_level DECIMAL(5,2) DEFAULT 0,
|
||||
last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
forecast_quantity INTEGER,
|
||||
confidence_level DECIMAL(5,2),
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (pid, forecast_date),
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
|
||||
INDEX idx_forecast_date (forecast_date),
|
||||
INDEX idx_forecast_last_calculated (last_calculated_at)
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_forecast_date ON sales_forecasts(forecast_date);
|
||||
|
||||
-- New table for category forecasts
|
||||
CREATE TABLE IF NOT EXISTS category_forecasts (
|
||||
CREATE TABLE category_forecasts (
|
||||
category_id BIGINT NOT NULL,
|
||||
forecast_date DATE NOT NULL,
|
||||
forecast_units DECIMAL(10,2) DEFAULT 0,
|
||||
forecast_revenue DECIMAL(10,2) DEFAULT 0,
|
||||
confidence_level DECIMAL(5,2) DEFAULT 0,
|
||||
last_calculated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
forecast_revenue DECIMAL(15,2),
|
||||
forecast_units INTEGER,
|
||||
confidence_level DECIMAL(5,2),
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (category_id, forecast_date),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
INDEX idx_category_forecast_date (forecast_date),
|
||||
INDEX idx_category_forecast_last_calculated (last_calculated_at)
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Create view for inventory health
|
||||
CREATE INDEX idx_cat_forecast_date ON category_forecasts(forecast_date);
|
||||
|
||||
-- Create views for common calculations
|
||||
CREATE OR REPLACE VIEW inventory_health AS
|
||||
WITH product_thresholds AS (
|
||||
WITH stock_levels AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
COALESCE(
|
||||
-- Try category+vendor specific
|
||||
(SELECT critical_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.cat_id
|
||||
WHERE pc.pid = p.pid
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Try category specific
|
||||
(SELECT critical_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.cat_id
|
||||
WHERE pc.pid = p.pid
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
-- Try vendor specific
|
||||
(SELECT critical_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Fall back to default
|
||||
(SELECT critical_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
7
|
||||
) as critical_days,
|
||||
COALESCE(
|
||||
-- Try category+vendor specific
|
||||
(SELECT reorder_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.cat_id
|
||||
WHERE pc.pid = p.pid
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Try category specific
|
||||
(SELECT reorder_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.cat_id
|
||||
WHERE pc.pid = p.pid
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
-- Try vendor specific
|
||||
(SELECT reorder_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Fall back to default
|
||||
(SELECT reorder_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
14
|
||||
) as reorder_days,
|
||||
COALESCE(
|
||||
-- Try category+vendor specific
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.cat_id
|
||||
WHERE pc.pid = p.pid
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Try category specific
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
JOIN product_categories pc ON st.category_id = pc.cat_id
|
||||
WHERE pc.pid = p.pid
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
-- Try vendor specific
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor = p.vendor LIMIT 1),
|
||||
-- Fall back to default
|
||||
(SELECT overstock_days FROM stock_thresholds st
|
||||
WHERE st.category_id IS NULL
|
||||
AND st.vendor IS NULL LIMIT 1),
|
||||
90
|
||||
) as overstock_days
|
||||
p.title,
|
||||
p.SKU,
|
||||
p.stock_quantity,
|
||||
p.preorder_count,
|
||||
pm.daily_sales_avg,
|
||||
pm.weekly_sales_avg,
|
||||
pm.monthly_sales_avg,
|
||||
pm.reorder_point,
|
||||
pm.safety_stock,
|
||||
pm.days_of_inventory,
|
||||
pm.weeks_of_inventory,
|
||||
pm.stock_status,
|
||||
pm.abc_class,
|
||||
pm.turnover_rate,
|
||||
pm.avg_lead_time_days,
|
||||
pm.current_lead_time,
|
||||
pm.target_lead_time,
|
||||
pm.lead_time_status,
|
||||
p.cost_price,
|
||||
p.price,
|
||||
pm.inventory_value,
|
||||
pm.gmroi
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
WHERE p.managing_stock = true AND p.visible = true
|
||||
)
|
||||
SELECT
|
||||
p.pid,
|
||||
p.SKU,
|
||||
p.title,
|
||||
p.stock_quantity,
|
||||
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
|
||||
COALESCE(pm.days_of_inventory, 0) as days_of_inventory,
|
||||
COALESCE(pm.reorder_point, 0) as reorder_point,
|
||||
COALESCE(pm.safety_stock, 0) as safety_stock,
|
||||
*,
|
||||
CASE
|
||||
WHEN pm.daily_sales_avg = 0 THEN 'New'
|
||||
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * pt.critical_days) THEN 'Critical'
|
||||
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * pt.reorder_days) THEN 'Reorder'
|
||||
WHEN p.stock_quantity > (pm.daily_sales_avg * pt.overstock_days) THEN 'Overstocked'
|
||||
WHEN stock_quantity <= safety_stock THEN 'Critical'
|
||||
WHEN stock_quantity <= reorder_point THEN 'Low'
|
||||
WHEN stock_quantity > (reorder_point * 3) THEN 'Excess'
|
||||
ELSE 'Healthy'
|
||||
END as stock_status
|
||||
FROM
|
||||
products p
|
||||
LEFT JOIN
|
||||
product_metrics pm ON p.pid = pm.pid
|
||||
LEFT JOIN
|
||||
product_thresholds pt ON p.pid = pt.pid
|
||||
WHERE
|
||||
p.managing_stock = true;
|
||||
END as inventory_status,
|
||||
CASE
|
||||
WHEN lead_time_status = 'delayed' AND stock_status = 'low' THEN 'High'
|
||||
WHEN lead_time_status = 'delayed' OR stock_status = 'low' THEN 'Medium'
|
||||
ELSE 'Low'
|
||||
END as risk_level
|
||||
FROM stock_levels;
|
||||
|
||||
-- Create view for category performance trends
|
||||
CREATE OR REPLACE VIEW category_performance_trends AS
|
||||
WITH monthly_trends AS (
|
||||
SELECT
|
||||
c.cat_id as category_id,
|
||||
c.name,
|
||||
c.description,
|
||||
p.name as parent_name,
|
||||
c.status,
|
||||
cm.product_count,
|
||||
cm.active_products,
|
||||
cm.total_value,
|
||||
cm.avg_margin,
|
||||
cm.turnover_rate,
|
||||
cm.growth_rate,
|
||||
c.cat_id,
|
||||
c.name as category_name,
|
||||
ctm.year,
|
||||
ctm.month,
|
||||
ctm.product_count,
|
||||
ctm.active_products,
|
||||
ctm.total_value,
|
||||
ctm.total_revenue,
|
||||
ctm.avg_margin,
|
||||
ctm.turnover_rate,
|
||||
LAG(ctm.total_revenue) OVER (PARTITION BY c.cat_id ORDER BY ctm.year, ctm.month) as prev_month_revenue,
|
||||
LAG(ctm.turnover_rate) OVER (PARTITION BY c.cat_id ORDER BY ctm.year, ctm.month) as prev_month_turnover
|
||||
FROM categories c
|
||||
JOIN category_time_metrics ctm ON c.cat_id = ctm.category_id
|
||||
)
|
||||
SELECT
|
||||
*,
|
||||
CASE
|
||||
WHEN cm.growth_rate >= 20 THEN 'High Growth'
|
||||
WHEN cm.growth_rate >= 5 THEN 'Growing'
|
||||
WHEN cm.growth_rate >= -5 THEN 'Stable'
|
||||
ELSE 'Declining'
|
||||
END as performance_rating
|
||||
FROM
|
||||
categories c
|
||||
LEFT JOIN
|
||||
categories p ON c.parent_id = p.cat_id
|
||||
LEFT JOIN
|
||||
category_metrics cm ON c.cat_id = cm.category_id;
|
||||
WHEN prev_month_revenue IS NULL THEN 0
|
||||
ELSE ((total_revenue - prev_month_revenue) / prev_month_revenue) * 100
|
||||
END as revenue_growth_percent,
|
||||
CASE
|
||||
WHEN prev_month_turnover IS NULL THEN 0
|
||||
ELSE ((turnover_rate - prev_month_turnover) / prev_month_turnover) * 100
|
||||
END as turnover_growth_percent
|
||||
FROM monthly_trends;
|
||||
|
||||
-- Re-enable foreign key checks
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
||||
SET session_replication_role = 'origin';
|
||||
@@ -1,6 +1,5 @@
|
||||
-- Enable strict error reporting
|
||||
SET sql_mode = 'STRICT_ALL_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ZERO_DATE,NO_ZERO_IN_DATE,NO_ENGINE_SUBSTITUTION';
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
SET session_replication_role = 'replica'; -- Disable foreign key checks temporarily
|
||||
|
||||
-- Create tables
|
||||
CREATE TABLE products (
|
||||
@@ -8,11 +7,11 @@ CREATE TABLE products (
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
created_at TIMESTAMP NULL,
|
||||
first_received TIMESTAMP NULL,
|
||||
stock_quantity INT DEFAULT 0,
|
||||
preorder_count INT DEFAULT 0,
|
||||
notions_inv_count INT DEFAULT 0,
|
||||
created_at TIMESTAMP,
|
||||
first_received TIMESTAMP,
|
||||
stock_quantity INTEGER DEFAULT 0,
|
||||
preorder_count INTEGER DEFAULT 0,
|
||||
notions_inv_count INTEGER DEFAULT 0,
|
||||
price DECIMAL(10, 3) NOT NULL,
|
||||
regular_price DECIMAL(10, 3) NOT NULL,
|
||||
cost_price DECIMAL(10, 3),
|
||||
@@ -37,47 +36,52 @@ CREATE TABLE products (
|
||||
artist VARCHAR(100),
|
||||
options TEXT,
|
||||
tags TEXT,
|
||||
moq INT DEFAULT 1,
|
||||
uom INT DEFAULT 1,
|
||||
moq INTEGER DEFAULT 1,
|
||||
uom INTEGER DEFAULT 1,
|
||||
rating DECIMAL(10,2) DEFAULT 0.00,
|
||||
reviews INT UNSIGNED DEFAULT 0,
|
||||
reviews INTEGER DEFAULT 0,
|
||||
weight DECIMAL(10,3),
|
||||
length DECIMAL(10,3),
|
||||
width DECIMAL(10,3),
|
||||
height DECIMAL(10,3),
|
||||
country_of_origin VARCHAR(5),
|
||||
location VARCHAR(50),
|
||||
total_sold INT UNSIGNED DEFAULT 0,
|
||||
baskets INT UNSIGNED DEFAULT 0,
|
||||
notifies INT UNSIGNED DEFAULT 0,
|
||||
total_sold INTEGER DEFAULT 0,
|
||||
baskets INTEGER DEFAULT 0,
|
||||
notifies INTEGER DEFAULT 0,
|
||||
date_last_sold DATE,
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (pid),
|
||||
INDEX idx_sku (SKU),
|
||||
INDEX idx_vendor (vendor),
|
||||
INDEX idx_brand (brand),
|
||||
INDEX idx_location (location),
|
||||
INDEX idx_total_sold (total_sold),
|
||||
INDEX idx_date_last_sold (date_last_sold),
|
||||
INDEX idx_updated (updated)
|
||||
) ENGINE=InnoDB;
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (pid)
|
||||
);
|
||||
|
||||
-- Create indexes for products table
|
||||
CREATE INDEX idx_products_sku ON products(SKU);
|
||||
CREATE INDEX idx_products_vendor ON products(vendor);
|
||||
CREATE INDEX idx_products_brand ON products(brand);
|
||||
CREATE INDEX idx_products_location ON products(location);
|
||||
CREATE INDEX idx_products_total_sold ON products(total_sold);
|
||||
CREATE INDEX idx_products_date_last_sold ON products(date_last_sold);
|
||||
CREATE INDEX idx_products_updated ON products(updated);
|
||||
|
||||
-- Create categories table with hierarchy support
|
||||
CREATE TABLE categories (
|
||||
cat_id BIGINT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
type SMALLINT NOT NULL COMMENT '10=section, 11=category, 12=subcategory, 13=subsubcategory, 1=company, 2=line, 3=subline, 40=artist',
|
||||
type SMALLINT NOT NULL,
|
||||
parent_id BIGINT,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
status VARCHAR(20) DEFAULT 'active',
|
||||
FOREIGN KEY (parent_id) REFERENCES categories(cat_id),
|
||||
INDEX idx_parent (parent_id),
|
||||
INDEX idx_type (type),
|
||||
INDEX idx_status (status),
|
||||
INDEX idx_name_type (name, type)
|
||||
) ENGINE=InnoDB;
|
||||
FOREIGN KEY (parent_id) REFERENCES categories(cat_id)
|
||||
);
|
||||
|
||||
COMMENT ON COLUMN categories.type IS '10=section, 11=category, 12=subcategory, 13=subsubcategory, 1=company, 2=line, 3=subline, 40=artist';
|
||||
|
||||
CREATE INDEX idx_categories_parent ON categories(parent_id);
|
||||
CREATE INDEX idx_categories_type ON categories(type);
|
||||
CREATE INDEX idx_categories_status ON categories(status);
|
||||
CREATE INDEX idx_categories_name_type ON categories(name, type);
|
||||
|
||||
-- Create product_categories junction table
|
||||
CREATE TABLE product_categories (
|
||||
@@ -85,78 +89,86 @@ CREATE TABLE product_categories (
|
||||
pid BIGINT NOT NULL,
|
||||
PRIMARY KEY (pid, cat_id),
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
|
||||
FOREIGN KEY (cat_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
INDEX idx_category (cat_id),
|
||||
INDEX idx_product (pid)
|
||||
) ENGINE=InnoDB;
|
||||
FOREIGN KEY (cat_id) REFERENCES categories(cat_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_product_categories_category ON product_categories(cat_id);
|
||||
CREATE INDEX idx_product_categories_product ON product_categories(pid);
|
||||
|
||||
-- Create orders table with its indexes
|
||||
CREATE TABLE IF NOT EXISTS orders (
|
||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
||||
CREATE TABLE orders (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
order_number VARCHAR(50) NOT NULL,
|
||||
pid BIGINT NOT NULL,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
price DECIMAL(10,3) NOT NULL,
|
||||
quantity INT NOT NULL,
|
||||
quantity INTEGER NOT NULL,
|
||||
discount DECIMAL(10,3) DEFAULT 0.000,
|
||||
tax DECIMAL(10,3) DEFAULT 0.000,
|
||||
tax_included TINYINT(1) DEFAULT 0,
|
||||
tax_included BOOLEAN DEFAULT false,
|
||||
shipping DECIMAL(10,3) DEFAULT 0.000,
|
||||
costeach DECIMAL(10,3) DEFAULT 0.000,
|
||||
customer VARCHAR(50) NOT NULL,
|
||||
customer_name VARCHAR(100),
|
||||
status VARCHAR(20) DEFAULT 'pending',
|
||||
canceled TINYINT(1) DEFAULT 0,
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
UNIQUE KEY unique_order_line (order_number, pid),
|
||||
KEY order_number (order_number),
|
||||
KEY pid (pid),
|
||||
KEY customer (customer),
|
||||
KEY date (date),
|
||||
KEY status (status),
|
||||
INDEX idx_orders_metrics (pid, date, canceled),
|
||||
INDEX idx_updated (updated)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
canceled BOOLEAN DEFAULT false,
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE (order_number, pid)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_orders_number ON orders(order_number);
|
||||
CREATE INDEX idx_orders_pid ON orders(pid);
|
||||
CREATE INDEX idx_orders_customer ON orders(customer);
|
||||
CREATE INDEX idx_orders_date ON orders(date);
|
||||
CREATE INDEX idx_orders_status ON orders(status);
|
||||
CREATE INDEX idx_orders_metrics ON orders(pid, date, canceled);
|
||||
CREATE INDEX idx_orders_updated ON orders(updated);
|
||||
|
||||
-- Create purchase_orders table with its indexes
|
||||
CREATE TABLE purchase_orders (
|
||||
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
po_id VARCHAR(50) NOT NULL,
|
||||
vendor VARCHAR(100) NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
expected_date DATE,
|
||||
pid BIGINT NOT NULL,
|
||||
sku VARCHAR(50) NOT NULL,
|
||||
name VARCHAR(100) NOT NULL COMMENT 'Product name from products.description',
|
||||
name VARCHAR(100) NOT NULL,
|
||||
cost_price DECIMAL(10, 3) NOT NULL,
|
||||
po_cost_price DECIMAL(10, 3) NOT NULL COMMENT 'Original cost from PO, before receiving adjustments',
|
||||
status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done',
|
||||
receiving_status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,30=partial_received,40=full_received,50=paid',
|
||||
po_cost_price DECIMAL(10, 3) NOT NULL,
|
||||
status SMALLINT DEFAULT 1,
|
||||
receiving_status SMALLINT DEFAULT 1,
|
||||
notes TEXT,
|
||||
long_note TEXT,
|
||||
ordered INT NOT NULL,
|
||||
received INT DEFAULT 0,
|
||||
received_date DATE COMMENT 'Date of first receiving',
|
||||
last_received_date DATE COMMENT 'Date of most recent receiving',
|
||||
received_by VARCHAR(100) COMMENT 'Name of person who first received this PO line',
|
||||
receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag',
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
ordered INTEGER NOT NULL,
|
||||
received INTEGER DEFAULT 0,
|
||||
received_date DATE,
|
||||
last_received_date DATE,
|
||||
received_by VARCHAR(100),
|
||||
receiving_history JSONB,
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (pid) REFERENCES products(pid),
|
||||
INDEX idx_po_id (po_id),
|
||||
INDEX idx_vendor (vendor),
|
||||
INDEX idx_status (status),
|
||||
INDEX idx_receiving_status (receiving_status),
|
||||
INDEX idx_purchase_orders_metrics (pid, date, status, ordered, received),
|
||||
INDEX idx_po_metrics (pid, date, receiving_status, received_date),
|
||||
INDEX idx_po_product_date (pid, date),
|
||||
INDEX idx_po_product_status (pid, status),
|
||||
INDEX idx_updated (updated),
|
||||
UNIQUE KEY unique_po_product (po_id, pid)
|
||||
) ENGINE=InnoDB;
|
||||
UNIQUE (po_id, pid)
|
||||
);
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
||||
COMMENT ON COLUMN purchase_orders.name IS 'Product name from products.description';
|
||||
COMMENT ON COLUMN purchase_orders.po_cost_price IS 'Original cost from PO, before receiving adjustments';
|
||||
COMMENT ON COLUMN purchase_orders.status IS '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done';
|
||||
COMMENT ON COLUMN purchase_orders.receiving_status IS '0=canceled,1=created,30=partial_received,40=full_received,50=paid';
|
||||
COMMENT ON COLUMN purchase_orders.receiving_history IS 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag';
|
||||
|
||||
CREATE INDEX idx_po_id ON purchase_orders(po_id);
|
||||
CREATE INDEX idx_po_vendor ON purchase_orders(vendor);
|
||||
CREATE INDEX idx_po_status ON purchase_orders(status);
|
||||
CREATE INDEX idx_po_receiving_status ON purchase_orders(receiving_status);
|
||||
CREATE INDEX idx_po_metrics ON purchase_orders(pid, date, status, ordered, received);
|
||||
CREATE INDEX idx_po_metrics_receiving ON purchase_orders(pid, date, receiving_status, received_date);
|
||||
CREATE INDEX idx_po_product_date ON purchase_orders(pid, date);
|
||||
CREATE INDEX idx_po_product_status ON purchase_orders(pid, status);
|
||||
CREATE INDEX idx_po_updated ON purchase_orders(updated);
|
||||
|
||||
SET session_replication_role = 'origin'; -- Re-enable foreign key checks
|
||||
|
||||
-- Create views for common calculations
|
||||
-- product_sales_trends view moved to metrics-schema.sql
|
||||
649
inventory-server/package-lock.json
generated
649
inventory-server/package-lock.json
generated
@@ -9,12 +9,14 @@
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"bcrypt": "^5.1.1",
|
||||
"cors": "^2.8.5",
|
||||
"csv-parse": "^5.6.0",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^4.18.2",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"mysql2": "^3.12.0",
|
||||
"pg": "^8.13.3",
|
||||
"pm2": "^5.3.0",
|
||||
"ssh2": "^1.16.0",
|
||||
"uuid": "^9.0.1"
|
||||
@@ -23,6 +25,74 @@
|
||||
"nodemon": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
|
||||
"integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"detect-libc": "^2.0.0",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"make-dir": "^3.1.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"nopt": "^5.0.0",
|
||||
"npmlog": "^5.0.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"semver": "^7.3.5",
|
||||
"tar": "^6.1.11"
|
||||
},
|
||||
"bin": {
|
||||
"node-pre-gyp": "bin/node-pre-gyp"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp/node_modules/agent-base": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp/node_modules/debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp/node_modules/https-proxy-agent": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "6",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp/node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@pm2/agent": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@pm2/agent/-/agent-2.0.4.tgz",
|
||||
@@ -276,6 +346,12 @@
|
||||
"integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/abbrev": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
|
||||
"integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/accepts": {
|
||||
"version": "1.3.8",
|
||||
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
|
||||
@@ -322,6 +398,15 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-regex": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-styles": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
@@ -356,6 +441,40 @@
|
||||
"integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aproba": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz",
|
||||
"integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/are-we-there-yet": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz",
|
||||
"integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==",
|
||||
"deprecated": "This package is no longer supported.",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"delegates": "^1.0.0",
|
||||
"readable-stream": "^3.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/are-we-there-yet/node_modules/readable-stream": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/argparse": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
|
||||
@@ -414,7 +533,6 @@
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/basic-ftp": {
|
||||
@@ -426,6 +544,20 @@
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bcrypt": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz",
|
||||
"integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@mapbox/node-pre-gyp": "^1.0.11",
|
||||
"node-addon-api": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bcrypt-pbkdf": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
|
||||
@@ -493,7 +625,6 @@
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
@@ -619,6 +750,15 @@
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/chownr": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
|
||||
"integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/cli-tableau": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/cli-tableau/-/cli-tableau-2.0.1.tgz",
|
||||
@@ -648,6 +788,15 @@
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/color-support": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
|
||||
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"color-support": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/commander": {
|
||||
"version": "2.15.1",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz",
|
||||
@@ -658,7 +807,6 @@
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/concat-stream": {
|
||||
@@ -676,6 +824,12 @@
|
||||
"typedarray": "^0.0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/console-control-strings": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
|
||||
"integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/content-disposition": {
|
||||
"version": "0.5.4",
|
||||
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
|
||||
@@ -801,6 +955,12 @@
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/delegates": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
|
||||
"integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/denque": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||
@@ -829,6 +989,15 @@
|
||||
"npm": "1.2.8000 || >= 1.4.16"
|
||||
}
|
||||
},
|
||||
"node_modules/detect-libc": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
|
||||
"integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/dotenv": {
|
||||
"version": "16.4.7",
|
||||
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
|
||||
@@ -861,6 +1030,12 @@
|
||||
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/emoji-regex": {
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/encodeurl": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
|
||||
@@ -1132,6 +1307,36 @@
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/fs-minipass": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
|
||||
"integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/fs-minipass/node_modules/minipass": {
|
||||
"version": "3.3.6",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
|
||||
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
||||
@@ -1155,6 +1360,27 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/gauge": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz",
|
||||
"integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==",
|
||||
"deprecated": "This package is no longer supported.",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"aproba": "^1.0.3 || ^2.0.0",
|
||||
"color-support": "^1.1.2",
|
||||
"console-control-strings": "^1.0.0",
|
||||
"has-unicode": "^2.0.1",
|
||||
"object-assign": "^4.1.1",
|
||||
"signal-exit": "^3.0.0",
|
||||
"string-width": "^4.2.3",
|
||||
"strip-ansi": "^6.0.1",
|
||||
"wide-align": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/generate-function": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",
|
||||
@@ -1250,6 +1476,27 @@
|
||||
"integrity": "sha512-2e/nZezdVlyCopOCYHeW0onkbZg7xP1Ad6pndPy1rCygeRykefUS6r7oA5cJRGEFvseiaz5a/qUHFVX1dd6Isg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/glob": {
|
||||
"version": "7.2.3",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
|
||||
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
|
||||
"deprecated": "Glob versions prior to v9 are no longer supported",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
"inherits": "2",
|
||||
"minimatch": "^3.1.1",
|
||||
"once": "^1.3.0",
|
||||
"path-is-absolute": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/glob-parent": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
|
||||
@@ -1295,6 +1542,12 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-unicode": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
|
||||
"integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
@@ -1414,6 +1667,17 @@
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/inflight": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
|
||||
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
|
||||
"deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"once": "^1.3.0",
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"node_modules/inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
@@ -1490,6 +1754,15 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-fullwidth-code-point": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
||||
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-glob": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
||||
@@ -1605,6 +1878,30 @@
|
||||
"url": "https://github.com/sponsors/wellwelwel"
|
||||
}
|
||||
},
|
||||
"node_modules/make-dir": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
|
||||
"integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"semver": "^6.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/make-dir/node_modules/semver": {
|
||||
"version": "6.3.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/math-intrinsics": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||
@@ -1678,7 +1975,6 @@
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
@@ -1696,6 +1992,40 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/minipass": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
|
||||
"integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/minizlib": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
|
||||
"integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0",
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/minizlib/node_modules/minipass": {
|
||||
"version": "3.3.6",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
|
||||
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/mkdirp": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
|
||||
@@ -1857,6 +2187,32 @@
|
||||
"node": ">= 0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-addon-api": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
|
||||
"integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "4.x || >=6.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"encoding": "^0.1.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"encoding": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/nodemon": {
|
||||
"version": "3.1.9",
|
||||
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.9.tgz",
|
||||
@@ -1934,6 +2290,21 @@
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/nopt": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
|
||||
"integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"abbrev": "1"
|
||||
},
|
||||
"bin": {
|
||||
"nopt": "bin/nopt.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
@@ -1943,6 +2314,19 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/npmlog": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
|
||||
"integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==",
|
||||
"deprecated": "This package is no longer supported.",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"are-we-there-yet": "^2.0.0",
|
||||
"console-control-strings": "^1.1.0",
|
||||
"gauge": "^3.0.0",
|
||||
"set-blocking": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/nssocket": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/nssocket/-/nssocket-0.6.0.tgz",
|
||||
@@ -1995,6 +2379,15 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"node_modules/pac-proxy-agent": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.1.0.tgz",
|
||||
@@ -2065,6 +2458,15 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/path-is-absolute": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
|
||||
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/path-parse": {
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
|
||||
@@ -2077,6 +2479,95 @@
|
||||
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg": {
|
||||
"version": "8.13.3",
|
||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.13.3.tgz",
|
||||
"integrity": "sha512-P6tPt9jXbL9HVu/SSRERNYaYG++MjnscnegFh9pPHihfoBSujsrka0hyuymMzeJKFWrcG8wvCKy8rCe8e5nDUQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pg-connection-string": "^2.7.0",
|
||||
"pg-pool": "^3.7.1",
|
||||
"pg-protocol": "^1.7.1",
|
||||
"pg-types": "^2.1.0",
|
||||
"pgpass": "1.x"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"pg-cloudflare": "^1.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"pg-native": ">=3.0.1"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"pg-native": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/pg-cloudflare": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz",
|
||||
"integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==",
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/pg-connection-string": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.7.0.tgz",
|
||||
"integrity": "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg-int8": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
|
||||
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-pool": {
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.7.1.tgz",
|
||||
"integrity": "sha512-xIOsFoh7Vdhojas6q3596mXFsR8nwBQBXX5JiV7p9buEVAGqYL4yFzclON5P9vFrpu1u7Zwl2oriyDa89n0wbw==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"pg": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-protocol": {
|
||||
"version": "1.7.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.7.1.tgz",
|
||||
"integrity": "sha512-gjTHWGYWsEgy9MsY0Gp6ZJxV24IjDqdpTW7Eh0x+WfJLFsm/TJx1MzL6T0D88mBvkpxotCQ6TwW6N+Kko7lhgQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg-types": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
|
||||
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pg-int8": "1.0.1",
|
||||
"postgres-array": "~2.0.0",
|
||||
"postgres-bytea": "~1.0.0",
|
||||
"postgres-date": "~1.0.4",
|
||||
"postgres-interval": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/pgpass": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
|
||||
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"split2": "^4.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
||||
@@ -2320,6 +2811,45 @@
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/postgres-array": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
|
||||
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-bytea": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
|
||||
"integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-date": {
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
|
||||
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-interval": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
|
||||
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"xtend": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/process-nextick-args": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||
@@ -2544,6 +3074,22 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/rimraf": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
|
||||
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
|
||||
"deprecated": "Rimraf versions prior to v4 are no longer supported",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"glob": "^7.1.3"
|
||||
},
|
||||
"bin": {
|
||||
"rimraf": "bin.js"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/run-series": {
|
||||
"version": "1.1.9",
|
||||
"resolved": "https://registry.npmjs.org/run-series/-/run-series-1.1.9.tgz",
|
||||
@@ -2667,6 +3213,12 @@
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/set-blocking": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
|
||||
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/setprototypeof": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
|
||||
@@ -2850,6 +3402,15 @@
|
||||
"source-map": "^0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/split2": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
|
||||
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">= 10.x"
|
||||
}
|
||||
},
|
||||
"node_modules/sprintf-js": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.2.tgz",
|
||||
@@ -2914,6 +3475,32 @@
|
||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/string-width": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"emoji-regex": "^8.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0",
|
||||
"strip-ansi": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-ansi": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/supports-color": {
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
|
||||
@@ -2965,6 +3552,23 @@
|
||||
"url": "https://www.buymeacoffee.com/systeminfo"
|
||||
}
|
||||
},
|
||||
"node_modules/tar": {
|
||||
"version": "6.2.1",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
|
||||
"integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"chownr": "^2.0.0",
|
||||
"fs-minipass": "^2.0.0",
|
||||
"minipass": "^5.0.0",
|
||||
"minizlib": "^2.1.1",
|
||||
"mkdirp": "^1.0.3",
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/to-regex-range": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||
@@ -2996,6 +3600,12 @@
|
||||
"nodetouch": "bin/nodetouch.js"
|
||||
}
|
||||
},
|
||||
"node_modules/tr46": {
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
|
||||
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "1.9.3",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz",
|
||||
@@ -3132,6 +3742,37 @@
|
||||
"lodash": "^4.17.14"
|
||||
}
|
||||
},
|
||||
"node_modules/webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
|
||||
"license": "BSD-2-Clause"
|
||||
},
|
||||
"node_modules/whatwg-url": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
|
||||
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tr46": "~0.0.3",
|
||||
"webidl-conversions": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/wide-align": {
|
||||
"version": "1.1.5",
|
||||
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz",
|
||||
"integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"string-width": "^1.0.2 || 2 || 3 || 4"
|
||||
}
|
||||
},
|
||||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/ws": {
|
||||
"version": "7.5.10",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz",
|
||||
|
||||
@@ -18,12 +18,14 @@
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"bcrypt": "^5.1.1",
|
||||
"cors": "^2.8.5",
|
||||
"csv-parse": "^5.6.0",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^4.18.2",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"mysql2": "^3.12.0",
|
||||
"pg": "^8.13.3",
|
||||
"pm2": "^5.3.0",
|
||||
"ssh2": "^1.16.0",
|
||||
"uuid": "^9.0.1"
|
||||
|
||||
@@ -14,7 +14,15 @@ function outputProgress(data) {
|
||||
function runScript(scriptPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn('node', [scriptPath], {
|
||||
stdio: ['inherit', 'pipe', 'pipe']
|
||||
stdio: ['inherit', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
PGHOST: process.env.DB_HOST,
|
||||
PGUSER: process.env.DB_USER,
|
||||
PGPASSWORD: process.env.DB_PASSWORD,
|
||||
PGDATABASE: process.env.DB_NAME,
|
||||
PGPORT: process.env.DB_PORT || '5432'
|
||||
}
|
||||
});
|
||||
|
||||
let output = '';
|
||||
|
||||
@@ -19,7 +19,6 @@ const IMPORT_PURCHASE_ORDERS = true;
|
||||
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
|
||||
|
||||
// SSH configuration
|
||||
// In import-from-prod.js
|
||||
const sshConfig = {
|
||||
ssh: {
|
||||
host: process.env.PROD_SSH_HOST,
|
||||
@@ -31,6 +30,7 @@ const sshConfig = {
|
||||
compress: true, // Enable SSH compression
|
||||
},
|
||||
prodDbConfig: {
|
||||
// MySQL config for production
|
||||
host: process.env.PROD_DB_HOST || "localhost",
|
||||
user: process.env.PROD_DB_USER,
|
||||
password: process.env.PROD_DB_PASSWORD,
|
||||
@@ -39,21 +39,16 @@ const sshConfig = {
|
||||
timezone: 'Z',
|
||||
},
|
||||
localDbConfig: {
|
||||
// PostgreSQL config for local
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
multipleStatements: true,
|
||||
waitForConnections: true,
|
||||
connectionLimit: 10,
|
||||
queueLimit: 0,
|
||||
namedPlaceholders: true,
|
||||
connectTimeout: 60000,
|
||||
enableKeepAlive: true,
|
||||
keepAliveInitialDelay: 10000,
|
||||
compress: true,
|
||||
timezone: 'Z',
|
||||
stringifyObjects: false,
|
||||
port: process.env.DB_PORT || 5432,
|
||||
ssl: process.env.DB_SSL === 'true',
|
||||
connectionTimeoutMillis: 60000,
|
||||
idleTimeoutMillis: 30000,
|
||||
max: 10 // connection pool max size
|
||||
}
|
||||
};
|
||||
|
||||
@@ -108,7 +103,7 @@ async function main() {
|
||||
SET
|
||||
status = 'cancelled',
|
||||
end_time = NOW(),
|
||||
duration_seconds = TIMESTAMPDIFF(SECOND, start_time, NOW()),
|
||||
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
|
||||
error_message = 'Previous import was not completed properly'
|
||||
WHERE status = 'running'
|
||||
`);
|
||||
@@ -118,9 +113,10 @@ async function main() {
|
||||
CREATE TABLE IF NOT EXISTS sync_status (
|
||||
table_name VARCHAR(50) PRIMARY KEY,
|
||||
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_sync_id BIGINT,
|
||||
INDEX idx_last_sync (last_sync_timestamp)
|
||||
last_sync_id BIGINT
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status (last_sync_timestamp);
|
||||
`);
|
||||
|
||||
// Create import history record for the overall session
|
||||
@@ -134,17 +130,17 @@ async function main() {
|
||||
) VALUES (
|
||||
'all_tables',
|
||||
NOW(),
|
||||
?,
|
||||
$1::boolean,
|
||||
'running',
|
||||
JSON_OBJECT(
|
||||
'categories_enabled', ?,
|
||||
'products_enabled', ?,
|
||||
'orders_enabled', ?,
|
||||
'purchase_orders_enabled', ?
|
||||
)
|
||||
jsonb_build_object(
|
||||
'categories_enabled', $2::boolean,
|
||||
'products_enabled', $3::boolean,
|
||||
'orders_enabled', $4::boolean,
|
||||
'purchase_orders_enabled', $5::boolean
|
||||
)
|
||||
) RETURNING id
|
||||
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS]);
|
||||
importHistoryId = historyResult.insertId;
|
||||
importHistoryId = historyResult.rows[0].id;
|
||||
|
||||
const results = {
|
||||
categories: null,
|
||||
@@ -162,8 +158,8 @@ async function main() {
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Categories import result:', results.categories);
|
||||
totalRecordsAdded += results.categories?.recordsAdded || 0;
|
||||
totalRecordsUpdated += results.categories?.recordsUpdated || 0;
|
||||
totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0);
|
||||
}
|
||||
|
||||
if (IMPORT_PRODUCTS) {
|
||||
@@ -171,8 +167,8 @@ async function main() {
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Products import result:', results.products);
|
||||
totalRecordsAdded += results.products?.recordsAdded || 0;
|
||||
totalRecordsUpdated += results.products?.recordsUpdated || 0;
|
||||
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0);
|
||||
}
|
||||
|
||||
if (IMPORT_ORDERS) {
|
||||
@@ -180,8 +176,8 @@ async function main() {
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Orders import result:', results.orders);
|
||||
totalRecordsAdded += results.orders?.recordsAdded || 0;
|
||||
totalRecordsUpdated += results.orders?.recordsUpdated || 0;
|
||||
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0);
|
||||
}
|
||||
|
||||
if (IMPORT_PURCHASE_ORDERS) {
|
||||
@@ -189,8 +185,8 @@ async function main() {
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Purchase orders import result:', results.purchaseOrders);
|
||||
totalRecordsAdded += results.purchaseOrders?.recordsAdded || 0;
|
||||
totalRecordsUpdated += results.purchaseOrders?.recordsUpdated || 0;
|
||||
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0);
|
||||
}
|
||||
|
||||
const endTime = Date.now();
|
||||
@@ -201,21 +197,21 @@ async function main() {
|
||||
UPDATE import_history
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = ?,
|
||||
records_added = ?,
|
||||
records_updated = ?,
|
||||
duration_seconds = $1,
|
||||
records_added = $2,
|
||||
records_updated = $3,
|
||||
status = 'completed',
|
||||
additional_info = JSON_OBJECT(
|
||||
'categories_enabled', ?,
|
||||
'products_enabled', ?,
|
||||
'orders_enabled', ?,
|
||||
'purchase_orders_enabled', ?,
|
||||
'categories_result', CAST(? AS JSON),
|
||||
'products_result', CAST(? AS JSON),
|
||||
'orders_result', CAST(? AS JSON),
|
||||
'purchase_orders_result', CAST(? AS JSON)
|
||||
additional_info = jsonb_build_object(
|
||||
'categories_enabled', $4::boolean,
|
||||
'products_enabled', $5::boolean,
|
||||
'orders_enabled', $6::boolean,
|
||||
'purchase_orders_enabled', $7::boolean,
|
||||
'categories_result', COALESCE($8::jsonb, 'null'::jsonb),
|
||||
'products_result', COALESCE($9::jsonb, 'null'::jsonb),
|
||||
'orders_result', COALESCE($10::jsonb, 'null'::jsonb),
|
||||
'purchase_orders_result', COALESCE($11::jsonb, 'null'::jsonb)
|
||||
)
|
||||
WHERE id = ?
|
||||
WHERE id = $12
|
||||
`, [
|
||||
totalElapsedSeconds,
|
||||
totalRecordsAdded,
|
||||
@@ -259,10 +255,10 @@ async function main() {
|
||||
UPDATE import_history
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = ?,
|
||||
status = ?,
|
||||
error_message = ?
|
||||
WHERE id = ?
|
||||
duration_seconds = $1,
|
||||
status = $2,
|
||||
error_message = $3
|
||||
WHERE id = $4
|
||||
`, [totalElapsedSeconds, error.message === "Import cancelled" ? 'cancelled' : 'failed', error.message, importHistoryId]);
|
||||
}
|
||||
|
||||
|
||||
@@ -9,11 +9,20 @@ async function importCategories(prodConnection, localConnection) {
|
||||
const startTime = Date.now();
|
||||
const typeOrder = [10, 20, 11, 21, 12, 13];
|
||||
let totalInserted = 0;
|
||||
let totalUpdated = 0;
|
||||
let skippedCategories = [];
|
||||
|
||||
try {
|
||||
// Process each type in order with its own query
|
||||
// Start a single transaction for the entire import
|
||||
await localConnection.query('BEGIN');
|
||||
|
||||
// Process each type in order with its own savepoint
|
||||
for (const type of typeOrder) {
|
||||
try {
|
||||
// Create a savepoint for this type
|
||||
await localConnection.query(`SAVEPOINT category_type_${type}`);
|
||||
|
||||
// Production query remains MySQL compatible
|
||||
const [categories] = await prodConnection.query(
|
||||
`
|
||||
SELECT
|
||||
@@ -33,7 +42,10 @@ async function importCategories(prodConnection, localConnection) {
|
||||
[type]
|
||||
);
|
||||
|
||||
if (categories.length === 0) continue;
|
||||
if (categories.length === 0) {
|
||||
await localConnection.query(`RELEASE SAVEPOINT category_type_${type}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`\nProcessing ${categories.length} type ${type} categories`);
|
||||
if (type === 10) {
|
||||
@@ -46,133 +58,157 @@ async function importCategories(prodConnection, localConnection) {
|
||||
// Get all parent IDs
|
||||
const parentIds = [
|
||||
...new Set(
|
||||
categories.map((c) => c.parent_id).filter((id) => id !== null)
|
||||
categories
|
||||
.filter(c => c && c.parent_id !== null)
|
||||
.map(c => c.parent_id)
|
||||
),
|
||||
];
|
||||
|
||||
// Check which parents exist
|
||||
const [existingParents] = await localConnection.query(
|
||||
"SELECT cat_id FROM categories WHERE cat_id IN (?)",
|
||||
[parentIds]
|
||||
);
|
||||
const existingParentIds = new Set(existingParents.map((p) => p.cat_id));
|
||||
console.log(`Processing ${categories.length} type ${type} categories with ${parentIds.length} unique parent IDs`);
|
||||
console.log('Parent IDs:', parentIds);
|
||||
|
||||
// Filter categories and track skipped ones
|
||||
categoriesToInsert = categories.filter(
|
||||
(cat) =>
|
||||
cat.parent_id === null || existingParentIds.has(cat.parent_id)
|
||||
);
|
||||
const invalidCategories = categories.filter(
|
||||
(cat) =>
|
||||
cat.parent_id !== null && !existingParentIds.has(cat.parent_id)
|
||||
);
|
||||
|
||||
if (invalidCategories.length > 0) {
|
||||
const skippedInfo = invalidCategories.map((c) => ({
|
||||
id: c.cat_id,
|
||||
name: c.name,
|
||||
type: c.type,
|
||||
missing_parent: c.parent_id,
|
||||
}));
|
||||
skippedCategories.push(...skippedInfo);
|
||||
|
||||
console.log(
|
||||
"\nSkipping categories with missing parents:",
|
||||
invalidCategories
|
||||
.map(
|
||||
(c) =>
|
||||
`${c.cat_id} - ${c.name} (missing parent: ${c.parent_id})`
|
||||
)
|
||||
.join("\n")
|
||||
);
|
||||
// No need to check for parent existence - we trust they exist since they were just inserted
|
||||
categoriesToInsert = categories;
|
||||
}
|
||||
|
||||
if (categoriesToInsert.length === 0) {
|
||||
console.log(
|
||||
`No valid categories of type ${type} to insert - all had missing parents`
|
||||
`No valid categories of type ${type} to insert`
|
||||
);
|
||||
await localConnection.query(`RELEASE SAVEPOINT category_type_${type}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Inserting ${categoriesToInsert.length} type ${type} categories`
|
||||
);
|
||||
|
||||
const placeholders = categoriesToInsert
|
||||
.map(() => "(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)")
|
||||
.join(",");
|
||||
|
||||
// PostgreSQL upsert query with parameterized values
|
||||
const values = categoriesToInsert.flatMap((cat) => [
|
||||
cat.cat_id,
|
||||
cat.name,
|
||||
cat.type,
|
||||
cat.parent_id,
|
||||
cat.description,
|
||||
"active",
|
||||
'active',
|
||||
new Date(),
|
||||
new Date()
|
||||
]);
|
||||
|
||||
// Insert categories and create relationships in one query to avoid race conditions
|
||||
await localConnection.query(
|
||||
`
|
||||
INSERT INTO categories (cat_id, name, type, parent_id, description, status, created_at, updated_at)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
name = VALUES(name),
|
||||
type = VALUES(type),
|
||||
parent_id = VALUES(parent_id),
|
||||
description = VALUES(description),
|
||||
status = VALUES(status),
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`,
|
||||
values
|
||||
);
|
||||
console.log('Attempting to insert/update with values:', JSON.stringify(values, null, 2));
|
||||
|
||||
const placeholders = categoriesToInsert
|
||||
.map((_, i) => `($${i * 8 + 1}, $${i * 8 + 2}, $${i * 8 + 3}, $${i * 8 + 4}, $${i * 8 + 5}, $${i * 8 + 6}, $${i * 8 + 7}, $${i * 8 + 8})`)
|
||||
.join(',');
|
||||
|
||||
console.log('Using placeholders:', placeholders);
|
||||
|
||||
// Insert categories with ON CONFLICT clause for PostgreSQL
|
||||
const query = `
|
||||
WITH inserted_categories AS (
|
||||
INSERT INTO categories (
|
||||
cat_id, name, type, parent_id, description, status, created_at, updated_at
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (cat_id) DO UPDATE SET
|
||||
name = EXCLUDED.name,
|
||||
type = EXCLUDED.type,
|
||||
parent_id = EXCLUDED.parent_id,
|
||||
description = EXCLUDED.description,
|
||||
status = EXCLUDED.status,
|
||||
updated_at = EXCLUDED.updated_at
|
||||
RETURNING
|
||||
cat_id,
|
||||
CASE
|
||||
WHEN xmax = 0 THEN true
|
||||
ELSE false
|
||||
END as is_insert
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(*) FILTER (WHERE is_insert) as inserted,
|
||||
COUNT(*) FILTER (WHERE NOT is_insert) as updated
|
||||
FROM inserted_categories`;
|
||||
|
||||
console.log('Executing query:', query);
|
||||
|
||||
const result = await localConnection.query(query, values);
|
||||
console.log('Query result:', result);
|
||||
|
||||
// Get the first result since query returns an array
|
||||
const queryResult = Array.isArray(result) ? result[0] : result;
|
||||
|
||||
if (!queryResult || !queryResult.rows || !queryResult.rows[0]) {
|
||||
console.error('Query failed to return results. Result:', queryResult);
|
||||
throw new Error('Query did not return expected results');
|
||||
}
|
||||
|
||||
const total = parseInt(queryResult.rows[0].total) || 0;
|
||||
const inserted = parseInt(queryResult.rows[0].inserted) || 0;
|
||||
const updated = parseInt(queryResult.rows[0].updated) || 0;
|
||||
|
||||
console.log(`Total: ${total}, Inserted: ${inserted}, Updated: ${updated}`);
|
||||
|
||||
totalInserted += inserted;
|
||||
totalUpdated += updated;
|
||||
|
||||
// Release the savepoint for this type
|
||||
await localConnection.query(`RELEASE SAVEPOINT category_type_${type}`);
|
||||
|
||||
totalInserted += categoriesToInsert.length;
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Categories import",
|
||||
current: totalInserted,
|
||||
total: totalInserted,
|
||||
message: `Imported ${inserted} (updated ${updated}) categories of type ${type}`,
|
||||
current: totalInserted + totalUpdated,
|
||||
total: categories.length,
|
||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
});
|
||||
}
|
||||
|
||||
// After all imports, if we skipped any categories, throw an error
|
||||
if (skippedCategories.length > 0) {
|
||||
const error = new Error(
|
||||
"Categories import completed with errors - some categories were skipped due to missing parents"
|
||||
);
|
||||
error.skippedCategories = skippedCategories;
|
||||
} catch (error) {
|
||||
// Rollback to the savepoint for this type
|
||||
await localConnection.query(`ROLLBACK TO SAVEPOINT category_type_${type}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Commit the entire transaction - we'll do this even if we have skipped categories
|
||||
await localConnection.query('COMMIT');
|
||||
|
||||
outputProgress({
|
||||
status: "complete",
|
||||
operation: "Categories import completed",
|
||||
current: totalInserted,
|
||||
total: totalInserted,
|
||||
current: totalInserted + totalUpdated,
|
||||
total: totalInserted + totalUpdated,
|
||||
duration: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
warnings: skippedCategories.length > 0 ? {
|
||||
message: "Some categories were skipped due to missing parents",
|
||||
skippedCategories
|
||||
} : undefined
|
||||
});
|
||||
|
||||
return {
|
||||
status: "complete",
|
||||
totalImported: totalInserted
|
||||
recordsAdded: totalInserted,
|
||||
recordsUpdated: totalUpdated,
|
||||
totalRecords: totalInserted + totalUpdated,
|
||||
warnings: skippedCategories.length > 0 ? {
|
||||
message: "Some categories were skipped due to missing parents",
|
||||
skippedCategories
|
||||
} : undefined
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error importing categories:", error);
|
||||
if (error.skippedCategories) {
|
||||
console.error(
|
||||
"Skipped categories:",
|
||||
JSON.stringify(error.skippedCategories, null, 2)
|
||||
);
|
||||
|
||||
// Only rollback if we haven't committed yet
|
||||
try {
|
||||
await localConnection.query('ROLLBACK');
|
||||
} catch (rollbackError) {
|
||||
console.error("Error during rollback:", rollbackError);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: "error",
|
||||
operation: "Categories import failed",
|
||||
error: error.message,
|
||||
skippedCategories: error.skippedCategories
|
||||
error: error.message
|
||||
});
|
||||
|
||||
throw error;
|
||||
|
||||
@@ -2,7 +2,7 @@ const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } =
|
||||
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
|
||||
|
||||
/**
|
||||
* Imports orders from a production MySQL database to a local MySQL database.
|
||||
* Imports orders from a production MySQL database to a local PostgreSQL database.
|
||||
* It can run in two modes:
|
||||
* 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time.
|
||||
* 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp.
|
||||
@@ -23,93 +23,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
let importedCount = 0;
|
||||
let totalOrderItems = 0;
|
||||
let totalUniqueOrders = 0;
|
||||
|
||||
// Add a cumulative counter for processed orders before the loop
|
||||
let cumulativeProcessedOrders = 0;
|
||||
|
||||
try {
|
||||
// Clean up any existing temp tables first
|
||||
await localConnection.query(`
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
|
||||
`);
|
||||
|
||||
// Create all temp tables with correct schema
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_items (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
price DECIMAL(10,2) NOT NULL,
|
||||
quantity INT NOT NULL,
|
||||
base_discount DECIMAL(10,2) DEFAULT 0,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_meta (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
customer VARCHAR(100) NOT NULL,
|
||||
customer_name VARCHAR(150) NOT NULL,
|
||||
status INT,
|
||||
canceled TINYINT(1),
|
||||
summary_discount DECIMAL(10,2) DEFAULT 0.00,
|
||||
summary_subtotal DECIMAL(10,2) DEFAULT 0.00,
|
||||
PRIMARY KEY (order_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_discounts (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
discount DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_taxes (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
tax DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
await localConnection.query(`
|
||||
CREATE TEMPORARY TABLE temp_order_costs (
|
||||
order_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
costeach DECIMAL(10,3) DEFAULT 0.000,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
`);
|
||||
|
||||
// Get column names from the local table
|
||||
const [columns] = await localConnection.query(`
|
||||
SELECT COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_NAME = 'orders'
|
||||
AND COLUMN_NAME != 'updated' -- Exclude the updated column
|
||||
ORDER BY ORDINAL_POSITION
|
||||
`);
|
||||
const columnNames = columns.map(col => col.COLUMN_NAME);
|
||||
|
||||
// Get last sync info
|
||||
const [syncInfo] = await localConnection.query(
|
||||
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
|
||||
);
|
||||
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||
|
||||
console.log('Orders: Using last sync time:', lastSyncTime);
|
||||
|
||||
// First get count of order items
|
||||
// First get count of order items - Keep MySQL compatible for production
|
||||
const [[{ total }]] = await prodConnection.query(`
|
||||
SELECT COUNT(*) as total
|
||||
FROM order_items oi
|
||||
@@ -141,12 +66,13 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
totalOrderItems = total;
|
||||
console.log('Orders: Found changes:', totalOrderItems);
|
||||
|
||||
// Get order items in batches
|
||||
// Get order items - Keep MySQL compatible for production
|
||||
console.log('Orders: Starting MySQL query...');
|
||||
const [orderItems] = await prodConnection.query(`
|
||||
SELECT
|
||||
oi.order_id,
|
||||
oi.prod_pid as pid,
|
||||
oi.prod_itemnumber as SKU,
|
||||
oi.prod_pid,
|
||||
COALESCE(NULLIF(TRIM(oi.prod_itemnumber), ''), 'NO-SKU') as SKU,
|
||||
oi.prod_price as price,
|
||||
oi.qty_ordered as quantity,
|
||||
COALESCE(oi.prod_price_reg - oi.prod_price, 0) as base_discount,
|
||||
@@ -177,24 +103,78 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
` : ''}
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||
|
||||
console.log('Orders: Processing', orderItems.length, 'order items');
|
||||
console.log('Orders: Found', orderItems.length, 'order items to process');
|
||||
|
||||
// Create tables in PostgreSQL for debugging
|
||||
await localConnection.query(`
|
||||
DROP TABLE IF EXISTS debug_order_items;
|
||||
DROP TABLE IF EXISTS debug_order_meta;
|
||||
DROP TABLE IF EXISTS debug_order_discounts;
|
||||
DROP TABLE IF EXISTS debug_order_taxes;
|
||||
DROP TABLE IF EXISTS debug_order_costs;
|
||||
|
||||
CREATE TABLE debug_order_items (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
price DECIMAL(10,2) NOT NULL,
|
||||
quantity INTEGER NOT NULL,
|
||||
base_discount DECIMAL(10,2) DEFAULT 0,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TABLE debug_order_meta (
|
||||
order_id INTEGER NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
customer VARCHAR(100) NOT NULL,
|
||||
customer_name VARCHAR(150) NOT NULL,
|
||||
status INTEGER,
|
||||
canceled BOOLEAN,
|
||||
summary_discount DECIMAL(10,2) DEFAULT 0.00,
|
||||
summary_subtotal DECIMAL(10,2) DEFAULT 0.00,
|
||||
PRIMARY KEY (order_id)
|
||||
);
|
||||
|
||||
CREATE TABLE debug_order_discounts (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
discount DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TABLE debug_order_taxes (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
tax DECIMAL(10,2) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TABLE debug_order_costs (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
costeach DECIMAL(10,3) DEFAULT 0.000,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
`);
|
||||
|
||||
// Insert order items in batches
|
||||
for (let i = 0; i < orderItems.length; i += 5000) {
|
||||
const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
|
||||
const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
|
||||
const placeholders = batch.map((_, idx) =>
|
||||
`($${idx * 6 + 1}, $${idx * 6 + 2}, $${idx * 6 + 3}, $${idx * 6 + 4}, $${idx * 6 + 5}, $${idx * 6 + 6})`
|
||||
).join(",");
|
||||
const values = batch.flatMap(item => [
|
||||
item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount
|
||||
item.order_id, item.prod_pid, item.SKU, item.price, item.quantity, item.base_discount
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
|
||||
INSERT INTO debug_order_items (order_id, pid, SKU, price, quantity, base_discount)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
base_discount = VALUES(base_discount)
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
SKU = EXCLUDED.SKU,
|
||||
price = EXCLUDED.price,
|
||||
quantity = EXCLUDED.quantity,
|
||||
base_discount = EXCLUDED.base_discount
|
||||
`, values);
|
||||
|
||||
processedCount = i + batch.length;
|
||||
@@ -203,24 +183,26 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
operation: "Orders import",
|
||||
message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
|
||||
current: processedCount,
|
||||
total: totalOrderItems
|
||||
total: totalOrderItems,
|
||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalOrderItems),
|
||||
rate: calculateRate(startTime, processedCount)
|
||||
});
|
||||
}
|
||||
|
||||
// Get unique order IDs
|
||||
const orderIds = [...new Set(orderItems.map(item => item.order_id))];
|
||||
totalUniqueOrders = orderIds.length;
|
||||
console.log('Total unique order IDs:', totalUniqueOrders);
|
||||
console.log('Orders: Processing', totalUniqueOrders, 'unique orders');
|
||||
|
||||
// Reset processed count for order processing phase
|
||||
processedCount = 0;
|
||||
|
||||
// Get order metadata in batches
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`);
|
||||
console.log('Sample of batch IDs:', batchIds.slice(0, 5));
|
||||
// Process metadata, discounts, taxes, and costs in parallel
|
||||
const METADATA_BATCH_SIZE = 2000;
|
||||
const PG_BATCH_SIZE = 200;
|
||||
|
||||
const processMetadataBatch = async (batchIds) => {
|
||||
const [orders] = await prodConnection.query(`
|
||||
SELECT
|
||||
o.order_id,
|
||||
@@ -236,63 +218,45 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
WHERE o.order_id IN (?)
|
||||
`, [batchIds]);
|
||||
|
||||
console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`);
|
||||
const duplicates = orders.filter((order, index, self) =>
|
||||
self.findIndex(o => o.order_id === order.order_id) !== index
|
||||
);
|
||||
if (duplicates.length > 0) {
|
||||
console.log('Found duplicates:', duplicates);
|
||||
}
|
||||
// Process in sub-batches for PostgreSQL
|
||||
for (let j = 0; j < orders.length; j += PG_BATCH_SIZE) {
|
||||
const subBatch = orders.slice(j, j + PG_BATCH_SIZE);
|
||||
if (subBatch.length === 0) continue;
|
||||
|
||||
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?, ?, ?)").join(",");
|
||||
const values = orders.flatMap(order => [
|
||||
const placeholders = subBatch.map((_, idx) =>
|
||||
`($${idx * 8 + 1}, $${idx * 8 + 2}, $${idx * 8 + 3}, $${idx * 8 + 4}, $${idx * 8 + 5}, $${idx * 8 + 6}, $${idx * 8 + 7}, $${idx * 8 + 8})`
|
||||
).join(",");
|
||||
|
||||
const values = subBatch.flatMap(order => [
|
||||
order.order_id,
|
||||
order.date,
|
||||
order.customer,
|
||||
order.customer_name,
|
||||
order.customer_name || '',
|
||||
order.status,
|
||||
order.canceled,
|
||||
order.summary_discount,
|
||||
order.summary_subtotal
|
||||
order.summary_discount || 0,
|
||||
order.summary_subtotal || 0
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_meta (
|
||||
order_id,
|
||||
date,
|
||||
customer,
|
||||
customer_name,
|
||||
status,
|
||||
canceled,
|
||||
summary_discount,
|
||||
summary_subtotal
|
||||
) VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
date = VALUES(date),
|
||||
customer = VALUES(customer),
|
||||
customer_name = VALUES(customer_name),
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
summary_discount = VALUES(summary_discount),
|
||||
summary_subtotal = VALUES(summary_subtotal)
|
||||
INSERT INTO debug_order_meta (
|
||||
order_id, date, customer, customer_name, status, canceled,
|
||||
summary_discount, summary_subtotal
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_id) DO UPDATE SET
|
||||
date = EXCLUDED.date,
|
||||
customer = EXCLUDED.customer,
|
||||
customer_name = EXCLUDED.customer_name,
|
||||
status = EXCLUDED.status,
|
||||
canceled = EXCLUDED.canceled,
|
||||
summary_discount = EXCLUDED.summary_discount,
|
||||
summary_subtotal = EXCLUDED.summary_subtotal
|
||||
`, values);
|
||||
|
||||
processedCount = i + orders.length;
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Loading order metadata: ${processedCount} of ${totalUniqueOrders}`,
|
||||
current: processedCount,
|
||||
total: totalUniqueOrders
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Reset processed count for final phase
|
||||
processedCount = 0;
|
||||
|
||||
// Get promotional discounts in batches
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
const processDiscountsBatch = async (batchIds) => {
|
||||
const [discounts] = await prodConnection.query(`
|
||||
SELECT order_id, pid, SUM(amount) as discount
|
||||
FROM order_discount_items
|
||||
@@ -300,188 +264,282 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
GROUP BY order_id, pid
|
||||
`, [batchIds]);
|
||||
|
||||
if (discounts.length > 0) {
|
||||
const placeholders = discounts.map(() => "(?, ?, ?)").join(",");
|
||||
const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]);
|
||||
if (discounts.length === 0) return;
|
||||
|
||||
for (let j = 0; j < discounts.length; j += PG_BATCH_SIZE) {
|
||||
const subBatch = discounts.slice(j, j + PG_BATCH_SIZE);
|
||||
if (subBatch.length === 0) continue;
|
||||
|
||||
const placeholders = subBatch.map((_, idx) =>
|
||||
`($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})`
|
||||
).join(",");
|
||||
|
||||
const values = subBatch.flatMap(d => [
|
||||
d.order_id,
|
||||
d.pid,
|
||||
d.discount || 0
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_discounts VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
discount = VALUES(discount)
|
||||
INSERT INTO debug_order_discounts (order_id, pid, discount)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
discount = EXCLUDED.discount
|
||||
`, values);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Get tax information in batches
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
const processTaxesBatch = async (batchIds) => {
|
||||
// Optimized tax query to avoid subquery
|
||||
const [taxes] = await prodConnection.query(`
|
||||
SELECT DISTINCT
|
||||
oti.order_id,
|
||||
otip.pid,
|
||||
otip.item_taxes_to_collect as tax
|
||||
FROM order_tax_info oti
|
||||
JOIN (
|
||||
SELECT order_id, MAX(stamp) as max_stamp
|
||||
SELECT oti.order_id, otip.pid, otip.item_taxes_to_collect as tax
|
||||
FROM (
|
||||
SELECT order_id, MAX(taxinfo_id) as latest_taxinfo_id
|
||||
FROM order_tax_info
|
||||
WHERE order_id IN (?)
|
||||
GROUP BY order_id
|
||||
) latest ON oti.order_id = latest.order_id AND oti.stamp = latest.max_stamp
|
||||
) latest_info
|
||||
JOIN order_tax_info oti ON oti.order_id = latest_info.order_id
|
||||
AND oti.taxinfo_id = latest_info.latest_taxinfo_id
|
||||
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
||||
`, [batchIds]);
|
||||
|
||||
if (taxes.length > 0) {
|
||||
// Remove any duplicates before inserting
|
||||
const uniqueTaxes = new Map();
|
||||
taxes.forEach(t => {
|
||||
const key = `${t.order_id}-${t.pid}`;
|
||||
uniqueTaxes.set(key, t);
|
||||
});
|
||||
if (taxes.length === 0) return;
|
||||
|
||||
for (let j = 0; j < taxes.length; j += PG_BATCH_SIZE) {
|
||||
const subBatch = taxes.slice(j, j + PG_BATCH_SIZE);
|
||||
if (subBatch.length === 0) continue;
|
||||
|
||||
const placeholders = subBatch.map((_, idx) =>
|
||||
`($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})`
|
||||
).join(",");
|
||||
|
||||
const values = subBatch.flatMap(t => [
|
||||
t.order_id,
|
||||
t.pid,
|
||||
t.tax || 0
|
||||
]);
|
||||
|
||||
const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]);
|
||||
if (values.length > 0) {
|
||||
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_taxes VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE tax = VALUES(tax)
|
||||
INSERT INTO debug_order_taxes (order_id, pid, tax)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
tax = EXCLUDED.tax
|
||||
`, values);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Get costeach values in batches
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
const processCostsBatch = async (batchIds) => {
|
||||
const [costs] = await prodConnection.query(`
|
||||
SELECT
|
||||
oc.orderid as order_id,
|
||||
oc.pid,
|
||||
COALESCE(
|
||||
oc.costeach,
|
||||
(SELECT pi.costeach
|
||||
FROM product_inventory pi
|
||||
WHERE pi.pid = oc.pid
|
||||
AND pi.daterec <= o.date_placed
|
||||
ORDER BY pi.daterec DESC LIMIT 1)
|
||||
) as costeach
|
||||
FROM order_costs oc
|
||||
JOIN _order o ON oc.orderid = o.order_id
|
||||
WHERE oc.orderid IN (?)
|
||||
oi.order_id,
|
||||
oi.prod_pid as pid,
|
||||
oi.prod_price as costeach
|
||||
FROM order_items oi
|
||||
WHERE oi.order_id IN (?)
|
||||
`, [batchIds]);
|
||||
|
||||
if (costs.length > 0) {
|
||||
const placeholders = costs.map(() => '(?, ?, ?)').join(",");
|
||||
const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach || 0]);
|
||||
if (costs.length === 0) return;
|
||||
|
||||
for (let j = 0; j < costs.length; j += PG_BATCH_SIZE) {
|
||||
const subBatch = costs.slice(j, j + PG_BATCH_SIZE);
|
||||
if (subBatch.length === 0) continue;
|
||||
|
||||
const placeholders = subBatch.map((_, idx) =>
|
||||
`($${idx * 3 + 1}, $${idx * 3 + 2}, $${idx * 3 + 3})`
|
||||
).join(",");
|
||||
|
||||
const values = subBatch.flatMap(c => [
|
||||
c.order_id,
|
||||
c.pid,
|
||||
c.costeach || 0
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_costs (order_id, pid, costeach)
|
||||
INSERT INTO debug_order_costs (order_id, pid, costeach)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE costeach = VALUES(costeach)
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
costeach = EXCLUDED.costeach
|
||||
`, values);
|
||||
}
|
||||
};
|
||||
|
||||
// Process all data types in parallel for each batch
|
||||
for (let i = 0; i < orderIds.length; i += METADATA_BATCH_SIZE) {
|
||||
const batchIds = orderIds.slice(i, i + METADATA_BATCH_SIZE);
|
||||
|
||||
await Promise.all([
|
||||
processMetadataBatch(batchIds),
|
||||
processDiscountsBatch(batchIds),
|
||||
processTaxesBatch(batchIds),
|
||||
processCostsBatch(batchIds)
|
||||
]);
|
||||
|
||||
processedCount = i + batchIds.length;
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Loading order data: ${processedCount} of ${totalUniqueOrders}`,
|
||||
current: processedCount,
|
||||
total: totalUniqueOrders,
|
||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalUniqueOrders),
|
||||
rate: calculateRate(startTime, processedCount)
|
||||
});
|
||||
}
|
||||
|
||||
// Now combine all the data and insert into orders table
|
||||
// Pre-check all products at once instead of per batch
|
||||
const allOrderPids = [...new Set(orderItems.map(item => item.pid))];
|
||||
// Pre-check all products at once
|
||||
const allOrderPids = [...new Set(orderItems.map(item => item.prod_pid))];
|
||||
console.log('Orders: Checking', allOrderPids.length, 'unique products');
|
||||
|
||||
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
|
||||
"SELECT pid FROM products WHERE pid IN (?)",
|
||||
"SELECT pid FROM products WHERE pid = ANY($1::bigint[])",
|
||||
[allOrderPids]
|
||||
) : [[]];
|
||||
const existingPids = new Set(existingProducts.map(p => p.pid));
|
||||
|
||||
// Process in larger batches
|
||||
for (let i = 0; i < orderIds.length; i += 5000) {
|
||||
const batchIds = orderIds.slice(i, i + 5000);
|
||||
const existingPids = new Set(existingProducts.rows.map(p => p.pid));
|
||||
|
||||
// Process in smaller batches
|
||||
for (let i = 0; i < orderIds.length; i += 1000) {
|
||||
const batchIds = orderIds.slice(i, i + 1000);
|
||||
|
||||
// Get combined data for this batch in sub-batches
|
||||
const PG_BATCH_SIZE = 100; // Process 100 records at a time
|
||||
for (let j = 0; j < batchIds.length; j += PG_BATCH_SIZE) {
|
||||
const subBatchIds = batchIds.slice(j, j + PG_BATCH_SIZE);
|
||||
|
||||
// Get combined data for this batch
|
||||
const [orders] = await localConnection.query(`
|
||||
WITH order_totals AS (
|
||||
SELECT
|
||||
oi.order_id,
|
||||
oi.pid,
|
||||
SUM(COALESCE(od.discount, 0)) as promo_discount,
|
||||
COALESCE(ot.tax, 0) as total_tax
|
||||
FROM debug_order_items oi
|
||||
LEFT JOIN debug_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
||||
LEFT JOIN debug_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
GROUP BY oi.order_id, oi.pid, ot.tax
|
||||
)
|
||||
SELECT
|
||||
oi.order_id as order_number,
|
||||
oi.pid,
|
||||
oi.SKU,
|
||||
oi.pid::bigint as pid,
|
||||
oi.SKU as sku,
|
||||
om.date,
|
||||
oi.price,
|
||||
oi.quantity,
|
||||
oi.base_discount + COALESCE(od.discount, 0) +
|
||||
(oi.base_discount +
|
||||
COALESCE(ot.promo_discount, 0) +
|
||||
CASE
|
||||
WHEN om.summary_discount > 0 THEN
|
||||
ROUND((om.summary_discount * (oi.price * oi.quantity)) /
|
||||
NULLIF(om.summary_subtotal, 0), 2)
|
||||
WHEN om.summary_discount > 0 AND om.summary_subtotal > 0 THEN
|
||||
ROUND((om.summary_discount * (oi.price * oi.quantity)) / NULLIF(om.summary_subtotal, 0), 2)
|
||||
ELSE 0
|
||||
END as discount,
|
||||
COALESCE(ot.tax, 0) as tax,
|
||||
0 as tax_included,
|
||||
END)::DECIMAL(10,2) as discount,
|
||||
COALESCE(ot.total_tax, 0)::DECIMAL(10,2) as tax,
|
||||
false as tax_included,
|
||||
0 as shipping,
|
||||
om.customer,
|
||||
om.customer_name,
|
||||
om.status,
|
||||
om.canceled,
|
||||
COALESCE(tc.costeach, 0) as costeach
|
||||
FROM temp_order_items oi
|
||||
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
||||
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
||||
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
|
||||
WHERE oi.order_id IN (?)
|
||||
`, [batchIds]);
|
||||
COALESCE(oc.costeach, oi.price)::DECIMAL(10,3) as costeach
|
||||
FROM (
|
||||
SELECT DISTINCT ON (order_id, pid)
|
||||
order_id, pid, SKU, price, quantity, base_discount
|
||||
FROM debug_order_items
|
||||
WHERE order_id = ANY($1)
|
||||
ORDER BY order_id, pid
|
||||
) oi
|
||||
JOIN debug_order_meta om ON oi.order_id = om.order_id
|
||||
LEFT JOIN order_totals ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN debug_order_costs oc ON oi.order_id = oc.order_id AND oi.pid = oc.pid
|
||||
ORDER BY oi.order_id, oi.pid
|
||||
`, [subBatchIds]);
|
||||
|
||||
// Filter orders and track missing products - do this in a single pass
|
||||
// Filter orders and track missing products
|
||||
const validOrders = [];
|
||||
const values = [];
|
||||
const processedOrderItems = new Set(); // Track unique order items
|
||||
const processedOrders = new Set(); // Track unique orders
|
||||
const processedOrderItems = new Set();
|
||||
const processedOrders = new Set();
|
||||
|
||||
for (const order of orders) {
|
||||
for (const order of orders.rows) {
|
||||
if (!existingPids.has(order.pid)) {
|
||||
missingProducts.add(order.pid);
|
||||
skippedOrders.add(order.order_number);
|
||||
continue;
|
||||
}
|
||||
validOrders.push(order);
|
||||
values.push(...columnNames.map(col => order[col] ?? null));
|
||||
processedOrderItems.add(`${order.order_number}-${order.pid}`);
|
||||
processedOrders.add(order.order_number);
|
||||
}
|
||||
|
||||
if (validOrders.length > 0) {
|
||||
// Pre-compute the placeholders string once
|
||||
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
|
||||
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
|
||||
// Process valid orders in smaller sub-batches
|
||||
const FINAL_BATCH_SIZE = 50;
|
||||
for (let k = 0; k < validOrders.length; k += FINAL_BATCH_SIZE) {
|
||||
const subBatch = validOrders.slice(k, k + FINAL_BATCH_SIZE);
|
||||
|
||||
const result = await localConnection.query(`
|
||||
INSERT INTO orders (${columnNames.join(",")})
|
||||
const placeholders = subBatch.map((_, idx) => {
|
||||
const base = idx * 15;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15})`;
|
||||
}).join(',');
|
||||
|
||||
const batchValues = subBatch.flatMap(o => [
|
||||
o.order_number,
|
||||
o.pid,
|
||||
o.sku || 'NO-SKU',
|
||||
o.date,
|
||||
o.price,
|
||||
o.quantity,
|
||||
o.discount,
|
||||
o.tax,
|
||||
o.tax_included,
|
||||
o.shipping,
|
||||
o.customer,
|
||||
o.customer_name,
|
||||
o.status,
|
||||
o.canceled,
|
||||
o.costeach
|
||||
]);
|
||||
|
||||
const [result] = await localConnection.query(`
|
||||
WITH inserted_orders AS (
|
||||
INSERT INTO orders (
|
||||
order_number, pid, SKU, date, price, quantity, discount,
|
||||
tax, tax_included, shipping, customer, customer_name,
|
||||
status, canceled, costeach
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
date = VALUES(date),
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
discount = VALUES(discount),
|
||||
tax = VALUES(tax),
|
||||
tax_included = VALUES(tax_included),
|
||||
shipping = VALUES(shipping),
|
||||
customer = VALUES(customer),
|
||||
customer_name = VALUES(customer_name),
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
costeach = VALUES(costeach)
|
||||
`, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat());
|
||||
ON CONFLICT (order_number, pid) DO UPDATE SET
|
||||
SKU = EXCLUDED.SKU,
|
||||
date = EXCLUDED.date,
|
||||
price = EXCLUDED.price,
|
||||
quantity = EXCLUDED.quantity,
|
||||
discount = EXCLUDED.discount,
|
||||
tax = EXCLUDED.tax,
|
||||
tax_included = EXCLUDED.tax_included,
|
||||
shipping = EXCLUDED.shipping,
|
||||
customer = EXCLUDED.customer,
|
||||
customer_name = EXCLUDED.customer_name,
|
||||
status = EXCLUDED.status,
|
||||
canceled = EXCLUDED.canceled,
|
||||
costeach = EXCLUDED.costeach
|
||||
RETURNING xmax, xmin
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE xmax = 0) as inserted,
|
||||
COUNT(*) FILTER (WHERE xmax <> 0) as updated
|
||||
FROM inserted_orders
|
||||
`, batchValues);
|
||||
|
||||
const affectedRows = result[0].affectedRows;
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += updates;
|
||||
importedCount += processedOrderItems.size; // Count unique order items processed
|
||||
const { inserted, updated } = result.rows[0];
|
||||
recordsAdded += inserted;
|
||||
recordsUpdated += updated;
|
||||
importedCount += subBatch.length;
|
||||
}
|
||||
|
||||
// Update progress based on unique orders processed
|
||||
cumulativeProcessedOrders += processedOrders.size;
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`,
|
||||
message: `Importing orders: ${cumulativeProcessedOrders} of ${totalUniqueOrders}`,
|
||||
current: cumulativeProcessedOrders,
|
||||
total: totalUniqueOrders,
|
||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
@@ -489,124 +547,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
rate: calculateRate(startTime, cumulativeProcessedOrders)
|
||||
});
|
||||
}
|
||||
|
||||
// Now try to import any orders that were skipped due to missing products
|
||||
if (skippedOrders.size > 0) {
|
||||
try {
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`,
|
||||
});
|
||||
|
||||
// Get the orders that were skipped
|
||||
const [skippedProdOrders] = await localConnection.query(`
|
||||
SELECT DISTINCT
|
||||
oi.order_id as order_number,
|
||||
oi.pid,
|
||||
oi.SKU,
|
||||
om.date,
|
||||
oi.price,
|
||||
oi.quantity,
|
||||
oi.base_discount + COALESCE(od.discount, 0) +
|
||||
CASE
|
||||
WHEN o.summary_discount > 0 THEN
|
||||
ROUND((o.summary_discount * (oi.price * oi.quantity)) /
|
||||
NULLIF(o.summary_subtotal, 0), 2)
|
||||
ELSE 0
|
||||
END as discount,
|
||||
COALESCE(ot.tax, 0) as tax,
|
||||
0 as tax_included,
|
||||
0 as shipping,
|
||||
om.customer,
|
||||
om.customer_name,
|
||||
om.status,
|
||||
om.canceled,
|
||||
COALESCE(tc.costeach, 0) as costeach
|
||||
FROM temp_order_items oi
|
||||
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
||||
LEFT JOIN _order o ON oi.order_id = o.order_id
|
||||
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
||||
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
|
||||
WHERE oi.order_id IN (?)
|
||||
`, [Array.from(skippedOrders)]);
|
||||
|
||||
// Check which products exist now
|
||||
const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))];
|
||||
const [existingProducts] = skippedPids.length > 0 ? await localConnection.query(
|
||||
"SELECT pid FROM products WHERE pid IN (?)",
|
||||
[skippedPids]
|
||||
) : [[]];
|
||||
const existingPids = new Set(existingProducts.map(p => p.pid));
|
||||
|
||||
// Filter orders that can now be imported
|
||||
const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid));
|
||||
const retryOrderItems = new Set(); // Track unique order items in retry
|
||||
|
||||
if (validOrders.length > 0) {
|
||||
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
|
||||
const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat();
|
||||
|
||||
const result = await localConnection.query(`
|
||||
INSERT INTO orders (${columnNames.join(", ")})
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
SKU = VALUES(SKU),
|
||||
date = VALUES(date),
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
discount = VALUES(discount),
|
||||
tax = VALUES(tax),
|
||||
tax_included = VALUES(tax_included),
|
||||
shipping = VALUES(shipping),
|
||||
customer = VALUES(customer),
|
||||
customer_name = VALUES(customer_name),
|
||||
status = VALUES(status),
|
||||
canceled = VALUES(canceled),
|
||||
costeach = VALUES(costeach)
|
||||
`, values);
|
||||
|
||||
const affectedRows = result[0].affectedRows;
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
// Track unique order items
|
||||
validOrders.forEach(order => {
|
||||
retryOrderItems.add(`${order.order_number}-${order.pid}`);
|
||||
});
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Orders import",
|
||||
message: `Successfully imported ${retryOrderItems.size} previously skipped order items`,
|
||||
});
|
||||
|
||||
// Update the main counters
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += updates;
|
||||
importedCount += retryOrderItems.size;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Warning: Failed to retry skipped orders:', error.message);
|
||||
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up temporary tables after ALL processing is complete
|
||||
await localConnection.query(`
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
|
||||
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
|
||||
`);
|
||||
|
||||
// Only update sync status if we get here (no errors thrown)
|
||||
// Update sync status
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
VALUES ('orders', NOW())
|
||||
ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW()
|
||||
ON CONFLICT (table_name) DO UPDATE SET
|
||||
last_sync_timestamp = NOW()
|
||||
`);
|
||||
|
||||
return {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,22 +10,38 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
const [syncInfo] = await localConnection.query(
|
||||
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'"
|
||||
);
|
||||
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||
|
||||
console.log('Purchase Orders: Using last sync time:', lastSyncTime);
|
||||
|
||||
// Insert temporary table creation query for purchase orders
|
||||
// Create temporary tables with PostgreSQL syntax
|
||||
await localConnection.query(`
|
||||
CREATE TABLE IF NOT EXISTS temp_purchase_orders (
|
||||
po_id INT UNSIGNED NOT NULL,
|
||||
pid INT UNSIGNED NOT NULL,
|
||||
DROP TABLE IF EXISTS temp_purchase_orders;
|
||||
DROP TABLE IF EXISTS temp_po_receivings;
|
||||
|
||||
CREATE TEMP TABLE temp_purchase_orders (
|
||||
po_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
vendor VARCHAR(255),
|
||||
date DATE,
|
||||
expected_date DATE,
|
||||
status INT,
|
||||
status INTEGER,
|
||||
notes TEXT,
|
||||
PRIMARY KEY (po_id, pid)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_po_receivings (
|
||||
po_id INTEGER,
|
||||
pid INTEGER NOT NULL,
|
||||
receiving_id INTEGER NOT NULL,
|
||||
qty_each INTEGER,
|
||||
cost_each DECIMAL(10,3),
|
||||
received_date TIMESTAMP,
|
||||
received_by INTEGER,
|
||||
received_by_name VARCHAR(255),
|
||||
is_alt_po INTEGER,
|
||||
PRIMARY KEY (receiving_id, pid)
|
||||
);
|
||||
`);
|
||||
|
||||
outputProgress({
|
||||
@@ -33,8 +49,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
status: "running",
|
||||
});
|
||||
|
||||
// Get column names first
|
||||
const [columns] = await localConnection.query(`
|
||||
// Get column names - Keep MySQL compatible for production
|
||||
const [columns] = await prodConnection.query(`
|
||||
SELECT COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_NAME = 'purchase_orders'
|
||||
@@ -60,7 +76,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]
|
||||
: [];
|
||||
|
||||
// First get all relevant PO IDs with basic info
|
||||
// First get all relevant PO IDs with basic info - Keep MySQL compatible for production
|
||||
const [[{ total }]] = await prodConnection.query(`
|
||||
SELECT COUNT(*) as total
|
||||
FROM (
|
||||
@@ -99,6 +115,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
|
||||
console.log('Purchase Orders: Found changes:', total);
|
||||
|
||||
// Get PO list - Keep MySQL compatible for production
|
||||
const [poList] = await prodConnection.query(`
|
||||
SELECT DISTINCT
|
||||
COALESCE(p.po_id, r.receiving_id) as po_id,
|
||||
@@ -185,7 +202,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
const batch = poList.slice(i, Math.min(i + BATCH_SIZE, poList.length));
|
||||
const poIds = batch.map(po => po.po_id);
|
||||
|
||||
// Get all products for these POs in one query
|
||||
// Get all products for these POs in one query - Keep MySQL compatible for production
|
||||
const [poProducts] = await prodConnection.query(`
|
||||
SELECT
|
||||
pop.po_id,
|
||||
@@ -207,7 +224,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
const productPids = [...new Set(productBatch.map(p => p.pid))];
|
||||
const batchPoIds = [...new Set(productBatch.map(p => p.po_id))];
|
||||
|
||||
// Get receivings for this batch with employee names
|
||||
// Get receivings for this batch with employee names - Keep MySQL compatible for production
|
||||
const [receivings] = await prodConnection.query(`
|
||||
SELECT
|
||||
r.po_id,
|
||||
@@ -232,315 +249,176 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
ORDER BY r.po_id, rp.pid, rp.received_date
|
||||
`, [batchPoIds, productPids]);
|
||||
|
||||
// Create maps for this sub-batch
|
||||
const poProductMap = new Map();
|
||||
productBatch.forEach(product => {
|
||||
const key = `${product.po_id}-${product.pid}`;
|
||||
poProductMap.set(key, product);
|
||||
});
|
||||
// Insert receivings into temp table
|
||||
if (receivings.length > 0) {
|
||||
const placeholders = receivings.map((_, idx) => {
|
||||
const base = idx * 9;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9})`;
|
||||
}).join(',');
|
||||
|
||||
const receivingMap = new Map();
|
||||
const altReceivingMap = new Map();
|
||||
const noPOReceivingMap = new Map();
|
||||
const values = receivings.flatMap(r => [
|
||||
r.po_id,
|
||||
r.pid,
|
||||
r.receiving_id,
|
||||
r.qty_each,
|
||||
r.cost_each,
|
||||
r.received_date,
|
||||
r.received_by,
|
||||
r.received_by_name,
|
||||
r.is_alt_po
|
||||
]);
|
||||
|
||||
receivings.forEach(receiving => {
|
||||
const key = `${receiving.po_id}-${receiving.pid}`;
|
||||
if (receiving.is_alt_po === 2) {
|
||||
// No PO
|
||||
if (!noPOReceivingMap.has(receiving.pid)) {
|
||||
noPOReceivingMap.set(receiving.pid, []);
|
||||
}
|
||||
noPOReceivingMap.get(receiving.pid).push(receiving);
|
||||
} else if (receiving.is_alt_po === 1) {
|
||||
// Different PO
|
||||
if (!altReceivingMap.has(receiving.pid)) {
|
||||
altReceivingMap.set(receiving.pid, []);
|
||||
}
|
||||
altReceivingMap.get(receiving.pid).push(receiving);
|
||||
} else {
|
||||
// Original PO
|
||||
if (!receivingMap.has(key)) {
|
||||
receivingMap.set(key, []);
|
||||
}
|
||||
receivingMap.get(key).push(receiving);
|
||||
}
|
||||
});
|
||||
|
||||
// Verify PIDs exist
|
||||
const [existingPids] = await localConnection.query(
|
||||
'SELECT pid FROM products WHERE pid IN (?)',
|
||||
[productPids]
|
||||
);
|
||||
const validPids = new Set(existingPids.map(p => p.pid));
|
||||
|
||||
// First check which PO lines already exist and get their current values
|
||||
const poLines = Array.from(poProductMap.values())
|
||||
.filter(p => validPids.has(p.pid))
|
||||
.map(p => [p.po_id, p.pid]);
|
||||
|
||||
const [existingPOs] = await localConnection.query(
|
||||
`SELECT ${columnNames.join(',')} FROM purchase_orders WHERE (po_id, pid) IN (${poLines.map(() => "(?,?)").join(",")})`,
|
||||
poLines.flat()
|
||||
);
|
||||
const existingPOMap = new Map(
|
||||
existingPOs.map(po => [`${po.po_id}-${po.pid}`, po])
|
||||
);
|
||||
|
||||
// Split into inserts and updates
|
||||
const insertsAndUpdates = { inserts: [], updates: [] };
|
||||
let batchProcessed = 0;
|
||||
|
||||
for (const po of batch) {
|
||||
const poProducts = Array.from(poProductMap.values())
|
||||
.filter(p => p.po_id === po.po_id && validPids.has(p.pid));
|
||||
|
||||
for (const product of poProducts) {
|
||||
const key = `${po.po_id}-${product.pid}`;
|
||||
const receivingHistory = receivingMap.get(key) || [];
|
||||
const altReceivingHistory = altReceivingMap.get(product.pid) || [];
|
||||
const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || [];
|
||||
|
||||
// Combine all receivings and sort by date
|
||||
const allReceivings = [
|
||||
...receivingHistory.map(r => ({ ...r, type: 'original' })),
|
||||
...altReceivingHistory.map(r => ({ ...r, type: 'alternate' })),
|
||||
...noPOReceivingHistory.map(r => ({ ...r, type: 'no_po' }))
|
||||
].sort((a, b) => new Date(a.received_date || '9999-12-31') - new Date(b.received_date || '9999-12-31'));
|
||||
|
||||
// Split receivings into original PO and others
|
||||
const originalPOReceivings = allReceivings.filter(r => r.type === 'original');
|
||||
const otherReceivings = allReceivings.filter(r => r.type !== 'original');
|
||||
|
||||
// Track FIFO fulfillment
|
||||
let remainingToFulfill = product.ordered;
|
||||
const fulfillmentTracking = [];
|
||||
let totalReceived = 0;
|
||||
let actualCost = null; // Will store the cost of the first receiving that fulfills this PO
|
||||
let firstFulfillmentReceiving = null;
|
||||
let lastFulfillmentReceiving = null;
|
||||
|
||||
for (const receiving of allReceivings) {
|
||||
// Convert quantities to base units using supplier data
|
||||
const baseQtyReceived = receiving.qty_each * (
|
||||
receiving.type === 'original' ? 1 :
|
||||
Math.max(1, product.supplier_qty_per_unit || 1)
|
||||
);
|
||||
const qtyToApply = Math.min(remainingToFulfill, baseQtyReceived);
|
||||
|
||||
if (qtyToApply > 0) {
|
||||
// If this is the first receiving being applied, use its cost
|
||||
if (actualCost === null && receiving.cost_each > 0) {
|
||||
actualCost = receiving.cost_each;
|
||||
firstFulfillmentReceiving = receiving;
|
||||
}
|
||||
lastFulfillmentReceiving = receiving;
|
||||
fulfillmentTracking.push({
|
||||
receiving_id: receiving.receiving_id,
|
||||
qty_applied: qtyToApply,
|
||||
qty_total: baseQtyReceived,
|
||||
cost: receiving.cost_each || actualCost || product.cost_each,
|
||||
date: receiving.received_date,
|
||||
received_by: receiving.received_by,
|
||||
received_by_name: receiving.received_by_name || 'Unknown',
|
||||
type: receiving.type,
|
||||
remaining_qty: baseQtyReceived - qtyToApply
|
||||
});
|
||||
remainingToFulfill -= qtyToApply;
|
||||
} else {
|
||||
// Track excess receivings
|
||||
fulfillmentTracking.push({
|
||||
receiving_id: receiving.receiving_id,
|
||||
qty_applied: 0,
|
||||
qty_total: baseQtyReceived,
|
||||
cost: receiving.cost_each || actualCost || product.cost_each,
|
||||
date: receiving.received_date,
|
||||
received_by: receiving.received_by,
|
||||
received_by_name: receiving.received_by_name || 'Unknown',
|
||||
type: receiving.type,
|
||||
is_excess: true
|
||||
});
|
||||
}
|
||||
totalReceived += baseQtyReceived;
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_po_receivings (
|
||||
po_id, pid, receiving_id, qty_each, cost_each, received_date,
|
||||
received_by, received_by_name, is_alt_po
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (receiving_id, pid) DO UPDATE SET
|
||||
po_id = EXCLUDED.po_id,
|
||||
qty_each = EXCLUDED.qty_each,
|
||||
cost_each = EXCLUDED.cost_each,
|
||||
received_date = EXCLUDED.received_date,
|
||||
received_by = EXCLUDED.received_by,
|
||||
received_by_name = EXCLUDED.received_by_name,
|
||||
is_alt_po = EXCLUDED.is_alt_po
|
||||
`, values);
|
||||
}
|
||||
|
||||
const receiving_status = !totalReceived ? 1 : // created
|
||||
remainingToFulfill > 0 ? 30 : // partial
|
||||
40; // full
|
||||
// Process each PO product
|
||||
for (const product of productBatch) {
|
||||
const po = batch.find(p => p.po_id === product.po_id);
|
||||
if (!po) continue;
|
||||
|
||||
function formatDate(dateStr) {
|
||||
if (!dateStr) return null;
|
||||
if (dateStr === '0000-00-00' || dateStr === '0000-00-00 00:00:00') return null;
|
||||
if (typeof dateStr === 'string' && !dateStr.match(/^\d{4}-\d{2}-\d{2}/)) return null;
|
||||
try {
|
||||
const date = new Date(dateStr);
|
||||
if (isNaN(date.getTime())) return null;
|
||||
if (date.getFullYear() < 1900 || date.getFullYear() > 2100) return null;
|
||||
return date.toISOString().split('T')[0];
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
// Insert into temp_purchase_orders
|
||||
const placeholders = `($1, $2, $3, $4, $5, $6, $7)`;
|
||||
const values = [
|
||||
product.po_id,
|
||||
product.pid,
|
||||
po.vendor,
|
||||
po.date,
|
||||
po.expected_date,
|
||||
po.status,
|
||||
po.notes || po.long_note
|
||||
];
|
||||
|
||||
const rowValues = columnNames.map(col => {
|
||||
switch (col) {
|
||||
case 'po_id': return po.po_id;
|
||||
case 'vendor': return po.vendor;
|
||||
case 'date': return formatDate(po.date);
|
||||
case 'expected_date': return formatDate(po.expected_date);
|
||||
case 'pid': return product.pid;
|
||||
case 'sku': return product.sku;
|
||||
case 'name': return product.name;
|
||||
case 'cost_price': return actualCost || product.cost_each;
|
||||
case 'po_cost_price': return product.cost_each;
|
||||
case 'status': return po.status;
|
||||
case 'notes': return po.notes;
|
||||
case 'long_note': return po.long_note;
|
||||
case 'ordered': return product.ordered;
|
||||
case 'received': return totalReceived;
|
||||
case 'unfulfilled': return remainingToFulfill;
|
||||
case 'excess_received': return Math.max(0, totalReceived - product.ordered);
|
||||
case 'received_date': return formatDate(firstFulfillmentReceiving?.received_date);
|
||||
case 'last_received_date': return formatDate(lastFulfillmentReceiving?.received_date);
|
||||
case 'received_by': return firstFulfillmentReceiving?.received_by_name || null;
|
||||
case 'receiving_status': return receiving_status;
|
||||
case 'receiving_history': return JSON.stringify({
|
||||
fulfillment: fulfillmentTracking,
|
||||
ordered_qty: product.ordered,
|
||||
total_received: totalReceived,
|
||||
remaining_unfulfilled: remainingToFulfill,
|
||||
excess_received: Math.max(0, totalReceived - product.ordered),
|
||||
po_cost: product.cost_each,
|
||||
actual_cost: actualCost || product.cost_each
|
||||
});
|
||||
default: return null;
|
||||
}
|
||||
});
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_purchase_orders (
|
||||
po_id, pid, vendor, date, expected_date, status, notes
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (po_id, pid) DO UPDATE SET
|
||||
vendor = EXCLUDED.vendor,
|
||||
date = EXCLUDED.date,
|
||||
expected_date = EXCLUDED.expected_date,
|
||||
status = EXCLUDED.status,
|
||||
notes = EXCLUDED.notes
|
||||
`, values);
|
||||
|
||||
if (existingPOMap.has(key)) {
|
||||
const existing = existingPOMap.get(key);
|
||||
// Check if any values are different
|
||||
const hasChanges = columnNames.some(col => {
|
||||
const newVal = rowValues[columnNames.indexOf(col)];
|
||||
const oldVal = existing[col] ?? null;
|
||||
// Special handling for numbers to avoid type coercion issues
|
||||
if (typeof newVal === 'number' && typeof oldVal === 'number') {
|
||||
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
|
||||
}
|
||||
// Special handling for receiving_history - parse and compare
|
||||
if (col === 'receiving_history') {
|
||||
const newHistory = JSON.parse(newVal || '{}');
|
||||
const oldHistory = JSON.parse(oldVal || '{}');
|
||||
return JSON.stringify(newHistory) !== JSON.stringify(oldHistory);
|
||||
}
|
||||
return newVal !== oldVal;
|
||||
});
|
||||
processed++;
|
||||
|
||||
if (hasChanges) {
|
||||
insertsAndUpdates.updates.push({
|
||||
po_id: po.po_id,
|
||||
pid: product.pid,
|
||||
values: rowValues
|
||||
});
|
||||
}
|
||||
} else {
|
||||
insertsAndUpdates.inserts.push({
|
||||
po_id: po.po_id,
|
||||
pid: product.pid,
|
||||
values: rowValues
|
||||
});
|
||||
}
|
||||
batchProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle inserts
|
||||
if (insertsAndUpdates.inserts.length > 0) {
|
||||
const insertPlaceholders = insertsAndUpdates.inserts
|
||||
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
|
||||
.join(",");
|
||||
|
||||
const insertResult = await localConnection.query(`
|
||||
INSERT INTO purchase_orders (${columnNames.join(",")})
|
||||
VALUES ${insertPlaceholders}
|
||||
`, insertsAndUpdates.inserts.map(i => i.values).flat());
|
||||
|
||||
const affectedRows = insertResult[0].affectedRows;
|
||||
// For an upsert, MySQL counts rows twice for updates
|
||||
// So if affectedRows is odd, we have (updates * 2 + inserts)
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
recordsAdded += inserts;
|
||||
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
|
||||
processed += batchProcessed;
|
||||
}
|
||||
|
||||
// Handle updates - now we know these actually have changes
|
||||
if (insertsAndUpdates.updates.length > 0) {
|
||||
const updatePlaceholders = insertsAndUpdates.updates
|
||||
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
|
||||
.join(",");
|
||||
|
||||
const updateResult = await localConnection.query(`
|
||||
INSERT INTO purchase_orders (${columnNames.join(",")})
|
||||
VALUES ${updatePlaceholders}
|
||||
ON DUPLICATE KEY UPDATE ${columnNames
|
||||
.filter((col) => col !== "po_id" && col !== "pid")
|
||||
.map((col) => `${col} = VALUES(${col})`)
|
||||
.join(",")};
|
||||
`, insertsAndUpdates.updates.map(u => u.values).flat());
|
||||
|
||||
const affectedRows = updateResult[0].affectedRows;
|
||||
// For an upsert, MySQL counts rows twice for updates
|
||||
// So if affectedRows is odd, we have (updates * 2 + inserts)
|
||||
const updates = Math.floor(affectedRows / 2);
|
||||
const inserts = affectedRows - (updates * 2);
|
||||
|
||||
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
|
||||
processed += batchProcessed;
|
||||
}
|
||||
|
||||
// Update progress based on time interval
|
||||
const now = Date.now();
|
||||
if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) {
|
||||
// Update progress periodically
|
||||
if (Date.now() - lastProgressUpdate > PROGRESS_INTERVAL) {
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Purchase orders import",
|
||||
message: `Processing purchase orders: ${processed} of ${totalItems}`,
|
||||
current: processed,
|
||||
total: totalItems,
|
||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
||||
remaining: estimateRemaining(startTime, processed, totalItems),
|
||||
rate: calculateRate(startTime, processed)
|
||||
});
|
||||
lastProgressUpdate = now;
|
||||
lastProgressUpdate = Date.now();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only update sync status if we get here (no errors thrown)
|
||||
// Insert final data into purchase_orders table
|
||||
const [result] = await localConnection.query(`
|
||||
WITH inserted_pos AS (
|
||||
INSERT INTO purchase_orders (
|
||||
po_id, pid, vendor, date, expected_date, status, notes,
|
||||
received_qty, received_cost, last_received_date, last_received_by,
|
||||
alt_po_received_qty, alt_po_last_received_date,
|
||||
no_po_received_qty, no_po_last_received_date
|
||||
)
|
||||
SELECT
|
||||
po.po_id,
|
||||
po.pid,
|
||||
po.vendor,
|
||||
po.date,
|
||||
po.expected_date,
|
||||
po.status,
|
||||
po.notes,
|
||||
COALESCE(SUM(CASE WHEN r.is_alt_po = 0 THEN r.qty_each END), 0) as received_qty,
|
||||
COALESCE(AVG(CASE WHEN r.is_alt_po = 0 THEN r.cost_each END), 0) as received_cost,
|
||||
MAX(CASE WHEN r.is_alt_po = 0 THEN r.received_date END) as last_received_date,
|
||||
MAX(CASE WHEN r.is_alt_po = 0 THEN r.received_by_name END) as last_received_by,
|
||||
COALESCE(SUM(CASE WHEN r.is_alt_po = 1 THEN r.qty_each END), 0) as alt_po_received_qty,
|
||||
MAX(CASE WHEN r.is_alt_po = 1 THEN r.received_date END) as alt_po_last_received_date,
|
||||
COALESCE(SUM(CASE WHEN r.is_alt_po = 2 THEN r.qty_each END), 0) as no_po_received_qty,
|
||||
MAX(CASE WHEN r.is_alt_po = 2 THEN r.received_date END) as no_po_last_received_date
|
||||
FROM temp_purchase_orders po
|
||||
LEFT JOIN temp_po_receivings r ON po.pid = r.pid
|
||||
GROUP BY po.po_id, po.pid, po.vendor, po.date, po.expected_date, po.status, po.notes
|
||||
ON CONFLICT (po_id, pid) DO UPDATE SET
|
||||
vendor = EXCLUDED.vendor,
|
||||
date = EXCLUDED.date,
|
||||
expected_date = EXCLUDED.expected_date,
|
||||
status = EXCLUDED.status,
|
||||
notes = EXCLUDED.notes,
|
||||
received_qty = EXCLUDED.received_qty,
|
||||
received_cost = EXCLUDED.received_cost,
|
||||
last_received_date = EXCLUDED.last_received_date,
|
||||
last_received_by = EXCLUDED.last_received_by,
|
||||
alt_po_received_qty = EXCLUDED.alt_po_received_qty,
|
||||
alt_po_last_received_date = EXCLUDED.alt_po_last_received_date,
|
||||
no_po_received_qty = EXCLUDED.no_po_received_qty,
|
||||
no_po_last_received_date = EXCLUDED.no_po_last_received_date
|
||||
RETURNING xmax
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE xmax = 0) as inserted,
|
||||
COUNT(*) FILTER (WHERE xmax <> 0) as updated
|
||||
FROM inserted_pos
|
||||
`);
|
||||
|
||||
recordsAdded = parseInt(result.rows[0].inserted, 10) || 0;
|
||||
recordsUpdated = parseInt(result.rows[0].updated, 10) || 0;
|
||||
|
||||
// Update sync status
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
VALUES ('purchase_orders', NOW())
|
||||
ON DUPLICATE KEY UPDATE
|
||||
last_sync_timestamp = NOW(),
|
||||
last_sync_id = LAST_INSERT_ID(last_sync_id)
|
||||
ON CONFLICT (table_name) DO UPDATE SET
|
||||
last_sync_timestamp = NOW()
|
||||
`);
|
||||
|
||||
// Clean up temporary tables
|
||||
await localConnection.query(`
|
||||
DROP TABLE IF EXISTS temp_purchase_orders;
|
||||
DROP TABLE IF EXISTS temp_po_receivings;
|
||||
`);
|
||||
|
||||
return {
|
||||
status: "complete",
|
||||
totalImported: totalItems,
|
||||
recordsAdded: recordsAdded || 0,
|
||||
recordsUpdated: recordsUpdated || 0,
|
||||
incrementalUpdate,
|
||||
lastSyncTime
|
||||
recordsAdded,
|
||||
recordsUpdated,
|
||||
totalRecords: processed
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
outputProgress({
|
||||
operation: `${incrementalUpdate ? 'Incremental' : 'Full'} purchase orders import failed`,
|
||||
status: "error",
|
||||
error: error.message,
|
||||
});
|
||||
console.error("Error during purchase orders import:", error);
|
||||
// Attempt cleanup on error
|
||||
try {
|
||||
await localConnection.query(`
|
||||
DROP TABLE IF EXISTS temp_purchase_orders;
|
||||
DROP TABLE IF EXISTS temp_po_receivings;
|
||||
`);
|
||||
} catch (cleanupError) {
|
||||
console.error('Error during cleanup:', cleanupError);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const mysql = require("mysql2/promise");
|
||||
const { Client } = require("ssh2");
|
||||
const { Pool } = require('pg');
|
||||
const dotenv = require("dotenv");
|
||||
const path = require("path");
|
||||
|
||||
@@ -41,23 +42,90 @@ async function setupSshTunnel(sshConfig) {
|
||||
async function setupConnections(sshConfig) {
|
||||
const tunnel = await setupSshTunnel(sshConfig);
|
||||
|
||||
// Setup MySQL connection for production
|
||||
const prodConnection = await mysql.createConnection({
|
||||
...sshConfig.prodDbConfig,
|
||||
stream: tunnel.stream,
|
||||
});
|
||||
|
||||
const localConnection = await mysql.createPool({
|
||||
...sshConfig.localDbConfig,
|
||||
waitForConnections: true,
|
||||
connectionLimit: 10,
|
||||
queueLimit: 0
|
||||
});
|
||||
// Setup PostgreSQL connection pool for local
|
||||
const localPool = new Pool(sshConfig.localDbConfig);
|
||||
|
||||
return {
|
||||
ssh: tunnel.ssh,
|
||||
prodConnection,
|
||||
localConnection
|
||||
// Test the PostgreSQL connection
|
||||
try {
|
||||
const client = await localPool.connect();
|
||||
await client.query('SELECT NOW()');
|
||||
client.release();
|
||||
console.log('PostgreSQL connection successful');
|
||||
} catch (err) {
|
||||
console.error('PostgreSQL connection error:', err);
|
||||
throw err;
|
||||
}
|
||||
|
||||
// Create a wrapper for the PostgreSQL pool to match MySQL interface
|
||||
const localConnection = {
|
||||
_client: null,
|
||||
_transactionActive: false,
|
||||
|
||||
query: async (text, params) => {
|
||||
// If we're not in a transaction, use the pool directly
|
||||
if (!localConnection._transactionActive) {
|
||||
const client = await localPool.connect();
|
||||
try {
|
||||
const result = await client.query(text, params);
|
||||
return [result];
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
// If we're in a transaction, use the dedicated client
|
||||
if (!localConnection._client) {
|
||||
throw new Error('No active transaction client');
|
||||
}
|
||||
const result = await localConnection._client.query(text, params);
|
||||
return [result];
|
||||
},
|
||||
|
||||
beginTransaction: async () => {
|
||||
if (localConnection._transactionActive) {
|
||||
throw new Error('Transaction already active');
|
||||
}
|
||||
localConnection._client = await localPool.connect();
|
||||
await localConnection._client.query('BEGIN');
|
||||
localConnection._transactionActive = true;
|
||||
},
|
||||
|
||||
commit: async () => {
|
||||
if (!localConnection._transactionActive) {
|
||||
throw new Error('No active transaction to commit');
|
||||
}
|
||||
await localConnection._client.query('COMMIT');
|
||||
localConnection._client.release();
|
||||
localConnection._client = null;
|
||||
localConnection._transactionActive = false;
|
||||
},
|
||||
|
||||
rollback: async () => {
|
||||
if (!localConnection._transactionActive) {
|
||||
throw new Error('No active transaction to rollback');
|
||||
}
|
||||
await localConnection._client.query('ROLLBACK');
|
||||
localConnection._client.release();
|
||||
localConnection._client = null;
|
||||
localConnection._transactionActive = false;
|
||||
},
|
||||
|
||||
end: async () => {
|
||||
if (localConnection._client) {
|
||||
localConnection._client.release();
|
||||
localConnection._client = null;
|
||||
}
|
||||
await localPool.end();
|
||||
}
|
||||
};
|
||||
|
||||
return { prodConnection, localConnection, tunnel };
|
||||
}
|
||||
|
||||
// Helper function to close connections
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const mysql = require('mysql2/promise');
|
||||
const { Client } = require('pg');
|
||||
const path = require('path');
|
||||
const dotenv = require('dotenv');
|
||||
const fs = require('fs');
|
||||
@@ -10,7 +10,7 @@ const dbConfig = {
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
multipleStatements: true
|
||||
port: process.env.DB_PORT || 5432
|
||||
};
|
||||
|
||||
// Helper function to output progress in JSON format
|
||||
@@ -120,46 +120,42 @@ async function resetDatabase() {
|
||||
}
|
||||
});
|
||||
|
||||
const connection = await mysql.createConnection(dbConfig);
|
||||
const client = new Client(dbConfig);
|
||||
await client.connect();
|
||||
|
||||
try {
|
||||
// Check MySQL privileges
|
||||
// Check PostgreSQL version and user
|
||||
outputProgress({
|
||||
operation: 'Checking privileges',
|
||||
message: 'Verifying MySQL user privileges...'
|
||||
operation: 'Checking database',
|
||||
message: 'Verifying PostgreSQL version and user privileges...'
|
||||
});
|
||||
|
||||
const [grants] = await connection.query('SHOW GRANTS');
|
||||
const versionResult = await client.query('SELECT version()');
|
||||
const userResult = await client.query('SELECT current_user, current_database()');
|
||||
|
||||
outputProgress({
|
||||
operation: 'User privileges',
|
||||
operation: 'Database info',
|
||||
message: {
|
||||
grants: grants.map(g => Object.values(g)[0])
|
||||
version: versionResult.rows[0].version,
|
||||
user: userResult.rows[0].current_user,
|
||||
database: userResult.rows[0].current_database
|
||||
}
|
||||
});
|
||||
|
||||
// Enable warnings as errors
|
||||
await connection.query('SET SESSION sql_notes = 1');
|
||||
|
||||
// Log database config (without sensitive info)
|
||||
outputProgress({
|
||||
operation: 'Database config',
|
||||
message: `Using database: ${dbConfig.database} on host: ${dbConfig.host}`
|
||||
});
|
||||
|
||||
// Get list of all tables in the current database
|
||||
outputProgress({
|
||||
operation: 'Getting table list',
|
||||
message: 'Retrieving all table names...'
|
||||
});
|
||||
|
||||
const [tables] = await connection.query(`
|
||||
SELECT GROUP_CONCAT(table_name) as tables
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
AND table_name NOT IN ('users', 'import_history', 'calculate_history')
|
||||
const tablesResult = await client.query(`
|
||||
SELECT string_agg(tablename, ', ') as tables
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename NOT IN ('users', 'calculate_history', 'import_history');
|
||||
`);
|
||||
|
||||
if (!tables[0].tables) {
|
||||
if (!tablesResult.rows[0].tables) {
|
||||
outputProgress({
|
||||
operation: 'No tables found',
|
||||
message: 'Database is already empty'
|
||||
@@ -170,20 +166,73 @@ async function resetDatabase() {
|
||||
message: 'Dropping all existing tables...'
|
||||
});
|
||||
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 0');
|
||||
const dropQuery = `
|
||||
DROP TABLE IF EXISTS
|
||||
${tables[0].tables
|
||||
.split(',')
|
||||
.filter(table => !['users', 'calculate_history'].includes(table))
|
||||
.map(table => '`' + table + '`')
|
||||
.join(', ')}
|
||||
`;
|
||||
await connection.query(dropQuery);
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 1');
|
||||
// Disable triggers/foreign key checks
|
||||
await client.query('SET session_replication_role = \'replica\';');
|
||||
|
||||
// Drop all tables except users
|
||||
const tables = tablesResult.rows[0].tables.split(', ');
|
||||
for (const table of tables) {
|
||||
if (!['users'].includes(table)) {
|
||||
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
|
||||
}
|
||||
}
|
||||
|
||||
// Read and execute main schema (core tables)
|
||||
// Only drop types if we're not preserving history tables
|
||||
const historyTablesExist = await client.query(`
|
||||
SELECT EXISTS (
|
||||
SELECT FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename IN ('calculate_history', 'import_history')
|
||||
);
|
||||
`);
|
||||
|
||||
if (!historyTablesExist.rows[0].exists) {
|
||||
await client.query('DROP TYPE IF EXISTS calculation_status CASCADE;');
|
||||
await client.query('DROP TYPE IF EXISTS module_name CASCADE;');
|
||||
}
|
||||
|
||||
// Re-enable triggers/foreign key checks
|
||||
await client.query('SET session_replication_role = \'origin\';');
|
||||
}
|
||||
|
||||
// Create enum types if they don't exist
|
||||
outputProgress({
|
||||
operation: 'Creating enum types',
|
||||
message: 'Setting up required enum types...'
|
||||
});
|
||||
|
||||
// Check if types exist before creating
|
||||
const typesExist = await client.query(`
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM pg_type
|
||||
WHERE typname = 'calculation_status'
|
||||
) as calc_status_exists,
|
||||
EXISTS (
|
||||
SELECT 1 FROM pg_type
|
||||
WHERE typname = 'module_name'
|
||||
) as module_name_exists;
|
||||
`);
|
||||
|
||||
if (!typesExist.rows[0].calc_status_exists) {
|
||||
await client.query(`CREATE TYPE calculation_status AS ENUM ('running', 'completed', 'failed', 'cancelled')`);
|
||||
}
|
||||
|
||||
if (!typesExist.rows[0].module_name_exists) {
|
||||
await client.query(`
|
||||
CREATE TYPE module_name AS ENUM (
|
||||
'product_metrics',
|
||||
'time_aggregates',
|
||||
'financial_metrics',
|
||||
'vendor_metrics',
|
||||
'category_metrics',
|
||||
'brand_metrics',
|
||||
'sales_forecasts',
|
||||
'abc_classification'
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
// Read and execute main schema first (core tables)
|
||||
outputProgress({
|
||||
operation: 'Running database setup',
|
||||
message: 'Creating core tables...'
|
||||
@@ -223,35 +272,24 @@ async function resetDatabase() {
|
||||
for (let i = 0; i < statements.length; i++) {
|
||||
const stmt = statements[i];
|
||||
try {
|
||||
const [result, fields] = await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'SQL Warning',
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
});
|
||||
}
|
||||
const result = await client.query(stmt);
|
||||
|
||||
// Verify if table was created (if this was a CREATE TABLE statement)
|
||||
if (stmt.trim().toLowerCase().startsWith('create table')) {
|
||||
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1];
|
||||
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1];
|
||||
if (tableName) {
|
||||
const [tableExists] = await connection.query(`
|
||||
const tableExists = await client.query(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
AND table_name = ?
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = $1
|
||||
`, [tableName]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Table Creation Verification',
|
||||
message: {
|
||||
table: tableName,
|
||||
exists: tableExists[0].count > 0
|
||||
exists: tableExists.rows[0].count > 0
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -263,7 +301,7 @@ async function resetDatabase() {
|
||||
statement: i + 1,
|
||||
total: statements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
affectedRows: result.affectedRows
|
||||
rowCount: result.rowCount
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
@@ -271,8 +309,6 @@ async function resetDatabase() {
|
||||
status: 'error',
|
||||
operation: 'SQL Error',
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
});
|
||||
@@ -280,66 +316,12 @@ async function resetDatabase() {
|
||||
}
|
||||
}
|
||||
|
||||
// List all tables in the database after schema execution
|
||||
outputProgress({
|
||||
operation: 'Debug database',
|
||||
message: {
|
||||
currentDatabase: (await connection.query('SELECT DATABASE() as db'))[0][0].db
|
||||
}
|
||||
});
|
||||
|
||||
const [allTables] = await connection.query(`
|
||||
SELECT
|
||||
table_schema,
|
||||
table_name,
|
||||
engine,
|
||||
create_time,
|
||||
table_rows
|
||||
// Verify core tables were created
|
||||
const existingTables = (await client.query(`
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = DATABASE()
|
||||
`);
|
||||
|
||||
if (allTables.length === 0) {
|
||||
outputProgress({
|
||||
operation: 'Warning',
|
||||
message: 'No tables found in database after schema execution'
|
||||
});
|
||||
} else {
|
||||
outputProgress({
|
||||
operation: 'Tables after schema execution',
|
||||
message: {
|
||||
count: allTables.length,
|
||||
tables: allTables.map(t => ({
|
||||
schema: t.table_schema,
|
||||
name: t.table_name,
|
||||
engine: t.engine,
|
||||
created: t.create_time,
|
||||
rows: t.table_rows
|
||||
}))
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Also check table status
|
||||
const [tableStatus] = await connection.query('SHOW TABLE STATUS');
|
||||
outputProgress({
|
||||
operation: 'Table Status',
|
||||
message: {
|
||||
tables: tableStatus.map(t => ({
|
||||
name: t.Name,
|
||||
engine: t.Engine,
|
||||
version: t.Version,
|
||||
rowFormat: t.Row_format,
|
||||
rows: t.Rows,
|
||||
createTime: t.Create_time,
|
||||
updateTime: t.Update_time
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
// Verify core tables were created using SHOW TABLES
|
||||
const [showTables] = await connection.query('SHOW TABLES');
|
||||
const existingTables = showTables.map(t => Object.values(t)[0]);
|
||||
WHERE table_schema = 'public'
|
||||
`)).rows.map(t => t.table_name);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Core tables verification',
|
||||
@@ -359,22 +341,12 @@ async function resetDatabase() {
|
||||
);
|
||||
}
|
||||
|
||||
// Verify all core tables use InnoDB
|
||||
const [engineStatus] = await connection.query('SHOW TABLE STATUS WHERE Name IN (?)', [CORE_TABLES]);
|
||||
const nonInnoDBTables = engineStatus.filter(t => t.Engine !== 'InnoDB');
|
||||
|
||||
if (nonInnoDBTables.length > 0) {
|
||||
throw new Error(
|
||||
`Tables using non-InnoDB engine: ${nonInnoDBTables.map(t => t.Name).join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Core tables created',
|
||||
message: `Successfully created tables: ${CORE_TABLES.join(', ')}`
|
||||
});
|
||||
|
||||
// Read and execute config schema
|
||||
// Now read and execute config schema (since core tables exist)
|
||||
outputProgress({
|
||||
operation: 'Running config setup',
|
||||
message: 'Creating configuration tables...'
|
||||
@@ -400,18 +372,7 @@ async function resetDatabase() {
|
||||
for (let i = 0; i < configStatements.length; i++) {
|
||||
const stmt = configStatements[i];
|
||||
try {
|
||||
const [result, fields] = await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'Config SQL Warning',
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
});
|
||||
}
|
||||
const result = await client.query(stmt);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config SQL Progress',
|
||||
@@ -419,7 +380,7 @@ async function resetDatabase() {
|
||||
statement: i + 1,
|
||||
total: configStatements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
affectedRows: result.affectedRows
|
||||
rowCount: result.rowCount
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
@@ -427,8 +388,6 @@ async function resetDatabase() {
|
||||
status: 'error',
|
||||
operation: 'Config SQL Error',
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
});
|
||||
@@ -436,33 +395,6 @@ async function resetDatabase() {
|
||||
}
|
||||
}
|
||||
|
||||
// Verify config tables were created
|
||||
const [showConfigTables] = await connection.query('SHOW TABLES');
|
||||
const existingConfigTables = showConfigTables.map(t => Object.values(t)[0]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config tables verification',
|
||||
message: {
|
||||
found: existingConfigTables,
|
||||
expected: CONFIG_TABLES
|
||||
}
|
||||
});
|
||||
|
||||
const missingConfigTables = CONFIG_TABLES.filter(
|
||||
t => !existingConfigTables.includes(t)
|
||||
);
|
||||
|
||||
if (missingConfigTables.length > 0) {
|
||||
throw new Error(
|
||||
`Failed to create config tables: ${missingConfigTables.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Config tables created',
|
||||
message: `Successfully created tables: ${CONFIG_TABLES.join(', ')}`
|
||||
});
|
||||
|
||||
// Read and execute metrics schema (metrics tables)
|
||||
outputProgress({
|
||||
operation: 'Running metrics setup',
|
||||
@@ -489,18 +421,7 @@ async function resetDatabase() {
|
||||
for (let i = 0; i < metricsStatements.length; i++) {
|
||||
const stmt = metricsStatements[i];
|
||||
try {
|
||||
const [result, fields] = await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'Metrics SQL Warning',
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
});
|
||||
}
|
||||
const result = await client.query(stmt);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Metrics SQL Progress',
|
||||
@@ -508,7 +429,7 @@ async function resetDatabase() {
|
||||
statement: i + 1,
|
||||
total: metricsStatements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
affectedRows: result.affectedRows
|
||||
rowCount: result.rowCount
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
@@ -516,8 +437,6 @@ async function resetDatabase() {
|
||||
status: 'error',
|
||||
operation: 'Metrics SQL Error',
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
});
|
||||
@@ -539,7 +458,7 @@ async function resetDatabase() {
|
||||
});
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await connection.end();
|
||||
await client.end();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const mysql = require('mysql2/promise');
|
||||
const { Client } = require('pg');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../.env') });
|
||||
@@ -8,7 +8,7 @@ const dbConfig = {
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
multipleStatements: true
|
||||
port: process.env.DB_PORT || 5432
|
||||
};
|
||||
|
||||
function outputProgress(data) {
|
||||
@@ -34,8 +34,8 @@ const METRICS_TABLES = [
|
||||
'sales_forecasts',
|
||||
'temp_purchase_metrics',
|
||||
'temp_sales_metrics',
|
||||
'vendor_metrics', //before vendor_details for foreign key
|
||||
'vendor_time_metrics', //before vendor_details for foreign key
|
||||
'vendor_metrics',
|
||||
'vendor_time_metrics',
|
||||
'vendor_details'
|
||||
];
|
||||
|
||||
@@ -90,31 +90,31 @@ function splitSQLStatements(sql) {
|
||||
}
|
||||
|
||||
async function resetMetrics() {
|
||||
let connection;
|
||||
let client;
|
||||
try {
|
||||
outputProgress({
|
||||
operation: 'Starting metrics reset',
|
||||
message: 'Connecting to database...'
|
||||
});
|
||||
|
||||
connection = await mysql.createConnection(dbConfig);
|
||||
await connection.beginTransaction();
|
||||
client = new Client(dbConfig);
|
||||
await client.connect();
|
||||
|
||||
// First verify current state
|
||||
const [initialTables] = await connection.query(`
|
||||
SELECT TABLE_NAME as name
|
||||
FROM information_schema.tables
|
||||
WHERE TABLE_SCHEMA = DATABASE()
|
||||
AND TABLE_NAME IN (?)
|
||||
const initialTables = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = ANY($1)
|
||||
`, [METRICS_TABLES]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Initial state',
|
||||
message: `Found ${initialTables.length} existing metrics tables: ${initialTables.map(t => t.name).join(', ')}`
|
||||
message: `Found ${initialTables.rows.length} existing metrics tables: ${initialTables.rows.map(t => t.name).join(', ')}`
|
||||
});
|
||||
|
||||
// Disable foreign key checks at the start
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 0');
|
||||
await client.query('SET session_replication_role = \'replica\'');
|
||||
|
||||
// Drop all metrics tables in reverse order to handle dependencies
|
||||
outputProgress({
|
||||
@@ -124,17 +124,17 @@ async function resetMetrics() {
|
||||
|
||||
for (const table of [...METRICS_TABLES].reverse()) {
|
||||
try {
|
||||
await connection.query(`DROP TABLE IF EXISTS ${table}`);
|
||||
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
|
||||
|
||||
// Verify the table was actually dropped
|
||||
const [checkDrop] = await connection.query(`
|
||||
const checkDrop = await client.query(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM information_schema.tables
|
||||
WHERE TABLE_SCHEMA = DATABASE()
|
||||
AND TABLE_NAME = ?
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = $1
|
||||
`, [table]);
|
||||
|
||||
if (checkDrop[0].count > 0) {
|
||||
if (parseInt(checkDrop.rows[0].count) > 0) {
|
||||
throw new Error(`Failed to drop table ${table} - table still exists`);
|
||||
}
|
||||
|
||||
@@ -153,15 +153,15 @@ async function resetMetrics() {
|
||||
}
|
||||
|
||||
// Verify all tables were dropped
|
||||
const [afterDrop] = await connection.query(`
|
||||
SELECT TABLE_NAME as name
|
||||
FROM information_schema.tables
|
||||
WHERE TABLE_SCHEMA = DATABASE()
|
||||
AND TABLE_NAME IN (?)
|
||||
const afterDrop = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = ANY($1)
|
||||
`, [METRICS_TABLES]);
|
||||
|
||||
if (afterDrop.length > 0) {
|
||||
throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.map(t => t.name).join(', ')}`);
|
||||
if (afterDrop.rows.length > 0) {
|
||||
throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.rows.map(t => t.name).join(', ')}`);
|
||||
}
|
||||
|
||||
// Read metrics schema
|
||||
@@ -187,39 +187,26 @@ async function resetMetrics() {
|
||||
for (let i = 0; i < statements.length; i++) {
|
||||
const stmt = statements[i];
|
||||
try {
|
||||
await connection.query(stmt);
|
||||
|
||||
// Check for warnings
|
||||
const [warnings] = await connection.query('SHOW WARNINGS');
|
||||
if (warnings && warnings.length > 0) {
|
||||
outputProgress({
|
||||
status: 'warning',
|
||||
operation: 'SQL Warning',
|
||||
message: {
|
||||
statement: i + 1,
|
||||
warnings: warnings
|
||||
}
|
||||
});
|
||||
}
|
||||
const result = await client.query(stmt);
|
||||
|
||||
// If this is a CREATE TABLE statement, verify the table was created
|
||||
if (stmt.trim().toLowerCase().startsWith('create table')) {
|
||||
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1];
|
||||
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(\w+)["]?/i)?.[1];
|
||||
if (tableName) {
|
||||
const [checkCreate] = await connection.query(`
|
||||
SELECT TABLE_NAME as name, CREATE_TIME as created
|
||||
FROM information_schema.tables
|
||||
WHERE TABLE_SCHEMA = DATABASE()
|
||||
AND TABLE_NAME = ?
|
||||
const checkCreate = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = $1
|
||||
`, [tableName]);
|
||||
|
||||
if (checkCreate.length === 0) {
|
||||
if (checkCreate.rows.length === 0) {
|
||||
throw new Error(`Failed to create table ${tableName} - table does not exist after CREATE statement`);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Table created',
|
||||
message: `Successfully created table: ${tableName} at ${checkCreate[0].created}`
|
||||
message: `Successfully created table: ${tableName}`
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -229,7 +216,8 @@ async function resetMetrics() {
|
||||
message: {
|
||||
statement: i + 1,
|
||||
total: statements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
rowCount: result.rowCount
|
||||
}
|
||||
});
|
||||
} catch (sqlError) {
|
||||
@@ -238,8 +226,6 @@ async function resetMetrics() {
|
||||
operation: 'SQL Error',
|
||||
message: {
|
||||
error: sqlError.message,
|
||||
sqlState: sqlError.sqlState,
|
||||
errno: sqlError.errno,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
}
|
||||
@@ -249,7 +235,7 @@ async function resetMetrics() {
|
||||
}
|
||||
|
||||
// Re-enable foreign key checks after all tables are created
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 1');
|
||||
await client.query('SET session_replication_role = \'origin\'');
|
||||
|
||||
// Verify metrics tables were created
|
||||
outputProgress({
|
||||
@@ -257,37 +243,36 @@ async function resetMetrics() {
|
||||
message: 'Checking all metrics tables were created...'
|
||||
});
|
||||
|
||||
const [metricsTablesResult] = await connection.query(`
|
||||
SELECT
|
||||
TABLE_NAME as name,
|
||||
TABLE_ROWS as \`rows\`,
|
||||
CREATE_TIME as created
|
||||
FROM information_schema.tables
|
||||
WHERE TABLE_SCHEMA = DATABASE()
|
||||
AND TABLE_NAME IN (?)
|
||||
const metricsTablesResult = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = ANY($1)
|
||||
`, [METRICS_TABLES]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Tables found',
|
||||
message: `Found ${metricsTablesResult.length} tables: ${metricsTablesResult.map(t =>
|
||||
`${t.name} (created: ${t.created})`
|
||||
).join(', ')}`
|
||||
message: `Found ${metricsTablesResult.rows.length} tables: ${metricsTablesResult.rows.map(t => t.name).join(', ')}`
|
||||
});
|
||||
|
||||
const existingMetricsTables = metricsTablesResult.map(t => t.name);
|
||||
const existingMetricsTables = metricsTablesResult.rows.map(t => t.name);
|
||||
const missingMetricsTables = METRICS_TABLES.filter(t => !existingMetricsTables.includes(t));
|
||||
|
||||
if (missingMetricsTables.length > 0) {
|
||||
// Do one final check of the actual tables
|
||||
const [finalCheck] = await connection.query('SHOW TABLES');
|
||||
const finalCheck = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
`);
|
||||
outputProgress({
|
||||
operation: 'Final table check',
|
||||
message: `All database tables: ${finalCheck.map(t => Object.values(t)[0]).join(', ')}`
|
||||
message: `All database tables: ${finalCheck.rows.map(t => t.name).join(', ')}`
|
||||
});
|
||||
throw new Error(`Failed to create metrics tables: ${missingMetricsTables.join(', ')}`);
|
||||
}
|
||||
|
||||
await connection.commit();
|
||||
await client.query('COMMIT');
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
@@ -302,17 +287,17 @@ async function resetMetrics() {
|
||||
stack: error.stack
|
||||
});
|
||||
|
||||
if (connection) {
|
||||
await connection.rollback();
|
||||
if (client) {
|
||||
await client.query('ROLLBACK');
|
||||
// Make sure to re-enable foreign key checks even if there's an error
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 1').catch(() => {});
|
||||
await client.query('SET session_replication_role = \'origin\'').catch(() => {});
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
if (connection) {
|
||||
if (client) {
|
||||
// One final attempt to ensure foreign key checks are enabled
|
||||
await connection.query('SET FOREIGN_KEY_CHECKS = 1').catch(() => {});
|
||||
await connection.end();
|
||||
await client.query('SET session_replication_role = \'origin\'').catch(() => {});
|
||||
await client.end();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,24 +6,24 @@ router.get('/stats', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const [results] = await pool.query(`
|
||||
const { rows: [results] } = await pool.query(`
|
||||
SELECT
|
||||
COALESCE(
|
||||
ROUND(
|
||||
(SUM(o.price * o.quantity - p.cost_price * o.quantity) /
|
||||
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
|
||||
NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1
|
||||
),
|
||||
0
|
||||
) as profitMargin,
|
||||
COALESCE(
|
||||
ROUND(
|
||||
(AVG(p.price / NULLIF(p.cost_price, 0) - 1) * 100), 1
|
||||
(AVG(p.price / NULLIF(p.cost_price, 0) - 1) * 100)::numeric, 1
|
||||
),
|
||||
0
|
||||
) as averageMarkup,
|
||||
COALESCE(
|
||||
ROUND(
|
||||
SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0), 2
|
||||
(SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0))::numeric, 2
|
||||
),
|
||||
0
|
||||
) as stockTurnoverRate,
|
||||
@@ -31,23 +31,23 @@ router.get('/stats', async (req, res) => {
|
||||
COALESCE(COUNT(DISTINCT p.categories), 0) as categoryCount,
|
||||
COALESCE(
|
||||
ROUND(
|
||||
AVG(o.price * o.quantity), 2
|
||||
AVG(o.price * o.quantity)::numeric, 2
|
||||
),
|
||||
0
|
||||
) as averageOrderValue
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
`);
|
||||
|
||||
// Ensure all values are numbers
|
||||
const stats = {
|
||||
profitMargin: Number(results[0].profitMargin) || 0,
|
||||
averageMarkup: Number(results[0].averageMarkup) || 0,
|
||||
stockTurnoverRate: Number(results[0].stockTurnoverRate) || 0,
|
||||
vendorCount: Number(results[0].vendorCount) || 0,
|
||||
categoryCount: Number(results[0].categoryCount) || 0,
|
||||
averageOrderValue: Number(results[0].averageOrderValue) || 0
|
||||
profitMargin: Number(results.profitmargin) || 0,
|
||||
averageMarkup: Number(results.averagemarkup) || 0,
|
||||
stockTurnoverRate: Number(results.stockturnoverrate) || 0,
|
||||
vendorCount: Number(results.vendorcount) || 0,
|
||||
categoryCount: Number(results.categorycount) || 0,
|
||||
averageOrderValue: Number(results.averageordervalue) || 0
|
||||
};
|
||||
|
||||
res.json(stats);
|
||||
@@ -63,13 +63,13 @@ router.get('/profit', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// Get profit margins by category with full path
|
||||
const [byCategory] = await pool.query(`
|
||||
const { rows: byCategory } = await pool.query(`
|
||||
WITH RECURSIVE category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
CAST(c.name AS CHAR(1000)) as path
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
@@ -79,7 +79,7 @@ router.get('/profit', async (req, res) => {
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
CONCAT(cp.path, ' > ', c.name)
|
||||
cp.path || ' > ' || c.name
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
)
|
||||
@@ -88,53 +88,46 @@ router.get('/profit', async (req, res) => {
|
||||
cp.path as categoryPath,
|
||||
ROUND(
|
||||
(SUM(o.price * o.quantity - p.cost_price * o.quantity) /
|
||||
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
|
||||
NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1
|
||||
) as profitMargin,
|
||||
CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
|
||||
CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost
|
||||
ROUND(SUM(o.price * o.quantity)::numeric, 3) as revenue,
|
||||
ROUND(SUM(p.cost_price * o.quantity)::numeric, 3) as cost
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
JOIN product_categories pc ON p.pid = pc.pid
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY c.name, cp.path
|
||||
ORDER BY profitMargin DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
|
||||
// Get profit margin trend over time
|
||||
const [overTime] = await pool.query(`
|
||||
const { rows: overTime } = await pool.query(`
|
||||
SELECT
|
||||
formatted_date as date,
|
||||
to_char(o.date, 'YYYY-MM-DD') as date,
|
||||
ROUND(
|
||||
(SUM(o.price * o.quantity - p.cost_price * o.quantity) /
|
||||
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
|
||||
NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1
|
||||
) as profitMargin,
|
||||
CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
|
||||
CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost
|
||||
ROUND(SUM(o.price * o.quantity)::numeric, 3) as revenue,
|
||||
ROUND(SUM(p.cost_price * o.quantity)::numeric, 3) as cost
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
CROSS JOIN (
|
||||
SELECT DATE_FORMAT(o.date, '%Y-%m-%d') as formatted_date
|
||||
FROM orders o
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
GROUP BY DATE_FORMAT(o.date, '%Y-%m-%d')
|
||||
) dates
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
AND DATE_FORMAT(o.date, '%Y-%m-%d') = dates.formatted_date
|
||||
GROUP BY formatted_date
|
||||
ORDER BY formatted_date
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY to_char(o.date, 'YYYY-MM-DD')
|
||||
ORDER BY date
|
||||
`);
|
||||
|
||||
// Get top performing products with category paths
|
||||
const [topProducts] = await pool.query(`
|
||||
const { rows: topProducts } = await pool.query(`
|
||||
WITH RECURSIVE category_path AS (
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
CAST(c.name AS CHAR(1000)) as path
|
||||
c.name::text as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
@@ -144,7 +137,7 @@ router.get('/profit', async (req, res) => {
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
CONCAT(cp.path, ' > ', c.name)
|
||||
cp.path || ' > ' || c.name
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
)
|
||||
@@ -154,18 +147,18 @@ router.get('/profit', async (req, res) => {
|
||||
cp.path as categoryPath,
|
||||
ROUND(
|
||||
(SUM(o.price * o.quantity - p.cost_price * o.quantity) /
|
||||
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
|
||||
NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1
|
||||
) as profitMargin,
|
||||
CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
|
||||
CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost
|
||||
ROUND(SUM(o.price * o.quantity)::numeric, 3) as revenue,
|
||||
ROUND(SUM(p.cost_price * o.quantity)::numeric, 3) as cost
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
JOIN product_categories pc ON p.pid = pc.pid
|
||||
JOIN categories c ON pc.cat_id = c.cat_id
|
||||
JOIN category_path cp ON c.cat_id = cp.cat_id
|
||||
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY p.pid, p.title, c.name, cp.path
|
||||
HAVING revenue > 0
|
||||
HAVING SUM(o.price * o.quantity) > 0
|
||||
ORDER BY profitMargin DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
@@ -185,7 +178,7 @@ router.get('/vendors', async (req, res) => {
|
||||
console.log('Fetching vendor performance data...');
|
||||
|
||||
// First check if we have any vendors with sales
|
||||
const [checkData] = await pool.query(`
|
||||
const { rows: [checkData] } = await pool.query(`
|
||||
SELECT COUNT(DISTINCT p.vendor) as vendor_count,
|
||||
COUNT(DISTINCT o.order_number) as order_count
|
||||
FROM products p
|
||||
@@ -193,39 +186,39 @@ router.get('/vendors', async (req, res) => {
|
||||
WHERE p.vendor IS NOT NULL
|
||||
`);
|
||||
|
||||
console.log('Vendor data check:', checkData[0]);
|
||||
console.log('Vendor data check:', checkData);
|
||||
|
||||
// Get vendor performance metrics
|
||||
const [performance] = await pool.query(`
|
||||
const { rows: performance } = await pool.query(`
|
||||
WITH monthly_sales AS (
|
||||
SELECT
|
||||
p.vendor,
|
||||
CAST(SUM(CASE
|
||||
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
ROUND(SUM(CASE
|
||||
WHEN o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
THEN o.price * o.quantity
|
||||
ELSE 0
|
||||
END) AS DECIMAL(15,3)) as current_month,
|
||||
CAST(SUM(CASE
|
||||
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
|
||||
AND o.date < DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
END)::numeric, 3) as current_month,
|
||||
ROUND(SUM(CASE
|
||||
WHEN o.date >= CURRENT_DATE - INTERVAL '60 days'
|
||||
AND o.date < CURRENT_DATE - INTERVAL '30 days'
|
||||
THEN o.price * o.quantity
|
||||
ELSE 0
|
||||
END) AS DECIMAL(15,3)) as previous_month
|
||||
END)::numeric, 3) as previous_month
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
WHERE p.vendor IS NOT NULL
|
||||
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
|
||||
AND o.date >= CURRENT_DATE - INTERVAL '60 days'
|
||||
GROUP BY p.vendor
|
||||
)
|
||||
SELECT
|
||||
p.vendor,
|
||||
CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as salesVolume,
|
||||
ROUND(SUM(o.price * o.quantity)::numeric, 3) as salesVolume,
|
||||
COALESCE(ROUND(
|
||||
(SUM(o.price * o.quantity - p.cost_price * o.quantity) /
|
||||
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
|
||||
NULLIF(SUM(o.price * o.quantity), 0) * 100)::numeric, 1
|
||||
), 0) as profitMargin,
|
||||
COALESCE(ROUND(
|
||||
SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0), 1
|
||||
(SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0))::numeric, 1
|
||||
), 0) as stockTurnover,
|
||||
COUNT(DISTINCT p.pid) as productCount,
|
||||
ROUND(
|
||||
@@ -236,7 +229,7 @@ router.get('/vendors', async (req, res) => {
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
LEFT JOIN monthly_sales ms ON p.vendor = ms.vendor
|
||||
WHERE p.vendor IS NOT NULL
|
||||
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
AND o.date >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY p.vendor, ms.current_month, ms.previous_month
|
||||
ORDER BY salesVolume DESC
|
||||
LIMIT 10
|
||||
@@ -244,45 +237,7 @@ router.get('/vendors', async (req, res) => {
|
||||
|
||||
console.log('Performance data:', performance);
|
||||
|
||||
// Get vendor comparison data
|
||||
const [comparison] = await pool.query(`
|
||||
SELECT
|
||||
p.vendor,
|
||||
CAST(COALESCE(ROUND(SUM(o.price * o.quantity) / NULLIF(COUNT(DISTINCT p.pid), 0), 2), 0) AS DECIMAL(15,3)) as salesPerProduct,
|
||||
COALESCE(ROUND(AVG((o.price - p.cost_price) / NULLIF(o.price, 0) * 100), 1), 0) as averageMargin,
|
||||
COUNT(DISTINCT p.pid) as size
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
WHERE p.vendor IS NOT NULL
|
||||
GROUP BY p.vendor
|
||||
ORDER BY salesPerProduct DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
|
||||
console.log('Comparison data:', comparison);
|
||||
|
||||
// Get vendor sales trends
|
||||
const [trends] = await pool.query(`
|
||||
SELECT
|
||||
p.vendor,
|
||||
DATE_FORMAT(o.date, '%b %Y') as month,
|
||||
CAST(COALESCE(SUM(o.price * o.quantity), 0) AS DECIMAL(15,3)) as sales
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
WHERE p.vendor IS NOT NULL
|
||||
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 6 MONTH)
|
||||
GROUP BY
|
||||
p.vendor,
|
||||
DATE_FORMAT(o.date, '%b %Y'),
|
||||
DATE_FORMAT(o.date, '%Y-%m')
|
||||
ORDER BY
|
||||
p.vendor,
|
||||
DATE_FORMAT(o.date, '%Y-%m')
|
||||
`);
|
||||
|
||||
console.log('Trends data:', trends);
|
||||
|
||||
res.json({ performance, comparison, trends });
|
||||
res.json({ performance });
|
||||
} catch (error) {
|
||||
console.error('Error fetching vendor performance:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch vendor performance' });
|
||||
|
||||
@@ -6,7 +6,7 @@ router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get all categories with metrics and hierarchy info
|
||||
const [categories] = await pool.query(`
|
||||
const { rows: categories } = await pool.query(`
|
||||
SELECT
|
||||
c.cat_id,
|
||||
c.name,
|
||||
@@ -18,7 +18,7 @@ router.get('/', async (req, res) => {
|
||||
p.type as parent_type,
|
||||
COALESCE(cm.product_count, 0) as product_count,
|
||||
COALESCE(cm.active_products, 0) as active_products,
|
||||
CAST(COALESCE(cm.total_value, 0) AS DECIMAL(15,3)) as total_value,
|
||||
ROUND(COALESCE(cm.total_value, 0)::numeric, 3) as total_value,
|
||||
COALESCE(cm.avg_margin, 0) as avg_margin,
|
||||
COALESCE(cm.turnover_rate, 0) as turnover_rate,
|
||||
COALESCE(cm.growth_rate, 0) as growth_rate
|
||||
@@ -39,22 +39,22 @@ router.get('/', async (req, res) => {
|
||||
`);
|
||||
|
||||
// Get overall stats
|
||||
const [stats] = await pool.query(`
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(DISTINCT c.cat_id) as totalCategories,
|
||||
COUNT(DISTINCT CASE WHEN c.status = 'active' THEN c.cat_id END) as activeCategories,
|
||||
CAST(COALESCE(SUM(cm.total_value), 0) AS DECIMAL(15,3)) as totalValue,
|
||||
COALESCE(ROUND(AVG(NULLIF(cm.avg_margin, 0)), 1), 0) as avgMargin,
|
||||
COALESCE(ROUND(AVG(NULLIF(cm.growth_rate, 0)), 1), 0) as avgGrowth
|
||||
ROUND(COALESCE(SUM(cm.total_value), 0)::numeric, 3) as totalValue,
|
||||
COALESCE(ROUND(AVG(NULLIF(cm.avg_margin, 0))::numeric, 1), 0) as avgMargin,
|
||||
COALESCE(ROUND(AVG(NULLIF(cm.growth_rate, 0))::numeric, 1), 0) as avgGrowth
|
||||
FROM categories c
|
||||
LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id
|
||||
`);
|
||||
|
||||
// Get type counts for filtering
|
||||
const [typeCounts] = await pool.query(`
|
||||
const { rows: typeCounts } = await pool.query(`
|
||||
SELECT
|
||||
type,
|
||||
COUNT(*) as count
|
||||
COUNT(*)::integer as count
|
||||
FROM categories
|
||||
GROUP BY type
|
||||
ORDER BY type
|
||||
@@ -81,14 +81,14 @@ router.get('/', async (req, res) => {
|
||||
})),
|
||||
typeCounts: typeCounts.map(tc => ({
|
||||
type: tc.type,
|
||||
count: parseInt(tc.count)
|
||||
count: tc.count // Already cast to integer in the query
|
||||
})),
|
||||
stats: {
|
||||
totalCategories: parseInt(stats[0].totalCategories),
|
||||
activeCategories: parseInt(stats[0].activeCategories),
|
||||
totalValue: parseFloat(stats[0].totalValue),
|
||||
avgMargin: parseFloat(stats[0].avgMargin),
|
||||
avgGrowth: parseFloat(stats[0].avgGrowth)
|
||||
totalCategories: parseInt(stats.totalcategories),
|
||||
activeCategories: parseInt(stats.activecategories),
|
||||
totalValue: parseFloat(stats.totalvalue),
|
||||
avgMargin: parseFloat(stats.avgmargin),
|
||||
avgGrowth: parseFloat(stats.avggrowth)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@@ -13,22 +13,22 @@ router.get('/', async (req, res) => {
|
||||
try {
|
||||
console.log('[Config Route] Fetching configuration values...');
|
||||
|
||||
const [stockThresholds] = await pool.query('SELECT * FROM stock_thresholds WHERE id = 1');
|
||||
const { rows: stockThresholds } = await pool.query('SELECT * FROM stock_thresholds WHERE id = 1');
|
||||
console.log('[Config Route] Stock thresholds:', stockThresholds);
|
||||
|
||||
const [leadTimeThresholds] = await pool.query('SELECT * FROM lead_time_thresholds WHERE id = 1');
|
||||
const { rows: leadTimeThresholds } = await pool.query('SELECT * FROM lead_time_thresholds WHERE id = 1');
|
||||
console.log('[Config Route] Lead time thresholds:', leadTimeThresholds);
|
||||
|
||||
const [salesVelocityConfig] = await pool.query('SELECT * FROM sales_velocity_config WHERE id = 1');
|
||||
const { rows: salesVelocityConfig } = await pool.query('SELECT * FROM sales_velocity_config WHERE id = 1');
|
||||
console.log('[Config Route] Sales velocity config:', salesVelocityConfig);
|
||||
|
||||
const [abcConfig] = await pool.query('SELECT * FROM abc_classification_config WHERE id = 1');
|
||||
const { rows: abcConfig } = await pool.query('SELECT * FROM abc_classification_config WHERE id = 1');
|
||||
console.log('[Config Route] ABC config:', abcConfig);
|
||||
|
||||
const [safetyStockConfig] = await pool.query('SELECT * FROM safety_stock_config WHERE id = 1');
|
||||
const { rows: safetyStockConfig } = await pool.query('SELECT * FROM safety_stock_config WHERE id = 1');
|
||||
console.log('[Config Route] Safety stock config:', safetyStockConfig);
|
||||
|
||||
const [turnoverConfig] = await pool.query('SELECT * FROM turnover_config WHERE id = 1');
|
||||
const { rows: turnoverConfig } = await pool.query('SELECT * FROM turnover_config WHERE id = 1');
|
||||
console.log('[Config Route] Turnover config:', turnoverConfig);
|
||||
|
||||
const response = {
|
||||
@@ -53,14 +53,14 @@ router.put('/stock-thresholds/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { critical_days, reorder_days, overstock_days, low_stock_threshold, min_reorder_quantity } = req.body;
|
||||
const [result] = await pool.query(
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE stock_thresholds
|
||||
SET critical_days = ?,
|
||||
reorder_days = ?,
|
||||
overstock_days = ?,
|
||||
low_stock_threshold = ?,
|
||||
min_reorder_quantity = ?
|
||||
WHERE id = ?`,
|
||||
SET critical_days = $1,
|
||||
reorder_days = $2,
|
||||
overstock_days = $3,
|
||||
low_stock_threshold = $4,
|
||||
min_reorder_quantity = $5
|
||||
WHERE id = $6`,
|
||||
[critical_days, reorder_days, overstock_days, low_stock_threshold, min_reorder_quantity, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
@@ -75,12 +75,12 @@ router.put('/lead-time-thresholds/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { target_days, warning_days, critical_days } = req.body;
|
||||
const [result] = await pool.query(
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE lead_time_thresholds
|
||||
SET target_days = ?,
|
||||
warning_days = ?,
|
||||
critical_days = ?
|
||||
WHERE id = ?`,
|
||||
SET target_days = $1,
|
||||
warning_days = $2,
|
||||
critical_days = $3
|
||||
WHERE id = $4`,
|
||||
[target_days, warning_days, critical_days, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
@@ -95,12 +95,12 @@ router.put('/sales-velocity/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { daily_window_days, weekly_window_days, monthly_window_days } = req.body;
|
||||
const [result] = await pool.query(
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE sales_velocity_config
|
||||
SET daily_window_days = ?,
|
||||
weekly_window_days = ?,
|
||||
monthly_window_days = ?
|
||||
WHERE id = ?`,
|
||||
SET daily_window_days = $1,
|
||||
weekly_window_days = $2,
|
||||
monthly_window_days = $3
|
||||
WHERE id = $4`,
|
||||
[daily_window_days, weekly_window_days, monthly_window_days, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
@@ -115,12 +115,12 @@ router.put('/abc-classification/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { a_threshold, b_threshold, classification_period_days } = req.body;
|
||||
const [result] = await pool.query(
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE abc_classification_config
|
||||
SET a_threshold = ?,
|
||||
b_threshold = ?,
|
||||
classification_period_days = ?
|
||||
WHERE id = ?`,
|
||||
SET a_threshold = $1,
|
||||
b_threshold = $2,
|
||||
classification_period_days = $3
|
||||
WHERE id = $4`,
|
||||
[a_threshold, b_threshold, classification_period_days, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
@@ -135,11 +135,11 @@ router.put('/safety-stock/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { coverage_days, service_level } = req.body;
|
||||
const [result] = await pool.query(
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE safety_stock_config
|
||||
SET coverage_days = ?,
|
||||
service_level = ?
|
||||
WHERE id = ?`,
|
||||
SET coverage_days = $1,
|
||||
service_level = $2
|
||||
WHERE id = $3`,
|
||||
[coverage_days, service_level, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
@@ -154,11 +154,11 @@ router.put('/turnover/:id', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { calculation_period_days, target_rate } = req.body;
|
||||
const [result] = await pool.query(
|
||||
const { rows } = await pool.query(
|
||||
`UPDATE turnover_config
|
||||
SET calculation_period_days = ?,
|
||||
target_rate = ?
|
||||
WHERE id = ?`,
|
||||
SET calculation_period_days = $1,
|
||||
target_rate = $2
|
||||
WHERE id = $3`,
|
||||
[calculation_period_days, target_rate, req.params.id]
|
||||
);
|
||||
res.json({ success: true });
|
||||
|
||||
@@ -750,8 +750,16 @@ router.post('/full-reset', async (req, res) => {
|
||||
router.get('/history/import', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const [rows] = await pool.query(`
|
||||
SELECT * FROM import_history
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
status,
|
||||
error_message,
|
||||
rows_processed::integer,
|
||||
files_processed::integer
|
||||
FROM import_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
@@ -766,8 +774,16 @@ router.get('/history/import', async (req, res) => {
|
||||
router.get('/history/calculate', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const [rows] = await pool.query(`
|
||||
SELECT * FROM calculate_history
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
status,
|
||||
error_message,
|
||||
modules_processed::integer,
|
||||
total_modules::integer
|
||||
FROM calculate_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
@@ -782,8 +798,10 @@ router.get('/history/calculate', async (req, res) => {
|
||||
router.get('/status/modules', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const [rows] = await pool.query(`
|
||||
SELECT module_name, last_calculation_timestamp
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
module_name,
|
||||
last_calculation_timestamp::timestamp
|
||||
FROM calculate_status
|
||||
ORDER BY module_name
|
||||
`);
|
||||
@@ -798,8 +816,10 @@ router.get('/status/modules', async (req, res) => {
|
||||
router.get('/status/tables', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const [rows] = await pool.query(`
|
||||
SELECT table_name, last_sync_timestamp
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
table_name,
|
||||
last_sync_timestamp::timestamp
|
||||
FROM sync_status
|
||||
ORDER BY table_name
|
||||
`);
|
||||
|
||||
@@ -19,16 +19,15 @@ async function executeQuery(sql, params = []) {
|
||||
router.get('/stock/metrics', async (req, res) => {
|
||||
try {
|
||||
// Get stock metrics
|
||||
const [rows] = await executeQuery(`
|
||||
const { rows: [stockMetrics] } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(COUNT(*), 0) as total_products,
|
||||
COALESCE(COUNT(CASE WHEN stock_quantity > 0 THEN 1 END), 0) as products_in_stock,
|
||||
COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity END), 0) as total_units,
|
||||
COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity * cost_price END), 0) as total_cost,
|
||||
COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity * price END), 0) as total_retail
|
||||
COALESCE(COUNT(*), 0)::integer as total_products,
|
||||
COALESCE(COUNT(CASE WHEN stock_quantity > 0 THEN 1 END), 0)::integer as products_in_stock,
|
||||
COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity END), 0)::integer as total_units,
|
||||
ROUND(COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity * cost_price END), 0)::numeric, 3) as total_cost,
|
||||
ROUND(COALESCE(SUM(CASE WHEN stock_quantity > 0 THEN stock_quantity * price END), 0)::numeric, 3) as total_retail
|
||||
FROM products
|
||||
`);
|
||||
const stockMetrics = rows[0];
|
||||
|
||||
console.log('Raw stockMetrics from database:', stockMetrics);
|
||||
console.log('stockMetrics.total_products:', stockMetrics.total_products);
|
||||
@@ -38,26 +37,26 @@ router.get('/stock/metrics', async (req, res) => {
|
||||
console.log('stockMetrics.total_retail:', stockMetrics.total_retail);
|
||||
|
||||
// Get brand stock values with Other category
|
||||
const [brandValues] = await executeQuery(`
|
||||
const { rows: brandValues } = await executeQuery(`
|
||||
WITH brand_totals AS (
|
||||
SELECT
|
||||
COALESCE(brand, 'Unbranded') as brand,
|
||||
COUNT(DISTINCT pid) as variant_count,
|
||||
COALESCE(SUM(stock_quantity), 0) as stock_units,
|
||||
CAST(COALESCE(SUM(stock_quantity * cost_price), 0) AS DECIMAL(15,3)) as stock_cost,
|
||||
CAST(COALESCE(SUM(stock_quantity * price), 0) AS DECIMAL(15,3)) as stock_retail
|
||||
COUNT(DISTINCT pid)::integer as variant_count,
|
||||
COALESCE(SUM(stock_quantity), 0)::integer as stock_units,
|
||||
ROUND(COALESCE(SUM(stock_quantity * cost_price), 0)::numeric, 3) as stock_cost,
|
||||
ROUND(COALESCE(SUM(stock_quantity * price), 0)::numeric, 3) as stock_retail
|
||||
FROM products
|
||||
WHERE stock_quantity > 0
|
||||
GROUP BY COALESCE(brand, 'Unbranded')
|
||||
HAVING stock_cost > 0
|
||||
HAVING ROUND(COALESCE(SUM(stock_quantity * cost_price), 0)::numeric, 3) > 0
|
||||
),
|
||||
other_brands AS (
|
||||
SELECT
|
||||
'Other' as brand,
|
||||
SUM(variant_count) as variant_count,
|
||||
SUM(stock_units) as stock_units,
|
||||
CAST(SUM(stock_cost) AS DECIMAL(15,3)) as stock_cost,
|
||||
CAST(SUM(stock_retail) AS DECIMAL(15,3)) as stock_retail
|
||||
SUM(variant_count)::integer as variant_count,
|
||||
SUM(stock_units)::integer as stock_units,
|
||||
ROUND(SUM(stock_cost)::numeric, 3) as stock_cost,
|
||||
ROUND(SUM(stock_retail)::numeric, 3) as stock_retail
|
||||
FROM brand_totals
|
||||
WHERE stock_cost <= 5000
|
||||
),
|
||||
@@ -101,51 +100,50 @@ router.get('/stock/metrics', async (req, res) => {
|
||||
// Returns purchase order metrics by vendor
|
||||
router.get('/purchase/metrics', async (req, res) => {
|
||||
try {
|
||||
const [rows] = await executeQuery(`
|
||||
const { rows: [poMetrics] } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(COUNT(DISTINCT CASE
|
||||
WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
|
||||
WHEN po.receiving_status < $1
|
||||
THEN po.po_id
|
||||
END), 0) as active_pos,
|
||||
END), 0)::integer as active_pos,
|
||||
COALESCE(COUNT(DISTINCT CASE
|
||||
WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
|
||||
AND po.expected_date < CURDATE()
|
||||
WHEN po.receiving_status < $1
|
||||
AND po.expected_date < CURRENT_DATE
|
||||
THEN po.po_id
|
||||
END), 0) as overdue_pos,
|
||||
END), 0)::integer as overdue_pos,
|
||||
COALESCE(SUM(CASE
|
||||
WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
|
||||
WHEN po.receiving_status < $1
|
||||
THEN po.ordered
|
||||
ELSE 0
|
||||
END), 0) as total_units,
|
||||
CAST(COALESCE(SUM(CASE
|
||||
WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
|
||||
END), 0)::integer as total_units,
|
||||
ROUND(COALESCE(SUM(CASE
|
||||
WHEN po.receiving_status < $1
|
||||
THEN po.ordered * po.cost_price
|
||||
ELSE 0
|
||||
END), 0) AS DECIMAL(15,3)) as total_cost,
|
||||
CAST(COALESCE(SUM(CASE
|
||||
WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
|
||||
END), 0)::numeric, 3) as total_cost,
|
||||
ROUND(COALESCE(SUM(CASE
|
||||
WHEN po.receiving_status < $1
|
||||
THEN po.ordered * p.price
|
||||
ELSE 0
|
||||
END), 0) AS DECIMAL(15,3)) as total_retail
|
||||
END), 0)::numeric, 3) as total_retail
|
||||
FROM purchase_orders po
|
||||
JOIN products p ON po.pid = p.pid
|
||||
`);
|
||||
const poMetrics = rows[0];
|
||||
`, [ReceivingStatus.PartialReceived]);
|
||||
|
||||
const [vendorOrders] = await executeQuery(`
|
||||
const { rows: vendorOrders } = await executeQuery(`
|
||||
SELECT
|
||||
po.vendor,
|
||||
COUNT(DISTINCT po.po_id) as orders,
|
||||
COALESCE(SUM(po.ordered), 0) as units,
|
||||
CAST(COALESCE(SUM(po.ordered * po.cost_price), 0) AS DECIMAL(15,3)) as cost,
|
||||
CAST(COALESCE(SUM(po.ordered * p.price), 0) AS DECIMAL(15,3)) as retail
|
||||
COUNT(DISTINCT po.po_id)::integer as orders,
|
||||
COALESCE(SUM(po.ordered), 0)::integer as units,
|
||||
ROUND(COALESCE(SUM(po.ordered * po.cost_price), 0)::numeric, 3) as cost,
|
||||
ROUND(COALESCE(SUM(po.ordered * p.price), 0)::numeric, 3) as retail
|
||||
FROM purchase_orders po
|
||||
JOIN products p ON po.pid = p.pid
|
||||
WHERE po.receiving_status < ${ReceivingStatus.PartialReceived}
|
||||
WHERE po.receiving_status < $1
|
||||
GROUP BY po.vendor
|
||||
HAVING cost > 0
|
||||
HAVING ROUND(COALESCE(SUM(po.ordered * po.cost_price), 0)::numeric, 3) > 0
|
||||
ORDER BY cost DESC
|
||||
`);
|
||||
`, [ReceivingStatus.PartialReceived]);
|
||||
|
||||
// Format response to match PurchaseMetricsData interface
|
||||
const response = {
|
||||
@@ -175,21 +173,21 @@ router.get('/purchase/metrics', async (req, res) => {
|
||||
router.get('/replenishment/metrics', async (req, res) => {
|
||||
try {
|
||||
// Get summary metrics
|
||||
const [metrics] = await executeQuery(`
|
||||
const { rows: [metrics] } = await executeQuery(`
|
||||
SELECT
|
||||
COUNT(DISTINCT p.pid) as products_to_replenish,
|
||||
COUNT(DISTINCT p.pid)::integer as products_to_replenish,
|
||||
COALESCE(SUM(CASE
|
||||
WHEN p.stock_quantity < 0 THEN ABS(p.stock_quantity) + pm.reorder_qty
|
||||
ELSE pm.reorder_qty
|
||||
END), 0) as total_units_needed,
|
||||
CAST(COALESCE(SUM(CASE
|
||||
END), 0)::integer as total_units_needed,
|
||||
ROUND(COALESCE(SUM(CASE
|
||||
WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.cost_price
|
||||
ELSE pm.reorder_qty * p.cost_price
|
||||
END), 0) AS DECIMAL(15,3)) as total_cost,
|
||||
CAST(COALESCE(SUM(CASE
|
||||
END), 0)::numeric, 3) as total_cost,
|
||||
ROUND(COALESCE(SUM(CASE
|
||||
WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.price
|
||||
ELSE pm.reorder_qty * p.price
|
||||
END), 0) AS DECIMAL(15,3)) as total_retail
|
||||
END), 0)::numeric, 3) as total_retail
|
||||
FROM products p
|
||||
JOIN product_metrics pm ON p.pid = pm.pid
|
||||
WHERE p.replenishable = true
|
||||
@@ -199,23 +197,23 @@ router.get('/replenishment/metrics', async (req, res) => {
|
||||
`);
|
||||
|
||||
// Get top variants to replenish
|
||||
const [variants] = await executeQuery(`
|
||||
const { rows: variants } = await executeQuery(`
|
||||
SELECT
|
||||
p.pid,
|
||||
p.title,
|
||||
p.stock_quantity as current_stock,
|
||||
p.stock_quantity::integer as current_stock,
|
||||
CASE
|
||||
WHEN p.stock_quantity < 0 THEN ABS(p.stock_quantity) + pm.reorder_qty
|
||||
ELSE pm.reorder_qty
|
||||
END as replenish_qty,
|
||||
CAST(CASE
|
||||
END::integer as replenish_qty,
|
||||
ROUND(CASE
|
||||
WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.cost_price
|
||||
ELSE pm.reorder_qty * p.cost_price
|
||||
END AS DECIMAL(15,3)) as replenish_cost,
|
||||
CAST(CASE
|
||||
END::numeric, 3) as replenish_cost,
|
||||
ROUND(CASE
|
||||
WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.price
|
||||
ELSE pm.reorder_qty * p.price
|
||||
END AS DECIMAL(15,3)) as replenish_retail,
|
||||
END::numeric, 3) as replenish_retail,
|
||||
pm.stock_status
|
||||
FROM products p
|
||||
JOIN product_metrics pm ON p.pid = pm.pid
|
||||
@@ -234,10 +232,10 @@ router.get('/replenishment/metrics', async (req, res) => {
|
||||
|
||||
// Format response
|
||||
const response = {
|
||||
productsToReplenish: parseInt(metrics[0].products_to_replenish) || 0,
|
||||
unitsToReplenish: parseInt(metrics[0].total_units_needed) || 0,
|
||||
replenishmentCost: parseFloat(metrics[0].total_cost) || 0,
|
||||
replenishmentRetail: parseFloat(metrics[0].total_retail) || 0,
|
||||
productsToReplenish: parseInt(metrics.products_to_replenish) || 0,
|
||||
unitsToReplenish: parseInt(metrics.total_units_needed) || 0,
|
||||
replenishmentCost: parseFloat(metrics.total_cost) || 0,
|
||||
replenishmentRetail: parseFloat(metrics.total_retail) || 0,
|
||||
topVariants: variants.map(v => ({
|
||||
id: v.pid,
|
||||
title: v.title,
|
||||
|
||||
@@ -5,26 +5,28 @@ const router = express.Router();
|
||||
router.get('/trends', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const [rows] = await pool.query(`
|
||||
const { rows } = await pool.query(`
|
||||
WITH MonthlyMetrics AS (
|
||||
SELECT
|
||||
DATE(CONCAT(pta.year, '-', LPAD(pta.month, 2, '0'), '-01')) as date,
|
||||
CAST(COALESCE(SUM(pta.total_revenue), 0) AS DECIMAL(15,3)) as revenue,
|
||||
CAST(COALESCE(SUM(pta.total_cost), 0) AS DECIMAL(15,3)) as cost,
|
||||
CAST(COALESCE(SUM(pm.inventory_value), 0) AS DECIMAL(15,3)) as inventory_value,
|
||||
make_date(pta.year, pta.month, 1) as date,
|
||||
ROUND(COALESCE(SUM(pta.total_revenue), 0)::numeric, 3) as revenue,
|
||||
ROUND(COALESCE(SUM(pta.total_cost), 0)::numeric, 3) as cost,
|
||||
ROUND(COALESCE(SUM(pm.inventory_value), 0)::numeric, 3) as inventory_value,
|
||||
CASE
|
||||
WHEN SUM(pm.inventory_value) > 0
|
||||
THEN CAST((SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value)) * 100 AS DECIMAL(15,3))
|
||||
THEN ROUND((SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value) * 100)::numeric, 3)
|
||||
ELSE 0
|
||||
END as gmroi
|
||||
FROM product_time_aggregates pta
|
||||
JOIN product_metrics pm ON pta.pid = pm.pid
|
||||
WHERE (pta.year * 100 + pta.month) >= DATE_FORMAT(DATE_SUB(CURDATE(), INTERVAL 12 MONTH), '%Y%m')
|
||||
WHERE (pta.year * 100 + pta.month) >=
|
||||
EXTRACT(YEAR FROM CURRENT_DATE - INTERVAL '12 months')::integer * 100 +
|
||||
EXTRACT(MONTH FROM CURRENT_DATE - INTERVAL '12 months')::integer
|
||||
GROUP BY pta.year, pta.month
|
||||
ORDER BY date ASC
|
||||
)
|
||||
SELECT
|
||||
DATE_FORMAT(date, '%b %y') as date,
|
||||
to_char(date, 'Mon YY') as date,
|
||||
revenue,
|
||||
inventory_value,
|
||||
gmroi
|
||||
|
||||
@@ -20,39 +20,46 @@ router.get('/', async (req, res) => {
|
||||
// Build the WHERE clause
|
||||
const conditions = ['o1.canceled = false'];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
if (search) {
|
||||
conditions.push('(o1.order_number LIKE ? OR o1.customer LIKE ?)');
|
||||
params.push(`%${search}%`, `%${search}%`);
|
||||
conditions.push(`(o1.order_number ILIKE $${paramCounter} OR o1.customer ILIKE $${paramCounter})`);
|
||||
params.push(`%${search}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (status !== 'all') {
|
||||
conditions.push('o1.status = ?');
|
||||
conditions.push(`o1.status = $${paramCounter}`);
|
||||
params.push(status);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (fromDate) {
|
||||
conditions.push('DATE(o1.date) >= DATE(?)');
|
||||
conditions.push(`DATE(o1.date) >= DATE($${paramCounter})`);
|
||||
params.push(fromDate.toISOString());
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (toDate) {
|
||||
conditions.push('DATE(o1.date) <= DATE(?)');
|
||||
conditions.push(`DATE(o1.date) <= DATE($${paramCounter})`);
|
||||
params.push(toDate.toISOString());
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (minAmount > 0) {
|
||||
conditions.push('total_amount >= ?');
|
||||
conditions.push(`total_amount >= $${paramCounter}`);
|
||||
params.push(minAmount);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (maxAmount) {
|
||||
conditions.push('total_amount <= ?');
|
||||
conditions.push(`total_amount <= $${paramCounter}`);
|
||||
params.push(maxAmount);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Get total count for pagination
|
||||
const [countResult] = await pool.query(`
|
||||
const { rows: [countResult] } = await pool.query(`
|
||||
SELECT COUNT(DISTINCT o1.order_number) as total
|
||||
FROM orders o1
|
||||
LEFT JOIN (
|
||||
@@ -63,7 +70,7 @@ router.get('/', async (req, res) => {
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
`, params);
|
||||
|
||||
const total = countResult[0].total;
|
||||
const total = countResult.total;
|
||||
|
||||
// Get paginated results
|
||||
const query = `
|
||||
@@ -75,7 +82,7 @@ router.get('/', async (req, res) => {
|
||||
o1.payment_method,
|
||||
o1.shipping_method,
|
||||
COUNT(o2.pid) as items_count,
|
||||
CAST(SUM(o2.price * o2.quantity) AS DECIMAL(15,3)) as total_amount
|
||||
ROUND(SUM(o2.price * o2.quantity)::numeric, 3) as total_amount
|
||||
FROM orders o1
|
||||
JOIN orders o2 ON o1.order_number = o2.order_number
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
@@ -91,36 +98,37 @@ router.get('/', async (req, res) => {
|
||||
? `${sortColumn} ${sortDirection}`
|
||||
: `o1.${sortColumn} ${sortDirection}`
|
||||
}
|
||||
LIMIT ? OFFSET ?
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
|
||||
const [rows] = await pool.query(query, [...params, limit, offset]);
|
||||
params.push(limit, offset);
|
||||
const { rows } = await pool.query(query, params);
|
||||
|
||||
// Get order statistics
|
||||
const [stats] = await pool.query(`
|
||||
const { rows: [orderStats] } = await pool.query(`
|
||||
WITH CurrentStats AS (
|
||||
SELECT
|
||||
COUNT(DISTINCT order_number) as total_orders,
|
||||
CAST(SUM(price * quantity) AS DECIMAL(15,3)) as total_revenue
|
||||
ROUND(SUM(price * quantity)::numeric, 3) as total_revenue
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
AND DATE(date) >= CURRENT_DATE - INTERVAL '30 days'
|
||||
),
|
||||
PreviousStats AS (
|
||||
SELECT
|
||||
COUNT(DISTINCT order_number) as prev_orders,
|
||||
CAST(SUM(price * quantity) AS DECIMAL(15,3)) as prev_revenue
|
||||
ROUND(SUM(price * quantity)::numeric, 3) as prev_revenue
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) BETWEEN DATE_SUB(CURDATE(), INTERVAL 60 DAY) AND DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
AND DATE(date) BETWEEN CURRENT_DATE - INTERVAL '60 days' AND CURRENT_DATE - INTERVAL '30 days'
|
||||
),
|
||||
OrderValues AS (
|
||||
SELECT
|
||||
order_number,
|
||||
CAST(SUM(price * quantity) AS DECIMAL(15,3)) as order_value
|
||||
ROUND(SUM(price * quantity)::numeric, 3) as order_value
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
AND DATE(date) >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY order_number
|
||||
)
|
||||
SELECT
|
||||
@@ -128,30 +136,28 @@ router.get('/', async (req, res) => {
|
||||
cs.total_revenue,
|
||||
CASE
|
||||
WHEN ps.prev_orders > 0
|
||||
THEN ((cs.total_orders - ps.prev_orders) / ps.prev_orders * 100)
|
||||
THEN ROUND(((cs.total_orders - ps.prev_orders)::numeric / ps.prev_orders * 100), 1)
|
||||
ELSE 0
|
||||
END as order_growth,
|
||||
CASE
|
||||
WHEN ps.prev_revenue > 0
|
||||
THEN ((cs.total_revenue - ps.prev_revenue) / ps.prev_revenue * 100)
|
||||
THEN ROUND(((cs.total_revenue - ps.prev_revenue)::numeric / ps.prev_revenue * 100), 1)
|
||||
ELSE 0
|
||||
END as revenue_growth,
|
||||
CASE
|
||||
WHEN cs.total_orders > 0
|
||||
THEN CAST((cs.total_revenue / cs.total_orders) AS DECIMAL(15,3))
|
||||
THEN ROUND((cs.total_revenue::numeric / cs.total_orders), 3)
|
||||
ELSE 0
|
||||
END as average_order_value,
|
||||
CASE
|
||||
WHEN ps.prev_orders > 0
|
||||
THEN CAST((ps.prev_revenue / ps.prev_orders) AS DECIMAL(15,3))
|
||||
THEN ROUND((ps.prev_revenue::numeric / ps.prev_orders), 3)
|
||||
ELSE 0
|
||||
END as prev_average_order_value
|
||||
FROM CurrentStats cs
|
||||
CROSS JOIN PreviousStats ps
|
||||
`);
|
||||
|
||||
const orderStats = stats[0];
|
||||
|
||||
res.json({
|
||||
orders: rows.map(row => ({
|
||||
...row,
|
||||
@@ -189,7 +195,7 @@ router.get('/:orderNumber', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get order details
|
||||
const [orderRows] = await pool.query(`
|
||||
const { rows: orderRows } = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
@@ -200,10 +206,10 @@ router.get('/:orderNumber', async (req, res) => {
|
||||
o1.shipping_address,
|
||||
o1.billing_address,
|
||||
COUNT(o2.pid) as items_count,
|
||||
CAST(SUM(o2.price * o2.quantity) AS DECIMAL(15,3)) as total_amount
|
||||
ROUND(SUM(o2.price * o2.quantity)::numeric, 3) as total_amount
|
||||
FROM orders o1
|
||||
JOIN orders o2 ON o1.order_number = o2.order_number
|
||||
WHERE o1.order_number = ? AND o1.canceled = false
|
||||
WHERE o1.order_number = $1 AND o1.canceled = false
|
||||
GROUP BY
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
@@ -220,17 +226,17 @@ router.get('/:orderNumber', async (req, res) => {
|
||||
}
|
||||
|
||||
// Get order items
|
||||
const [itemRows] = await pool.query(`
|
||||
const { rows: itemRows } = await pool.query(`
|
||||
SELECT
|
||||
o.pid,
|
||||
p.title,
|
||||
p.SKU,
|
||||
o.quantity,
|
||||
o.price,
|
||||
CAST((o.price * o.quantity) AS DECIMAL(15,3)) as total
|
||||
ROUND((o.price * o.quantity)::numeric, 3) as total
|
||||
FROM orders o
|
||||
JOIN products p ON o.pid = p.pid
|
||||
WHERE o.order_number = ? AND o.canceled = false
|
||||
WHERE o.order_number = $1 AND o.canceled = false
|
||||
`, [req.params.orderNumber]);
|
||||
|
||||
const order = {
|
||||
|
||||
@@ -20,7 +20,7 @@ router.get('/brands', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('Fetching brands from database...');
|
||||
|
||||
const [results] = await pool.query(`
|
||||
const { rows } = await pool.query(`
|
||||
SELECT DISTINCT COALESCE(p.brand, 'Unbranded') as brand
|
||||
FROM products p
|
||||
JOIN purchase_orders po ON p.pid = po.pid
|
||||
@@ -30,8 +30,8 @@ router.get('/brands', async (req, res) => {
|
||||
ORDER BY COALESCE(p.brand, 'Unbranded')
|
||||
`);
|
||||
|
||||
console.log(`Found ${results.length} brands:`, results.slice(0, 3));
|
||||
res.json(results.map(r => r.brand));
|
||||
console.log(`Found ${rows.length} brands:`, rows.slice(0, 3));
|
||||
res.json(rows.map(r => r.brand));
|
||||
} catch (error) {
|
||||
console.error('Error fetching brands:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch brands' });
|
||||
@@ -50,6 +50,7 @@ router.get('/', async (req, res) => {
|
||||
|
||||
const conditions = ['p.visible = true'];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
// Add default replenishable filter unless explicitly showing non-replenishable
|
||||
if (req.query.showNonReplenishable !== 'true') {
|
||||
@@ -58,9 +59,10 @@ router.get('/', async (req, res) => {
|
||||
|
||||
// Handle search filter
|
||||
if (req.query.search) {
|
||||
conditions.push('(p.title LIKE ? OR p.SKU LIKE ? OR p.barcode LIKE ?)');
|
||||
conditions.push(`(p.title ILIKE $${paramCounter} OR p.SKU ILIKE $${paramCounter} OR p.barcode ILIKE $${paramCounter})`);
|
||||
const searchTerm = `%${req.query.search}%`;
|
||||
params.push(searchTerm, searchTerm, searchTerm);
|
||||
params.push(searchTerm);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Handle numeric filters with operators
|
||||
@@ -84,61 +86,69 @@ router.get('/', async (req, res) => {
|
||||
if (field) {
|
||||
const operator = req.query[`${key}_operator`] || '=';
|
||||
if (operator === 'between') {
|
||||
// Handle between operator
|
||||
try {
|
||||
const [min, max] = JSON.parse(value);
|
||||
conditions.push(`${field} BETWEEN ? AND ?`);
|
||||
conditions.push(`${field} BETWEEN $${paramCounter} AND $${paramCounter + 1}`);
|
||||
params.push(min, max);
|
||||
paramCounter += 2;
|
||||
} catch (e) {
|
||||
console.error(`Invalid between value for ${key}:`, value);
|
||||
}
|
||||
} else {
|
||||
// Handle other operators
|
||||
conditions.push(`${field} ${operator} ?`);
|
||||
conditions.push(`${field} ${operator} $${paramCounter}`);
|
||||
params.push(parseFloat(value));
|
||||
paramCounter++;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle select filters
|
||||
if (req.query.vendor) {
|
||||
conditions.push('p.vendor = ?');
|
||||
conditions.push(`p.vendor = $${paramCounter}`);
|
||||
params.push(req.query.vendor);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.brand) {
|
||||
conditions.push('p.brand = ?');
|
||||
conditions.push(`p.brand = $${paramCounter}`);
|
||||
params.push(req.query.brand);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.category) {
|
||||
conditions.push('p.categories LIKE ?');
|
||||
conditions.push(`p.categories ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.category}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.stockStatus && req.query.stockStatus !== 'all') {
|
||||
conditions.push('pm.stock_status = ?');
|
||||
conditions.push(`pm.stock_status = $${paramCounter}`);
|
||||
params.push(req.query.stockStatus);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.abcClass) {
|
||||
conditions.push('pm.abc_class = ?');
|
||||
conditions.push(`pm.abc_class = $${paramCounter}`);
|
||||
params.push(req.query.abcClass);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.leadTimeStatus) {
|
||||
conditions.push('pm.lead_time_status = ?');
|
||||
conditions.push(`pm.lead_time_status = $${paramCounter}`);
|
||||
params.push(req.query.leadTimeStatus);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.replenishable !== undefined) {
|
||||
conditions.push('p.replenishable = ?');
|
||||
params.push(req.query.replenishable === 'true' ? 1 : 0);
|
||||
conditions.push(`p.replenishable = $${paramCounter}`);
|
||||
params.push(req.query.replenishable === 'true');
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.managingStock !== undefined) {
|
||||
conditions.push('p.managing_stock = ?');
|
||||
params.push(req.query.managingStock === 'true' ? 1 : 0);
|
||||
conditions.push(`p.managing_stock = $${paramCounter}`);
|
||||
params.push(req.query.managingStock === 'true');
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Combine all conditions with AND
|
||||
@@ -151,17 +161,17 @@ router.get('/', async (req, res) => {
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
${whereClause}
|
||||
`;
|
||||
const [countResult] = await pool.query(countQuery, params);
|
||||
const total = countResult[0].total;
|
||||
const { rows: [countResult] } = await pool.query(countQuery, params);
|
||||
const total = countResult.total;
|
||||
|
||||
// Get available filters
|
||||
const [categories] = await pool.query(
|
||||
const { rows: categories } = await pool.query(
|
||||
'SELECT name FROM categories ORDER BY name'
|
||||
);
|
||||
const [vendors] = await pool.query(
|
||||
'SELECT DISTINCT vendor FROM products WHERE visible = true AND vendor IS NOT NULL AND vendor != "" ORDER BY vendor'
|
||||
const { rows: vendors } = await pool.query(
|
||||
'SELECT DISTINCT vendor FROM products WHERE visible = true AND vendor IS NOT NULL AND vendor != \'\' ORDER BY vendor'
|
||||
);
|
||||
const [brands] = await pool.query(
|
||||
const { rows: brands } = await pool.query(
|
||||
'SELECT DISTINCT COALESCE(brand, \'Unbranded\') as brand FROM products WHERE visible = true ORDER BY brand'
|
||||
);
|
||||
|
||||
@@ -173,7 +183,7 @@ router.get('/', async (req, res) => {
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
CAST(c.name AS CHAR(1000)) as path
|
||||
CAST(c.name AS text) as path
|
||||
FROM categories c
|
||||
WHERE c.parent_id IS NULL
|
||||
|
||||
@@ -183,7 +193,7 @@ router.get('/', async (req, res) => {
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.parent_id,
|
||||
CONCAT(cp.path, ' > ', c.name)
|
||||
cp.path || ' > ' || c.name
|
||||
FROM categories c
|
||||
JOIN category_path cp ON c.parent_id = cp.cat_id
|
||||
),
|
||||
@@ -210,7 +220,6 @@ router.get('/', async (req, res) => {
|
||||
FROM products p
|
||||
),
|
||||
product_leaf_categories AS (
|
||||
-- Find categories that aren't parents to other categories for this product
|
||||
SELECT DISTINCT pc.cat_id
|
||||
FROM product_categories pc
|
||||
WHERE NOT EXISTS (
|
||||
@@ -224,7 +233,7 @@ router.get('/', async (req, res) => {
|
||||
SELECT
|
||||
p.*,
|
||||
COALESCE(p.brand, 'Unbranded') as brand,
|
||||
GROUP_CONCAT(DISTINCT CONCAT(c.cat_id, ':', c.name)) as categories,
|
||||
string_agg(DISTINCT (c.cat_id || ':' || c.name), ',') as categories,
|
||||
pm.daily_sales_avg,
|
||||
pm.weekly_sales_avg,
|
||||
pm.monthly_sales_avg,
|
||||
@@ -247,83 +256,32 @@ router.get('/', async (req, res) => {
|
||||
pm.last_received_date,
|
||||
pm.abc_class,
|
||||
pm.stock_status,
|
||||
pm.turnover_rate,
|
||||
pm.current_lead_time,
|
||||
pm.target_lead_time,
|
||||
pm.lead_time_status,
|
||||
pm.reorder_qty,
|
||||
pm.overstocked_amt,
|
||||
COALESCE(pm.days_of_inventory / NULLIF(pt.target_days, 0), 0) as stock_coverage_ratio
|
||||
pm.turnover_rate
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
LEFT JOIN product_categories pc ON p.pid = pc.pid
|
||||
LEFT JOIN categories c ON pc.cat_id = c.cat_id
|
||||
LEFT JOIN product_thresholds pt ON p.pid = pt.pid
|
||||
JOIN product_leaf_categories plc ON c.cat_id = plc.cat_id
|
||||
${whereClause ? 'WHERE ' + whereClause.substring(6) : ''}
|
||||
GROUP BY p.pid
|
||||
${whereClause}
|
||||
GROUP BY p.pid, pm.pid
|
||||
ORDER BY ${sortColumn} ${sortDirection}
|
||||
LIMIT ? OFFSET ?
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
|
||||
// Add pagination params to the main query params
|
||||
const queryParams = [...params, limit, offset];
|
||||
console.log('Query:', query.replace(/\s+/g, ' '));
|
||||
console.log('Params:', queryParams);
|
||||
|
||||
const [rows] = await pool.query(query, queryParams);
|
||||
|
||||
// Transform the results
|
||||
const products = rows.map(row => ({
|
||||
...row,
|
||||
categories: row.categories ? row.categories.split(',') : [],
|
||||
price: parseFloat(row.price),
|
||||
cost_price: parseFloat(row.cost_price),
|
||||
landing_cost_price: row.landing_cost_price ? parseFloat(row.landing_cost_price) : null,
|
||||
stock_quantity: parseInt(row.stock_quantity),
|
||||
daily_sales_avg: parseFloat(row.daily_sales_avg) || 0,
|
||||
weekly_sales_avg: parseFloat(row.weekly_sales_avg) || 0,
|
||||
monthly_sales_avg: parseFloat(row.monthly_sales_avg) || 0,
|
||||
avg_quantity_per_order: parseFloat(row.avg_quantity_per_order) || 0,
|
||||
number_of_orders: parseInt(row.number_of_orders) || 0,
|
||||
first_sale_date: row.first_sale_date || null,
|
||||
last_sale_date: row.last_sale_date || null,
|
||||
days_of_inventory: parseFloat(row.days_of_inventory) || 0,
|
||||
weeks_of_inventory: parseFloat(row.weeks_of_inventory) || 0,
|
||||
reorder_point: parseFloat(row.reorder_point) || 0,
|
||||
safety_stock: parseFloat(row.safety_stock) || 0,
|
||||
avg_margin_percent: parseFloat(row.avg_margin_percent) || 0,
|
||||
total_revenue: parseFloat(row.total_revenue) || 0,
|
||||
inventory_value: parseFloat(row.inventory_value) || 0,
|
||||
cost_of_goods_sold: parseFloat(row.cost_of_goods_sold) || 0,
|
||||
gross_profit: parseFloat(row.gross_profit) || 0,
|
||||
gmroi: parseFloat(row.gmroi) || 0,
|
||||
avg_lead_time_days: parseFloat(row.avg_lead_time_days) || 0,
|
||||
last_purchase_date: row.last_purchase_date || null,
|
||||
last_received_date: row.last_received_date || null,
|
||||
abc_class: row.abc_class || null,
|
||||
stock_status: row.stock_status || null,
|
||||
turnover_rate: parseFloat(row.turnover_rate) || 0,
|
||||
current_lead_time: parseFloat(row.current_lead_time) || 0,
|
||||
target_lead_time: parseFloat(row.target_lead_time) || 0,
|
||||
lead_time_status: row.lead_time_status || null,
|
||||
stock_coverage_ratio: parseFloat(row.stock_coverage_ratio) || 0,
|
||||
reorder_qty: parseInt(row.reorder_qty) || 0,
|
||||
overstocked_amt: parseInt(row.overstocked_amt) || 0
|
||||
}));
|
||||
params.push(limit, offset);
|
||||
const { rows: products } = await pool.query(query, params);
|
||||
|
||||
res.json({
|
||||
products,
|
||||
pagination: {
|
||||
total,
|
||||
currentPage: page,
|
||||
pages: Math.ceil(total / limit),
|
||||
currentPage: page,
|
||||
limit
|
||||
},
|
||||
filters: {
|
||||
categories: categories.map(category => category.name),
|
||||
vendors: vendors.map(vendor => vendor.vendor),
|
||||
brands: brands.map(brand => brand.brand)
|
||||
categories: categories.map(c => c.name),
|
||||
vendors: vendors.map(v => v.vendor),
|
||||
brands: brands.map(b => b.brand)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@@ -29,40 +29,46 @@ router.get('/', async (req, res) => {
|
||||
|
||||
let whereClause = '1=1';
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
if (search) {
|
||||
whereClause += ' AND (po.po_id LIKE ? OR po.vendor LIKE ?)';
|
||||
params.push(`%${search}%`, `%${search}%`);
|
||||
whereClause += ` AND (po.po_id ILIKE $${paramCounter} OR po.vendor ILIKE $${paramCounter})`;
|
||||
params.push(`%${search}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (status && status !== 'all') {
|
||||
whereClause += ' AND po.status = ?';
|
||||
whereClause += ` AND po.status = $${paramCounter}`;
|
||||
params.push(Number(status));
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (vendor && vendor !== 'all') {
|
||||
whereClause += ' AND po.vendor = ?';
|
||||
whereClause += ` AND po.vendor = $${paramCounter}`;
|
||||
params.push(vendor);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (startDate) {
|
||||
whereClause += ' AND po.date >= ?';
|
||||
whereClause += ` AND po.date >= $${paramCounter}`;
|
||||
params.push(startDate);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (endDate) {
|
||||
whereClause += ' AND po.date <= ?';
|
||||
whereClause += ` AND po.date <= $${paramCounter}`;
|
||||
params.push(endDate);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Get filtered summary metrics
|
||||
const [summary] = await pool.query(`
|
||||
const { rows: [summary] } = await pool.query(`
|
||||
WITH po_totals AS (
|
||||
SELECT
|
||||
po_id,
|
||||
SUM(ordered) as total_ordered,
|
||||
SUM(received) as total_received,
|
||||
CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_cost
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_cost
|
||||
FROM purchase_orders po
|
||||
WHERE ${whereClause}
|
||||
GROUP BY po_id
|
||||
@@ -72,26 +78,26 @@ router.get('/', async (req, res) => {
|
||||
SUM(total_ordered) as total_ordered,
|
||||
SUM(total_received) as total_received,
|
||||
ROUND(
|
||||
SUM(total_received) / NULLIF(SUM(total_ordered), 0), 3
|
||||
(SUM(total_received)::numeric / NULLIF(SUM(total_ordered), 0)), 3
|
||||
) as fulfillment_rate,
|
||||
CAST(SUM(total_cost) AS DECIMAL(15,3)) as total_value,
|
||||
CAST(AVG(total_cost) AS DECIMAL(15,3)) as avg_cost
|
||||
ROUND(SUM(total_cost)::numeric, 3) as total_value,
|
||||
ROUND(AVG(total_cost)::numeric, 3) as avg_cost
|
||||
FROM po_totals
|
||||
`, params);
|
||||
|
||||
// Get total count for pagination
|
||||
const [countResult] = await pool.query(`
|
||||
const { rows: [countResult] } = await pool.query(`
|
||||
SELECT COUNT(DISTINCT po_id) as total
|
||||
FROM purchase_orders po
|
||||
WHERE ${whereClause}
|
||||
`, params);
|
||||
|
||||
const total = countResult[0].total;
|
||||
const total = countResult.total;
|
||||
const offset = (page - 1) * limit;
|
||||
const pages = Math.ceil(total / limit);
|
||||
|
||||
// Get recent purchase orders
|
||||
const [orders] = await pool.query(`
|
||||
const { rows: orders } = await pool.query(`
|
||||
WITH po_totals AS (
|
||||
SELECT
|
||||
po_id,
|
||||
@@ -101,10 +107,10 @@ router.get('/', async (req, res) => {
|
||||
receiving_status,
|
||||
COUNT(DISTINCT pid) as total_items,
|
||||
SUM(ordered) as total_quantity,
|
||||
CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_cost,
|
||||
SUM(received) as total_received,
|
||||
ROUND(
|
||||
SUM(received) / NULLIF(SUM(ordered), 0), 3
|
||||
(SUM(received)::numeric / NULLIF(SUM(ordered), 0)), 3
|
||||
) as fulfillment_rate
|
||||
FROM purchase_orders po
|
||||
WHERE ${whereClause}
|
||||
@@ -113,7 +119,7 @@ router.get('/', async (req, res) => {
|
||||
SELECT
|
||||
po_id as id,
|
||||
vendor as vendor_name,
|
||||
DATE_FORMAT(date, '%Y-%m-%d') as order_date,
|
||||
to_char(date, 'YYYY-MM-DD') as order_date,
|
||||
status,
|
||||
receiving_status,
|
||||
total_items,
|
||||
@@ -124,21 +130,21 @@ router.get('/', async (req, res) => {
|
||||
FROM po_totals
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN ? = 'order_date' THEN date
|
||||
WHEN ? = 'vendor_name' THEN vendor
|
||||
WHEN ? = 'total_cost' THEN CAST(total_cost AS DECIMAL(15,3))
|
||||
WHEN ? = 'total_received' THEN CAST(total_received AS DECIMAL(15,3))
|
||||
WHEN ? = 'total_items' THEN CAST(total_items AS SIGNED)
|
||||
WHEN ? = 'total_quantity' THEN CAST(total_quantity AS SIGNED)
|
||||
WHEN ? = 'fulfillment_rate' THEN CAST(fulfillment_rate AS DECIMAL(5,3))
|
||||
WHEN ? = 'status' THEN status
|
||||
WHEN $${paramCounter} = 'order_date' THEN date
|
||||
WHEN $${paramCounter} = 'vendor_name' THEN vendor
|
||||
WHEN $${paramCounter} = 'total_cost' THEN total_cost
|
||||
WHEN $${paramCounter} = 'total_received' THEN total_received
|
||||
WHEN $${paramCounter} = 'total_items' THEN total_items
|
||||
WHEN $${paramCounter} = 'total_quantity' THEN total_quantity
|
||||
WHEN $${paramCounter} = 'fulfillment_rate' THEN fulfillment_rate
|
||||
WHEN $${paramCounter} = 'status' THEN status
|
||||
ELSE date
|
||||
END ${sortDirection === 'desc' ? 'DESC' : 'ASC'}
|
||||
LIMIT ? OFFSET ?
|
||||
`, [...params, sortColumn, sortColumn, sortColumn, sortColumn, sortColumn, sortColumn, sortColumn, sortColumn, Number(limit), offset]);
|
||||
LIMIT $${paramCounter + 1} OFFSET $${paramCounter + 2}
|
||||
`, [...params, sortColumn, Number(limit), offset]);
|
||||
|
||||
// Get unique vendors for filter options
|
||||
const [vendors] = await pool.query(`
|
||||
const { rows: vendors } = await pool.query(`
|
||||
SELECT DISTINCT vendor
|
||||
FROM purchase_orders
|
||||
WHERE vendor IS NOT NULL AND vendor != ''
|
||||
@@ -146,7 +152,7 @@ router.get('/', async (req, res) => {
|
||||
`);
|
||||
|
||||
// Get unique statuses for filter options
|
||||
const [statuses] = await pool.query(`
|
||||
const { rows: statuses } = await pool.query(`
|
||||
SELECT DISTINCT status
|
||||
FROM purchase_orders
|
||||
WHERE status IS NOT NULL
|
||||
@@ -169,12 +175,12 @@ router.get('/', async (req, res) => {
|
||||
|
||||
// Parse summary metrics
|
||||
const parsedSummary = {
|
||||
order_count: Number(summary[0].order_count) || 0,
|
||||
total_ordered: Number(summary[0].total_ordered) || 0,
|
||||
total_received: Number(summary[0].total_received) || 0,
|
||||
fulfillment_rate: Number(summary[0].fulfillment_rate) || 0,
|
||||
total_value: Number(summary[0].total_value) || 0,
|
||||
avg_cost: Number(summary[0].avg_cost) || 0
|
||||
order_count: Number(summary.order_count) || 0,
|
||||
total_ordered: Number(summary.total_ordered) || 0,
|
||||
total_received: Number(summary.total_received) || 0,
|
||||
fulfillment_rate: Number(summary.fulfillment_rate) || 0,
|
||||
total_value: Number(summary.total_value) || 0,
|
||||
avg_cost: Number(summary.avg_cost) || 0
|
||||
};
|
||||
|
||||
res.json({
|
||||
@@ -202,7 +208,7 @@ router.get('/vendor-metrics', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const [metrics] = await pool.query(`
|
||||
const { rows: metrics } = await pool.query(`
|
||||
WITH delivery_metrics AS (
|
||||
SELECT
|
||||
vendor,
|
||||
@@ -213,7 +219,7 @@ router.get('/vendor-metrics', async (req, res) => {
|
||||
CASE
|
||||
WHEN status >= ${STATUS.RECEIVING_STARTED} AND receiving_status >= ${RECEIVING_STATUS.PARTIAL_RECEIVED}
|
||||
AND received_date IS NOT NULL AND date IS NOT NULL
|
||||
THEN DATEDIFF(received_date, date)
|
||||
THEN (received_date - date)::integer
|
||||
ELSE NULL
|
||||
END as delivery_days
|
||||
FROM purchase_orders
|
||||
@@ -226,18 +232,18 @@ router.get('/vendor-metrics', async (req, res) => {
|
||||
SUM(ordered) as total_ordered,
|
||||
SUM(received) as total_received,
|
||||
ROUND(
|
||||
SUM(received) / NULLIF(SUM(ordered), 0), 3
|
||||
(SUM(received)::numeric / NULLIF(SUM(ordered), 0)), 3
|
||||
) as fulfillment_rate,
|
||||
CAST(ROUND(
|
||||
SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2
|
||||
) AS DECIMAL(15,3)) as avg_unit_cost,
|
||||
CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend,
|
||||
ROUND(
|
||||
AVG(NULLIF(delivery_days, 0)), 1
|
||||
(SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2
|
||||
) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend,
|
||||
ROUND(
|
||||
AVG(NULLIF(delivery_days, 0))::numeric, 1
|
||||
) as avg_delivery_days
|
||||
FROM delivery_metrics
|
||||
GROUP BY vendor
|
||||
HAVING total_orders > 0
|
||||
HAVING COUNT(DISTINCT po_id) > 0
|
||||
ORDER BY total_spend DESC
|
||||
`);
|
||||
|
||||
@@ -251,7 +257,7 @@ router.get('/vendor-metrics', async (req, res) => {
|
||||
fulfillment_rate: Number(vendor.fulfillment_rate) || 0,
|
||||
avg_unit_cost: Number(vendor.avg_unit_cost) || 0,
|
||||
total_spend: Number(vendor.total_spend) || 0,
|
||||
avg_delivery_days: vendor.avg_delivery_days === null ? null : Number(vendor.avg_delivery_days)
|
||||
avg_delivery_days: Number(vendor.avg_delivery_days) || 0
|
||||
}));
|
||||
|
||||
res.json(parsedMetrics);
|
||||
|
||||
@@ -6,7 +6,7 @@ router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get all vendors with metrics
|
||||
const [vendors] = await pool.query(`
|
||||
const { rows: vendors } = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
p.vendor as name,
|
||||
COALESCE(vm.active_products, 0) as active_products,
|
||||
@@ -26,16 +26,16 @@ router.get('/', async (req, res) => {
|
||||
|
||||
// Get cost metrics for all vendors
|
||||
const vendorNames = vendors.map(v => v.name);
|
||||
const [costMetrics] = await pool.query(`
|
||||
const { rows: costMetrics } = await pool.query(`
|
||||
SELECT
|
||||
vendor,
|
||||
CAST(ROUND(SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2) AS DECIMAL(15,3)) as avg_unit_cost,
|
||||
CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend
|
||||
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed'
|
||||
AND cost_price IS NOT NULL
|
||||
AND ordered > 0
|
||||
AND vendor IN (?)
|
||||
AND vendor = ANY($1)
|
||||
GROUP BY vendor
|
||||
`, [vendorNames]);
|
||||
|
||||
@@ -49,26 +49,26 @@ router.get('/', async (req, res) => {
|
||||
}, {});
|
||||
|
||||
// Get overall stats
|
||||
const [stats] = await pool.query(`
|
||||
const { rows: [stats] } = await pool.query(`
|
||||
SELECT
|
||||
COUNT(DISTINCT p.vendor) as totalVendors,
|
||||
COUNT(DISTINCT CASE
|
||||
WHEN COALESCE(vm.total_orders, 0) > 0 AND COALESCE(vm.order_fill_rate, 0) >= 75
|
||||
THEN p.vendor
|
||||
END) as activeVendors,
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.avg_lead_time_days, 0)), 1), 0) as avgLeadTime,
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.order_fill_rate, 0)), 1), 0) as avgFillRate,
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.on_time_delivery_rate, 0)), 1), 0) as avgOnTimeDelivery
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.avg_lead_time_days, 0))::numeric, 1), 0) as avgLeadTime,
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.order_fill_rate, 0))::numeric, 1), 0) as avgFillRate,
|
||||
COALESCE(ROUND(AVG(NULLIF(vm.on_time_delivery_rate, 0))::numeric, 1), 0) as avgOnTimeDelivery
|
||||
FROM products p
|
||||
LEFT JOIN vendor_metrics vm ON p.vendor = vm.vendor
|
||||
WHERE p.vendor IS NOT NULL AND p.vendor != ''
|
||||
`);
|
||||
|
||||
// Get overall cost metrics
|
||||
const [overallCostMetrics] = await pool.query(`
|
||||
const { rows: [overallCostMetrics] } = await pool.query(`
|
||||
SELECT
|
||||
CAST(ROUND(SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2) AS DECIMAL(15,3)) as avg_unit_cost,
|
||||
CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend
|
||||
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE status = 'closed'
|
||||
AND cost_price IS NOT NULL
|
||||
@@ -90,13 +90,13 @@ router.get('/', async (req, res) => {
|
||||
total_spend: parseFloat(costMetricsMap[vendor.name]?.total_spend || 0)
|
||||
})),
|
||||
stats: {
|
||||
totalVendors: parseInt(stats[0].totalVendors),
|
||||
activeVendors: parseInt(stats[0].activeVendors),
|
||||
avgLeadTime: parseFloat(stats[0].avgLeadTime),
|
||||
avgFillRate: parseFloat(stats[0].avgFillRate),
|
||||
avgOnTimeDelivery: parseFloat(stats[0].avgOnTimeDelivery),
|
||||
avgUnitCost: parseFloat(overallCostMetrics[0].avg_unit_cost),
|
||||
totalSpend: parseFloat(overallCostMetrics[0].total_spend)
|
||||
totalVendors: parseInt(stats.totalvendors),
|
||||
activeVendors: parseInt(stats.activevendors),
|
||||
avgLeadTime: parseFloat(stats.avgleadtime),
|
||||
avgFillRate: parseFloat(stats.avgfillrate),
|
||||
avgOnTimeDelivery: parseFloat(stats.avgontimedelivery),
|
||||
avgUnitCost: parseFloat(overallCostMetrics.avg_unit_cost),
|
||||
totalSpend: parseFloat(overallCostMetrics.total_spend)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@@ -3,7 +3,6 @@ const cors = require('cors');
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const mysql = require('mysql2/promise');
|
||||
const { corsMiddleware, corsErrorHandler } = require('./middleware/cors');
|
||||
const { initPool } = require('./utils/db');
|
||||
const productsRouter = require('./routes/products');
|
||||
@@ -16,11 +15,9 @@ const configRouter = require('./routes/config');
|
||||
const metricsRouter = require('./routes/metrics');
|
||||
const vendorsRouter = require('./routes/vendors');
|
||||
const categoriesRouter = require('./routes/categories');
|
||||
const testConnectionRouter = require('./routes/test-connection');
|
||||
|
||||
// Get the absolute path to the .env file
|
||||
const envPath = path.resolve(process.cwd(), '.env');
|
||||
console.log('Current working directory:', process.cwd());
|
||||
const envPath = '/var/www/html/inventory/.env';
|
||||
console.log('Looking for .env file at:', envPath);
|
||||
console.log('.env file exists:', fs.existsSync(envPath));
|
||||
|
||||
@@ -33,6 +30,9 @@ try {
|
||||
DB_HOST: process.env.DB_HOST || 'not set',
|
||||
DB_USER: process.env.DB_USER || 'not set',
|
||||
DB_NAME: process.env.DB_NAME || 'not set',
|
||||
DB_PASSWORD: process.env.DB_PASSWORD ? '[password set]' : 'not set',
|
||||
DB_PORT: process.env.DB_PORT || 'not set',
|
||||
DB_SSL: process.env.DB_SSL || 'not set'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error loading .env file:', error);
|
||||
@@ -66,20 +66,27 @@ app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
|
||||
// Initialize database pool
|
||||
const pool = initPool({
|
||||
const poolPromise = initPool({
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
waitForConnections: true,
|
||||
connectionLimit: process.env.NODE_ENV === 'production' ? 20 : 10,
|
||||
queueLimit: 0,
|
||||
enableKeepAlive: true,
|
||||
keepAliveInitialDelay: 0
|
||||
port: process.env.DB_PORT || 5432,
|
||||
max: process.env.NODE_ENV === 'production' ? 20 : 10,
|
||||
idleTimeoutMillis: 30000,
|
||||
connectionTimeoutMillis: 2000,
|
||||
ssl: process.env.DB_SSL === 'true' ? {
|
||||
rejectUnauthorized: false
|
||||
} : false
|
||||
});
|
||||
|
||||
// Make pool available to routes
|
||||
// Make pool available to routes once initialized
|
||||
poolPromise.then(pool => {
|
||||
app.locals.pool = pool;
|
||||
}).catch(err => {
|
||||
console.error('[Database] Failed to initialize pool:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Routes
|
||||
app.use('/api/products', productsRouter);
|
||||
@@ -92,7 +99,6 @@ app.use('/api/config', configRouter);
|
||||
app.use('/api/metrics', metricsRouter);
|
||||
app.use('/api/vendors', vendorsRouter);
|
||||
app.use('/api/categories', categoriesRouter);
|
||||
app.use('/api', testConnectionRouter);
|
||||
|
||||
// Basic health check route
|
||||
app.get('/health', (req, res) => {
|
||||
@@ -128,17 +134,6 @@ process.on('unhandledRejection', (reason, promise) => {
|
||||
console.error(`[${new Date().toISOString()}] Unhandled Rejection at:`, promise, 'reason:', reason);
|
||||
});
|
||||
|
||||
// Test database connection
|
||||
pool.getConnection()
|
||||
.then(connection => {
|
||||
console.log('[Database] Connected successfully');
|
||||
connection.release();
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('[Database] Error connecting:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Initialize client sets for SSE
|
||||
const importClients = new Set();
|
||||
const updateClients = new Set();
|
||||
|
||||
@@ -1,17 +1,70 @@
|
||||
const mysql = require('mysql2/promise');
|
||||
const { Pool, Client } = require('pg');
|
||||
|
||||
let pool;
|
||||
|
||||
function initPool(config) {
|
||||
pool = mysql.createPool(config);
|
||||
// Log config without sensitive data
|
||||
const safeConfig = {
|
||||
host: config.host,
|
||||
user: config.user,
|
||||
database: config.database,
|
||||
port: config.port,
|
||||
max: config.max,
|
||||
idleTimeoutMillis: config.idleTimeoutMillis,
|
||||
connectionTimeoutMillis: config.connectionTimeoutMillis,
|
||||
ssl: config.ssl,
|
||||
password: config.password ? '[password set]' : '[no password]'
|
||||
};
|
||||
console.log('[Database] Initializing pool with config:', safeConfig);
|
||||
|
||||
// Try creating a client first to test the connection
|
||||
const testClient = new Client({
|
||||
host: config.host,
|
||||
user: config.user,
|
||||
password: config.password,
|
||||
database: config.database,
|
||||
port: config.port,
|
||||
ssl: config.ssl
|
||||
});
|
||||
|
||||
console.log('[Database] Testing connection with Client...');
|
||||
return testClient.connect()
|
||||
.then(() => {
|
||||
console.log('[Database] Test connection with Client successful');
|
||||
return testClient.end();
|
||||
})
|
||||
.then(() => {
|
||||
// If client connection worked, create the pool
|
||||
console.log('[Database] Creating pool...');
|
||||
pool = new Pool({
|
||||
host: config.host,
|
||||
user: config.user,
|
||||
password: config.password,
|
||||
database: config.database,
|
||||
port: config.port,
|
||||
max: config.max,
|
||||
idleTimeoutMillis: config.idleTimeoutMillis,
|
||||
connectionTimeoutMillis: config.connectionTimeoutMillis,
|
||||
ssl: config.ssl
|
||||
});
|
||||
return pool.connect();
|
||||
})
|
||||
.then(poolClient => {
|
||||
console.log('[Database] Pool connection successful');
|
||||
poolClient.release();
|
||||
return pool;
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('[Database] Connection failed:', err);
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
async function getConnection() {
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
return pool.getConnection();
|
||||
return pool.connect();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
@@ -27,7 +27,7 @@ import {
|
||||
import { Loader2, X, RefreshCw, AlertTriangle, RefreshCcw, Hourglass } from "lucide-react";
|
||||
import config from "../../config";
|
||||
import { toast } from "sonner";
|
||||
import { Table, TableBody, TableCell, TableRow, TableHeader, TableHead } from "@/components/ui/table";
|
||||
import { Table, TableBody, TableCell, TableRow } from "@/components/ui/table";
|
||||
|
||||
interface ImportProgress {
|
||||
status: "running" | "error" | "complete" | "cancelled";
|
||||
@@ -85,9 +85,7 @@ export function DataManagement() {
|
||||
const [] = useState<ImportProgress | null>(null);
|
||||
const [eventSource, setEventSource] = useState<EventSource | null>(null);
|
||||
const [importHistory, setImportHistory] = useState<ImportHistoryRecord[]>([]);
|
||||
const [calculateHistory, setCalculateHistory] = useState<
|
||||
CalculateHistoryRecord[]
|
||||
>([]);
|
||||
const [calculateHistory, setCalculateHistory] = useState<CalculateHistoryRecord[]>([]);
|
||||
const [moduleStatus, setModuleStatus] = useState<ModuleStatus[]>([]);
|
||||
const [tableStatus, setTableStatus] = useState<TableStatus[]>([]);
|
||||
const [scriptOutput, setScriptOutput] = useState<string[]>([]);
|
||||
@@ -368,6 +366,10 @@ export function DataManagement() {
|
||||
fetch(`${config.apiUrl}/csv/status/tables`),
|
||||
]);
|
||||
|
||||
if (!importRes.ok || !calcRes.ok || !moduleRes.ok || !tableRes.ok) {
|
||||
throw new Error('One or more requests failed');
|
||||
}
|
||||
|
||||
const [importData, calcData, moduleData, tableData] = await Promise.all([
|
||||
importRes.json(),
|
||||
calcRes.json(),
|
||||
@@ -375,52 +377,66 @@ export function DataManagement() {
|
||||
tableRes.json(),
|
||||
]);
|
||||
|
||||
setImportHistory(importData);
|
||||
setCalculateHistory(calcData);
|
||||
setModuleStatus(moduleData);
|
||||
setTableStatus(tableData);
|
||||
// Ensure we're setting arrays even if the response is empty or invalid
|
||||
setImportHistory(Array.isArray(importData) ? importData : []);
|
||||
setCalculateHistory(Array.isArray(calcData) ? calcData : []);
|
||||
setModuleStatus(Array.isArray(moduleData) ? moduleData : []);
|
||||
setTableStatus(Array.isArray(tableData) ? tableData : []);
|
||||
} catch (error) {
|
||||
console.error("Error fetching history:", error);
|
||||
// Set empty arrays as fallback
|
||||
setImportHistory([]);
|
||||
setCalculateHistory([]);
|
||||
setModuleStatus([]);
|
||||
setTableStatus([]);
|
||||
}
|
||||
};
|
||||
|
||||
const refreshTableStatus = async () => {
|
||||
try {
|
||||
const response = await fetch(`${config.apiUrl}/csv/status/tables`);
|
||||
if (!response.ok) throw new Error('Failed to fetch table status');
|
||||
const data = await response.json();
|
||||
setTableStatus(data);
|
||||
setTableStatus(Array.isArray(data) ? data : []);
|
||||
} catch (error) {
|
||||
toast.error("Failed to refresh table status");
|
||||
setTableStatus([]);
|
||||
}
|
||||
};
|
||||
|
||||
const refreshModuleStatus = async () => {
|
||||
try {
|
||||
const response = await fetch(`${config.apiUrl}/csv/status/modules`);
|
||||
if (!response.ok) throw new Error('Failed to fetch module status');
|
||||
const data = await response.json();
|
||||
setModuleStatus(data);
|
||||
setModuleStatus(Array.isArray(data) ? data : []);
|
||||
} catch (error) {
|
||||
toast.error("Failed to refresh module status");
|
||||
setModuleStatus([]);
|
||||
}
|
||||
};
|
||||
|
||||
const refreshImportHistory = async () => {
|
||||
try {
|
||||
const response = await fetch(`${config.apiUrl}/csv/history/import`);
|
||||
if (!response.ok) throw new Error('Failed to fetch import history');
|
||||
const data = await response.json();
|
||||
setImportHistory(data);
|
||||
setImportHistory(Array.isArray(data) ? data : []);
|
||||
} catch (error) {
|
||||
toast.error("Failed to refresh import history");
|
||||
setImportHistory([]);
|
||||
}
|
||||
};
|
||||
|
||||
const refreshCalculateHistory = async () => {
|
||||
try {
|
||||
const response = await fetch(`${config.apiUrl}/csv/history/calculate`);
|
||||
if (!response.ok) throw new Error('Failed to fetch calculate history');
|
||||
const data = await response.json();
|
||||
setCalculateHistory(data);
|
||||
setCalculateHistory(Array.isArray(data) ? data : []);
|
||||
} catch (error) {
|
||||
toast.error("Failed to refresh calculate history");
|
||||
setCalculateHistory([]);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ export function Login() {
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
const url = isDev ? "/auth-inv/login" : `${config.authUrl}/login`;
|
||||
const url = `${config.authUrl}/login`;
|
||||
console.log("Making login request:", {
|
||||
url,
|
||||
method: "POST",
|
||||
|
||||
@@ -22,7 +22,6 @@ import {
|
||||
import { motion } from 'motion/react';
|
||||
import {
|
||||
PurchaseOrderStatus,
|
||||
ReceivingStatus as ReceivingStatusCode,
|
||||
getPurchaseOrderStatusLabel,
|
||||
getReceivingStatusLabel,
|
||||
getPurchaseOrderStatusVariant,
|
||||
@@ -113,7 +112,7 @@ export default function PurchaseOrders() {
|
||||
statuses: string[];
|
||||
}>({
|
||||
vendors: [],
|
||||
statuses: [],
|
||||
statuses: []
|
||||
});
|
||||
const [pagination, setPagination] = useState({
|
||||
total: 0,
|
||||
@@ -154,15 +153,57 @@ export default function PurchaseOrders() {
|
||||
fetch('/api/purchase-orders/cost-analysis')
|
||||
]);
|
||||
|
||||
const [
|
||||
purchaseOrdersData,
|
||||
vendorMetricsData,
|
||||
costAnalysisData
|
||||
] = await Promise.all([
|
||||
purchaseOrdersRes.json() as Promise<PurchaseOrdersResponse>,
|
||||
vendorMetricsRes.json(),
|
||||
costAnalysisRes.json()
|
||||
]);
|
||||
// Initialize default data
|
||||
let purchaseOrdersData: PurchaseOrdersResponse = {
|
||||
orders: [],
|
||||
summary: {
|
||||
order_count: 0,
|
||||
total_ordered: 0,
|
||||
total_received: 0,
|
||||
fulfillment_rate: 0,
|
||||
total_value: 0,
|
||||
avg_cost: 0
|
||||
},
|
||||
pagination: {
|
||||
total: 0,
|
||||
pages: 0,
|
||||
page: 1,
|
||||
limit: 100
|
||||
},
|
||||
filters: {
|
||||
vendors: [],
|
||||
statuses: []
|
||||
}
|
||||
};
|
||||
|
||||
let vendorMetricsData: VendorMetrics[] = [];
|
||||
let costAnalysisData: CostAnalysis = {
|
||||
unique_products: 0,
|
||||
avg_cost: 0,
|
||||
min_cost: 0,
|
||||
max_cost: 0,
|
||||
cost_variance: 0,
|
||||
total_spend_by_category: []
|
||||
};
|
||||
|
||||
// Only try to parse responses if they were successful
|
||||
if (purchaseOrdersRes.ok) {
|
||||
purchaseOrdersData = await purchaseOrdersRes.json();
|
||||
} else {
|
||||
console.error('Failed to fetch purchase orders:', await purchaseOrdersRes.text());
|
||||
}
|
||||
|
||||
if (vendorMetricsRes.ok) {
|
||||
vendorMetricsData = await vendorMetricsRes.json();
|
||||
} else {
|
||||
console.error('Failed to fetch vendor metrics:', await vendorMetricsRes.text());
|
||||
}
|
||||
|
||||
if (costAnalysisRes.ok) {
|
||||
costAnalysisData = await costAnalysisRes.json();
|
||||
} else {
|
||||
console.error('Failed to fetch cost analysis:', await costAnalysisRes.text());
|
||||
}
|
||||
|
||||
setPurchaseOrders(purchaseOrdersData.orders);
|
||||
setPagination(purchaseOrdersData.pagination);
|
||||
@@ -172,6 +213,27 @@ export default function PurchaseOrders() {
|
||||
setCostAnalysis(costAnalysisData);
|
||||
} catch (error) {
|
||||
console.error('Error fetching data:', error);
|
||||
// Set default values in case of error
|
||||
setPurchaseOrders([]);
|
||||
setPagination({ total: 0, pages: 0, page: 1, limit: 100 });
|
||||
setFilterOptions({ vendors: [], statuses: [] });
|
||||
setSummary({
|
||||
order_count: 0,
|
||||
total_ordered: 0,
|
||||
total_received: 0,
|
||||
fulfillment_rate: 0,
|
||||
total_value: 0,
|
||||
avg_cost: 0
|
||||
});
|
||||
setVendorMetrics([]);
|
||||
setCostAnalysis({
|
||||
unique_products: 0,
|
||||
avg_cost: 0,
|
||||
min_cost: 0,
|
||||
max_cost: 0,
|
||||
cost_variance: 0,
|
||||
total_spend_by_category: []
|
||||
});
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
@@ -311,7 +373,7 @@ export default function PurchaseOrders() {
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="all">All Vendors</SelectItem>
|
||||
{filterOptions.vendors.map(vendor => (
|
||||
{filterOptions?.vendors?.map(vendor => (
|
||||
<SelectItem key={vendor} value={vendor}>
|
||||
{vendor}
|
||||
</SelectItem>
|
||||
|
||||
@@ -3,10 +3,10 @@ export interface Product {
|
||||
title: string;
|
||||
SKU: string;
|
||||
stock_quantity: number;
|
||||
price: string; // DECIMAL(15,3)
|
||||
regular_price: string; // DECIMAL(15,3)
|
||||
cost_price: string; // DECIMAL(15,3)
|
||||
landing_cost_price: string | null; // DECIMAL(15,3)
|
||||
price: string; // numeric(15,3)
|
||||
regular_price: string; // numeric(15,3)
|
||||
cost_price: string; // numeric(15,3)
|
||||
landing_cost_price: string | null; // numeric(15,3)
|
||||
barcode: string;
|
||||
vendor: string;
|
||||
vendor_reference: string;
|
||||
@@ -24,32 +24,32 @@ export interface Product {
|
||||
updated_at: string;
|
||||
|
||||
// Metrics
|
||||
daily_sales_avg?: string; // DECIMAL(15,3)
|
||||
weekly_sales_avg?: string; // DECIMAL(15,3)
|
||||
monthly_sales_avg?: string; // DECIMAL(15,3)
|
||||
avg_quantity_per_order?: string; // DECIMAL(15,3)
|
||||
daily_sales_avg?: string; // numeric(15,3)
|
||||
weekly_sales_avg?: string; // numeric(15,3)
|
||||
monthly_sales_avg?: string; // numeric(15,3)
|
||||
avg_quantity_per_order?: string; // numeric(15,3)
|
||||
number_of_orders?: number;
|
||||
first_sale_date?: string;
|
||||
last_sale_date?: string;
|
||||
last_purchase_date?: string;
|
||||
days_of_inventory?: string; // DECIMAL(15,3)
|
||||
weeks_of_inventory?: string; // DECIMAL(15,3)
|
||||
reorder_point?: string; // DECIMAL(15,3)
|
||||
safety_stock?: string; // DECIMAL(15,3)
|
||||
avg_margin_percent?: string; // DECIMAL(15,3)
|
||||
total_revenue?: string; // DECIMAL(15,3)
|
||||
inventory_value?: string; // DECIMAL(15,3)
|
||||
cost_of_goods_sold?: string; // DECIMAL(15,3)
|
||||
gross_profit?: string; // DECIMAL(15,3)
|
||||
gmroi?: string; // DECIMAL(15,3)
|
||||
avg_lead_time_days?: string; // DECIMAL(15,3)
|
||||
days_of_inventory?: string; // numeric(15,3)
|
||||
weeks_of_inventory?: string; // numeric(15,3)
|
||||
reorder_point?: string; // numeric(15,3)
|
||||
safety_stock?: string; // numeric(15,3)
|
||||
avg_margin_percent?: string; // numeric(15,3)
|
||||
total_revenue?: string; // numeric(15,3)
|
||||
inventory_value?: string; // numeric(15,3)
|
||||
cost_of_goods_sold?: string; // numeric(15,3)
|
||||
gross_profit?: string; // numeric(15,3)
|
||||
gmroi?: string; // numeric(15,3)
|
||||
avg_lead_time_days?: string; // numeric(15,3)
|
||||
last_received_date?: string;
|
||||
abc_class?: string;
|
||||
stock_status?: string;
|
||||
turnover_rate?: string; // DECIMAL(15,3)
|
||||
current_lead_time?: string; // DECIMAL(15,3)
|
||||
target_lead_time?: string; // DECIMAL(15,3)
|
||||
turnover_rate?: string; // numeric(15,3)
|
||||
current_lead_time?: string; // numeric(15,3)
|
||||
target_lead_time?: string; // numeric(15,3)
|
||||
lead_time_status?: string;
|
||||
reorder_qty?: number;
|
||||
overstocked_amt?: string; // DECIMAL(15,3)
|
||||
overstocked_amt?: string; // numeric(15,3)
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"root":["./src/app.tsx","./src/config.ts","./src/main.tsx","./src/vite-env.d.ts","./src/components/analytics/categoryperformance.tsx","./src/components/analytics/priceanalysis.tsx","./src/components/analytics/profitanalysis.tsx","./src/components/analytics/stockanalysis.tsx","./src/components/analytics/vendorperformance.tsx","./src/components/auth/requireauth.tsx","./src/components/dashboard/bestsellers.tsx","./src/components/dashboard/forecastmetrics.tsx","./src/components/dashboard/inventoryhealthsummary.tsx","./src/components/dashboard/inventorystats.tsx","./src/components/dashboard/keymetricscharts.tsx","./src/components/dashboard/lowstockalerts.tsx","./src/components/dashboard/overstockmetrics.tsx","./src/components/dashboard/overview.tsx","./src/components/dashboard/purchasemetrics.tsx","./src/components/dashboard/recentsales.tsx","./src/components/dashboard/replenishmentmetrics.tsx","./src/components/dashboard/salesbycategory.tsx","./src/components/dashboard/salesmetrics.tsx","./src/components/dashboard/stockmetrics.tsx","./src/components/dashboard/topoverstockedproducts.tsx","./src/components/dashboard/topreplenishproducts.tsx","./src/components/dashboard/trendingproducts.tsx","./src/components/dashboard/vendorperformance.tsx","./src/components/forecasting/columns.tsx","./src/components/layout/appsidebar.tsx","./src/components/layout/mainlayout.tsx","./src/components/products/productdetail.tsx","./src/components/products/productfilters.tsx","./src/components/products/producttable.tsx","./src/components/products/producttableskeleton.tsx","./src/components/products/productviews.tsx","./src/components/settings/calculationsettings.tsx","./src/components/settings/configuration.tsx","./src/components/settings/datamanagement.tsx","./src/components/settings/performancemetrics.tsx","./src/components/settings/stockmanagement.tsx","./src/components/ui/accordion.tsx","./src/components/ui/alert-dialog.tsx","./src/components/ui/alert.tsx","./src/components/ui/avatar.tsx","./src/components/ui/badge.tsx","./src/components/ui/button.tsx","./src/components/ui/calendar.tsx","./src/components/ui/card.tsx","./src/components/ui/command.tsx","./src/components/ui/date-range-picker-narrow.tsx","./src/components/ui/date-range-picker.tsx","./src/components/ui/dialog.tsx","./src/components/ui/drawer.tsx","./src/components/ui/dropdown-menu.tsx","./src/components/ui/input.tsx","./src/components/ui/label.tsx","./src/components/ui/pagination.tsx","./src/components/ui/popover.tsx","./src/components/ui/progress.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/select.tsx","./src/components/ui/separator.tsx","./src/components/ui/sheet.tsx","./src/components/ui/sidebar.tsx","./src/components/ui/skeleton.tsx","./src/components/ui/sonner.tsx","./src/components/ui/switch.tsx","./src/components/ui/table.tsx","./src/components/ui/tabs.tsx","./src/components/ui/toggle-group.tsx","./src/components/ui/toggle.tsx","./src/components/ui/tooltip.tsx","./src/hooks/use-mobile.tsx","./src/lib/utils.ts","./src/pages/analytics.tsx","./src/pages/categories.tsx","./src/pages/dashboard.tsx","./src/pages/forecasting.tsx","./src/pages/login.tsx","./src/pages/orders.tsx","./src/pages/products.tsx","./src/pages/purchaseorders.tsx","./src/pages/settings.tsx","./src/pages/vendors.tsx","./src/routes/forecasting.tsx","./src/types/products.ts"],"version":"5.6.3"}
|
||||
{"root":["./src/app.tsx","./src/config.ts","./src/main.tsx","./src/vite-env.d.ts","./src/components/analytics/categoryperformance.tsx","./src/components/analytics/priceanalysis.tsx","./src/components/analytics/profitanalysis.tsx","./src/components/analytics/stockanalysis.tsx","./src/components/analytics/vendorperformance.tsx","./src/components/auth/requireauth.tsx","./src/components/dashboard/bestsellers.tsx","./src/components/dashboard/forecastmetrics.tsx","./src/components/dashboard/inventoryhealthsummary.tsx","./src/components/dashboard/inventorystats.tsx","./src/components/dashboard/keymetricscharts.tsx","./src/components/dashboard/lowstockalerts.tsx","./src/components/dashboard/overstockmetrics.tsx","./src/components/dashboard/overview.tsx","./src/components/dashboard/purchasemetrics.tsx","./src/components/dashboard/recentsales.tsx","./src/components/dashboard/replenishmentmetrics.tsx","./src/components/dashboard/salesbycategory.tsx","./src/components/dashboard/salesmetrics.tsx","./src/components/dashboard/stockmetrics.tsx","./src/components/dashboard/topoverstockedproducts.tsx","./src/components/dashboard/topreplenishproducts.tsx","./src/components/dashboard/trendingproducts.tsx","./src/components/dashboard/vendorperformance.tsx","./src/components/forecasting/columns.tsx","./src/components/layout/appsidebar.tsx","./src/components/layout/mainlayout.tsx","./src/components/products/productdetail.tsx","./src/components/products/productfilters.tsx","./src/components/products/producttable.tsx","./src/components/products/producttableskeleton.tsx","./src/components/products/productviews.tsx","./src/components/settings/calculationsettings.tsx","./src/components/settings/configuration.tsx","./src/components/settings/datamanagement.tsx","./src/components/settings/performancemetrics.tsx","./src/components/settings/stockmanagement.tsx","./src/components/ui/accordion.tsx","./src/components/ui/alert-dialog.tsx","./src/components/ui/alert.tsx","./src/components/ui/avatar.tsx","./src/components/ui/badge.tsx","./src/components/ui/button.tsx","./src/components/ui/calendar.tsx","./src/components/ui/card.tsx","./src/components/ui/command.tsx","./src/components/ui/date-range-picker-narrow.tsx","./src/components/ui/date-range-picker.tsx","./src/components/ui/dialog.tsx","./src/components/ui/drawer.tsx","./src/components/ui/dropdown-menu.tsx","./src/components/ui/input.tsx","./src/components/ui/label.tsx","./src/components/ui/pagination.tsx","./src/components/ui/popover.tsx","./src/components/ui/progress.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/select.tsx","./src/components/ui/separator.tsx","./src/components/ui/sheet.tsx","./src/components/ui/sidebar.tsx","./src/components/ui/skeleton.tsx","./src/components/ui/sonner.tsx","./src/components/ui/switch.tsx","./src/components/ui/table.tsx","./src/components/ui/tabs.tsx","./src/components/ui/toggle-group.tsx","./src/components/ui/toggle.tsx","./src/components/ui/tooltip.tsx","./src/hooks/use-mobile.tsx","./src/lib/utils.ts","./src/pages/analytics.tsx","./src/pages/categories.tsx","./src/pages/dashboard.tsx","./src/pages/forecasting.tsx","./src/pages/login.tsx","./src/pages/orders.tsx","./src/pages/products.tsx","./src/pages/purchaseorders.tsx","./src/pages/settings.tsx","./src/pages/vendors.tsx","./src/types/products.ts","./src/types/status-codes.ts"],"version":"5.6.3"}
|
||||
@@ -1,7 +1,5 @@
|
||||
#!/bin/zsh
|
||||
|
||||
#Clear previous mount in case it’s still there
|
||||
umount /Users/matt/Library/Mobile Documents/com~apple~CloudDocs/Dev/inventory/inventory-server
|
||||
|
||||
#Clear previous mount in case it's still there
|
||||
umount '/Users/matt/Library/Mobile Documents/com~apple~CloudDocs/Dev/inventory/inventory-server'
|
||||
#Mount
|
||||
sshfs matt@dashboard.kent.pw:/var/www/html/inventory -p 22122 /Users/matt/Library/Mobile Documents/com~apple~CloudDocs/Dev/inventory/inventory-server/
|
||||
sshfs matt@dashboard.kent.pw:/var/www/html/inventory -p 22122 '/Users/matt/Library/Mobile Documents/com~apple~CloudDocs/Dev/inventory/inventory-server/'
|
||||
Reference in New Issue
Block a user