Compare commits
21 Commits
merge-dash
...
5833779c10
| Author | SHA1 | Date | |
|---|---|---|---|
| 5833779c10 | |||
| c61115f665 | |||
| 7da2b304b4 | |||
| 4ccda8ad49 | |||
| 88f703ec70 | |||
| ab998fb7c4 | |||
| faaa8cc47a | |||
| 459c5092d2 | |||
| 6c9fd062e9 | |||
| 5d7d7a8671 | |||
| 54f55b06a1 | |||
| 4935cfe3bb | |||
| 5e2ee73e2d | |||
| 4dfe85231a | |||
| 9e7aac836e | |||
| d35c7dd6cf | |||
| ad1ebeefe1 | |||
| a0c442d1af | |||
| 7938c50762 | |||
| 5dcd19e7f3 | |||
| 075e7253a0 |
@@ -7,12 +7,13 @@ This document outlines the permission system implemented in the Inventory Manage
|
||||
Permissions follow this naming convention:
|
||||
|
||||
- Page access: `access:{page_name}`
|
||||
- Actions: `{action}:{resource}`
|
||||
- Settings sections: `settings:{section_name}`
|
||||
- Admin features: `admin:{feature}`
|
||||
|
||||
Examples:
|
||||
- `access:products` - Can access the Products page
|
||||
- `create:products` - Can create new products
|
||||
- `edit:users` - Can edit user accounts
|
||||
- `settings:user_management` - Can access User Management settings
|
||||
- `admin:debug` - Can see debug information
|
||||
|
||||
## Permission Components
|
||||
|
||||
@@ -22,10 +23,10 @@ The core component that conditionally renders content based on permissions.
|
||||
|
||||
```tsx
|
||||
<PermissionGuard
|
||||
permission="create:products"
|
||||
permission="settings:user_management"
|
||||
fallback={<p>No permission</p>}
|
||||
>
|
||||
<button>Create Product</button>
|
||||
<button>Manage Users</button>
|
||||
</PermissionGuard>
|
||||
```
|
||||
|
||||
@@ -81,7 +82,7 @@ Specific component for settings with built-in permission checks.
|
||||
<SettingsSection
|
||||
title="System Settings"
|
||||
description="Configure global settings"
|
||||
permission="edit:system_settings"
|
||||
permission="settings:global"
|
||||
>
|
||||
{/* Settings content */}
|
||||
</SettingsSection>
|
||||
@@ -95,8 +96,8 @@ Core hook for checking any permission.
|
||||
|
||||
```tsx
|
||||
const { hasPermission, hasPageAccess, isAdmin } = usePermissions();
|
||||
if (hasPermission('delete:products')) {
|
||||
// Can delete products
|
||||
if (hasPermission('settings:user_management')) {
|
||||
// Can access user management
|
||||
}
|
||||
```
|
||||
|
||||
@@ -106,8 +107,8 @@ Specialized hook for page-level permissions.
|
||||
|
||||
```tsx
|
||||
const { canView, canCreate, canEdit, canDelete } = usePagePermission('products');
|
||||
if (canEdit()) {
|
||||
// Can edit products
|
||||
if (canView()) {
|
||||
// Can view products
|
||||
}
|
||||
```
|
||||
|
||||
@@ -119,18 +120,43 @@ Permissions are stored in the database:
|
||||
|
||||
Admin users automatically have all permissions.
|
||||
|
||||
## Common Permission Codes
|
||||
## Implemented Permission Codes
|
||||
|
||||
### Page Access Permissions
|
||||
| Code | Description |
|
||||
|------|-------------|
|
||||
| `access:dashboard` | Access to Dashboard page |
|
||||
| `access:overview` | Access to Overview page |
|
||||
| `access:products` | Access to Products page |
|
||||
| `create:products` | Create new products |
|
||||
| `edit:products` | Edit existing products |
|
||||
| `delete:products` | Delete products |
|
||||
| `view:users` | View user accounts |
|
||||
| `edit:users` | Edit user accounts |
|
||||
| `manage:permissions` | Assign permissions to users |
|
||||
| `access:categories` | Access to Categories page |
|
||||
| `access:brands` | Access to Brands page |
|
||||
| `access:vendors` | Access to Vendors page |
|
||||
| `access:purchase_orders` | Access to Purchase Orders page |
|
||||
| `access:analytics` | Access to Analytics page |
|
||||
| `access:forecasting` | Access to Forecasting page |
|
||||
| `access:import` | Access to Import page |
|
||||
| `access:settings` | Access to Settings page |
|
||||
| `access:chat` | Access to Chat Archive page |
|
||||
|
||||
### Settings Permissions
|
||||
| Code | Description |
|
||||
|------|-------------|
|
||||
| `settings:global` | Access to Global Settings section |
|
||||
| `settings:products` | Access to Product Settings section |
|
||||
| `settings:vendors` | Access to Vendor Settings section |
|
||||
| `settings:data_management` | Access to Data Management settings |
|
||||
| `settings:calculation_settings` | Access to Calculation Settings |
|
||||
| `settings:library_management` | Access to Image Library Management |
|
||||
| `settings:performance_metrics` | Access to Performance Metrics |
|
||||
| `settings:prompt_management` | Access to AI Prompt Management |
|
||||
| `settings:stock_management` | Access to Stock Management |
|
||||
| `settings:templates` | Access to Template Management |
|
||||
| `settings:user_management` | Access to User Management |
|
||||
|
||||
### Admin Permissions
|
||||
| Code | Description |
|
||||
|------|-------------|
|
||||
| `admin:debug` | Can see debug information and features |
|
||||
|
||||
## Implementation Examples
|
||||
|
||||
@@ -148,25 +174,31 @@ In `App.tsx`:
|
||||
### Component Level Protection
|
||||
|
||||
```tsx
|
||||
const { canEdit } = usePagePermission('products');
|
||||
const { hasPermission } = usePermissions();
|
||||
|
||||
function handleEdit() {
|
||||
if (!canEdit()) {
|
||||
function handleAction() {
|
||||
if (!hasPermission('settings:user_management')) {
|
||||
toast.error("You don't have permission");
|
||||
return;
|
||||
}
|
||||
// Edit logic
|
||||
// Action logic
|
||||
}
|
||||
```
|
||||
|
||||
### UI Element Protection
|
||||
|
||||
```tsx
|
||||
<PermissionButton
|
||||
page="products"
|
||||
action="delete"
|
||||
onClick={handleDelete}
|
||||
>
|
||||
Delete
|
||||
</PermissionButton>
|
||||
```
|
||||
<PermissionGuard permission="settings:user_management">
|
||||
<button onClick={handleManageUsers}>
|
||||
Manage Users
|
||||
</button>
|
||||
</PermissionGuard>
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- **Page Access**: These permissions control which pages a user can navigate to
|
||||
- **Settings Access**: These permissions control access to different sections within the Settings page
|
||||
- **Admin Features**: Special permissions for administrative functions
|
||||
- **CRUD Operations**: The application currently focuses on viewing and managing data rather than creating/editing/deleting individual records
|
||||
- **User Management**: User CRUD operations are handled through the settings interface rather than dedicated user management pages
|
||||
@@ -1,222 +0,0 @@
|
||||
// ecosystem.config.js
|
||||
const path = require('path');
|
||||
const dotenv = require('dotenv');
|
||||
|
||||
// Load environment variables safely with error handling
|
||||
const loadEnvFile = (envPath) => {
|
||||
try {
|
||||
console.log('Loading env from:', envPath);
|
||||
const result = dotenv.config({ path: envPath });
|
||||
if (result.error) {
|
||||
console.warn(`Warning: .env file not found or invalid at ${envPath}:`, result.error.message);
|
||||
return {};
|
||||
}
|
||||
console.log('Env variables loaded from', envPath, ':', Object.keys(result.parsed || {}));
|
||||
return result.parsed || {};
|
||||
} catch (error) {
|
||||
console.warn(`Warning: Error loading .env file at ${envPath}:`, error.message);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
// Load environment variables for each server
|
||||
const authEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/auth-server/.env'));
|
||||
const aircallEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/aircall-server/.env'));
|
||||
const klaviyoEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/klaviyo-server/.env'));
|
||||
const metaEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/meta-server/.env'));
|
||||
const googleAnalyticsEnv = require('dotenv').config({
|
||||
path: path.resolve(__dirname, 'dashboard/google-server/.env')
|
||||
}).parsed || {};
|
||||
const typeformEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/typeform-server/.env'));
|
||||
const inventoryEnv = loadEnvFile(path.resolve(__dirname, 'inventory/.env'));
|
||||
|
||||
// Common log settings for all apps
|
||||
const logSettings = {
|
||||
log_rotate: true,
|
||||
max_size: '10M',
|
||||
retain: '10',
|
||||
log_date_format: 'YYYY-MM-DD HH:mm:ss'
|
||||
};
|
||||
|
||||
// Common app settings
|
||||
const commonSettings = {
|
||||
instances: 1,
|
||||
exec_mode: 'fork',
|
||||
autorestart: true,
|
||||
watch: false,
|
||||
max_memory_restart: '1G',
|
||||
time: true,
|
||||
...logSettings,
|
||||
ignore_watch: [
|
||||
'node_modules',
|
||||
'logs',
|
||||
'.git',
|
||||
'*.log'
|
||||
],
|
||||
min_uptime: 5000,
|
||||
max_restarts: 5,
|
||||
restart_delay: 4000,
|
||||
listen_timeout: 50000,
|
||||
kill_timeout: 5000,
|
||||
node_args: '--max-old-space-size=1536'
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
apps: [
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'auth-server',
|
||||
script: './dashboard/auth-server/index.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3003,
|
||||
...authEnv
|
||||
},
|
||||
error_file: 'dashboard/auth-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/auth-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/auth-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3003
|
||||
},
|
||||
env_development: {
|
||||
NODE_ENV: 'development',
|
||||
PORT: 3003
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'aircall-server',
|
||||
script: './dashboard/aircall-server/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
AIRCALL_PORT: 3002,
|
||||
...aircallEnv
|
||||
},
|
||||
error_file: 'dashboard/aircall-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/aircall-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/aircall-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
AIRCALL_PORT: 3002
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'klaviyo-server',
|
||||
script: './dashboard/klaviyo-server/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
KLAVIYO_PORT: 3004,
|
||||
...klaviyoEnv
|
||||
},
|
||||
error_file: 'dashboard/klaviyo-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/klaviyo-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/klaviyo-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
KLAVIYO_PORT: 3004
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'meta-server',
|
||||
script: './dashboard/meta-server/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3005,
|
||||
...metaEnv
|
||||
},
|
||||
error_file: 'dashboard/meta-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/meta-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/meta-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3005
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "gorgias-server",
|
||||
script: "./dashboard/gorgias-server/server.js",
|
||||
env: {
|
||||
NODE_ENV: "development",
|
||||
PORT: 3006
|
||||
},
|
||||
env_production: {
|
||||
NODE_ENV: "production",
|
||||
PORT: 3006
|
||||
},
|
||||
error_file: "dashboard/logs/gorgias-server-error.log",
|
||||
out_file: "dashboard/logs/gorgias-server-out.log",
|
||||
log_file: "dashboard/logs/gorgias-server-combined.log",
|
||||
time: true
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'google-server',
|
||||
script: path.resolve(__dirname, 'dashboard/google-server/server.js'),
|
||||
watch: false,
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
GOOGLE_ANALYTICS_PORT: 3007,
|
||||
...googleAnalyticsEnv
|
||||
},
|
||||
error_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/err.log'),
|
||||
out_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/out.log'),
|
||||
log_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/combined.log'),
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
GOOGLE_ANALYTICS_PORT: 3007
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'typeform-server',
|
||||
script: './dashboard/typeform-server/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
TYPEFORM_PORT: 3008,
|
||||
...typeformEnv
|
||||
},
|
||||
error_file: 'dashboard/typeform-server/logs/pm2/err.log',
|
||||
out_file: 'dashboard/typeform-server/logs/pm2/out.log',
|
||||
log_file: 'dashboard/typeform-server/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
TYPEFORM_PORT: 3008
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'inventory-server',
|
||||
script: './inventory/src/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3010,
|
||||
...inventoryEnv
|
||||
},
|
||||
error_file: 'inventory/logs/pm2/err.log',
|
||||
out_file: 'inventory/logs/pm2/out.log',
|
||||
log_file: 'inventory/logs/pm2/combined.log',
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3010,
|
||||
...inventoryEnv
|
||||
}
|
||||
},
|
||||
{
|
||||
...commonSettings,
|
||||
name: 'new-auth-server',
|
||||
script: './inventory-server/auth/server.js',
|
||||
env: {
|
||||
NODE_ENV: 'production',
|
||||
AUTH_PORT: 3011,
|
||||
...inventoryEnv,
|
||||
JWT_SECRET: process.env.JWT_SECRET
|
||||
},
|
||||
error_file: 'inventory-server/auth/logs/pm2/err.log',
|
||||
out_file: 'inventory-server/auth/logs/pm2/out.log',
|
||||
log_file: 'inventory-server/auth/logs/pm2/combined.log'
|
||||
}
|
||||
]
|
||||
};
|
||||
@@ -34,10 +34,12 @@ const authenticate = async (req, res, next) => {
|
||||
|
||||
// Get user from database
|
||||
const result = await pool.query(
|
||||
'SELECT id, username, is_admin FROM users WHERE id = $1',
|
||||
'SELECT id, username, email, is_admin, rocket_chat_user_id FROM users WHERE id = $1',
|
||||
[decoded.userId]
|
||||
);
|
||||
|
||||
console.log('Database query result for user', decoded.userId, ':', result.rows[0]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(401).json({ error: 'User not found' });
|
||||
}
|
||||
@@ -58,7 +60,7 @@ router.post('/login', async (req, res) => {
|
||||
|
||||
// Get user from database
|
||||
const result = await pool.query(
|
||||
'SELECT id, username, password, is_admin, is_active FROM users WHERE username = $1',
|
||||
'SELECT id, username, password, is_admin, is_active, rocket_chat_user_id FROM users WHERE username = $1',
|
||||
[username]
|
||||
);
|
||||
|
||||
@@ -101,6 +103,7 @@ router.post('/login', async (req, res) => {
|
||||
id: user.id,
|
||||
username: user.username,
|
||||
is_admin: user.is_admin,
|
||||
rocket_chat_user_id: user.rocket_chat_user_id,
|
||||
permissions
|
||||
}
|
||||
});
|
||||
@@ -119,8 +122,13 @@ router.get('/me', authenticate, async (req, res) => {
|
||||
res.json({
|
||||
id: req.user.id,
|
||||
username: req.user.username,
|
||||
email: req.user.email,
|
||||
is_admin: req.user.is_admin,
|
||||
permissions
|
||||
rocket_chat_user_id: req.user.rocket_chat_user_id,
|
||||
permissions,
|
||||
// Debug info
|
||||
_debug_raw_user: req.user,
|
||||
_server_identifier: "INVENTORY_AUTH_SERVER_MODIFIED"
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error getting current user:', error);
|
||||
@@ -132,7 +140,7 @@ router.get('/me', authenticate, async (req, res) => {
|
||||
router.get('/users', authenticate, requirePermission('view:users'), async (req, res) => {
|
||||
try {
|
||||
const result = await pool.query(`
|
||||
SELECT id, username, email, is_admin, is_active, created_at, last_login
|
||||
SELECT id, username, email, is_admin, is_active, rocket_chat_user_id, created_at, last_login
|
||||
FROM users
|
||||
ORDER BY username
|
||||
`);
|
||||
@@ -151,7 +159,7 @@ router.get('/users/:id', authenticate, requirePermission('view:users'), async (r
|
||||
|
||||
// Get user details
|
||||
const userResult = await pool.query(`
|
||||
SELECT id, username, email, is_admin, is_active, created_at, last_login
|
||||
SELECT id, username, email, is_admin, is_active, rocket_chat_user_id, created_at, last_login
|
||||
FROM users
|
||||
WHERE id = $1
|
||||
`, [userId]);
|
||||
@@ -187,13 +195,14 @@ router.post('/users', authenticate, requirePermission('create:users'), async (re
|
||||
const client = await pool.connect();
|
||||
|
||||
try {
|
||||
const { username, email, password, is_admin, is_active, permissions } = req.body;
|
||||
const { username, email, password, is_admin, is_active, rocket_chat_user_id, permissions } = req.body;
|
||||
|
||||
console.log("Create user request:", {
|
||||
username,
|
||||
email,
|
||||
is_admin,
|
||||
is_active,
|
||||
rocket_chat_user_id,
|
||||
permissions: permissions || []
|
||||
});
|
||||
|
||||
@@ -221,10 +230,10 @@ router.post('/users', authenticate, requirePermission('create:users'), async (re
|
||||
|
||||
// Insert new user
|
||||
const userResult = await client.query(`
|
||||
INSERT INTO users (username, email, password, is_admin, is_active, created_at)
|
||||
VALUES ($1, $2, $3, $4, $5, CURRENT_TIMESTAMP)
|
||||
INSERT INTO users (username, email, password, is_admin, is_active, rocket_chat_user_id, created_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, CURRENT_TIMESTAMP)
|
||||
RETURNING id
|
||||
`, [username, email || null, hashedPassword, !!is_admin, is_active !== false]);
|
||||
`, [username, email || null, hashedPassword, !!is_admin, is_active !== false, rocket_chat_user_id || null]);
|
||||
|
||||
const userId = userResult.rows[0].id;
|
||||
|
||||
@@ -299,7 +308,7 @@ router.put('/users/:id', authenticate, requirePermission('edit:users'), async (r
|
||||
|
||||
try {
|
||||
const userId = req.params.id;
|
||||
const { username, email, password, is_admin, is_active, permissions } = req.body;
|
||||
const { username, email, password, is_admin, is_active, rocket_chat_user_id, permissions } = req.body;
|
||||
|
||||
console.log("Update user request:", {
|
||||
userId,
|
||||
@@ -307,6 +316,7 @@ router.put('/users/:id', authenticate, requirePermission('edit:users'), async (r
|
||||
email,
|
||||
is_admin,
|
||||
is_active,
|
||||
rocket_chat_user_id,
|
||||
permissions: permissions || []
|
||||
});
|
||||
|
||||
@@ -348,6 +358,11 @@ router.put('/users/:id', authenticate, requirePermission('edit:users'), async (r
|
||||
updateValues.push(!!is_active);
|
||||
}
|
||||
|
||||
if (rocket_chat_user_id !== undefined) {
|
||||
updateFields.push(`rocket_chat_user_id = $${paramIndex++}`);
|
||||
updateValues.push(rocket_chat_user_id || null);
|
||||
}
|
||||
|
||||
// Update password if provided
|
||||
if (password) {
|
||||
const saltRounds = 10;
|
||||
|
||||
@@ -108,7 +108,7 @@ app.get('/me', async (req, res) => {
|
||||
|
||||
// Get user details from database
|
||||
const userResult = await pool.query(
|
||||
'SELECT id, username, email, is_admin, is_active FROM users WHERE id = $1',
|
||||
'SELECT id, username, email, is_admin, rocket_chat_user_id, is_active FROM users WHERE id = $1',
|
||||
[decoded.userId]
|
||||
);
|
||||
|
||||
@@ -135,6 +135,7 @@ app.get('/me', async (req, res) => {
|
||||
id: user.id,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
rocket_chat_user_id: user.rocket_chat_user_id,
|
||||
is_admin: user.is_admin,
|
||||
permissions: permissions
|
||||
});
|
||||
|
||||
20
inventory-server/dashboard/.env-future
Normal file
20
inventory-server/dashboard/.env-future
Normal file
@@ -0,0 +1,20 @@
|
||||
# Caching Server Configuration
|
||||
PORT=3010
|
||||
NODE_ENV=production
|
||||
|
||||
# Database Configuration
|
||||
MONGODB_URI=mongodb://dashboard_user:WDRFWiGXEeaC6aAyUKuT@localhost:27017/dashboard?authSource=dashboard
|
||||
REDIS_URL=redis://:Wgj32YXxxVLtPZoVzUnP@localhost:6379
|
||||
|
||||
# Gorgias
|
||||
GORGIAS_API_USERNAME=matt@acherryontop.com
|
||||
GORGIAS_API_PASSWORD=d2ed0d23d2a7bf11a633a12fb260769f4e4a970d440693e7d64b8d2223fa6503
|
||||
|
||||
# GA4 credentials
|
||||
GA_PROPERTY_ID=281045851
|
||||
GOOGLE_APPLICATION_CREDENTIALS_JSON={"type": "service_account","project_id": "acot-stats","private_key_id": "259d1fd9864efbfa38b8ba02fdd74dc008ace3c5","private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC5Y6foai8WF98k\nIA0yLn94Y3lmDYlyvI9xL2YqSZSyvgK35wdWRTIaEvHKdiUWuYi3ZPdkYmz1OYiV\njVfR2g+mFpA7MI/JMwyGWwjnV4WW2q6INfgi/PvHlbP3LyyQo0B8CvAY0CHqrpDs\nlJQhAkqmteU24dqcdZoV3vM8JMsDiXm44DqwXsEfWibKv4i0mWNkwiEQr0yImHwb\nbjgclwVLLi5kdM2+49PXr47LCODdL+xmX0uSdgSG6XYqEIVsEOXIUJKzqUe036b/\nEFQ0BxWdJBWs/MYOapn/NNv+Mts+am2ipUuIcgPbOut4xa2Fkky93WnJf0tB+VJP\njFnyZJhdAgMBAAECggEAC980Cp/4zvSNZMNWr6l8ST8u2thavnRmcoGYtx7ffQjK\nT3Dl2TefgJLzqpr2lLt3OVint7p5LsUAmE8lBLpu+RxbH9HkIKbPvQTfD5gyZQQx\nBruqCGzkn2st9fzZNj6gwQYe9P/TGYkUnR8wqI0nLwDZTQful3QNKixiWC4lAAoK\nqdd6H++pqjVUiTqgFwFD3zBAhO0Lp8m/c5vTRT5kxi0wCTK66FaaGLr2OwZHcohp\nE8rEcTZ5kaJzBwqEz522R6ufQqN1Swoq4K6Ul3aAc59539VdrLNs++/eRH38MMVq\n5UTwBrH+zIkXIYv4mtGpR1NWGO2bZ652GzGXNEXcQQKBgQD9WsMmioIeWR9P9I0r\nIY+yyxz1EyscutUtnOtROT36OxokrzQaAKDz/OC3jVnhZSkzG6RcmmK/AJrcU+2m\n1L4mZGfF3DdeTqtK/KkNzGs9yRPDkbb/MF0wgtcvfE8tJH/suiDJKQNsjeaQIQW3\n4NvDxs0w60m9r9tk1CQau94ovQKBgQC7UzeA0mDSxIB5agGbvnzaJJTvAFvnCvhz\nu3ZakTlNecAHu4eOMc0+OCHFPLJlLL4b0oraOxZIszX9BTlgcstBmTUk03TibNsS\nsDiImHFC4hE5x6EPdifnkVFUXPMZ/eF0mHUPBEn41ipw1hoLfl6W+aYW9QUxBMWA\nzdMH4rg4IQKBgQCFcMaUiCNchKhfXnj0HKspCp3n3v64FReu/JVcpH+mSnbMl5Mj\nlu0vVSOuyb5rXvLCPm7lb1NPMqxeG75yPl8grYWSyxhGjbzetBD+eYqKclv8h8UQ\nx5JtuJxKIHk7V5whPS+DhByPknW7uAjg/ogBp7XvbB3c0MEHbEzP3991KQKBgC+a\n610Kmd6WX4v7e6Mn2rTZXRwL/E8QA6nttxs3Etf0m++bIczqLR2lyDdGwJNjtoB9\nlhn1sCkTmiHOBRHUuoDWPaI5NtggD+CE9ikIjKgRqY0EhZLXVTbNQFzvLjypv3UR\nFZaWYXIigzCfyIipOcKmeSYWaJZXfxXHuNylKmnhAoGAFa84AuOOGUr+pEvtUzIr\nvBKu1mnQbbsLEhgf3Tw88K3sO5OlguAwBEvD4eitj/aU5u2vJJhFa67cuERLsZru\n0sjtQwP6CJbWF4uaH0Hso4KQvnwl4BfdKwUncqoKtHrQiuGMvr5P5G941+Ax8brE\nJlC2e/RPUQKxScpK3nNK9mc=\n-----END PRIVATE KEY-----\n","client_email": "matt-dashboard@acot-stats.iam.gserviceaccount.com","client_id": "106112731322970982546","auth_uri": "https://accounts.google.com/o/oauth2/auth","token_uri": "https://oauth2.googleapis.com/token","auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs","client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/matt-dashboard%40acot-stats.iam.gserviceaccount.com","universe_domain": "googleapis.com"}
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL=info
|
||||
LOG_MAX_SIZE=10m
|
||||
LOG_MAX_FILES=5
|
||||
205
inventory-server/dashboard/acot-server/README.md
Normal file
205
inventory-server/dashboard/acot-server/README.md
Normal file
@@ -0,0 +1,205 @@
|
||||
# ACOT Server
|
||||
|
||||
This server replaces the Klaviyo integration with direct database queries to the production MySQL database via SSH tunnel. It provides seamless API compatibility for all frontend components without requiring any frontend changes.
|
||||
|
||||
## Setup
|
||||
|
||||
1. **Environment Variables**: Copy `.env.example` to `.env` and configure:
|
||||
```
|
||||
DB_HOST=localhost
|
||||
DB_PORT=3306
|
||||
DB_USER=your_db_user
|
||||
DB_PASSWORD=your_db_password
|
||||
DB_NAME=your_db_name
|
||||
PORT=3007
|
||||
NODE_ENV=development
|
||||
```
|
||||
|
||||
2. **SSH Tunnel**: Ensure your SSH tunnel to the production database is running on localhost:3306.
|
||||
|
||||
3. **Install Dependencies**:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
4. **Start Server**:
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
All endpoints provide exact API compatibility with the previous Klaviyo implementation:
|
||||
|
||||
### Main Statistics
|
||||
- `GET /api/acot/events/stats` - Complete statistics dashboard data
|
||||
- Query params: `timeRange` (today, yesterday, thisWeek, lastWeek, thisMonth, lastMonth, last7days, last30days, last90days) or `startDate`/`endDate` for custom ranges
|
||||
- Returns: Revenue, orders, AOV, shipping data, order types, brands/categories, refunds, cancellations, best day, peak hour, order ranges, period progress, projections
|
||||
|
||||
### Daily Details
|
||||
- `GET /api/acot/events/stats/details` - Daily breakdown with previous period comparisons
|
||||
- Query params: `timeRange`, `metric` (revenue, orders, average_order, etc.), `daily=true`
|
||||
- Returns: Array of daily data points with trend comparisons
|
||||
|
||||
### Products
|
||||
- `GET /api/acot/events/products` - Top products with sales data
|
||||
- Query params: `timeRange`
|
||||
- Returns: Product list with images, sales quantities, revenue, and order counts
|
||||
|
||||
### Projections
|
||||
- `GET /api/acot/events/projection` - Smart revenue projections for incomplete periods
|
||||
- Query params: `timeRange`
|
||||
- Returns: Projected revenue with confidence levels based on historical patterns
|
||||
|
||||
### Health Check
|
||||
- `GET /api/acot/test` - Server health and database connectivity test
|
||||
|
||||
## Database Schema
|
||||
|
||||
The server queries the following main tables:
|
||||
|
||||
### Orders (`_order`)
|
||||
- **Key fields**: `order_id`, `date_placed`, `summary_total`, `order_status`, `ship_method_selected`, `stats_waiting_preorder`
|
||||
- **Valid orders**: `order_status > 15`
|
||||
- **Cancelled orders**: `order_status = 15`
|
||||
- **Shipped orders**: `order_status IN (100, 92)`
|
||||
- **Pre-orders**: `stats_waiting_preorder > 0`
|
||||
- **Local pickup**: `ship_method_selected = 'localpickup'`
|
||||
- **On-hold orders**: `ship_method_selected = 'holdit'`
|
||||
|
||||
### Order Items (`order_items`)
|
||||
- **Fields**: `order_id`, `prod_pid`, `qty_ordered`, `prod_price`
|
||||
- **Purpose**: Links orders to products for detailed analysis
|
||||
|
||||
### Products (`products`)
|
||||
- **Fields**: `pid`, `description` (product name), `company`
|
||||
- **Purpose**: Product information and brand data
|
||||
|
||||
### Product Images (`product_images`)
|
||||
- **Fields**: `pid`, `iid`, `order` (priority)
|
||||
- **Primary image**: `order = 255` (highest priority)
|
||||
- **Image URL generation**: `https://sbing.com/i/products/0000/{prefix}/{pid}-{type}-{iid}.jpg`
|
||||
|
||||
### Payments (`order_payment`)
|
||||
- **Refunds**: `payment_amount < 0`
|
||||
- **Purpose**: Track refund amounts and counts
|
||||
|
||||
## Business Logic
|
||||
|
||||
### Time Handling
|
||||
- **Timezone**: All calculations in UTC-5 (Eastern Time)
|
||||
- **Business Day**: 1 AM - 12:59 AM Eastern (25-hour business day)
|
||||
- **Format**: MySQL DATETIME format (YYYY-MM-DD HH:MM:SS)
|
||||
- **Period Boundaries**: Calculated using `timeUtils.js` for consistent time range handling
|
||||
|
||||
### Order Processing
|
||||
- **Revenue Calculation**: Only includes orders with `order_status > 15`
|
||||
- **Order Types**:
|
||||
- Pre-orders: `stats_waiting_preorder > 0`
|
||||
- Local pickup: `ship_method_selected = 'localpickup'`
|
||||
- On-hold: `ship_method_selected = 'holdit'`
|
||||
- **Shipping Methods**: Mapped to friendly names (e.g., `usps_ground_advantage` → "USPS Ground Advantage")
|
||||
|
||||
### Projections
|
||||
- **Period Progress**: Calculated based on current time within the selected period
|
||||
- **Simple Projection**: Linear extrapolation based on current progress
|
||||
- **Smart Projection**: Uses historical data patterns for more accurate forecasting
|
||||
- **Confidence Levels**: Based on data consistency and historical accuracy
|
||||
|
||||
### Image URL Generation
|
||||
- **Pattern**: `https://sbing.com/i/products/0000/{prefix}/{pid}-{type}-{iid}.jpg`
|
||||
- **Prefix**: First 2 digits of product ID
|
||||
- **Type**: "main" for primary images
|
||||
- **Fallback**: Uses primary image (order=255) when available
|
||||
|
||||
## Frontend Integration
|
||||
|
||||
### Service Layer (`services/acotService.js`)
|
||||
- **Purpose**: Replaces direct Klaviyo API calls with acot-server calls
|
||||
- **Methods**: `getStats()`, `getStatsDetails()`, `getProducts()`, `getProjection()`
|
||||
- **Logging**: Axios interceptors for request/response logging
|
||||
- **Environment**: Automatic URL handling (proxy in dev, direct in production)
|
||||
|
||||
### Component Updates
|
||||
All 5 main components updated to use `acotService`:
|
||||
- **StatCards.jsx**: Main dashboard statistics
|
||||
- **MiniStatCards.jsx**: Compact statistics view
|
||||
- **SalesChart.jsx**: Revenue and order trends
|
||||
- **MiniSalesChart.jsx**: Compact chart view
|
||||
- **ProductGrid.jsx**: Top products table
|
||||
|
||||
### Proxy Configuration (`vite.config.js`)
|
||||
```javascript
|
||||
'/api/acot': {
|
||||
target: 'http://localhost:3007',
|
||||
changeOrigin: true,
|
||||
secure: false
|
||||
}
|
||||
```
|
||||
|
||||
## Key Features
|
||||
|
||||
### Complete Business Intelligence
|
||||
- **Revenue Analytics**: Total revenue, trends, projections
|
||||
- **Order Analysis**: Counts, types, status tracking
|
||||
- **Product Performance**: Top sellers, revenue contribution
|
||||
- **Shipping Intelligence**: Methods, locations, distribution
|
||||
- **Customer Insights**: Order value ranges, patterns
|
||||
- **Operational Metrics**: Refunds, cancellations, peak hours
|
||||
|
||||
### Performance Optimizations
|
||||
- **Connection Pooling**: Efficient database connection management
|
||||
- **Query Optimization**: Indexed queries with proper WHERE clauses
|
||||
- **Caching Strategy**: Frontend caching for detail views
|
||||
- **Batch Processing**: Efficient data aggregation
|
||||
|
||||
### Error Handling
|
||||
- **Database Connectivity**: Graceful handling of connection issues
|
||||
- **Query Failures**: Detailed error logging and user-friendly messages
|
||||
- **Data Validation**: Input sanitization and validation
|
||||
- **Fallback Mechanisms**: Default values for missing data
|
||||
|
||||
## Simplified Elements
|
||||
|
||||
Due to database complexity, some features are simplified:
|
||||
- **Brands**: Shows "Various Brands" (companies table structure complex)
|
||||
- **Categories**: Shows "General" (category relationships complex)
|
||||
|
||||
These can be enhanced in future iterations with proper category mapping.
|
||||
|
||||
## Testing
|
||||
|
||||
Test the server functionality:
|
||||
|
||||
```bash
|
||||
# Health check
|
||||
curl http://localhost:3007/api/acot/test
|
||||
|
||||
# Today's stats
|
||||
curl http://localhost:3007/api/acot/events/stats?timeRange=today
|
||||
|
||||
# Last 30 days with details
|
||||
curl http://localhost:3007/api/acot/events/stats/details?timeRange=last30days&daily=true
|
||||
|
||||
# Top products
|
||||
curl http://localhost:3007/api/acot/events/products?timeRange=thisWeek
|
||||
|
||||
# Revenue projection
|
||||
curl http://localhost:3007/api/acot/events/projection?timeRange=today
|
||||
```
|
||||
|
||||
## Development Notes
|
||||
|
||||
- **No Frontend Changes**: Complete drop-in replacement for Klaviyo
|
||||
- **API Compatibility**: Maintains exact response structure
|
||||
- **Business Logic**: Implements all complex e-commerce calculations
|
||||
- **Scalability**: Designed for production workloads
|
||||
- **Maintainability**: Well-documented code with clear separation of concerns
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- Enhanced category and brand mapping
|
||||
- Real-time notifications for significant events
|
||||
- Advanced analytics and forecasting
|
||||
- Customer segmentation analysis
|
||||
- Inventory integration
|
||||
297
inventory-server/dashboard/acot-server/db/connection.js
Normal file
297
inventory-server/dashboard/acot-server/db/connection.js
Normal file
@@ -0,0 +1,297 @@
|
||||
const { Client } = require('ssh2');
|
||||
const mysql = require('mysql2/promise');
|
||||
const fs = require('fs');
|
||||
|
||||
// Connection pool configuration
|
||||
const connectionPool = {
|
||||
connections: [],
|
||||
maxConnections: 20,
|
||||
currentConnections: 0,
|
||||
pendingRequests: [],
|
||||
// Cache for query results (key: query string, value: {data, timestamp})
|
||||
queryCache: new Map(),
|
||||
// Cache duration for different query types in milliseconds
|
||||
cacheDuration: {
|
||||
'stats': 60 * 1000, // 1 minute for stats
|
||||
'products': 5 * 60 * 1000, // 5 minutes for products
|
||||
'orders': 60 * 1000, // 1 minute for orders
|
||||
'default': 60 * 1000 // 1 minute default
|
||||
},
|
||||
// Circuit breaker state
|
||||
circuitBreaker: {
|
||||
failures: 0,
|
||||
lastFailure: 0,
|
||||
isOpen: false,
|
||||
threshold: 5,
|
||||
timeout: 30000 // 30 seconds
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a database connection from the pool
|
||||
* @returns {Promise<{connection: object, release: function}>} The database connection and release function
|
||||
*/
|
||||
async function getDbConnection() {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
// Check circuit breaker
|
||||
const now = Date.now();
|
||||
if (connectionPool.circuitBreaker.isOpen) {
|
||||
if (now - connectionPool.circuitBreaker.lastFailure > connectionPool.circuitBreaker.timeout) {
|
||||
// Reset circuit breaker
|
||||
connectionPool.circuitBreaker.isOpen = false;
|
||||
connectionPool.circuitBreaker.failures = 0;
|
||||
console.log('Circuit breaker reset');
|
||||
} else {
|
||||
reject(new Error('Circuit breaker is open - too many connection failures'));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there's an available connection in the pool
|
||||
if (connectionPool.connections.length > 0) {
|
||||
const conn = connectionPool.connections.pop();
|
||||
console.log(`Using pooled connection. Pool size: ${connectionPool.connections.length}`);
|
||||
resolve({
|
||||
connection: conn.connection,
|
||||
release: () => releaseConnection(conn)
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// If we haven't reached max connections, create a new one
|
||||
if (connectionPool.currentConnections < connectionPool.maxConnections) {
|
||||
try {
|
||||
console.log(`Creating new connection. Current: ${connectionPool.currentConnections}/${connectionPool.maxConnections}`);
|
||||
connectionPool.currentConnections++;
|
||||
|
||||
const tunnel = await setupSshTunnel();
|
||||
const { ssh, stream, dbConfig } = tunnel;
|
||||
|
||||
const connection = await mysql.createConnection({
|
||||
...dbConfig,
|
||||
stream
|
||||
});
|
||||
|
||||
const conn = { ssh, connection, inUse: true, created: Date.now() };
|
||||
|
||||
console.log('Database connection established');
|
||||
|
||||
// Reset circuit breaker on successful connection
|
||||
if (connectionPool.circuitBreaker.failures > 0) {
|
||||
connectionPool.circuitBreaker.failures = 0;
|
||||
connectionPool.circuitBreaker.isOpen = false;
|
||||
}
|
||||
|
||||
resolve({
|
||||
connection: conn.connection,
|
||||
release: () => releaseConnection(conn)
|
||||
});
|
||||
} catch (error) {
|
||||
connectionPool.currentConnections--;
|
||||
|
||||
// Track circuit breaker failures
|
||||
connectionPool.circuitBreaker.failures++;
|
||||
connectionPool.circuitBreaker.lastFailure = Date.now();
|
||||
|
||||
if (connectionPool.circuitBreaker.failures >= connectionPool.circuitBreaker.threshold) {
|
||||
connectionPool.circuitBreaker.isOpen = true;
|
||||
console.log(`Circuit breaker opened after ${connectionPool.circuitBreaker.failures} failures`);
|
||||
}
|
||||
|
||||
reject(error);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Pool is full, queue the request with timeout
|
||||
console.log('Connection pool full, queuing request...');
|
||||
const timeoutId = setTimeout(() => {
|
||||
// Remove from queue if still there
|
||||
const index = connectionPool.pendingRequests.findIndex(req => req.resolve === resolve);
|
||||
if (index !== -1) {
|
||||
connectionPool.pendingRequests.splice(index, 1);
|
||||
reject(new Error('Connection pool queue timeout after 15 seconds'));
|
||||
}
|
||||
}, 15000);
|
||||
|
||||
connectionPool.pendingRequests.push({
|
||||
resolve,
|
||||
reject,
|
||||
timeoutId,
|
||||
timestamp: Date.now()
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Release a connection back to the pool
|
||||
*/
|
||||
function releaseConnection(conn) {
|
||||
conn.inUse = false;
|
||||
|
||||
// Check if there are pending requests
|
||||
if (connectionPool.pendingRequests.length > 0) {
|
||||
const { resolve, timeoutId } = connectionPool.pendingRequests.shift();
|
||||
|
||||
// Clear the timeout since we're serving the request
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
|
||||
conn.inUse = true;
|
||||
console.log(`Serving queued request. Queue length: ${connectionPool.pendingRequests.length}`);
|
||||
resolve({
|
||||
connection: conn.connection,
|
||||
release: () => releaseConnection(conn)
|
||||
});
|
||||
} else {
|
||||
// Return to pool
|
||||
connectionPool.connections.push(conn);
|
||||
console.log(`Connection returned to pool. Pool size: ${connectionPool.connections.length}, Active: ${connectionPool.currentConnections}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached query results or execute query if not cached
|
||||
* @param {string} cacheKey - Unique key to identify the query
|
||||
* @param {string} queryType - Type of query (stats, products, orders, etc.)
|
||||
* @param {Function} queryFn - Function to execute if cache miss
|
||||
* @returns {Promise<any>} The query result
|
||||
*/
|
||||
async function getCachedQuery(cacheKey, queryType, queryFn) {
|
||||
// Get cache duration based on query type
|
||||
const cacheDuration = connectionPool.cacheDuration[queryType] || connectionPool.cacheDuration.default;
|
||||
|
||||
// Check if we have a valid cached result
|
||||
const cachedResult = connectionPool.queryCache.get(cacheKey);
|
||||
const now = Date.now();
|
||||
|
||||
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
|
||||
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
|
||||
return cachedResult.data;
|
||||
}
|
||||
|
||||
// No valid cache found, execute the query
|
||||
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
|
||||
const result = await queryFn();
|
||||
|
||||
// Cache the result
|
||||
connectionPool.queryCache.set(cacheKey, {
|
||||
data: result,
|
||||
timestamp: now
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup SSH tunnel to production database
|
||||
* @private - Should only be used by getDbConnection
|
||||
* @returns {Promise<{ssh: object, stream: object, dbConfig: object}>}
|
||||
*/
|
||||
async function setupSshTunnel() {
|
||||
const sshConfig = {
|
||||
host: process.env.PROD_SSH_HOST,
|
||||
port: process.env.PROD_SSH_PORT || 22,
|
||||
username: process.env.PROD_SSH_USER,
|
||||
privateKey: process.env.PROD_SSH_KEY_PATH
|
||||
? fs.readFileSync(process.env.PROD_SSH_KEY_PATH)
|
||||
: undefined,
|
||||
compress: true
|
||||
};
|
||||
|
||||
const dbConfig = {
|
||||
host: process.env.PROD_DB_HOST || 'localhost',
|
||||
user: process.env.PROD_DB_USER,
|
||||
password: process.env.PROD_DB_PASSWORD,
|
||||
database: process.env.PROD_DB_NAME,
|
||||
port: process.env.PROD_DB_PORT || 3306,
|
||||
timezone: 'Z'
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const ssh = new Client();
|
||||
|
||||
ssh.on('error', (err) => {
|
||||
console.error('SSH connection error:', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
ssh.on('ready', () => {
|
||||
ssh.forwardOut(
|
||||
'127.0.0.1',
|
||||
0,
|
||||
dbConfig.host,
|
||||
dbConfig.port,
|
||||
(err, stream) => {
|
||||
if (err) reject(err);
|
||||
resolve({ ssh, stream, dbConfig });
|
||||
}
|
||||
);
|
||||
}).connect(sshConfig);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached query results
|
||||
* @param {string} [cacheKey] - Specific cache key to clear (clears all if not provided)
|
||||
*/
|
||||
function clearQueryCache(cacheKey) {
|
||||
if (cacheKey) {
|
||||
connectionPool.queryCache.delete(cacheKey);
|
||||
console.log(`Cleared cache for key: ${cacheKey}`);
|
||||
} else {
|
||||
connectionPool.queryCache.clear();
|
||||
console.log('Cleared all query cache');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Force close all active connections
|
||||
* Useful for server shutdown or manual connection reset
|
||||
*/
|
||||
async function closeAllConnections() {
|
||||
// Close all pooled connections
|
||||
for (const conn of connectionPool.connections) {
|
||||
try {
|
||||
await conn.connection.end();
|
||||
conn.ssh.end();
|
||||
console.log('Closed pooled connection');
|
||||
} catch (error) {
|
||||
console.error('Error closing pooled connection:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Reset pool state
|
||||
connectionPool.connections = [];
|
||||
connectionPool.currentConnections = 0;
|
||||
connectionPool.pendingRequests = [];
|
||||
connectionPool.queryCache.clear();
|
||||
|
||||
console.log('All connections closed and pool reset');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get connection pool status for debugging
|
||||
*/
|
||||
function getPoolStatus() {
|
||||
return {
|
||||
poolSize: connectionPool.connections.length,
|
||||
activeConnections: connectionPool.currentConnections,
|
||||
maxConnections: connectionPool.maxConnections,
|
||||
pendingRequests: connectionPool.pendingRequests.length,
|
||||
cacheSize: connectionPool.queryCache.size,
|
||||
queuedRequests: connectionPool.pendingRequests.map(req => ({
|
||||
waitTime: Date.now() - req.timestamp,
|
||||
hasTimeout: !!req.timeoutId
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getDbConnection,
|
||||
getCachedQuery,
|
||||
clearQueryCache,
|
||||
closeAllConnections,
|
||||
getPoolStatus
|
||||
};
|
||||
1543
inventory-server/dashboard/acot-server/package-lock.json
generated
Normal file
1543
inventory-server/dashboard/acot-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
22
inventory-server/dashboard/acot-server/package.json
Normal file
22
inventory-server/dashboard/acot-server/package.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "acot-server",
|
||||
"version": "1.0.0",
|
||||
"description": "A Cherry On Top production database server",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"dev": "nodemon server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"express": "^4.18.2",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
"morgan": "^1.10.0",
|
||||
"ssh2": "^1.14.0",
|
||||
"mysql2": "^3.6.5",
|
||||
"compression": "^1.7.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.1"
|
||||
}
|
||||
}
|
||||
955
inventory-server/dashboard/acot-server/routes/events.js
Normal file
955
inventory-server/dashboard/acot-server/routes/events.js
Normal file
@@ -0,0 +1,955 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { getDbConnection, getPoolStatus } = require('../db/connection');
|
||||
const { getTimeRangeConditions, formatBusinessDate, getBusinessDayBounds } = require('../utils/timeUtils');
|
||||
|
||||
// Image URL generation utility
|
||||
const getImageUrls = (pid, iid = 1) => {
|
||||
const imageUrlBase = 'https://sbing.com/i/products/0000/';
|
||||
const paddedPid = pid.toString().padStart(6, '0');
|
||||
const prefix = paddedPid.slice(0, 3);
|
||||
const basePath = `${imageUrlBase}${prefix}/${pid}`;
|
||||
return {
|
||||
image: `${basePath}-t-${iid}.jpg`,
|
||||
image_175: `${basePath}-175x175-${iid}.jpg`,
|
||||
image_full: `${basePath}-o-${iid}.jpg`,
|
||||
ImgThumb: `${basePath}-175x175-${iid}.jpg` // For ProductGrid component
|
||||
};
|
||||
};
|
||||
|
||||
// Main stats endpoint - replaces /api/klaviyo/events/stats
|
||||
router.get('/stats', async (req, res) => {
|
||||
const startTime = Date.now();
|
||||
console.log(`[STATS] Starting request for timeRange: ${req.query.timeRange}`);
|
||||
|
||||
// Set a timeout for the entire operation
|
||||
const timeoutPromise = new Promise((_, reject) => {
|
||||
setTimeout(() => reject(new Error('Request timeout after 15 seconds')), 15000);
|
||||
});
|
||||
|
||||
try {
|
||||
const mainOperation = async () => {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
console.log(`[STATS] Getting DB connection...`);
|
||||
const { connection, release } = await getDbConnection();
|
||||
console.log(`[STATS] DB connection obtained in ${Date.now() - startTime}ms`);
|
||||
|
||||
const { whereClause, params, dateRange } = getTimeRangeConditions(timeRange, startDate, endDate);
|
||||
|
||||
// Main order stats query
|
||||
const mainStatsQuery = `
|
||||
SELECT
|
||||
COUNT(*) as orderCount,
|
||||
SUM(summary_total) as revenue,
|
||||
SUM(stats_prod_pieces) as itemCount,
|
||||
AVG(summary_total) as averageOrderValue,
|
||||
AVG(stats_prod_pieces) as averageItemsPerOrder,
|
||||
SUM(CASE WHEN stats_waiting_preorder > 0 THEN 1 ELSE 0 END) as preOrderCount,
|
||||
SUM(CASE WHEN ship_method_selected = 'localpickup' THEN 1 ELSE 0 END) as localPickupCount,
|
||||
SUM(CASE WHEN ship_method_selected = 'holdit' THEN 1 ELSE 0 END) as onHoldCount,
|
||||
SUM(CASE WHEN order_status IN (100, 92) THEN 1 ELSE 0 END) as shippedCount,
|
||||
SUM(CASE WHEN order_status = 15 THEN 1 ELSE 0 END) as cancelledCount,
|
||||
SUM(CASE WHEN order_status = 15 THEN summary_total ELSE 0 END) as cancelledTotal
|
||||
FROM _order
|
||||
WHERE order_status > 15 AND ${whereClause}
|
||||
`;
|
||||
|
||||
const [mainStats] = await connection.execute(mainStatsQuery, params);
|
||||
const stats = mainStats[0];
|
||||
|
||||
// Refunds query
|
||||
const refundsQuery = `
|
||||
SELECT
|
||||
COUNT(*) as refundCount,
|
||||
ABS(SUM(payment_amount)) as refundTotal
|
||||
FROM order_payment op
|
||||
JOIN _order o ON op.order_id = o.order_id
|
||||
WHERE payment_amount < 0 AND o.order_status > 15 AND ${whereClause.replace('date_placed', 'o.date_placed')}
|
||||
`;
|
||||
|
||||
const [refundStats] = await connection.execute(refundsQuery, params);
|
||||
|
||||
// Best revenue day query
|
||||
const bestDayQuery = `
|
||||
SELECT
|
||||
DATE(date_placed) as date,
|
||||
SUM(summary_total) as revenue,
|
||||
COUNT(*) as orders
|
||||
FROM _order
|
||||
WHERE order_status > 15 AND ${whereClause}
|
||||
GROUP BY DATE(date_placed)
|
||||
ORDER BY revenue DESC
|
||||
LIMIT 1
|
||||
`;
|
||||
|
||||
const [bestDayResult] = await connection.execute(bestDayQuery, params);
|
||||
|
||||
// Peak hour query (for single day periods)
|
||||
let peakHour = null;
|
||||
if (['today', 'yesterday'].includes(timeRange)) {
|
||||
const peakHourQuery = `
|
||||
SELECT
|
||||
HOUR(date_placed) as hour,
|
||||
COUNT(*) as count
|
||||
FROM _order
|
||||
WHERE order_status > 15 AND ${whereClause}
|
||||
GROUP BY HOUR(date_placed)
|
||||
ORDER BY count DESC
|
||||
LIMIT 1
|
||||
`;
|
||||
|
||||
const [peakHourResult] = await connection.execute(peakHourQuery, params);
|
||||
if (peakHourResult.length > 0) {
|
||||
const hour = peakHourResult[0].hour;
|
||||
const date = new Date();
|
||||
date.setHours(hour, 0, 0);
|
||||
peakHour = {
|
||||
hour,
|
||||
count: peakHourResult[0].count,
|
||||
displayHour: date.toLocaleString("en-US", { hour: "numeric", hour12: true })
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Brands and categories query - simplified for now since we don't have the category tables
|
||||
// We'll use a simple approach without company table for now
|
||||
const brandsQuery = `
|
||||
SELECT
|
||||
'Various Brands' as brandName,
|
||||
COUNT(DISTINCT oi.order_id) as orderCount,
|
||||
SUM(oi.qty_ordered) as itemCount,
|
||||
SUM(oi.qty_ordered * oi.prod_price) as revenue
|
||||
FROM order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
JOIN products p ON oi.prod_pid = p.pid
|
||||
WHERE o.order_status > 15 AND ${whereClause.replace('date_placed', 'o.date_placed')}
|
||||
HAVING revenue > 0
|
||||
`;
|
||||
|
||||
const [brandsResult] = await connection.execute(brandsQuery, params);
|
||||
|
||||
// For categories, we'll use a simplified approach
|
||||
const categoriesQuery = `
|
||||
SELECT
|
||||
'General' as categoryName,
|
||||
COUNT(DISTINCT oi.order_id) as orderCount,
|
||||
SUM(oi.qty_ordered) as itemCount,
|
||||
SUM(oi.qty_ordered * oi.prod_price) as revenue
|
||||
FROM order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
JOIN products p ON oi.prod_pid = p.pid
|
||||
WHERE o.order_status > 15 AND ${whereClause.replace('date_placed', 'o.date_placed')}
|
||||
HAVING revenue > 0
|
||||
`;
|
||||
|
||||
const [categoriesResult] = await connection.execute(categoriesQuery, params);
|
||||
|
||||
// Shipping locations query
|
||||
const shippingQuery = `
|
||||
SELECT
|
||||
ship_country,
|
||||
ship_state,
|
||||
ship_method_selected,
|
||||
COUNT(*) as count
|
||||
FROM _order
|
||||
WHERE order_status IN (100, 92) AND ${whereClause}
|
||||
GROUP BY ship_country, ship_state, ship_method_selected
|
||||
`;
|
||||
|
||||
const [shippingResult] = await connection.execute(shippingQuery, params);
|
||||
|
||||
// Process shipping data
|
||||
const shippingStats = processShippingData(shippingResult, stats.shippedCount);
|
||||
|
||||
// Order value range query
|
||||
const orderRangeQuery = `
|
||||
SELECT
|
||||
MIN(summary_total) as smallest,
|
||||
MAX(summary_total) as largest
|
||||
FROM _order
|
||||
WHERE order_status > 15 AND ${whereClause}
|
||||
`;
|
||||
|
||||
const [orderRangeResult] = await connection.execute(orderRangeQuery, params);
|
||||
|
||||
// Calculate period progress for incomplete periods
|
||||
let periodProgress = 100;
|
||||
if (['today', 'thisWeek', 'thisMonth'].includes(timeRange)) {
|
||||
periodProgress = calculatePeriodProgress(timeRange);
|
||||
}
|
||||
|
||||
// Previous period comparison data
|
||||
const prevPeriodData = await getPreviousPeriodData(connection, timeRange, startDate, endDate);
|
||||
|
||||
const response = {
|
||||
timeRange: dateRange,
|
||||
stats: {
|
||||
revenue: parseFloat(stats.revenue || 0),
|
||||
orderCount: parseInt(stats.orderCount || 0),
|
||||
itemCount: parseInt(stats.itemCount || 0),
|
||||
averageOrderValue: parseFloat(stats.averageOrderValue || 0),
|
||||
averageItemsPerOrder: parseFloat(stats.averageItemsPerOrder || 0),
|
||||
|
||||
// Order types
|
||||
orderTypes: {
|
||||
preOrders: {
|
||||
count: parseInt(stats.preOrderCount || 0),
|
||||
percentage: stats.orderCount > 0 ? (stats.preOrderCount / stats.orderCount) * 100 : 0
|
||||
},
|
||||
localPickup: {
|
||||
count: parseInt(stats.localPickupCount || 0),
|
||||
percentage: stats.orderCount > 0 ? (stats.localPickupCount / stats.orderCount) * 100 : 0
|
||||
},
|
||||
heldItems: {
|
||||
count: parseInt(stats.onHoldCount || 0),
|
||||
percentage: stats.orderCount > 0 ? (stats.onHoldCount / stats.orderCount) * 100 : 0
|
||||
}
|
||||
},
|
||||
|
||||
// Shipping
|
||||
shipping: {
|
||||
shippedCount: parseInt(stats.shippedCount || 0),
|
||||
locations: shippingStats.locations,
|
||||
methodStats: shippingStats.methods
|
||||
},
|
||||
|
||||
// Brands and categories
|
||||
brands: {
|
||||
total: brandsResult.length,
|
||||
list: brandsResult.slice(0, 50).map(brand => ({
|
||||
name: brand.brandName,
|
||||
count: parseInt(brand.itemCount),
|
||||
revenue: parseFloat(brand.revenue)
|
||||
}))
|
||||
},
|
||||
|
||||
categories: {
|
||||
total: categoriesResult.length,
|
||||
list: categoriesResult.slice(0, 50).map(category => ({
|
||||
name: category.categoryName,
|
||||
count: parseInt(category.itemCount),
|
||||
revenue: parseFloat(category.revenue)
|
||||
}))
|
||||
},
|
||||
|
||||
// Refunds and cancellations
|
||||
refunds: {
|
||||
total: parseFloat(refundStats[0]?.refundTotal || 0),
|
||||
count: parseInt(refundStats[0]?.refundCount || 0)
|
||||
},
|
||||
|
||||
canceledOrders: {
|
||||
total: parseFloat(stats.cancelledTotal || 0),
|
||||
count: parseInt(stats.cancelledCount || 0)
|
||||
},
|
||||
|
||||
// Best day
|
||||
bestRevenueDay: bestDayResult.length > 0 ? {
|
||||
amount: parseFloat(bestDayResult[0].revenue),
|
||||
displayDate: bestDayResult[0].date,
|
||||
orders: parseInt(bestDayResult[0].orders)
|
||||
} : null,
|
||||
|
||||
// Peak hour (for single days)
|
||||
peakOrderHour: peakHour,
|
||||
|
||||
// Order value range
|
||||
orderValueRange: orderRangeResult.length > 0 ? {
|
||||
smallest: parseFloat(orderRangeResult[0].smallest || 0),
|
||||
largest: parseFloat(orderRangeResult[0].largest || 0)
|
||||
} : { smallest: 0, largest: 0 },
|
||||
|
||||
// Period progress and projections
|
||||
periodProgress,
|
||||
projectedRevenue: periodProgress < 100 ? (stats.revenue / (periodProgress / 100)) : stats.revenue,
|
||||
|
||||
// Previous period comparison
|
||||
prevPeriodRevenue: prevPeriodData.revenue,
|
||||
prevPeriodOrders: prevPeriodData.orderCount,
|
||||
prevPeriodAOV: prevPeriodData.averageOrderValue
|
||||
}
|
||||
};
|
||||
|
||||
return { response, release };
|
||||
};
|
||||
|
||||
// Race between the main operation and timeout
|
||||
let result;
|
||||
try {
|
||||
result = await Promise.race([mainOperation(), timeoutPromise]);
|
||||
} catch (error) {
|
||||
// If it's a timeout, we don't have a release function to call
|
||||
if (error.message.includes('timeout')) {
|
||||
console.log(`[STATS] Request timed out in ${Date.now() - startTime}ms`);
|
||||
throw error;
|
||||
}
|
||||
// For other errors, re-throw
|
||||
throw error;
|
||||
}
|
||||
|
||||
const { response, release } = result;
|
||||
|
||||
// Release connection back to pool
|
||||
if (release) release();
|
||||
|
||||
console.log(`[STATS] Request completed in ${Date.now() - startTime}ms`);
|
||||
res.json(response);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error in /stats:', error);
|
||||
console.log(`[STATS] Request failed in ${Date.now() - startTime}ms`);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Daily details endpoint - replaces /api/klaviyo/events/stats/details
|
||||
router.get('/stats/details', async (req, res) => {
|
||||
let release;
|
||||
try {
|
||||
const { timeRange, startDate, endDate, metric, daily } = req.query;
|
||||
const { connection, release: releaseConn } = await getDbConnection();
|
||||
release = releaseConn;
|
||||
|
||||
const { whereClause, params } = getTimeRangeConditions(timeRange, startDate, endDate);
|
||||
|
||||
// Daily breakdown query
|
||||
const dailyQuery = `
|
||||
SELECT
|
||||
DATE(date_placed) as date,
|
||||
COUNT(*) as orders,
|
||||
SUM(summary_total) as revenue,
|
||||
AVG(summary_total) as averageOrderValue,
|
||||
SUM(stats_prod_pieces) as itemCount
|
||||
FROM _order
|
||||
WHERE order_status > 15 AND ${whereClause}
|
||||
GROUP BY DATE(date_placed)
|
||||
ORDER BY DATE(date_placed)
|
||||
`;
|
||||
|
||||
const [dailyResults] = await connection.execute(dailyQuery, params);
|
||||
|
||||
// Get previous period data using the same logic as main stats endpoint
|
||||
let prevWhereClause, prevParams;
|
||||
|
||||
if (timeRange && timeRange !== 'custom') {
|
||||
const prevTimeRange = getPreviousTimeRange(timeRange);
|
||||
const result = getTimeRangeConditions(prevTimeRange);
|
||||
prevWhereClause = result.whereClause;
|
||||
prevParams = result.params;
|
||||
} else {
|
||||
// Custom date range - go back by the same duration
|
||||
const start = new Date(startDate);
|
||||
const end = new Date(endDate);
|
||||
const duration = end.getTime() - start.getTime();
|
||||
|
||||
const prevEnd = new Date(start.getTime() - 1);
|
||||
const prevStart = new Date(prevEnd.getTime() - duration);
|
||||
|
||||
prevWhereClause = 'date_placed >= ? AND date_placed <= ?';
|
||||
prevParams = [prevStart.toISOString(), prevEnd.toISOString()];
|
||||
}
|
||||
|
||||
// Get previous period daily data
|
||||
const prevQuery = `
|
||||
SELECT
|
||||
DATE(date_placed) as date,
|
||||
COUNT(*) as prevOrders,
|
||||
SUM(summary_total) as prevRevenue,
|
||||
AVG(summary_total) as prevAvgOrderValue
|
||||
FROM _order
|
||||
WHERE order_status > 15 AND ${prevWhereClause}
|
||||
GROUP BY DATE(date_placed)
|
||||
`;
|
||||
|
||||
const [prevResults] = await connection.execute(prevQuery, prevParams);
|
||||
|
||||
// Create a map for quick lookup of previous period data
|
||||
const prevMap = new Map();
|
||||
prevResults.forEach(prev => {
|
||||
const key = new Date(prev.date).toISOString().split('T')[0];
|
||||
prevMap.set(key, prev);
|
||||
});
|
||||
|
||||
// For period-to-period comparison, we need to map days by relative position
|
||||
// since dates won't match exactly (e.g., current week vs previous week)
|
||||
const dailyArray = dailyResults.map(day => ({
|
||||
timestamp: day.date,
|
||||
date: day.date,
|
||||
orders: parseInt(day.orders),
|
||||
revenue: parseFloat(day.revenue),
|
||||
averageOrderValue: parseFloat(day.averageOrderValue || 0),
|
||||
itemCount: parseInt(day.itemCount)
|
||||
}));
|
||||
|
||||
const prevArray = prevResults.map(day => ({
|
||||
orders: parseInt(day.prevOrders),
|
||||
revenue: parseFloat(day.prevRevenue),
|
||||
averageOrderValue: parseFloat(day.prevAvgOrderValue || 0)
|
||||
}));
|
||||
|
||||
// Combine current and previous period data by matching relative positions
|
||||
const statsWithComparison = dailyArray.map((day, index) => {
|
||||
const prev = prevArray[index] || { orders: 0, revenue: 0, averageOrderValue: 0 };
|
||||
|
||||
return {
|
||||
...day,
|
||||
prevOrders: prev.orders,
|
||||
prevRevenue: prev.revenue,
|
||||
prevAvgOrderValue: prev.averageOrderValue
|
||||
};
|
||||
});
|
||||
|
||||
res.json({ stats: statsWithComparison });
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error in /stats/details:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
} finally {
|
||||
// Release connection back to pool
|
||||
if (release) release();
|
||||
}
|
||||
});
|
||||
|
||||
// Financial performance endpoint
|
||||
router.get('/financials', async (req, res) => {
|
||||
let release;
|
||||
try {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
const { connection, release: releaseConn } = await getDbConnection();
|
||||
release = releaseConn;
|
||||
|
||||
const { whereClause, params, dateRange } = getTimeRangeConditions(timeRange, startDate, endDate);
|
||||
const financialWhere = whereClause.replace(/date_placed/g, 'date_change');
|
||||
|
||||
const [totalsRows] = await connection.execute(
|
||||
buildFinancialTotalsQuery(financialWhere),
|
||||
params
|
||||
);
|
||||
|
||||
const totals = normalizeFinancialTotals(totalsRows[0]);
|
||||
|
||||
const [trendRows] = await connection.execute(
|
||||
buildFinancialTrendQuery(financialWhere),
|
||||
params
|
||||
);
|
||||
|
||||
const trend = trendRows.map(normalizeFinancialTrendRow);
|
||||
|
||||
let previousTotals = null;
|
||||
let comparison = null;
|
||||
|
||||
const previousRange = getPreviousPeriodRange(timeRange, startDate, endDate);
|
||||
if (previousRange) {
|
||||
const prevWhere = previousRange.whereClause.replace(/date_placed/g, 'date_change');
|
||||
const [previousRows] = await connection.execute(
|
||||
buildFinancialTotalsQuery(prevWhere),
|
||||
previousRange.params
|
||||
);
|
||||
previousTotals = normalizeFinancialTotals(previousRows[0]);
|
||||
comparison = {
|
||||
grossSales: calculateComparison(totals.grossSales, previousTotals.grossSales),
|
||||
refunds: calculateComparison(totals.refunds, previousTotals.refunds),
|
||||
taxCollected: calculateComparison(totals.taxCollected, previousTotals.taxCollected),
|
||||
cogs: calculateComparison(totals.cogs, previousTotals.cogs),
|
||||
netRevenue: calculateComparison(totals.netRevenue, previousTotals.netRevenue),
|
||||
profit: calculateComparison(totals.profit, previousTotals.profit),
|
||||
margin: calculateComparison(totals.margin, previousTotals.margin),
|
||||
};
|
||||
}
|
||||
|
||||
res.json({
|
||||
dateRange,
|
||||
totals,
|
||||
previousTotals,
|
||||
comparison,
|
||||
trend,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in /financials:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
} finally {
|
||||
if (release) release();
|
||||
}
|
||||
});
|
||||
|
||||
// Products endpoint - replaces /api/klaviyo/events/products
|
||||
router.get('/products', async (req, res) => {
|
||||
let release;
|
||||
try {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
const { connection, release: releaseConn } = await getDbConnection();
|
||||
release = releaseConn;
|
||||
|
||||
const { whereClause, params } = getTimeRangeConditions(timeRange, startDate, endDate);
|
||||
|
||||
const productsQuery = `
|
||||
SELECT
|
||||
p.pid,
|
||||
p.description as name,
|
||||
SUM(oi.qty_ordered) as totalQuantity,
|
||||
SUM(oi.qty_ordered * oi.prod_price) as totalRevenue,
|
||||
COUNT(DISTINCT oi.order_id) as orderCount,
|
||||
(SELECT pi.iid FROM product_images pi WHERE pi.pid = p.pid AND pi.order = 255 LIMIT 1) as primary_iid
|
||||
FROM order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
JOIN products p ON oi.prod_pid = p.pid
|
||||
WHERE o.order_status > 15 AND ${whereClause.replace('date_placed', 'o.date_placed')}
|
||||
GROUP BY p.pid, p.description
|
||||
ORDER BY totalRevenue DESC
|
||||
LIMIT 500
|
||||
`;
|
||||
|
||||
const [productsResult] = await connection.execute(productsQuery, params);
|
||||
|
||||
// Add image URLs to each product
|
||||
const productsWithImages = productsResult.map(product => {
|
||||
const imageUrls = getImageUrls(product.pid, product.primary_iid || 1);
|
||||
return {
|
||||
id: product.pid,
|
||||
name: product.name,
|
||||
totalQuantity: parseInt(product.totalQuantity),
|
||||
totalRevenue: parseFloat(product.totalRevenue),
|
||||
orderCount: parseInt(product.orderCount),
|
||||
...imageUrls
|
||||
};
|
||||
});
|
||||
|
||||
res.json({
|
||||
stats: {
|
||||
products: {
|
||||
total: productsWithImages.length,
|
||||
list: productsWithImages
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error in /products:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
} finally {
|
||||
// Release connection back to pool
|
||||
if (release) release();
|
||||
}
|
||||
});
|
||||
|
||||
// Projection endpoint - replaces /api/klaviyo/events/projection
|
||||
router.get('/projection', async (req, res) => {
|
||||
let release;
|
||||
try {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
|
||||
// Only provide projections for incomplete periods
|
||||
if (!['today', 'thisWeek', 'thisMonth'].includes(timeRange)) {
|
||||
return res.json({ projectedRevenue: 0, confidence: 0 });
|
||||
}
|
||||
|
||||
const { connection, release: releaseConn } = await getDbConnection();
|
||||
release = releaseConn;
|
||||
|
||||
// Get current period data
|
||||
const { whereClause, params } = getTimeRangeConditions(timeRange, startDate, endDate);
|
||||
|
||||
const currentQuery = `
|
||||
SELECT
|
||||
SUM(summary_total) as currentRevenue,
|
||||
COUNT(*) as currentOrders
|
||||
FROM _order
|
||||
WHERE order_status > 15 AND ${whereClause}
|
||||
`;
|
||||
|
||||
const [currentResult] = await connection.execute(currentQuery, params);
|
||||
const current = currentResult[0];
|
||||
|
||||
// Get historical data for the same period type
|
||||
const historicalQuery = await getHistoricalProjectionData(connection, timeRange);
|
||||
|
||||
// Calculate projection based on current progress and historical patterns
|
||||
const periodProgress = calculatePeriodProgress(timeRange);
|
||||
const projection = calculateSmartProjection(
|
||||
parseFloat(current.currentRevenue || 0),
|
||||
parseInt(current.currentOrders || 0),
|
||||
periodProgress,
|
||||
historicalQuery
|
||||
);
|
||||
|
||||
res.json(projection);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error in /projection:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
} finally {
|
||||
// Release connection back to pool
|
||||
if (release) release();
|
||||
}
|
||||
});
|
||||
|
||||
// Debug endpoint to check connection pool status
|
||||
router.get('/debug/pool', (req, res) => {
|
||||
res.json(getPoolStatus());
|
||||
});
|
||||
|
||||
// Health check endpoint
|
||||
router.get('/health', async (req, res) => {
|
||||
try {
|
||||
const { connection, release } = await getDbConnection();
|
||||
|
||||
// Simple query to test connection
|
||||
const [result] = await connection.execute('SELECT 1 as test');
|
||||
release();
|
||||
|
||||
res.json({
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
pool: getPoolStatus(),
|
||||
dbTest: result[0]
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
status: 'unhealthy',
|
||||
error: error.message,
|
||||
timestamp: new Date().toISOString(),
|
||||
pool: getPoolStatus()
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Helper functions
|
||||
function processShippingData(shippingResult, totalShipped) {
|
||||
const countries = {};
|
||||
const states = {};
|
||||
const methods = {};
|
||||
|
||||
shippingResult.forEach(row => {
|
||||
// Countries
|
||||
if (row.ship_country) {
|
||||
countries[row.ship_country] = (countries[row.ship_country] || 0) + row.count;
|
||||
}
|
||||
|
||||
// States
|
||||
if (row.ship_state) {
|
||||
states[row.ship_state] = (states[row.ship_state] || 0) + row.count;
|
||||
}
|
||||
|
||||
// Methods
|
||||
if (row.ship_method_selected) {
|
||||
methods[row.ship_method_selected] = (methods[row.ship_method_selected] || 0) + row.count;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
locations: {
|
||||
total: totalShipped,
|
||||
byCountry: Object.entries(countries)
|
||||
.map(([country, count]) => ({
|
||||
country,
|
||||
count,
|
||||
percentage: (count / totalShipped) * 100
|
||||
}))
|
||||
.sort((a, b) => b.count - a.count),
|
||||
byState: Object.entries(states)
|
||||
.map(([state, count]) => ({
|
||||
state,
|
||||
count,
|
||||
percentage: (count / totalShipped) * 100
|
||||
}))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
},
|
||||
methods: Object.entries(methods)
|
||||
.map(([name, value]) => ({ name, value }))
|
||||
.sort((a, b) => b.value - a.value)
|
||||
};
|
||||
}
|
||||
|
||||
function calculatePeriodProgress(timeRange) {
|
||||
const now = new Date();
|
||||
const easternTime = new Date(now.getTime() - (5 * 60 * 60 * 1000)); // UTC-5
|
||||
|
||||
switch (timeRange) {
|
||||
case 'today': {
|
||||
const { start } = getBusinessDayBounds('today');
|
||||
const businessStart = new Date(start);
|
||||
const businessEnd = new Date(businessStart);
|
||||
businessEnd.setDate(businessEnd.getDate() + 1);
|
||||
businessEnd.setHours(0, 59, 59, 999); // 12:59 AM next day
|
||||
|
||||
const elapsed = easternTime.getTime() - businessStart.getTime();
|
||||
const total = businessEnd.getTime() - businessStart.getTime();
|
||||
return Math.min(100, Math.max(0, (elapsed / total) * 100));
|
||||
}
|
||||
case 'thisWeek': {
|
||||
const startOfWeek = new Date(easternTime);
|
||||
startOfWeek.setDate(easternTime.getDate() - easternTime.getDay()); // Sunday
|
||||
startOfWeek.setHours(1, 0, 0, 0); // 1 AM business day start
|
||||
|
||||
const endOfWeek = new Date(startOfWeek);
|
||||
endOfWeek.setDate(endOfWeek.getDate() + 7);
|
||||
|
||||
const elapsed = easternTime.getTime() - startOfWeek.getTime();
|
||||
const total = endOfWeek.getTime() - startOfWeek.getTime();
|
||||
return Math.min(100, Math.max(0, (elapsed / total) * 100));
|
||||
}
|
||||
case 'thisMonth': {
|
||||
const startOfMonth = new Date(easternTime.getFullYear(), easternTime.getMonth(), 1, 1, 0, 0, 0);
|
||||
const endOfMonth = new Date(easternTime.getFullYear(), easternTime.getMonth() + 1, 1, 0, 59, 59, 999);
|
||||
|
||||
const elapsed = easternTime.getTime() - startOfMonth.getTime();
|
||||
const total = endOfMonth.getTime() - startOfMonth.getTime();
|
||||
return Math.min(100, Math.max(0, (elapsed / total) * 100));
|
||||
}
|
||||
default:
|
||||
return 100;
|
||||
}
|
||||
}
|
||||
|
||||
function buildFinancialTotalsQuery(whereClause) {
|
||||
return `
|
||||
SELECT
|
||||
COALESCE(SUM(sale_amount), 0) as grossSales,
|
||||
COALESCE(SUM(refund_amount), 0) as refunds,
|
||||
COALESCE(SUM(tax_collected_amount), 0) as taxCollected,
|
||||
COALESCE(SUM(cogs_amount), 0) as cogs
|
||||
FROM report_sales_data
|
||||
WHERE ${whereClause}
|
||||
`;
|
||||
}
|
||||
|
||||
function buildFinancialTrendQuery(whereClause) {
|
||||
return `
|
||||
SELECT
|
||||
DATE(date_change) as date,
|
||||
SUM(sale_amount) as grossSales,
|
||||
SUM(refund_amount) as refunds,
|
||||
SUM(tax_collected_amount) as taxCollected,
|
||||
SUM(cogs_amount) as cogs
|
||||
FROM report_sales_data
|
||||
WHERE ${whereClause}
|
||||
GROUP BY DATE(date_change)
|
||||
ORDER BY date ASC
|
||||
`;
|
||||
}
|
||||
|
||||
function normalizeFinancialTotals(row = {}) {
|
||||
const grossSales = parseFloat(row.grossSales || 0);
|
||||
const refunds = parseFloat(row.refunds || 0);
|
||||
const taxCollected = parseFloat(row.taxCollected || 0);
|
||||
const cogs = parseFloat(row.cogs || 0);
|
||||
const netSales = grossSales - refunds;
|
||||
const netRevenue = netSales - taxCollected;
|
||||
const profit = netRevenue - cogs;
|
||||
const margin = netRevenue !== 0 ? (profit / netRevenue) * 100 : 0;
|
||||
|
||||
return {
|
||||
grossSales,
|
||||
refunds,
|
||||
taxCollected,
|
||||
cogs,
|
||||
netSales,
|
||||
netRevenue,
|
||||
profit,
|
||||
margin,
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeFinancialTrendRow(row = {}) {
|
||||
const grossSales = parseFloat(row.grossSales || 0);
|
||||
const refunds = parseFloat(row.refunds || 0);
|
||||
const taxCollected = parseFloat(row.taxCollected || 0);
|
||||
const cogs = parseFloat(row.cogs || 0);
|
||||
const netSales = grossSales - refunds;
|
||||
const netRevenue = netSales - taxCollected;
|
||||
const profit = netRevenue - cogs;
|
||||
const margin = netRevenue !== 0 ? (profit / netRevenue) * 100 : 0;
|
||||
let timestamp = null;
|
||||
|
||||
if (row.date instanceof Date) {
|
||||
timestamp = new Date(row.date.getTime()).toISOString();
|
||||
} else if (typeof row.date === 'string') {
|
||||
timestamp = new Date(`${row.date}T00:00:00Z`).toISOString();
|
||||
}
|
||||
|
||||
return {
|
||||
date: row.date,
|
||||
grossSales,
|
||||
refunds,
|
||||
taxCollected,
|
||||
cogs,
|
||||
netSales,
|
||||
netRevenue,
|
||||
profit,
|
||||
margin,
|
||||
timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
function calculateComparison(currentValue, previousValue) {
|
||||
if (typeof previousValue !== 'number') {
|
||||
return { absolute: null, percentage: null };
|
||||
}
|
||||
|
||||
const absolute = typeof currentValue === 'number' ? currentValue - previousValue : null;
|
||||
const percentage =
|
||||
absolute !== null && previousValue !== 0
|
||||
? (absolute / Math.abs(previousValue)) * 100
|
||||
: null;
|
||||
|
||||
return { absolute, percentage };
|
||||
}
|
||||
|
||||
function getPreviousPeriodRange(timeRange, startDate, endDate) {
|
||||
if (timeRange && timeRange !== 'custom') {
|
||||
const prevTimeRange = getPreviousTimeRange(timeRange);
|
||||
if (!prevTimeRange || prevTimeRange === timeRange) {
|
||||
return null;
|
||||
}
|
||||
return getTimeRangeConditions(prevTimeRange);
|
||||
}
|
||||
|
||||
const hasCustomDates = (timeRange === 'custom' || !timeRange) && startDate && endDate;
|
||||
if (!hasCustomDates) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const start = new Date(startDate);
|
||||
const end = new Date(endDate);
|
||||
|
||||
if (Number.isNaN(start.getTime()) || Number.isNaN(end.getTime())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const duration = end.getTime() - start.getTime();
|
||||
if (!Number.isFinite(duration) || duration <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const prevEnd = new Date(start.getTime() - 1);
|
||||
const prevStart = new Date(prevEnd.getTime() - duration);
|
||||
|
||||
return getTimeRangeConditions('custom', prevStart.toISOString(), prevEnd.toISOString());
|
||||
}
|
||||
|
||||
async function getPreviousPeriodData(connection, timeRange, startDate, endDate) {
|
||||
// Calculate previous period dates
|
||||
let prevWhereClause, prevParams;
|
||||
|
||||
if (timeRange && timeRange !== 'custom') {
|
||||
const prevTimeRange = getPreviousTimeRange(timeRange);
|
||||
const result = getTimeRangeConditions(prevTimeRange);
|
||||
prevWhereClause = result.whereClause;
|
||||
prevParams = result.params;
|
||||
} else {
|
||||
// Custom date range - go back by the same duration
|
||||
const start = new Date(startDate);
|
||||
const end = new Date(endDate);
|
||||
const duration = end.getTime() - start.getTime();
|
||||
|
||||
const prevEnd = new Date(start.getTime() - 1);
|
||||
const prevStart = new Date(prevEnd.getTime() - duration);
|
||||
|
||||
prevWhereClause = 'date_placed >= ? AND date_placed <= ?';
|
||||
prevParams = [prevStart.toISOString(), prevEnd.toISOString()];
|
||||
}
|
||||
|
||||
const prevQuery = `
|
||||
SELECT
|
||||
COUNT(*) as orderCount,
|
||||
SUM(summary_total) as revenue,
|
||||
AVG(summary_total) as averageOrderValue
|
||||
FROM _order
|
||||
WHERE order_status > 15 AND ${prevWhereClause}
|
||||
`;
|
||||
|
||||
const [prevResult] = await connection.execute(prevQuery, prevParams);
|
||||
const prev = prevResult[0] || { orderCount: 0, revenue: 0, averageOrderValue: 0 };
|
||||
|
||||
return {
|
||||
orderCount: parseInt(prev.orderCount || 0),
|
||||
revenue: parseFloat(prev.revenue || 0),
|
||||
averageOrderValue: parseFloat(prev.averageOrderValue || 0)
|
||||
};
|
||||
}
|
||||
|
||||
function getPreviousTimeRange(timeRange) {
|
||||
const map = {
|
||||
today: 'yesterday',
|
||||
thisWeek: 'lastWeek',
|
||||
thisMonth: 'lastMonth',
|
||||
last7days: 'previous7days',
|
||||
last30days: 'previous30days',
|
||||
last90days: 'previous90days',
|
||||
yesterday: 'twoDaysAgo'
|
||||
};
|
||||
return map[timeRange] || timeRange;
|
||||
}
|
||||
|
||||
async function getHistoricalProjectionData(connection, timeRange) {
|
||||
// Get historical data for projection calculations
|
||||
// This is a simplified version - you could make this more sophisticated
|
||||
const historicalQuery = `
|
||||
SELECT
|
||||
SUM(summary_total) as revenue,
|
||||
COUNT(*) as orders
|
||||
FROM _order
|
||||
WHERE order_status > 15
|
||||
AND date_placed >= DATE_SUB(NOW(), INTERVAL 30 DAY)
|
||||
AND date_placed < DATE_SUB(NOW(), INTERVAL 1 DAY)
|
||||
`;
|
||||
|
||||
const [result] = await connection.execute(historicalQuery);
|
||||
return result;
|
||||
}
|
||||
|
||||
function calculateSmartProjection(currentRevenue, currentOrders, periodProgress, historicalData) {
|
||||
if (periodProgress >= 100) {
|
||||
return { projectedRevenue: currentRevenue, projectedOrders: currentOrders, confidence: 1.0 };
|
||||
}
|
||||
|
||||
// Simple linear projection with confidence based on how much of the period has elapsed
|
||||
const projectedRevenue = currentRevenue / (periodProgress / 100);
|
||||
const projectedOrders = Math.round(currentOrders / (periodProgress / 100));
|
||||
|
||||
// Confidence increases with more data (higher period progress)
|
||||
const confidence = Math.min(0.95, Math.max(0.1, periodProgress / 100));
|
||||
|
||||
return {
|
||||
projectedRevenue,
|
||||
projectedOrders,
|
||||
confidence
|
||||
};
|
||||
}
|
||||
|
||||
// Health check endpoint
|
||||
router.get('/health', async (req, res) => {
|
||||
try {
|
||||
const poolStatus = getPoolStatus();
|
||||
|
||||
// Test database connectivity
|
||||
const { connection, release } = await getDbConnection();
|
||||
await connection.execute('SELECT 1 as test');
|
||||
release();
|
||||
|
||||
res.json({
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
pool: poolStatus,
|
||||
database: 'connected'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Health check failed:', error);
|
||||
res.status(500).json({
|
||||
status: 'unhealthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error.message,
|
||||
pool: getPoolStatus()
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Debug endpoint for pool status
|
||||
router.get('/debug/pool', (req, res) => {
|
||||
res.json({
|
||||
timestamp: new Date().toISOString(),
|
||||
pool: getPoolStatus()
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
57
inventory-server/dashboard/acot-server/routes/test.js
Normal file
57
inventory-server/dashboard/acot-server/routes/test.js
Normal file
@@ -0,0 +1,57 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { getDbConnection, getCachedQuery } = require('../db/connection');
|
||||
|
||||
// Test endpoint to count orders
|
||||
router.get('/order-count', async (req, res) => {
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
|
||||
// Simple query to count orders from _order table
|
||||
const queryFn = async () => {
|
||||
const [rows] = await connection.execute('SELECT COUNT(*) as count FROM _order');
|
||||
return rows[0].count;
|
||||
};
|
||||
|
||||
const cacheKey = 'order-count';
|
||||
const count = await getCachedQuery(cacheKey, 'default', queryFn);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
orderCount: count,
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching order count:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Test connection endpoint
|
||||
router.get('/test-connection', async (req, res) => {
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
|
||||
// Test the connection with a simple query
|
||||
const [rows] = await connection.execute('SELECT 1 as test');
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Database connection successful',
|
||||
data: rows[0]
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error testing connection:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
98
inventory-server/dashboard/acot-server/server.js
Normal file
98
inventory-server/dashboard/acot-server/server.js
Normal file
@@ -0,0 +1,98 @@
|
||||
require('dotenv').config();
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const morgan = require('morgan');
|
||||
const compression = require('compression');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { closeAllConnections } = require('./db/connection');
|
||||
|
||||
const app = express();
|
||||
const PORT = process.env.ACOT_PORT || 3012;
|
||||
|
||||
// Create logs directory if it doesn't exist
|
||||
const logDir = path.join(__dirname, 'logs/app');
|
||||
if (!fs.existsSync(logDir)) {
|
||||
fs.mkdirSync(logDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Create a write stream for access logs
|
||||
const accessLogStream = fs.createWriteStream(
|
||||
path.join(logDir, 'access.log'),
|
||||
{ flags: 'a' }
|
||||
);
|
||||
|
||||
// Middleware
|
||||
app.use(compression());
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
|
||||
// Logging middleware
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
app.use(morgan('combined', { stream: accessLogStream }));
|
||||
} else {
|
||||
app.use(morgan('dev'));
|
||||
}
|
||||
|
||||
// Health check endpoint
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'healthy',
|
||||
service: 'acot-server',
|
||||
timestamp: new Date().toISOString(),
|
||||
uptime: process.uptime()
|
||||
});
|
||||
});
|
||||
|
||||
// Routes
|
||||
app.use('/api/acot/test', require('./routes/test'));
|
||||
app.use('/api/acot/events', require('./routes/events'));
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
console.error('Unhandled error:', err);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: process.env.NODE_ENV === 'production'
|
||||
? 'Internal server error'
|
||||
: err.message
|
||||
});
|
||||
});
|
||||
|
||||
// 404 handler
|
||||
app.use((req, res) => {
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: 'Route not found'
|
||||
});
|
||||
});
|
||||
|
||||
// Start server
|
||||
const server = app.listen(PORT, () => {
|
||||
console.log(`ACOT Server running on port ${PORT}`);
|
||||
console.log(`Environment: ${process.env.NODE_ENV}`);
|
||||
});
|
||||
|
||||
// Graceful shutdown
|
||||
const gracefulShutdown = async () => {
|
||||
console.log('SIGTERM signal received: closing HTTP server');
|
||||
server.close(async () => {
|
||||
console.log('HTTP server closed');
|
||||
|
||||
// Close database connections
|
||||
try {
|
||||
await closeAllConnections();
|
||||
console.log('Database connections closed');
|
||||
} catch (error) {
|
||||
console.error('Error closing database connections:', error);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
});
|
||||
};
|
||||
|
||||
process.on('SIGTERM', gracefulShutdown);
|
||||
process.on('SIGINT', gracefulShutdown);
|
||||
|
||||
module.exports = app;
|
||||
259
inventory-server/dashboard/acot-server/utils/timeUtils.js
Normal file
259
inventory-server/dashboard/acot-server/utils/timeUtils.js
Normal file
@@ -0,0 +1,259 @@
|
||||
// Time utilities for handling business day logic and time ranges
|
||||
// Business day is 1am-12:59am Eastern time (UTC-5)
|
||||
|
||||
const getBusinessDayBounds = (timeRange) => {
|
||||
const now = new Date();
|
||||
const easternTime = new Date(now.getTime() - (5 * 60 * 60 * 1000)); // UTC-5
|
||||
|
||||
switch (timeRange) {
|
||||
case 'today': {
|
||||
const start = new Date(easternTime);
|
||||
start.setHours(1, 0, 0, 0); // 1 AM start of business day
|
||||
|
||||
const end = new Date(start);
|
||||
end.setDate(end.getDate() + 1);
|
||||
end.setHours(0, 59, 59, 999); // 12:59 AM next day
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'yesterday': {
|
||||
const start = new Date(easternTime);
|
||||
start.setDate(start.getDate() - 1);
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
const end = new Date(start);
|
||||
end.setDate(end.getDate() + 1);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'thisWeek': {
|
||||
const start = new Date(easternTime);
|
||||
start.setDate(easternTime.getDate() - easternTime.getDay()); // Sunday
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
const end = new Date(easternTime);
|
||||
end.setDate(end.getDate() + 1);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'lastWeek': {
|
||||
const start = new Date(easternTime);
|
||||
start.setDate(easternTime.getDate() - easternTime.getDay() - 7); // Previous Sunday
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
const end = new Date(start);
|
||||
end.setDate(end.getDate() + 7);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'thisMonth': {
|
||||
const start = new Date(easternTime.getFullYear(), easternTime.getMonth(), 1, 1, 0, 0, 0);
|
||||
const end = new Date(easternTime);
|
||||
end.setDate(end.getDate() + 1);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'lastMonth': {
|
||||
const start = new Date(easternTime.getFullYear(), easternTime.getMonth() - 1, 1, 1, 0, 0, 0);
|
||||
const end = new Date(easternTime.getFullYear(), easternTime.getMonth(), 1, 0, 59, 59, 999);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'last7days': {
|
||||
const end = new Date(easternTime);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 7);
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'last30days': {
|
||||
const end = new Date(easternTime);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 30);
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'last90days': {
|
||||
const end = new Date(easternTime);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 90);
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'previous7days': {
|
||||
const end = new Date(easternTime);
|
||||
end.setDate(end.getDate() - 1);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 6);
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'previous30days': {
|
||||
const end = new Date(easternTime);
|
||||
end.setDate(end.getDate() - 1);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 29);
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'previous90days': {
|
||||
const end = new Date(easternTime);
|
||||
end.setDate(end.getDate() - 1);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 89);
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
case 'twoDaysAgo': {
|
||||
const start = new Date(easternTime);
|
||||
start.setDate(start.getDate() - 2);
|
||||
start.setHours(1, 0, 0, 0);
|
||||
|
||||
const end = new Date(start);
|
||||
end.setDate(end.getDate() + 1);
|
||||
end.setHours(0, 59, 59, 999);
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown time range: ${timeRange}`);
|
||||
}
|
||||
};
|
||||
|
||||
const getTimeRangeConditions = (timeRange, startDate, endDate) => {
|
||||
if (timeRange === 'custom' && startDate && endDate) {
|
||||
// Custom date range
|
||||
const start = new Date(startDate);
|
||||
const end = new Date(endDate);
|
||||
|
||||
// Convert to UTC-5 (Eastern time)
|
||||
const startUTC5 = new Date(start.getTime() - (5 * 60 * 60 * 1000));
|
||||
const endUTC5 = new Date(end.getTime() - (5 * 60 * 60 * 1000));
|
||||
|
||||
return {
|
||||
whereClause: 'date_placed >= ? AND date_placed <= ?',
|
||||
params: [
|
||||
startUTC5.toISOString().slice(0, 19).replace('T', ' '),
|
||||
endUTC5.toISOString().slice(0, 19).replace('T', ' ')
|
||||
],
|
||||
dateRange: {
|
||||
start: startDate,
|
||||
end: endDate,
|
||||
label: `${formatBusinessDate(start)} - ${formatBusinessDate(end)}`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (!timeRange) {
|
||||
timeRange = 'today';
|
||||
}
|
||||
|
||||
const { start, end } = getBusinessDayBounds(timeRange);
|
||||
|
||||
// Convert to MySQL datetime format (UTC-5)
|
||||
const startStr = start.toISOString().slice(0, 19).replace('T', ' ');
|
||||
const endStr = end.toISOString().slice(0, 19).replace('T', ' ');
|
||||
|
||||
return {
|
||||
whereClause: 'date_placed >= ? AND date_placed <= ?',
|
||||
params: [startStr, endStr],
|
||||
dateRange: {
|
||||
start: start.toISOString(),
|
||||
end: end.toISOString(),
|
||||
label: getTimeRangeLabel(timeRange)
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const formatBusinessDate = (date) => {
|
||||
return date.toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
year: 'numeric'
|
||||
});
|
||||
};
|
||||
|
||||
const getTimeRangeLabel = (timeRange) => {
|
||||
const labels = {
|
||||
today: 'Today',
|
||||
yesterday: 'Yesterday',
|
||||
thisWeek: 'This Week',
|
||||
lastWeek: 'Last Week',
|
||||
thisMonth: 'This Month',
|
||||
lastMonth: 'Last Month',
|
||||
last7days: 'Last 7 Days',
|
||||
last30days: 'Last 30 Days',
|
||||
last90days: 'Last 90 Days',
|
||||
previous7days: 'Previous 7 Days',
|
||||
previous30days: 'Previous 30 Days',
|
||||
previous90days: 'Previous 90 Days',
|
||||
twoDaysAgo: 'Two Days Ago'
|
||||
};
|
||||
|
||||
return labels[timeRange] || timeRange;
|
||||
};
|
||||
|
||||
// Helper to convert MySQL datetime to JavaScript Date
|
||||
const parseBusinessDate = (mysqlDatetime) => {
|
||||
if (!mysqlDatetime || mysqlDatetime === '0000-00-00 00:00:00') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// MySQL datetime is stored in UTC-5, so we need to add 5 hours to get UTC
|
||||
const date = new Date(mysqlDatetime + ' UTC');
|
||||
date.setHours(date.getHours() + 5);
|
||||
return date;
|
||||
};
|
||||
|
||||
// Helper to format date for MySQL queries
|
||||
const formatMySQLDate = (date) => {
|
||||
if (!date) return null;
|
||||
|
||||
// Convert to UTC-5 for storage
|
||||
const utc5Date = new Date(date.getTime() - (5 * 60 * 60 * 1000));
|
||||
return utc5Date.toISOString().slice(0, 19).replace('T', ' ');
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getBusinessDayBounds,
|
||||
getTimeRangeConditions,
|
||||
formatBusinessDate,
|
||||
getTimeRangeLabel,
|
||||
parseBusinessDate,
|
||||
formatMySQLDate
|
||||
};
|
||||
21
inventory-server/dashboard/aircall-server/.env.example
Normal file
21
inventory-server/dashboard/aircall-server/.env.example
Normal file
@@ -0,0 +1,21 @@
|
||||
# Server Configuration
|
||||
NODE_ENV=development
|
||||
AIRCALL_PORT=3002
|
||||
LOG_LEVEL=info
|
||||
|
||||
# Aircall API Credentials
|
||||
AIRCALL_API_ID=your_aircall_api_id
|
||||
AIRCALL_API_TOKEN=your_aircall_api_token
|
||||
|
||||
# Database Configuration
|
||||
MONGODB_URI=mongodb://localhost:27017/dashboard
|
||||
MONGODB_DB=dashboard
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
# Service Configuration
|
||||
TIMEZONE=America/New_York
|
||||
DAY_STARTS_AT=1 # Business day starts at 1 AM ET
|
||||
|
||||
# Optional Settings
|
||||
REDIS_TTL=300 # Cache TTL in seconds (5 minutes)
|
||||
COLLECTION_NAME=aircall_daily_data
|
||||
55
inventory-server/dashboard/aircall-server/README.md
Normal file
55
inventory-server/dashboard/aircall-server/README.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# Aircall Server
|
||||
|
||||
A standalone server for handling Aircall metrics and data processing.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Install dependencies:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
2. Set up environment variables:
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
Then edit `.env` with your configuration.
|
||||
|
||||
Required environment variables:
|
||||
- `AIRCALL_API_ID`: Your Aircall API ID
|
||||
- `AIRCALL_API_TOKEN`: Your Aircall API Token
|
||||
- `MONGODB_URI`: MongoDB connection string
|
||||
- `REDIS_URL`: Redis connection string
|
||||
- `AIRCALL_PORT`: Server port (default: 3002)
|
||||
|
||||
## Running the Server
|
||||
|
||||
### Development
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### Production
|
||||
Using PM2:
|
||||
```bash
|
||||
pm2 start ecosystem.config.js --env production
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### GET /api/aircall/metrics/:timeRange
|
||||
Get Aircall metrics for a specific time range.
|
||||
|
||||
Parameters:
|
||||
- `timeRange`: One of ['today', 'yesterday', 'last7days', 'last30days', 'last90days']
|
||||
|
||||
### GET /api/aircall/health
|
||||
Get server health status.
|
||||
|
||||
## Architecture
|
||||
|
||||
The server uses:
|
||||
- Express.js for the API
|
||||
- MongoDB for data storage
|
||||
- Redis for caching
|
||||
- Winston for logging
|
||||
1882
inventory-server/dashboard/aircall-server/package-lock.json
generated
Normal file
1882
inventory-server/dashboard/aircall-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
23
inventory-server/dashboard/aircall-server/package.json
Normal file
23
inventory-server/dashboard/aircall-server/package.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "aircall-server",
|
||||
"version": "1.0.0",
|
||||
"description": "Aircall metrics server",
|
||||
"type": "module",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"dev": "nodemon server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^1.6.2",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2",
|
||||
"mongodb": "^6.3.0",
|
||||
"redis": "^4.6.11",
|
||||
"winston": "^3.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.2"
|
||||
}
|
||||
}
|
||||
83
inventory-server/dashboard/aircall-server/server.js
Normal file
83
inventory-server/dashboard/aircall-server/server.js
Normal file
@@ -0,0 +1,83 @@
|
||||
import express from 'express';
|
||||
import cors from 'cors';
|
||||
import dotenv from 'dotenv';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { createRoutes } from './src/routes/index.js';
|
||||
import { aircallConfig } from './src/config/aircall.config.js';
|
||||
import { connectMongoDB } from './src/utils/db.js';
|
||||
import { createRedisClient } from './src/utils/redis.js';
|
||||
import { createLogger } from './src/utils/logger.js';
|
||||
|
||||
// Get directory name in ES modules
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Load environment variables from the correct path
|
||||
dotenv.config({ path: path.resolve(__dirname, '.env') });
|
||||
|
||||
// Validate required environment variables
|
||||
const requiredEnvVars = ['AIRCALL_API_ID', 'AIRCALL_API_TOKEN', 'MONGODB_URI', 'REDIS_URL'];
|
||||
const missingEnvVars = requiredEnvVars.filter(envVar => !process.env[envVar]);
|
||||
|
||||
if (missingEnvVars.length > 0) {
|
||||
console.error('Missing required environment variables:', missingEnvVars);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const app = express();
|
||||
const port = process.env.AIRCALL_PORT || 3002;
|
||||
const logger = createLogger('aircall-server');
|
||||
|
||||
// Middleware
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
// Connect to databases
|
||||
let mongodb;
|
||||
let redis;
|
||||
|
||||
async function initializeServer() {
|
||||
try {
|
||||
// Connect to MongoDB
|
||||
mongodb = await connectMongoDB();
|
||||
logger.info('Connected to MongoDB');
|
||||
|
||||
// Connect to Redis
|
||||
redis = await createRedisClient();
|
||||
logger.info('Connected to Redis');
|
||||
|
||||
// Initialize configs with database connections
|
||||
const configs = {
|
||||
aircall: {
|
||||
...aircallConfig,
|
||||
mongodb,
|
||||
redis,
|
||||
logger
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize routes
|
||||
const routes = createRoutes(configs, logger);
|
||||
app.use('/api', routes);
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
logger.error('Server error:', err);
|
||||
res.status(500).json({
|
||||
error: 'Internal server error',
|
||||
message: err.message
|
||||
});
|
||||
});
|
||||
|
||||
// Start server
|
||||
app.listen(port, () => {
|
||||
logger.info(`Aircall server listening on port ${port}`);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize server:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
initializeServer();
|
||||
@@ -0,0 +1,15 @@
|
||||
export const aircallConfig = {
|
||||
serviceName: 'aircall',
|
||||
apiId: process.env.AIRCALL_API_ID,
|
||||
apiToken: process.env.AIRCALL_API_TOKEN,
|
||||
timezone: 'America/New_York',
|
||||
dayStartsAt: 1,
|
||||
storeHistory: true,
|
||||
collection: 'aircall_daily_data',
|
||||
redisTTL: 300, // 5 minutes cache for current day
|
||||
endpoints: {
|
||||
metrics: {
|
||||
ttl: 300
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,57 @@
|
||||
import express from 'express';
|
||||
import { AircallService } from '../services/aircall/AircallService.js';
|
||||
|
||||
export const createAircallRoutes = (config, logger) => {
|
||||
const router = express.Router();
|
||||
const aircallService = new AircallService(config);
|
||||
|
||||
router.get('/metrics/:timeRange?', async (req, res) => {
|
||||
try {
|
||||
const { timeRange = 'today' } = req.params;
|
||||
const allowedRanges = ['today', 'yesterday', 'last7days', 'last30days', 'last90days'];
|
||||
|
||||
if (!allowedRanges.includes(timeRange)) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid time range',
|
||||
allowedRanges
|
||||
});
|
||||
}
|
||||
|
||||
const metrics = await aircallService.getMetrics(timeRange);
|
||||
|
||||
res.json({
|
||||
...metrics,
|
||||
_meta: {
|
||||
timeRange,
|
||||
generatedAt: new Date().toISOString(),
|
||||
dataPoints: metrics.daily_data?.length || 0
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Aircall metrics:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch Aircall metrics',
|
||||
message: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Health check endpoint
|
||||
router.get('/health', (req, res) => {
|
||||
const mongoConnected = !!aircallService.mongodb?.db;
|
||||
const redisConnected = !!aircallService.redis?.isOpen;
|
||||
|
||||
const health = {
|
||||
status: mongoConnected && redisConnected ? 'ok' : 'degraded',
|
||||
service: 'aircall',
|
||||
timestamp: new Date().toISOString(),
|
||||
connections: {
|
||||
mongodb: mongoConnected,
|
||||
redis: redisConnected
|
||||
}
|
||||
};
|
||||
res.json(health);
|
||||
});
|
||||
|
||||
return router;
|
||||
};
|
||||
@@ -0,0 +1,32 @@
|
||||
import express from 'express';
|
||||
import { createAircallRoutes } from './aircall.routes.js';
|
||||
|
||||
export const createRoutes = (configs, logger) => {
|
||||
const router = express.Router();
|
||||
|
||||
// Mount Aircall routes
|
||||
router.use('/aircall', createAircallRoutes(configs.aircall, logger));
|
||||
|
||||
// Health check endpoint
|
||||
router.get('/health', (req, res) => {
|
||||
const services = req.services || {};
|
||||
res.status(200).json({
|
||||
status: 'ok',
|
||||
timestamp: new Date(),
|
||||
services: {
|
||||
redis: services.redis?.isReady || false,
|
||||
mongodb: services.mongo?.readyState === 1 || false
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Catch-all 404 handler
|
||||
router.use('*', (req, res) => {
|
||||
res.status(404).json({
|
||||
error: 'Not Found',
|
||||
message: `Route ${req.originalUrl} not found`
|
||||
});
|
||||
});
|
||||
|
||||
return router;
|
||||
};
|
||||
@@ -0,0 +1,298 @@
|
||||
import { DataManager } from "../base/DataManager.js";
|
||||
|
||||
export class AircallDataManager extends DataManager {
|
||||
constructor(mongodb, redis, timeManager) {
|
||||
const options = {
|
||||
collection: "aircall_daily_data",
|
||||
redisTTL: 300 // 5 minutes cache
|
||||
};
|
||||
super(mongodb, redis, timeManager, options);
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
ensureDate(d) {
|
||||
if (d instanceof Date) return d;
|
||||
if (typeof d === 'string') return new Date(d);
|
||||
if (typeof d === 'number') return new Date(d);
|
||||
console.error('Invalid date value:', d);
|
||||
return new Date(); // fallback to current date
|
||||
}
|
||||
|
||||
async storeHistoricalPeriod(start, end, calls) {
|
||||
if (!this.mongodb) return;
|
||||
|
||||
try {
|
||||
if (!Array.isArray(calls)) {
|
||||
console.error("Invalid calls data:", calls);
|
||||
return;
|
||||
}
|
||||
|
||||
// Group calls by true day boundaries using TimeManager
|
||||
const dailyCallsMap = new Map();
|
||||
|
||||
calls.forEach((call) => {
|
||||
try {
|
||||
const timestamp = call.started_at * 1000; // Convert to milliseconds
|
||||
const callDate = this.ensureDate(timestamp);
|
||||
const dayBounds = this.timeManager.getDayBounds(callDate);
|
||||
const dayKey = dayBounds.start.toISOString();
|
||||
|
||||
if (!dailyCallsMap.has(dayKey)) {
|
||||
dailyCallsMap.set(dayKey, {
|
||||
date: dayBounds.start,
|
||||
calls: [],
|
||||
});
|
||||
}
|
||||
dailyCallsMap.get(dayKey).calls.push(call);
|
||||
} catch (err) {
|
||||
console.error('Error processing call:', err, call);
|
||||
}
|
||||
});
|
||||
|
||||
// Iterate over each day in the period using day boundaries
|
||||
const dates = [];
|
||||
let currentDate = this.ensureDate(start);
|
||||
const endDate = this.ensureDate(end);
|
||||
|
||||
while (currentDate < endDate) {
|
||||
const dayBounds = this.timeManager.getDayBounds(currentDate);
|
||||
dates.push(dayBounds.start);
|
||||
currentDate.setUTCDate(currentDate.getUTCDate() + 1);
|
||||
}
|
||||
|
||||
for (const date of dates) {
|
||||
try {
|
||||
const dateKey = date.toISOString();
|
||||
const dayData = dailyCallsMap.get(dateKey);
|
||||
const dayCalls = dayData ? dayData.calls : [];
|
||||
|
||||
// Process calls for this day using the same processing logic
|
||||
const metrics = this.processCallData(dayCalls);
|
||||
|
||||
// Insert a daily_data record for this day
|
||||
metrics.daily_data = [
|
||||
{
|
||||
date: date.toISOString().split("T")[0],
|
||||
inbound: metrics.by_direction.inbound,
|
||||
outbound: metrics.by_direction.outbound,
|
||||
},
|
||||
];
|
||||
|
||||
// Store this day's processed data as historical
|
||||
await this.storeHistoricalDay(date, metrics);
|
||||
} catch (err) {
|
||||
console.error('Error processing date:', err, date);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error storing historical period:", error, error.stack);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
processCallData(calls) {
|
||||
// If calls is already processed (has total, by_direction, etc.), return it
|
||||
if (calls && calls.total !== undefined) {
|
||||
console.log('Data already processed:', {
|
||||
total: calls.total,
|
||||
by_direction: calls.by_direction
|
||||
});
|
||||
// Return a clean copy of the processed data
|
||||
return {
|
||||
total: calls.total,
|
||||
by_direction: calls.by_direction,
|
||||
by_status: calls.by_status,
|
||||
by_missed_reason: calls.by_missed_reason,
|
||||
by_hour: calls.by_hour,
|
||||
by_users: calls.by_users,
|
||||
daily_data: calls.daily_data,
|
||||
duration_distribution: calls.duration_distribution,
|
||||
average_duration: calls.average_duration
|
||||
};
|
||||
}
|
||||
|
||||
console.log('Processing raw calls:', {
|
||||
count: calls.length,
|
||||
sample: calls.length > 0 ? {
|
||||
id: calls[0].id,
|
||||
direction: calls[0].direction,
|
||||
status: calls[0].status
|
||||
} : null
|
||||
});
|
||||
|
||||
// Process raw calls
|
||||
const metrics = {
|
||||
total: calls.length,
|
||||
by_direction: { inbound: 0, outbound: 0 },
|
||||
by_status: { answered: 0, missed: 0 },
|
||||
by_missed_reason: {},
|
||||
by_hour: Array(24).fill(0),
|
||||
by_users: {},
|
||||
daily_data: [],
|
||||
duration_distribution: [
|
||||
{ range: "0-1m", count: 0 },
|
||||
{ range: "1-5m", count: 0 },
|
||||
{ range: "5-15m", count: 0 },
|
||||
{ range: "15-30m", count: 0 },
|
||||
{ range: "30m+", count: 0 },
|
||||
],
|
||||
average_duration: 0,
|
||||
total_duration: 0,
|
||||
};
|
||||
|
||||
// Group calls by date for daily data
|
||||
const dailyCallsMap = new Map();
|
||||
|
||||
calls.forEach((call) => {
|
||||
try {
|
||||
// Direction metrics
|
||||
metrics.by_direction[call.direction]++;
|
||||
|
||||
// Get call date and hour using TimeManager
|
||||
const timestamp = call.started_at * 1000; // Convert to milliseconds
|
||||
const callDate = this.ensureDate(timestamp);
|
||||
const dayBounds = this.timeManager.getDayBounds(callDate);
|
||||
const dayKey = dayBounds.start.toISOString().split("T")[0];
|
||||
const hour = callDate.getHours();
|
||||
metrics.by_hour[hour]++;
|
||||
|
||||
// Status and duration metrics
|
||||
if (call.answered_at) {
|
||||
metrics.by_status.answered++;
|
||||
const duration = call.ended_at - call.answered_at;
|
||||
metrics.total_duration += duration;
|
||||
|
||||
// Duration distribution
|
||||
if (duration <= 60) {
|
||||
metrics.duration_distribution[0].count++;
|
||||
} else if (duration <= 300) {
|
||||
metrics.duration_distribution[1].count++;
|
||||
} else if (duration <= 900) {
|
||||
metrics.duration_distribution[2].count++;
|
||||
} else if (duration <= 1800) {
|
||||
metrics.duration_distribution[3].count++;
|
||||
} else {
|
||||
metrics.duration_distribution[4].count++;
|
||||
}
|
||||
|
||||
// Track user performance
|
||||
if (call.user) {
|
||||
const userId = call.user.id;
|
||||
if (!metrics.by_users[userId]) {
|
||||
metrics.by_users[userId] = {
|
||||
id: userId,
|
||||
name: call.user.name,
|
||||
total: 0,
|
||||
answered: 0,
|
||||
missed: 0,
|
||||
total_duration: 0,
|
||||
average_duration: 0,
|
||||
};
|
||||
}
|
||||
metrics.by_users[userId].total++;
|
||||
metrics.by_users[userId].answered++;
|
||||
metrics.by_users[userId].total_duration += duration;
|
||||
}
|
||||
} else {
|
||||
metrics.by_status.missed++;
|
||||
if (call.missed_call_reason) {
|
||||
metrics.by_missed_reason[call.missed_call_reason] =
|
||||
(metrics.by_missed_reason[call.missed_call_reason] || 0) + 1;
|
||||
}
|
||||
|
||||
// Track missed calls by user
|
||||
if (call.user) {
|
||||
const userId = call.user.id;
|
||||
if (!metrics.by_users[userId]) {
|
||||
metrics.by_users[userId] = {
|
||||
id: userId,
|
||||
name: call.user.name,
|
||||
total: 0,
|
||||
answered: 0,
|
||||
missed: 0,
|
||||
total_duration: 0,
|
||||
average_duration: 0,
|
||||
};
|
||||
}
|
||||
metrics.by_users[userId].total++;
|
||||
metrics.by_users[userId].missed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Group by date for daily data
|
||||
if (!dailyCallsMap.has(dayKey)) {
|
||||
dailyCallsMap.set(dayKey, { date: dayKey, inbound: 0, outbound: 0 });
|
||||
}
|
||||
dailyCallsMap.get(dayKey)[call.direction]++;
|
||||
} catch (err) {
|
||||
console.error('Error processing call:', err, call);
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate average durations for users
|
||||
Object.values(metrics.by_users).forEach((user) => {
|
||||
if (user.answered > 0) {
|
||||
user.average_duration = Math.round(user.total_duration / user.answered);
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate global average duration
|
||||
if (metrics.by_status.answered > 0) {
|
||||
metrics.average_duration = Math.round(
|
||||
metrics.total_duration / metrics.by_status.answered
|
||||
);
|
||||
}
|
||||
|
||||
// Convert daily data map to sorted array
|
||||
metrics.daily_data = Array.from(dailyCallsMap.values()).sort((a, b) =>
|
||||
a.date.localeCompare(b.date)
|
||||
);
|
||||
|
||||
delete metrics.total_duration;
|
||||
|
||||
console.log('Processed metrics:', {
|
||||
total: metrics.total,
|
||||
by_direction: metrics.by_direction,
|
||||
by_status: metrics.by_status,
|
||||
daily_data_count: metrics.daily_data.length
|
||||
});
|
||||
|
||||
return metrics;
|
||||
}
|
||||
|
||||
async storeHistoricalDay(date, data) {
|
||||
if (!this.mongodb) return;
|
||||
|
||||
try {
|
||||
const collection = this.mongodb.collection(this.options.collection);
|
||||
const dayBounds = this.timeManager.getDayBounds(this.ensureDate(date));
|
||||
|
||||
// Ensure consistent data structure with metrics nested in data field
|
||||
const document = {
|
||||
date: dayBounds.start,
|
||||
data: {
|
||||
total: data.total,
|
||||
by_direction: data.by_direction,
|
||||
by_status: data.by_status,
|
||||
by_missed_reason: data.by_missed_reason,
|
||||
by_hour: data.by_hour,
|
||||
by_users: data.by_users,
|
||||
daily_data: data.daily_data,
|
||||
duration_distribution: data.duration_distribution,
|
||||
average_duration: data.average_duration
|
||||
},
|
||||
updatedAt: new Date()
|
||||
};
|
||||
|
||||
await collection.updateOne(
|
||||
{ date: dayBounds.start },
|
||||
{ $set: document },
|
||||
{ upsert: true }
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error storing historical day:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
import axios from "axios";
|
||||
import { Buffer } from "buffer";
|
||||
import { BaseService } from "../base/BaseService.js";
|
||||
import { AircallDataManager } from "./AircallDataManager.js";
|
||||
|
||||
export class AircallService extends BaseService {
|
||||
constructor(config) {
|
||||
super(config);
|
||||
this.baseUrl = "https://api.aircall.io/v1";
|
||||
console.log('Initializing Aircall service with credentials:', {
|
||||
apiId: config.apiId ? 'present' : 'missing',
|
||||
apiToken: config.apiToken ? 'present' : 'missing'
|
||||
});
|
||||
this.auth = Buffer.from(`${config.apiId}:${config.apiToken}`).toString(
|
||||
"base64"
|
||||
);
|
||||
this.dataManager = new AircallDataManager(
|
||||
this.mongodb,
|
||||
this.redis,
|
||||
this.timeManager
|
||||
);
|
||||
|
||||
if (!config.apiId || !config.apiToken) {
|
||||
throw new Error("Aircall API credentials are required");
|
||||
}
|
||||
}
|
||||
|
||||
async getMetrics(timeRange) {
|
||||
const dateRange = await this.timeManager.getDateRange(timeRange);
|
||||
console.log('Fetching metrics for date range:', {
|
||||
start: dateRange.start.toISOString(),
|
||||
end: dateRange.end.toISOString()
|
||||
});
|
||||
|
||||
return this.dataManager.getData(dateRange, async (range) => {
|
||||
const calls = await this.fetchAllCalls(range.start, range.end);
|
||||
console.log('Fetched calls:', {
|
||||
count: calls.length,
|
||||
sample: calls.length > 0 ? calls[0] : null
|
||||
});
|
||||
return calls;
|
||||
});
|
||||
}
|
||||
|
||||
async fetchAllCalls(start, end) {
|
||||
try {
|
||||
let allCalls = [];
|
||||
let currentPage = 1;
|
||||
let hasMore = true;
|
||||
let totalPages = null;
|
||||
|
||||
while (hasMore) {
|
||||
const response = await this.makeRequest("/calls", {
|
||||
from: Math.floor(start.getTime() / 1000),
|
||||
to: Math.floor(end.getTime() / 1000),
|
||||
order: "asc",
|
||||
page: currentPage,
|
||||
per_page: 50,
|
||||
});
|
||||
|
||||
console.log('API Response:', {
|
||||
page: currentPage,
|
||||
totalPages: response.meta.total_pages,
|
||||
callsCount: response.calls?.length,
|
||||
params: {
|
||||
from: Math.floor(start.getTime() / 1000),
|
||||
to: Math.floor(end.getTime() / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.calls) {
|
||||
throw new Error("Invalid API response format");
|
||||
}
|
||||
|
||||
allCalls = [...allCalls, ...response.calls];
|
||||
hasMore = response.meta.next_page_link !== null;
|
||||
totalPages = response.meta.total_pages;
|
||||
currentPage++;
|
||||
|
||||
if (hasMore) {
|
||||
// Rate limiting pause
|
||||
await new Promise((resolve) => setTimeout(resolve, 1));
|
||||
}
|
||||
}
|
||||
|
||||
return allCalls;
|
||||
} catch (error) {
|
||||
console.error("Error fetching all calls:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async makeRequest(endpoint, params = {}) {
|
||||
try {
|
||||
console.log('Making API request:', {
|
||||
endpoint,
|
||||
params
|
||||
});
|
||||
const response = await axios.get(`${this.baseUrl}${endpoint}`, {
|
||||
headers: {
|
||||
Authorization: `Basic ${this.auth}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
params,
|
||||
});
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
if (error.response?.status === 429) {
|
||||
console.log("Rate limit reached, waiting before retry...");
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
return this.makeRequest(endpoint, params);
|
||||
}
|
||||
|
||||
this.handleApiError(error, `Error making request to ${endpoint}`);
|
||||
}
|
||||
}
|
||||
|
||||
validateApiResponse(response, context = "") {
|
||||
if (!response || typeof response !== "object") {
|
||||
throw new Error(`${context}: Invalid API response format`);
|
||||
}
|
||||
|
||||
if (response.error) {
|
||||
throw new Error(`${context}: ${response.error}`);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
getPaginationInfo(meta) {
|
||||
return {
|
||||
currentPage: meta.current_page,
|
||||
totalPages: meta.total_pages,
|
||||
hasNextPage: meta.next_page_link !== null,
|
||||
totalRecords: meta.total,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
import { createTimeManager } from '../../utils/timeUtils.js';
|
||||
|
||||
export class BaseService {
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.mongodb = config.mongodb;
|
||||
this.redis = config.redis;
|
||||
this.logger = config.logger;
|
||||
this.timeManager = createTimeManager(config.timezone, config.dayStartsAt);
|
||||
}
|
||||
|
||||
handleApiError(error, context = '') {
|
||||
this.logger.error(`API Error ${context}:`, {
|
||||
message: error.message,
|
||||
status: error.response?.status,
|
||||
data: error.response?.data,
|
||||
});
|
||||
|
||||
if (error.response) {
|
||||
const status = error.response.status;
|
||||
const message = error.response.data?.message || error.response.statusText;
|
||||
|
||||
if (status === 429) {
|
||||
throw new Error('API rate limit exceeded. Please try again later.');
|
||||
}
|
||||
|
||||
throw new Error(`API error (${status}): ${message}`);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,320 @@
|
||||
export class DataManager {
|
||||
constructor(mongodb, redis, timeManager, options) {
|
||||
this.mongodb = mongodb;
|
||||
this.redis = redis;
|
||||
this.timeManager = timeManager;
|
||||
this.options = options || {};
|
||||
}
|
||||
|
||||
ensureDate(d) {
|
||||
if (d instanceof Date) return d;
|
||||
if (typeof d === 'string') return new Date(d);
|
||||
if (typeof d === 'number') return new Date(d);
|
||||
if (d && d.date) return new Date(d.date); // Handle MongoDB records
|
||||
console.error('Invalid date value:', d);
|
||||
return new Date(); // fallback to current date
|
||||
}
|
||||
|
||||
async getData(dateRange, fetchFn) {
|
||||
try {
|
||||
// Get historical data from MongoDB
|
||||
const historicalData = await this.getHistoricalDays(dateRange.start, dateRange.end);
|
||||
|
||||
// Find any missing date ranges
|
||||
const missingRanges = this.findMissingDateRanges(dateRange.start, dateRange.end, historicalData);
|
||||
|
||||
// Fetch missing data
|
||||
for (const range of missingRanges) {
|
||||
const data = await fetchFn(range);
|
||||
await this.storeHistoricalPeriod(range.start, range.end, data);
|
||||
}
|
||||
|
||||
// Get updated historical data
|
||||
const updatedData = await this.getHistoricalDays(dateRange.start, dateRange.end);
|
||||
|
||||
// Handle both nested and flat data structures
|
||||
if (updatedData && updatedData.length > 0) {
|
||||
// Process each record and combine them
|
||||
const processedData = updatedData.map(record => {
|
||||
if (record.data) {
|
||||
return record.data;
|
||||
}
|
||||
if (record.total !== undefined) {
|
||||
return {
|
||||
total: record.total,
|
||||
by_direction: record.by_direction,
|
||||
by_status: record.by_status,
|
||||
by_missed_reason: record.by_missed_reason,
|
||||
by_hour: record.by_hour,
|
||||
by_users: record.by_users,
|
||||
daily_data: record.daily_data,
|
||||
duration_distribution: record.duration_distribution,
|
||||
average_duration: record.average_duration
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}).filter(Boolean);
|
||||
|
||||
// Combine the data
|
||||
if (processedData.length > 0) {
|
||||
return this.combineMetrics(processedData);
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise process as raw call data
|
||||
return this.processCallData(updatedData);
|
||||
} catch (error) {
|
||||
console.error('Error in getData:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
findMissingDateRanges(start, end, existingDates) {
|
||||
const missingRanges = [];
|
||||
const existingDatesSet = new Set(
|
||||
existingDates.map((d) => {
|
||||
// Handle both nested and flat data structures
|
||||
const date = d.date ? d.date : d;
|
||||
return this.ensureDate(date).toISOString().split("T")[0];
|
||||
})
|
||||
);
|
||||
|
||||
let current = new Date(start);
|
||||
const endDate = new Date(end);
|
||||
|
||||
while (current < endDate) {
|
||||
const dayBounds = this.timeManager.getDayBounds(current);
|
||||
const dayKey = dayBounds.start.toISOString().split("T")[0];
|
||||
|
||||
if (!existingDatesSet.has(dayKey)) {
|
||||
// Found a missing day
|
||||
const missingStart = new Date(dayBounds.start);
|
||||
const missingEnd = new Date(dayBounds.end);
|
||||
|
||||
missingRanges.push({
|
||||
start: missingStart,
|
||||
end: missingEnd,
|
||||
});
|
||||
}
|
||||
|
||||
// Move to the next day using timeManager to ensure proper business day boundaries
|
||||
current = new Date(dayBounds.end.getTime() + 1);
|
||||
}
|
||||
|
||||
return missingRanges;
|
||||
}
|
||||
|
||||
async getCurrentDay(fetchFn) {
|
||||
const now = new Date();
|
||||
const todayBounds = this.timeManager.getDayBounds(now);
|
||||
const todayKey = this.timeManager.formatDate(todayBounds.start);
|
||||
const cacheKey = `${this.options.collection}:current_day:${todayKey}`;
|
||||
|
||||
try {
|
||||
// Check cache first
|
||||
if (this.redis?.isOpen) {
|
||||
const cached = await this.redis.get(cacheKey);
|
||||
if (cached) {
|
||||
const parsedCache = JSON.parse(cached);
|
||||
if (parsedCache.total !== undefined) {
|
||||
// Use timeManager to check if the cached data is for today
|
||||
const cachedDate = new Date(parsedCache.daily_data[0].date);
|
||||
const isToday = this.timeManager.isToday(cachedDate);
|
||||
|
||||
if (isToday) {
|
||||
return parsedCache;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get safe end time that's never in the future
|
||||
const safeEnd = this.timeManager.getCurrentBusinessDayEnd();
|
||||
|
||||
// Fetch and process current day data with safe end time
|
||||
const data = await fetchFn({
|
||||
start: todayBounds.start,
|
||||
end: safeEnd
|
||||
});
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Cache the data with a shorter TTL for today's data
|
||||
if (this.redis?.isOpen) {
|
||||
const ttl = Math.min(
|
||||
this.options.redisTTL,
|
||||
60 * 5 // 5 minutes max for today's data
|
||||
);
|
||||
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||
EX: ttl,
|
||||
});
|
||||
}
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error('Error in getCurrentDay:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
getDayCount(start, end) {
|
||||
// Calculate full days between dates using timeManager
|
||||
const startDay = this.timeManager.getDayBounds(start);
|
||||
const endDay = this.timeManager.getDayBounds(end);
|
||||
return Math.ceil((endDay.end - startDay.start) / (24 * 60 * 60 * 1000));
|
||||
}
|
||||
|
||||
async fetchMissingDays(start, end, existingData, fetchFn) {
|
||||
const existingDates = new Set(
|
||||
existingData.map((d) => this.timeManager.formatDate(d.date))
|
||||
);
|
||||
const missingData = [];
|
||||
|
||||
let currentDate = new Date(start);
|
||||
while (currentDate < end) {
|
||||
const dayBounds = this.timeManager.getDayBounds(currentDate);
|
||||
const dateString = this.timeManager.formatDate(dayBounds.start);
|
||||
|
||||
if (!existingDates.has(dateString)) {
|
||||
const data = await fetchFn({
|
||||
start: dayBounds.start,
|
||||
end: dayBounds.end,
|
||||
});
|
||||
|
||||
await this.storeHistoricalDay(dayBounds.start, data);
|
||||
missingData.push(data);
|
||||
}
|
||||
|
||||
// Move to next day using timeManager to ensure proper business day boundaries
|
||||
currentDate = new Date(dayBounds.end.getTime() + 1);
|
||||
}
|
||||
|
||||
return missingData;
|
||||
}
|
||||
|
||||
async getHistoricalDays(start, end) {
|
||||
try {
|
||||
if (!this.mongodb) return [];
|
||||
|
||||
const collection = this.mongodb.collection(this.options.collection);
|
||||
const startDay = this.timeManager.getDayBounds(start);
|
||||
const endDay = this.timeManager.getDayBounds(end);
|
||||
|
||||
const records = await collection
|
||||
.find({
|
||||
date: {
|
||||
$gte: startDay.start,
|
||||
$lt: endDay.start,
|
||||
},
|
||||
})
|
||||
.sort({ date: 1 })
|
||||
.toArray();
|
||||
|
||||
return records;
|
||||
} catch (error) {
|
||||
console.error('Error getting historical days:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
combineMetrics(metricsArray) {
|
||||
if (!metricsArray || metricsArray.length === 0) return null;
|
||||
if (metricsArray.length === 1) return metricsArray[0];
|
||||
|
||||
const combined = {
|
||||
total: 0,
|
||||
by_direction: { inbound: 0, outbound: 0 },
|
||||
by_status: { answered: 0, missed: 0 },
|
||||
by_missed_reason: {},
|
||||
by_hour: Array(24).fill(0),
|
||||
by_users: {},
|
||||
daily_data: [],
|
||||
duration_distribution: [
|
||||
{ range: '0-1m', count: 0 },
|
||||
{ range: '1-5m', count: 0 },
|
||||
{ range: '5-15m', count: 0 },
|
||||
{ range: '15-30m', count: 0 },
|
||||
{ range: '30m+', count: 0 }
|
||||
],
|
||||
average_duration: 0
|
||||
};
|
||||
|
||||
let totalAnswered = 0;
|
||||
let totalDuration = 0;
|
||||
|
||||
metricsArray.forEach(metrics => {
|
||||
// Sum basic metrics
|
||||
combined.total += metrics.total;
|
||||
combined.by_direction.inbound += metrics.by_direction.inbound;
|
||||
combined.by_direction.outbound += metrics.by_direction.outbound;
|
||||
combined.by_status.answered += metrics.by_status.answered;
|
||||
combined.by_status.missed += metrics.by_status.missed;
|
||||
|
||||
// Combine missed reasons
|
||||
Object.entries(metrics.by_missed_reason).forEach(([reason, count]) => {
|
||||
combined.by_missed_reason[reason] = (combined.by_missed_reason[reason] || 0) + count;
|
||||
});
|
||||
|
||||
// Sum hourly data
|
||||
metrics.by_hour.forEach((count, hour) => {
|
||||
combined.by_hour[hour] += count;
|
||||
});
|
||||
|
||||
// Combine user data
|
||||
Object.entries(metrics.by_users).forEach(([userId, userData]) => {
|
||||
if (!combined.by_users[userId]) {
|
||||
combined.by_users[userId] = {
|
||||
id: userData.id,
|
||||
name: userData.name,
|
||||
total: 0,
|
||||
answered: 0,
|
||||
missed: 0,
|
||||
total_duration: 0,
|
||||
average_duration: 0
|
||||
};
|
||||
}
|
||||
combined.by_users[userId].total += userData.total;
|
||||
combined.by_users[userId].answered += userData.answered;
|
||||
combined.by_users[userId].missed += userData.missed;
|
||||
combined.by_users[userId].total_duration += userData.total_duration || 0;
|
||||
});
|
||||
|
||||
// Combine duration distribution
|
||||
metrics.duration_distribution.forEach((dist, index) => {
|
||||
combined.duration_distribution[index].count += dist.count;
|
||||
});
|
||||
|
||||
// Accumulate for average duration calculation
|
||||
if (metrics.average_duration && metrics.by_status.answered) {
|
||||
totalDuration += metrics.average_duration * metrics.by_status.answered;
|
||||
totalAnswered += metrics.by_status.answered;
|
||||
}
|
||||
|
||||
// Merge daily data
|
||||
if (metrics.daily_data) {
|
||||
combined.daily_data.push(...metrics.daily_data);
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate final average duration
|
||||
if (totalAnswered > 0) {
|
||||
combined.average_duration = Math.round(totalDuration / totalAnswered);
|
||||
}
|
||||
|
||||
// Calculate user averages
|
||||
Object.values(combined.by_users).forEach(user => {
|
||||
if (user.answered > 0) {
|
||||
user.average_duration = Math.round(user.total_duration / user.answered);
|
||||
}
|
||||
});
|
||||
|
||||
// Sort and deduplicate daily data
|
||||
combined.daily_data = Array.from(
|
||||
new Map(combined.daily_data.map(item => [item.date, item])).values()
|
||||
).sort((a, b) => a.date.localeCompare(b.date));
|
||||
|
||||
return combined;
|
||||
}
|
||||
}
|
||||
15
inventory-server/dashboard/aircall-server/src/utils/db.js
Normal file
15
inventory-server/dashboard/aircall-server/src/utils/db.js
Normal file
@@ -0,0 +1,15 @@
|
||||
import { MongoClient } from 'mongodb';
|
||||
|
||||
const MONGODB_URI = process.env.MONGODB_URI || 'mongodb://localhost:27017/dashboard';
|
||||
const DB_NAME = process.env.MONGODB_DB || 'dashboard';
|
||||
|
||||
export async function connectMongoDB() {
|
||||
try {
|
||||
const client = await MongoClient.connect(MONGODB_URI);
|
||||
console.log('Connected to MongoDB');
|
||||
return client.db(DB_NAME);
|
||||
} catch (error) {
|
||||
console.error('MongoDB connection error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import winston from 'winston';
|
||||
import path from 'path';
|
||||
|
||||
export function createLogger(service) {
|
||||
return winston.createLogger({
|
||||
level: process.env.LOG_LEVEL || 'info',
|
||||
format: winston.format.combine(
|
||||
winston.format.timestamp(),
|
||||
winston.format.json()
|
||||
),
|
||||
defaultMeta: { service },
|
||||
transports: [
|
||||
// Write all logs to console
|
||||
new winston.transports.Console({
|
||||
format: winston.format.combine(
|
||||
winston.format.colorize(),
|
||||
winston.format.simple()
|
||||
)
|
||||
}),
|
||||
// Write all logs to service-specific files
|
||||
new winston.transports.File({
|
||||
filename: path.join('logs', `${service}-error.log`),
|
||||
level: 'error'
|
||||
}),
|
||||
new winston.transports.File({
|
||||
filename: path.join('logs', `${service}-combined.log`)
|
||||
})
|
||||
]
|
||||
});
|
||||
}
|
||||
23
inventory-server/dashboard/aircall-server/src/utils/redis.js
Normal file
23
inventory-server/dashboard/aircall-server/src/utils/redis.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import { createClient } from 'redis';
|
||||
|
||||
const REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
|
||||
|
||||
export async function createRedisClient() {
|
||||
try {
|
||||
const client = createClient({
|
||||
url: REDIS_URL
|
||||
});
|
||||
|
||||
await client.connect();
|
||||
console.log('Connected to Redis');
|
||||
|
||||
client.on('error', (err) => {
|
||||
console.error('Redis error:', err);
|
||||
});
|
||||
|
||||
return client;
|
||||
} catch (error) {
|
||||
console.error('Redis connection error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
262
inventory-server/dashboard/aircall-server/src/utils/timeUtils.js
Normal file
262
inventory-server/dashboard/aircall-server/src/utils/timeUtils.js
Normal file
@@ -0,0 +1,262 @@
|
||||
class TimeManager {
|
||||
static ALLOWED_RANGES = ['today', 'yesterday', 'last2days', 'last7days', 'last30days', 'last90days',
|
||||
'previous7days', 'previous30days', 'previous90days'];
|
||||
|
||||
constructor(timezone = 'America/New_York', dayStartsAt = 1) {
|
||||
this.timezone = timezone;
|
||||
this.dayStartsAt = dayStartsAt;
|
||||
}
|
||||
|
||||
getDayBounds(date) {
|
||||
try {
|
||||
const now = new Date();
|
||||
const targetDate = new Date(date);
|
||||
|
||||
// For today
|
||||
if (
|
||||
targetDate.getUTCFullYear() === now.getUTCFullYear() &&
|
||||
targetDate.getUTCMonth() === now.getUTCMonth() &&
|
||||
targetDate.getUTCDate() === now.getUTCDate()
|
||||
) {
|
||||
// If current time is before day start (1 AM ET / 6 AM UTC),
|
||||
// use previous day's start until now
|
||||
const todayStart = new Date(Date.UTC(
|
||||
now.getUTCFullYear(),
|
||||
now.getUTCMonth(),
|
||||
now.getUTCDate(),
|
||||
this.dayStartsAt + 5,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
));
|
||||
|
||||
if (now < todayStart) {
|
||||
const yesterdayStart = new Date(todayStart);
|
||||
yesterdayStart.setUTCDate(yesterdayStart.getUTCDate() - 1);
|
||||
return { start: yesterdayStart, end: now };
|
||||
}
|
||||
|
||||
return { start: todayStart, end: now };
|
||||
}
|
||||
|
||||
// For past days, use full 24-hour period
|
||||
const normalizedDate = new Date(Date.UTC(
|
||||
targetDate.getUTCFullYear(),
|
||||
targetDate.getUTCMonth(),
|
||||
targetDate.getUTCDate()
|
||||
));
|
||||
|
||||
const dayStart = new Date(normalizedDate);
|
||||
dayStart.setUTCHours(this.dayStartsAt + 5, 0, 0, 0);
|
||||
|
||||
const dayEnd = new Date(dayStart);
|
||||
dayEnd.setUTCDate(dayEnd.getUTCDate() + 1);
|
||||
|
||||
return { start: dayStart, end: dayEnd };
|
||||
} catch (error) {
|
||||
console.error('Error in getDayBounds:', error);
|
||||
throw new Error(`Failed to calculate day bounds: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
getDateRange(period) {
|
||||
try {
|
||||
const now = new Date();
|
||||
const todayBounds = this.getDayBounds(now);
|
||||
const end = new Date();
|
||||
|
||||
switch (period) {
|
||||
case 'today':
|
||||
return {
|
||||
start: todayBounds.start,
|
||||
end
|
||||
};
|
||||
case 'yesterday': {
|
||||
const yesterday = new Date(now);
|
||||
yesterday.setDate(yesterday.getDate() - 1);
|
||||
return this.getDayBounds(yesterday);
|
||||
}
|
||||
case 'last2days': {
|
||||
const twoDaysAgo = new Date(now);
|
||||
twoDaysAgo.setDate(twoDaysAgo.getDate() - 2);
|
||||
return this.getDayBounds(twoDaysAgo);
|
||||
}
|
||||
case 'last7days': {
|
||||
const start = new Date(now);
|
||||
start.setDate(start.getDate() - 6);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end
|
||||
};
|
||||
}
|
||||
case 'previous7days': {
|
||||
const end = new Date(now);
|
||||
end.setDate(end.getDate() - 7);
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 6);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end: this.getDayBounds(end).end
|
||||
};
|
||||
}
|
||||
case 'last30days': {
|
||||
const start = new Date(now);
|
||||
start.setDate(start.getDate() - 29);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end
|
||||
};
|
||||
}
|
||||
case 'previous30days': {
|
||||
const end = new Date(now);
|
||||
end.setDate(end.getDate() - 30);
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 29);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end: this.getDayBounds(end).end
|
||||
};
|
||||
}
|
||||
case 'last90days': {
|
||||
const start = new Date(now);
|
||||
start.setDate(start.getDate() - 89);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end
|
||||
};
|
||||
}
|
||||
case 'previous90days': {
|
||||
const end = new Date(now);
|
||||
end.setDate(end.getDate() - 90);
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 89);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end: this.getDayBounds(end).end
|
||||
};
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported time period: ${period}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error in getDateRange:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
getPreviousPeriod(period) {
|
||||
try {
|
||||
const now = new Date();
|
||||
|
||||
switch (period) {
|
||||
case 'today':
|
||||
return 'yesterday';
|
||||
case 'yesterday': {
|
||||
// Return bounds for 2 days ago
|
||||
const twoDaysAgo = new Date(now);
|
||||
twoDaysAgo.setDate(twoDaysAgo.getDate() - 2);
|
||||
return this.getDayBounds(twoDaysAgo);
|
||||
}
|
||||
case 'last7days': {
|
||||
// Return bounds for previous 7 days
|
||||
const end = new Date(now);
|
||||
end.setDate(end.getDate() - 7);
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 7);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end: this.getDayBounds(end).end
|
||||
};
|
||||
}
|
||||
case 'last30days': {
|
||||
const end = new Date(now);
|
||||
end.setDate(end.getDate() - 30);
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 30);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end: this.getDayBounds(end).end
|
||||
};
|
||||
}
|
||||
case 'last90days': {
|
||||
const end = new Date(now);
|
||||
end.setDate(end.getDate() - 90);
|
||||
const start = new Date(end);
|
||||
start.setDate(start.getDate() - 90);
|
||||
return {
|
||||
start: this.getDayBounds(start).start,
|
||||
end: this.getDayBounds(end).end
|
||||
};
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported time period: ${period}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error in getPreviousPeriod:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
getCurrentBusinessDayEnd() {
|
||||
try {
|
||||
const now = new Date();
|
||||
const todayBounds = this.getDayBounds(now);
|
||||
|
||||
// If current time is before day start (1 AM ET / 6 AM UTC),
|
||||
// then we're still in yesterday's business day
|
||||
const todayStart = new Date(Date.UTC(
|
||||
now.getUTCFullYear(),
|
||||
now.getUTCMonth(),
|
||||
now.getUTCDate(),
|
||||
this.dayStartsAt + 5,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
));
|
||||
|
||||
if (now < todayStart) {
|
||||
const yesterdayBounds = this.getDayBounds(new Date(now.getTime() - 24 * 60 * 60 * 1000));
|
||||
return yesterdayBounds.end;
|
||||
}
|
||||
|
||||
// Return the earlier of current time or today's end
|
||||
return now < todayBounds.end ? now : todayBounds.end;
|
||||
} catch (error) {
|
||||
console.error('Error in getCurrentBusinessDayEnd:', error);
|
||||
return new Date();
|
||||
}
|
||||
}
|
||||
|
||||
isValidTimeRange(timeRange) {
|
||||
return TimeManager.ALLOWED_RANGES.includes(timeRange);
|
||||
}
|
||||
|
||||
isToday(date) {
|
||||
const now = new Date();
|
||||
const targetDate = new Date(date);
|
||||
return (
|
||||
targetDate.getUTCFullYear() === now.getUTCFullYear() &&
|
||||
targetDate.getUTCMonth() === now.getUTCMonth() &&
|
||||
targetDate.getUTCDate() === now.getUTCDate()
|
||||
);
|
||||
}
|
||||
|
||||
formatDate(date) {
|
||||
try {
|
||||
return date.toLocaleString('en-US', {
|
||||
timeZone: this.timezone,
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error formatting date:', error);
|
||||
return date.toISOString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const createTimeManager = (timezone, dayStartsAt) => new TimeManager(timezone, dayStartsAt);
|
||||
10
inventory-server/dashboard/auth-server/.env.example
Normal file
10
inventory-server/dashboard/auth-server/.env.example
Normal file
@@ -0,0 +1,10 @@
|
||||
# Server Configuration
|
||||
NODE_ENV=development
|
||||
PORT=3003
|
||||
|
||||
# Authentication
|
||||
JWT_SECRET=your-secret-key-here
|
||||
DASHBOARD_PASSWORD=your-dashboard-password-here
|
||||
|
||||
# Cookie Settings
|
||||
COOKIE_DOMAIN=localhost # In production: .kent.pw
|
||||
203
inventory-server/dashboard/auth-server/index.js
Normal file
203
inventory-server/dashboard/auth-server/index.js
Normal file
@@ -0,0 +1,203 @@
|
||||
// auth-server/index.js
|
||||
const path = require('path');
|
||||
require('dotenv').config({ path: path.join(__dirname, '.env') });
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const cookieParser = require('cookie-parser');
|
||||
const jwt = require('jsonwebtoken');
|
||||
|
||||
// Debug environment variables
|
||||
console.log('Environment variables loaded from:', path.join(__dirname, '.env'));
|
||||
console.log('Current directory:', __dirname);
|
||||
console.log('Available env vars:', Object.keys(process.env));
|
||||
|
||||
const app = express();
|
||||
const PORT = process.env.PORT || 3003;
|
||||
const JWT_SECRET = process.env.JWT_SECRET;
|
||||
const DASHBOARD_PASSWORD = process.env.DASHBOARD_PASSWORD;
|
||||
|
||||
// Validate required environment variables
|
||||
if (!JWT_SECRET || !DASHBOARD_PASSWORD) {
|
||||
console.error('Missing required environment variables:');
|
||||
if (!JWT_SECRET) console.error('- JWT_SECRET');
|
||||
if (!DASHBOARD_PASSWORD) console.error('- DASHBOARD_PASSWORD');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Middleware
|
||||
app.use(express.json());
|
||||
app.use(cookieParser());
|
||||
|
||||
// Configure CORS
|
||||
const corsOptions = {
|
||||
origin: function(origin, callback) {
|
||||
const allowedOrigins = [
|
||||
'http://localhost:3000',
|
||||
'https://dashboard.kent.pw'
|
||||
];
|
||||
|
||||
console.log('CORS check for origin:', origin);
|
||||
|
||||
// Allow local network IPs (192.168.1.xxx)
|
||||
if (origin && origin.match(/^http:\/\/192\.168\.1\.\d{1,3}(:\d+)?$/)) {
|
||||
callback(null, true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if origin is in allowed list
|
||||
if (!origin || allowedOrigins.indexOf(origin) !== -1) {
|
||||
callback(null, true);
|
||||
} else {
|
||||
callback(new Error('Not allowed by CORS'));
|
||||
}
|
||||
},
|
||||
credentials: true,
|
||||
methods: ['GET', 'POST', 'OPTIONS'],
|
||||
allowedHeaders: ['Content-Type', 'Authorization', 'Cookie', 'Accept'],
|
||||
exposedHeaders: ['Set-Cookie']
|
||||
};
|
||||
|
||||
app.use(cors(corsOptions));
|
||||
app.options('*', cors(corsOptions));
|
||||
|
||||
// Debug logging
|
||||
app.use((req, res, next) => {
|
||||
console.log(`${new Date().toISOString()} ${req.method} ${req.url}`);
|
||||
console.log('Headers:', req.headers);
|
||||
console.log('Cookies:', req.cookies);
|
||||
next();
|
||||
});
|
||||
|
||||
// Health check endpoint
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
});
|
||||
|
||||
// Auth endpoints
|
||||
app.post('/login', (req, res) => {
|
||||
console.log('Login attempt received');
|
||||
console.log('Request body:', req.body);
|
||||
console.log('Origin:', req.headers.origin);
|
||||
|
||||
const { password } = req.body;
|
||||
|
||||
if (!password) {
|
||||
console.log('No password provided');
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Password is required'
|
||||
});
|
||||
}
|
||||
|
||||
console.log('Comparing passwords...');
|
||||
console.log('Provided password length:', password.length);
|
||||
console.log('Expected password length:', DASHBOARD_PASSWORD.length);
|
||||
|
||||
if (password === DASHBOARD_PASSWORD) {
|
||||
console.log('Password matched');
|
||||
const token = jwt.sign({ authorized: true }, JWT_SECRET, {
|
||||
expiresIn: '24h'
|
||||
});
|
||||
|
||||
// Determine if request is from local network
|
||||
const isLocalNetwork = req.headers.origin?.includes('192.168.1.') || req.headers.origin?.includes('localhost');
|
||||
|
||||
const cookieOptions = {
|
||||
httpOnly: true,
|
||||
secure: !isLocalNetwork, // Only use secure for non-local requests
|
||||
sameSite: isLocalNetwork ? 'lax' : 'none',
|
||||
path: '/',
|
||||
maxAge: 24 * 60 * 60 * 1000 // 24 hours
|
||||
};
|
||||
|
||||
// Only set domain for production
|
||||
if (!isLocalNetwork) {
|
||||
cookieOptions.domain = '.kent.pw';
|
||||
}
|
||||
|
||||
console.log('Setting cookie with options:', cookieOptions);
|
||||
res.cookie('token', token, cookieOptions);
|
||||
|
||||
console.log('Response headers:', res.getHeaders());
|
||||
res.json({
|
||||
success: true,
|
||||
debug: {
|
||||
origin: req.headers.origin,
|
||||
cookieOptions
|
||||
}
|
||||
});
|
||||
} else {
|
||||
console.log('Password mismatch');
|
||||
res.status(401).json({
|
||||
success: false,
|
||||
message: 'Invalid password'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Modify the check endpoint to log more info
|
||||
app.get('/check', (req, res) => {
|
||||
console.log('Auth check received');
|
||||
console.log('All cookies:', req.cookies);
|
||||
console.log('Headers:', req.headers);
|
||||
|
||||
const token = req.cookies.token;
|
||||
|
||||
if (!token) {
|
||||
console.log('No token found in cookies');
|
||||
return res.status(401).json({
|
||||
authenticated: false,
|
||||
error: 'no_token'
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const decoded = jwt.verify(token, JWT_SECRET);
|
||||
console.log('Token verified successfully:', decoded);
|
||||
res.json({ authenticated: true });
|
||||
} catch (err) {
|
||||
console.log('Token verification failed:', err.message);
|
||||
res.status(401).json({
|
||||
authenticated: false,
|
||||
error: 'invalid_token',
|
||||
message: err.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/logout', (req, res) => {
|
||||
const isLocalNetwork = req.headers.origin?.includes('192.168.1.') || req.headers.origin?.includes('localhost');
|
||||
const cookieOptions = {
|
||||
httpOnly: true,
|
||||
secure: !isLocalNetwork,
|
||||
sameSite: isLocalNetwork ? 'lax' : 'none',
|
||||
path: '/',
|
||||
domain: isLocalNetwork ? undefined : '.kent.pw'
|
||||
};
|
||||
|
||||
console.log('Clearing cookie with options:', cookieOptions);
|
||||
res.clearCookie('token', cookieOptions);
|
||||
res.json({ success: true });
|
||||
});
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
console.error('Server error:', err);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Internal server error',
|
||||
error: err.message
|
||||
});
|
||||
});
|
||||
|
||||
// Start server
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Auth server running on port ${PORT}`);
|
||||
console.log('Environment:', process.env.NODE_ENV);
|
||||
console.log('CORS origins:', corsOptions.origin);
|
||||
console.log('JWT_SECRET length:', JWT_SECRET?.length);
|
||||
console.log('DASHBOARD_PASSWORD length:', DASHBOARD_PASSWORD?.length);
|
||||
});
|
||||
1044
inventory-server/dashboard/auth-server/package-lock.json
generated
Normal file
1044
inventory-server/dashboard/auth-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
22
inventory-server/dashboard/auth-server/package.json
Normal file
22
inventory-server/dashboard/auth-server/package.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "auth-server",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"cookie-parser": "^1.4.7",
|
||||
"cors": "^2.8.5",
|
||||
"date-fns": "^4.1.0",
|
||||
"date-fns-tz": "^3.2.0",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^4.21.1",
|
||||
"express-session": "^1.18.1",
|
||||
"jsonwebtoken": "^9.0.2"
|
||||
}
|
||||
}
|
||||
1
inventory-server/dashboard/dashboard.conf
Symbolic link
1
inventory-server/dashboard/dashboard.conf
Symbolic link
@@ -0,0 +1 @@
|
||||
/etc/nginx/sites-enabled/dashboard.conf
|
||||
2506
inventory-server/dashboard/google-server/package-lock.json
generated
Normal file
2506
inventory-server/dashboard/google-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
21
inventory-server/dashboard/google-server/package.json
Normal file
21
inventory-server/dashboard/google-server/package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "google-analytics-server",
|
||||
"version": "1.0.0",
|
||||
"description": "Google Analytics server for dashboard",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"dev": "nodemon server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google-analytics/data": "^4.0.0",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2",
|
||||
"redis": "^4.6.11",
|
||||
"winston": "^3.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.2"
|
||||
}
|
||||
}
|
||||
254
inventory-server/dashboard/google-server/routes/analytics.js
Normal file
254
inventory-server/dashboard/google-server/routes/analytics.js
Normal file
@@ -0,0 +1,254 @@
|
||||
const express = require('express');
|
||||
const { BetaAnalyticsDataClient } = require('@google-analytics/data');
|
||||
const router = express.Router();
|
||||
const logger = require('../utils/logger');
|
||||
|
||||
// Initialize GA4 client
|
||||
const analyticsClient = new BetaAnalyticsDataClient({
|
||||
credentials: JSON.parse(process.env.GOOGLE_APPLICATION_CREDENTIALS_JSON)
|
||||
});
|
||||
|
||||
const propertyId = process.env.GA_PROPERTY_ID;
|
||||
|
||||
// Cache durations
|
||||
const CACHE_DURATIONS = {
|
||||
REALTIME_BASIC: 60, // 1 minute
|
||||
REALTIME_DETAILED: 300, // 5 minutes
|
||||
BASIC_METRICS: 3600, // 1 hour
|
||||
USER_BEHAVIOR: 3600 // 1 hour
|
||||
};
|
||||
|
||||
// Basic metrics endpoint
|
||||
router.get('/metrics', async (req, res) => {
|
||||
try {
|
||||
const { startDate = '7daysAgo' } = req.query;
|
||||
const cacheKey = `analytics:basic_metrics:${startDate}`;
|
||||
|
||||
// Check Redis cache
|
||||
const cachedData = await req.redisClient.get(cacheKey);
|
||||
if (cachedData) {
|
||||
logger.info('Returning cached basic metrics data');
|
||||
return res.json({ success: true, data: JSON.parse(cachedData) });
|
||||
}
|
||||
|
||||
// Fetch from GA4
|
||||
const [response] = await analyticsClient.runReport({
|
||||
property: `properties/${propertyId}`,
|
||||
dateRanges: [{ startDate, endDate: 'today' }],
|
||||
dimensions: [{ name: 'date' }],
|
||||
metrics: [
|
||||
{ name: 'activeUsers' },
|
||||
{ name: 'newUsers' },
|
||||
{ name: 'averageSessionDuration' },
|
||||
{ name: 'screenPageViews' },
|
||||
{ name: 'bounceRate' },
|
||||
{ name: 'conversions' }
|
||||
],
|
||||
returnPropertyQuota: true
|
||||
});
|
||||
|
||||
// Cache the response
|
||||
await req.redisClient.set(cacheKey, JSON.stringify(response), {
|
||||
EX: CACHE_DURATIONS.BASIC_METRICS
|
||||
});
|
||||
|
||||
res.json({ success: true, data: response });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching basic metrics:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Realtime basic data endpoint
|
||||
router.get('/realtime/basic', async (req, res) => {
|
||||
try {
|
||||
const cacheKey = 'analytics:realtime:basic';
|
||||
|
||||
// Check Redis cache
|
||||
const cachedData = await req.redisClient.get(cacheKey);
|
||||
if (cachedData) {
|
||||
logger.info('Returning cached realtime basic data');
|
||||
return res.json({ success: true, data: JSON.parse(cachedData) });
|
||||
}
|
||||
|
||||
// Fetch active users
|
||||
const [userResponse] = await analyticsClient.runRealtimeReport({
|
||||
property: `properties/${propertyId}`,
|
||||
metrics: [{ name: 'activeUsers' }],
|
||||
returnPropertyQuota: true
|
||||
});
|
||||
|
||||
// Fetch last 5 minutes
|
||||
const [fiveMinResponse] = await analyticsClient.runRealtimeReport({
|
||||
property: `properties/${propertyId}`,
|
||||
metrics: [{ name: 'activeUsers' }],
|
||||
minuteRanges: [{ startMinutesAgo: 5, endMinutesAgo: 0 }]
|
||||
});
|
||||
|
||||
// Fetch time series data
|
||||
const [timeSeriesResponse] = await analyticsClient.runRealtimeReport({
|
||||
property: `properties/${propertyId}`,
|
||||
dimensions: [{ name: 'minutesAgo' }],
|
||||
metrics: [{ name: 'activeUsers' }]
|
||||
});
|
||||
|
||||
const response = {
|
||||
userResponse,
|
||||
fiveMinResponse,
|
||||
timeSeriesResponse,
|
||||
quotaInfo: {
|
||||
projectHourly: userResponse.propertyQuota.tokensPerProjectPerHour,
|
||||
daily: userResponse.propertyQuota.tokensPerDay,
|
||||
serverErrors: userResponse.propertyQuota.serverErrorsPerProjectPerHour,
|
||||
thresholdedRequests: userResponse.propertyQuota.potentiallyThresholdedRequestsPerHour
|
||||
}
|
||||
};
|
||||
|
||||
// Cache the response
|
||||
await req.redisClient.set(cacheKey, JSON.stringify(response), {
|
||||
EX: CACHE_DURATIONS.REALTIME_BASIC
|
||||
});
|
||||
|
||||
res.json({ success: true, data: response });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching realtime basic data:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Realtime detailed data endpoint
|
||||
router.get('/realtime/detailed', async (req, res) => {
|
||||
try {
|
||||
const cacheKey = 'analytics:realtime:detailed';
|
||||
|
||||
// Check Redis cache
|
||||
const cachedData = await req.redisClient.get(cacheKey);
|
||||
if (cachedData) {
|
||||
logger.info('Returning cached realtime detailed data');
|
||||
return res.json({ success: true, data: JSON.parse(cachedData) });
|
||||
}
|
||||
|
||||
// Fetch current pages
|
||||
const [pageResponse] = await analyticsClient.runRealtimeReport({
|
||||
property: `properties/${propertyId}`,
|
||||
dimensions: [{ name: 'unifiedScreenName' }],
|
||||
metrics: [{ name: 'screenPageViews' }],
|
||||
orderBy: [{ metric: { metricName: 'screenPageViews' }, desc: true }],
|
||||
limit: 25
|
||||
});
|
||||
|
||||
// Fetch events
|
||||
const [eventResponse] = await analyticsClient.runRealtimeReport({
|
||||
property: `properties/${propertyId}`,
|
||||
dimensions: [{ name: 'eventName' }],
|
||||
metrics: [{ name: 'eventCount' }],
|
||||
orderBy: [{ metric: { metricName: 'eventCount' }, desc: true }],
|
||||
limit: 25
|
||||
});
|
||||
|
||||
// Fetch device categories
|
||||
const [deviceResponse] = await analyticsClient.runRealtimeReport({
|
||||
property: `properties/${propertyId}`,
|
||||
dimensions: [{ name: 'deviceCategory' }],
|
||||
metrics: [{ name: 'activeUsers' }],
|
||||
orderBy: [{ metric: { metricName: 'activeUsers' }, desc: true }],
|
||||
limit: 10,
|
||||
returnPropertyQuota: true
|
||||
});
|
||||
|
||||
const response = {
|
||||
pageResponse,
|
||||
eventResponse,
|
||||
sourceResponse: deviceResponse
|
||||
};
|
||||
|
||||
// Cache the response
|
||||
await req.redisClient.set(cacheKey, JSON.stringify(response), {
|
||||
EX: CACHE_DURATIONS.REALTIME_DETAILED
|
||||
});
|
||||
|
||||
res.json({ success: true, data: response });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching realtime detailed data:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// User behavior endpoint
|
||||
router.get('/user-behavior', async (req, res) => {
|
||||
try {
|
||||
const { timeRange = '30' } = req.query;
|
||||
const cacheKey = `analytics:user_behavior:${timeRange}`;
|
||||
|
||||
// Check Redis cache
|
||||
const cachedData = await req.redisClient.get(cacheKey);
|
||||
if (cachedData) {
|
||||
logger.info('Returning cached user behavior data');
|
||||
return res.json({ success: true, data: JSON.parse(cachedData) });
|
||||
}
|
||||
|
||||
// Fetch page data
|
||||
const [pageResponse] = await analyticsClient.runReport({
|
||||
property: `properties/${propertyId}`,
|
||||
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||
dimensions: [{ name: 'pagePath' }],
|
||||
metrics: [
|
||||
{ name: 'screenPageViews' },
|
||||
{ name: 'averageSessionDuration' },
|
||||
{ name: 'bounceRate' },
|
||||
{ name: 'sessions' }
|
||||
],
|
||||
orderBy: [{
|
||||
metric: { metricName: 'screenPageViews' },
|
||||
desc: true
|
||||
}],
|
||||
limit: 25
|
||||
});
|
||||
|
||||
// Fetch device data
|
||||
const [deviceResponse] = await analyticsClient.runReport({
|
||||
property: `properties/${propertyId}`,
|
||||
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||
dimensions: [{ name: 'deviceCategory' }],
|
||||
metrics: [
|
||||
{ name: 'screenPageViews' },
|
||||
{ name: 'sessions' }
|
||||
]
|
||||
});
|
||||
|
||||
// Fetch source data
|
||||
const [sourceResponse] = await analyticsClient.runReport({
|
||||
property: `properties/${propertyId}`,
|
||||
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||
dimensions: [{ name: 'sessionSource' }],
|
||||
metrics: [
|
||||
{ name: 'sessions' },
|
||||
{ name: 'conversions' }
|
||||
],
|
||||
orderBy: [{
|
||||
metric: { metricName: 'sessions' },
|
||||
desc: true
|
||||
}],
|
||||
limit: 25,
|
||||
returnPropertyQuota: true
|
||||
});
|
||||
|
||||
const response = {
|
||||
pageResponse,
|
||||
deviceResponse,
|
||||
sourceResponse
|
||||
};
|
||||
|
||||
// Cache the response
|
||||
await req.redisClient.set(cacheKey, JSON.stringify(response), {
|
||||
EX: CACHE_DURATIONS.USER_BEHAVIOR
|
||||
});
|
||||
|
||||
res.json({ success: true, data: response });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching user behavior data:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -0,0 +1,91 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const analyticsService = require('../services/analytics.service');
|
||||
|
||||
// Basic metrics endpoint
|
||||
router.get('/metrics', async (req, res) => {
|
||||
try {
|
||||
const { startDate = '7daysAgo' } = req.query;
|
||||
console.log(`Fetching metrics with startDate: ${startDate}`);
|
||||
|
||||
const data = await analyticsService.getBasicMetrics(startDate);
|
||||
res.json({ success: true, data });
|
||||
} catch (error) {
|
||||
console.error('Metrics error:', {
|
||||
startDate: req.query.startDate,
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to fetch metrics',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Realtime basic data endpoint
|
||||
router.get('/realtime/basic', async (req, res) => {
|
||||
try {
|
||||
console.log('Fetching realtime basic data');
|
||||
const data = await analyticsService.getRealTimeBasicData();
|
||||
res.json({ success: true, data });
|
||||
} catch (error) {
|
||||
console.error('Realtime basic error:', {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to fetch realtime basic data',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Realtime detailed data endpoint
|
||||
router.get('/realtime/detailed', async (req, res) => {
|
||||
try {
|
||||
console.log('Fetching realtime detailed data');
|
||||
const data = await analyticsService.getRealTimeDetailedData();
|
||||
res.json({ success: true, data });
|
||||
} catch (error) {
|
||||
console.error('Realtime detailed error:', {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to fetch realtime detailed data',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// User behavior endpoint
|
||||
router.get('/user-behavior', async (req, res) => {
|
||||
try {
|
||||
const { timeRange = '30' } = req.query;
|
||||
console.log(`Fetching user behavior with timeRange: ${timeRange}`);
|
||||
|
||||
const data = await analyticsService.getUserBehavior(timeRange);
|
||||
res.json({ success: true, data });
|
||||
} catch (error) {
|
||||
console.error('User behavior error:', {
|
||||
timeRange: req.query.timeRange,
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to fetch user behavior data',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
65
inventory-server/dashboard/google-server/server.js
Normal file
65
inventory-server/dashboard/google-server/server.js
Normal file
@@ -0,0 +1,65 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const { createClient } = require('redis');
|
||||
const analyticsRoutes = require('./routes/analytics.routes');
|
||||
|
||||
const app = express();
|
||||
const port = process.env.GOOGLE_ANALYTICS_PORT || 3007;
|
||||
|
||||
// Redis client setup
|
||||
const redisClient = createClient({
|
||||
url: process.env.REDIS_URL || 'redis://localhost:6379'
|
||||
});
|
||||
|
||||
redisClient.on('error', (err) => console.error('Redis Client Error:', err));
|
||||
redisClient.on('connect', () => console.log('Redis Client Connected'));
|
||||
|
||||
// Connect to Redis
|
||||
(async () => {
|
||||
try {
|
||||
await redisClient.connect();
|
||||
} catch (err) {
|
||||
console.error('Redis connection error:', err);
|
||||
}
|
||||
})();
|
||||
|
||||
// Middleware
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
// Make Redis client available in requests
|
||||
app.use((req, res, next) => {
|
||||
req.redisClient = redisClient;
|
||||
next();
|
||||
});
|
||||
|
||||
// Routes
|
||||
app.use('/api/analytics', analyticsRoutes);
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
console.error('Server error:', err);
|
||||
res.status(err.status || 500).json({
|
||||
success: false,
|
||||
message: err.message || 'Internal server error',
|
||||
error: process.env.NODE_ENV === 'production' ? err : {}
|
||||
});
|
||||
});
|
||||
|
||||
// Start server
|
||||
app.listen(port, () => {
|
||||
console.log(`Google Analytics server running on port ${port}`);
|
||||
});
|
||||
|
||||
// Handle graceful shutdown
|
||||
process.on('SIGTERM', async () => {
|
||||
console.log('SIGTERM received. Shutting down gracefully...');
|
||||
await redisClient.quit();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.on('SIGINT', async () => {
|
||||
console.log('SIGINT received. Shutting down gracefully...');
|
||||
await redisClient.quit();
|
||||
process.exit(0);
|
||||
});
|
||||
@@ -0,0 +1,283 @@
|
||||
const { BetaAnalyticsDataClient } = require('@google-analytics/data');
|
||||
const { createClient } = require('redis');
|
||||
|
||||
class AnalyticsService {
|
||||
constructor() {
|
||||
// Initialize Redis client
|
||||
this.redis = createClient({
|
||||
url: process.env.REDIS_URL || 'redis://localhost:6379'
|
||||
});
|
||||
|
||||
this.redis.on('error', err => console.error('Redis Client Error:', err));
|
||||
this.redis.connect().catch(err => console.error('Redis connection error:', err));
|
||||
|
||||
try {
|
||||
// Initialize GA4 client
|
||||
const credentials = process.env.GOOGLE_APPLICATION_CREDENTIALS_JSON;
|
||||
this.analyticsClient = new BetaAnalyticsDataClient({
|
||||
credentials: typeof credentials === 'string' ? JSON.parse(credentials) : credentials
|
||||
});
|
||||
|
||||
this.propertyId = process.env.GA_PROPERTY_ID;
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize GA4 client:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Cache durations
|
||||
CACHE_DURATIONS = {
|
||||
REALTIME_BASIC: 60, // 1 minute
|
||||
REALTIME_DETAILED: 300, // 5 minutes
|
||||
BASIC_METRICS: 3600, // 1 hour
|
||||
USER_BEHAVIOR: 3600 // 1 hour
|
||||
};
|
||||
|
||||
async getBasicMetrics(startDate = '7daysAgo') {
|
||||
const cacheKey = `analytics:basic_metrics:${startDate}`;
|
||||
|
||||
try {
|
||||
// Try Redis first
|
||||
const cachedData = await this.redis.get(cacheKey);
|
||||
if (cachedData) {
|
||||
console.log('Analytics metrics found in Redis cache');
|
||||
return JSON.parse(cachedData);
|
||||
}
|
||||
|
||||
// Fetch from GA4
|
||||
console.log('Fetching fresh metrics data from GA4');
|
||||
const [response] = await this.analyticsClient.runReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
dateRanges: [{ startDate, endDate: 'today' }],
|
||||
dimensions: [{ name: 'date' }],
|
||||
metrics: [
|
||||
{ name: 'activeUsers' },
|
||||
{ name: 'newUsers' },
|
||||
{ name: 'averageSessionDuration' },
|
||||
{ name: 'screenPageViews' },
|
||||
{ name: 'bounceRate' },
|
||||
{ name: 'conversions' }
|
||||
],
|
||||
returnPropertyQuota: true
|
||||
});
|
||||
|
||||
// Cache the response
|
||||
await this.redis.set(cacheKey, JSON.stringify(response), {
|
||||
EX: this.CACHE_DURATIONS.BASIC_METRICS
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Error fetching analytics metrics:', {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getRealTimeBasicData() {
|
||||
const cacheKey = 'analytics:realtime:basic';
|
||||
|
||||
try {
|
||||
// Try Redis first
|
||||
const cachedData = await this.redis.get(cacheKey);
|
||||
if (cachedData) {
|
||||
console.log('Realtime basic data found in Redis cache');
|
||||
return JSON.parse(cachedData);
|
||||
}
|
||||
|
||||
console.log('Fetching fresh realtime data from GA4');
|
||||
|
||||
// Fetch active users
|
||||
const [userResponse] = await this.analyticsClient.runRealtimeReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
metrics: [{ name: 'activeUsers' }],
|
||||
returnPropertyQuota: true
|
||||
});
|
||||
|
||||
// Fetch last 5 minutes
|
||||
const [fiveMinResponse] = await this.analyticsClient.runRealtimeReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
metrics: [{ name: 'activeUsers' }],
|
||||
minuteRanges: [{ startMinutesAgo: 5, endMinutesAgo: 0 }]
|
||||
});
|
||||
|
||||
// Fetch time series data
|
||||
const [timeSeriesResponse] = await this.analyticsClient.runRealtimeReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
dimensions: [{ name: 'minutesAgo' }],
|
||||
metrics: [{ name: 'activeUsers' }]
|
||||
});
|
||||
|
||||
const response = {
|
||||
userResponse,
|
||||
fiveMinResponse,
|
||||
timeSeriesResponse,
|
||||
quotaInfo: {
|
||||
projectHourly: userResponse.propertyQuota.tokensPerProjectPerHour,
|
||||
daily: userResponse.propertyQuota.tokensPerDay,
|
||||
serverErrors: userResponse.propertyQuota.serverErrorsPerProjectPerHour,
|
||||
thresholdedRequests: userResponse.propertyQuota.potentiallyThresholdedRequestsPerHour
|
||||
}
|
||||
};
|
||||
|
||||
// Cache the response
|
||||
await this.redis.set(cacheKey, JSON.stringify(response), {
|
||||
EX: this.CACHE_DURATIONS.REALTIME_BASIC
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Error fetching realtime basic data:', {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getRealTimeDetailedData() {
|
||||
const cacheKey = 'analytics:realtime:detailed';
|
||||
|
||||
try {
|
||||
// Try Redis first
|
||||
const cachedData = await this.redis.get(cacheKey);
|
||||
if (cachedData) {
|
||||
console.log('Realtime detailed data found in Redis cache');
|
||||
return JSON.parse(cachedData);
|
||||
}
|
||||
|
||||
console.log('Fetching fresh realtime detailed data from GA4');
|
||||
|
||||
// Fetch current pages
|
||||
const [pageResponse] = await this.analyticsClient.runRealtimeReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
dimensions: [{ name: 'unifiedScreenName' }],
|
||||
metrics: [{ name: 'screenPageViews' }],
|
||||
orderBy: [{ metric: { metricName: 'screenPageViews' }, desc: true }],
|
||||
limit: 25
|
||||
});
|
||||
|
||||
// Fetch events
|
||||
const [eventResponse] = await this.analyticsClient.runRealtimeReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
dimensions: [{ name: 'eventName' }],
|
||||
metrics: [{ name: 'eventCount' }],
|
||||
orderBy: [{ metric: { metricName: 'eventCount' }, desc: true }],
|
||||
limit: 25
|
||||
});
|
||||
|
||||
// Fetch device categories
|
||||
const [deviceResponse] = await this.analyticsClient.runRealtimeReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
dimensions: [{ name: 'deviceCategory' }],
|
||||
metrics: [{ name: 'activeUsers' }],
|
||||
orderBy: [{ metric: { metricName: 'activeUsers' }, desc: true }],
|
||||
limit: 10,
|
||||
returnPropertyQuota: true
|
||||
});
|
||||
|
||||
const response = {
|
||||
pageResponse,
|
||||
eventResponse,
|
||||
sourceResponse: deviceResponse
|
||||
};
|
||||
|
||||
// Cache the response
|
||||
await this.redis.set(cacheKey, JSON.stringify(response), {
|
||||
EX: this.CACHE_DURATIONS.REALTIME_DETAILED
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Error fetching realtime detailed data:', {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getUserBehavior(timeRange = '30') {
|
||||
const cacheKey = `analytics:user_behavior:${timeRange}`;
|
||||
|
||||
try {
|
||||
// Try Redis first
|
||||
const cachedData = await this.redis.get(cacheKey);
|
||||
if (cachedData) {
|
||||
console.log('User behavior data found in Redis cache');
|
||||
return JSON.parse(cachedData);
|
||||
}
|
||||
|
||||
console.log('Fetching fresh user behavior data from GA4');
|
||||
|
||||
// Fetch page data
|
||||
const [pageResponse] = await this.analyticsClient.runReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||
dimensions: [{ name: 'pagePath' }],
|
||||
metrics: [
|
||||
{ name: 'screenPageViews' },
|
||||
{ name: 'averageSessionDuration' },
|
||||
{ name: 'bounceRate' },
|
||||
{ name: 'sessions' }
|
||||
],
|
||||
orderBy: [{
|
||||
metric: { metricName: 'screenPageViews' },
|
||||
desc: true
|
||||
}],
|
||||
limit: 25
|
||||
});
|
||||
|
||||
// Fetch device data
|
||||
const [deviceResponse] = await this.analyticsClient.runReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||
dimensions: [{ name: 'deviceCategory' }],
|
||||
metrics: [
|
||||
{ name: 'screenPageViews' },
|
||||
{ name: 'sessions' }
|
||||
]
|
||||
});
|
||||
|
||||
// Fetch source data
|
||||
const [sourceResponse] = await this.analyticsClient.runReport({
|
||||
property: `properties/${this.propertyId}`,
|
||||
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||
dimensions: [{ name: 'sessionSource' }],
|
||||
metrics: [
|
||||
{ name: 'sessions' },
|
||||
{ name: 'conversions' }
|
||||
],
|
||||
orderBy: [{
|
||||
metric: { metricName: 'sessions' },
|
||||
desc: true
|
||||
}],
|
||||
limit: 25,
|
||||
returnPropertyQuota: true
|
||||
});
|
||||
|
||||
const response = {
|
||||
pageResponse,
|
||||
deviceResponse,
|
||||
sourceResponse
|
||||
};
|
||||
|
||||
// Cache the response
|
||||
await this.redis.set(cacheKey, JSON.stringify(response), {
|
||||
EX: this.CACHE_DURATIONS.USER_BEHAVIOR
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Error fetching user behavior data:', {
|
||||
error: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new AnalyticsService();
|
||||
35
inventory-server/dashboard/google-server/utils/logger.js
Normal file
35
inventory-server/dashboard/google-server/utils/logger.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const winston = require('winston');
|
||||
const path = require('path');
|
||||
|
||||
const logger = winston.createLogger({
|
||||
level: process.env.LOG_LEVEL || 'info',
|
||||
format: winston.format.combine(
|
||||
winston.format.timestamp(),
|
||||
winston.format.json()
|
||||
),
|
||||
transports: [
|
||||
new winston.transports.File({
|
||||
filename: path.join(__dirname, '../logs/pm2/error.log'),
|
||||
level: 'error',
|
||||
maxsize: 10485760, // 10MB
|
||||
maxFiles: 5
|
||||
}),
|
||||
new winston.transports.File({
|
||||
filename: path.join(__dirname, '../logs/pm2/combined.log'),
|
||||
maxsize: 10485760, // 10MB
|
||||
maxFiles: 5
|
||||
})
|
||||
]
|
||||
});
|
||||
|
||||
// Add console transport in development
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.add(new winston.transports.Console({
|
||||
format: winston.format.combine(
|
||||
winston.format.colorize(),
|
||||
winston.format.simple()
|
||||
)
|
||||
}));
|
||||
}
|
||||
|
||||
module.exports = logger;
|
||||
1036
inventory-server/dashboard/gorgias-server/package-lock.json
generated
Normal file
1036
inventory-server/dashboard/gorgias-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
19
inventory-server/dashboard/gorgias-server/package.json
Normal file
19
inventory-server/dashboard/gorgias-server/package.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "gorgias-server",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"axios": "^1.7.9",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^4.21.2",
|
||||
"redis": "^4.7.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const gorgiasService = require('../services/gorgias.service');
|
||||
|
||||
// Get statistics
|
||||
router.post('/stats/:name', async (req, res) => {
|
||||
try {
|
||||
const { name } = req.params;
|
||||
const filters = req.body;
|
||||
|
||||
console.log(`Fetching ${name} statistics with filters:`, filters);
|
||||
|
||||
if (!name) {
|
||||
return res.status(400).json({
|
||||
error: 'Missing statistic name',
|
||||
details: 'The name parameter is required'
|
||||
});
|
||||
}
|
||||
|
||||
const data = await gorgiasService.getStatistics(name, filters);
|
||||
|
||||
if (!data) {
|
||||
return res.status(404).json({
|
||||
error: 'No data found',
|
||||
details: `No statistics found for ${name}`
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ data });
|
||||
} catch (error) {
|
||||
console.error('Statistics error:', {
|
||||
name: req.params.name,
|
||||
filters: req.body,
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
response: error.response?.data
|
||||
});
|
||||
|
||||
// Handle specific error cases
|
||||
if (error.response?.status === 401) {
|
||||
return res.status(401).json({
|
||||
error: 'Authentication failed',
|
||||
details: 'Invalid Gorgias API credentials'
|
||||
});
|
||||
}
|
||||
|
||||
if (error.response?.status === 404) {
|
||||
return res.status(404).json({
|
||||
error: 'Not found',
|
||||
details: `Statistics type '${req.params.name}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
if (error.response?.status === 400) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid request',
|
||||
details: error.response?.data?.message || 'The request was invalid',
|
||||
data: error.response?.data
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch statistics',
|
||||
details: error.response?.data?.message || error.message,
|
||||
data: error.response?.data
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get tickets
|
||||
router.get('/tickets', async (req, res) => {
|
||||
try {
|
||||
const data = await gorgiasService.getTickets(req.query);
|
||||
res.json(data);
|
||||
} catch (error) {
|
||||
console.error('Tickets error:', {
|
||||
params: req.query,
|
||||
error: error.message,
|
||||
response: error.response?.data
|
||||
});
|
||||
|
||||
if (error.response?.status === 401) {
|
||||
return res.status(401).json({
|
||||
error: 'Authentication failed',
|
||||
details: 'Invalid Gorgias API credentials'
|
||||
});
|
||||
}
|
||||
|
||||
if (error.response?.status === 400) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid request',
|
||||
details: error.response?.data?.message || 'The request was invalid',
|
||||
data: error.response?.data
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch tickets',
|
||||
details: error.response?.data?.message || error.message,
|
||||
data: error.response?.data
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get customer satisfaction
|
||||
router.get('/satisfaction', async (req, res) => {
|
||||
try {
|
||||
const data = await gorgiasService.getCustomerSatisfaction(req.query);
|
||||
res.json(data);
|
||||
} catch (error) {
|
||||
console.error('Satisfaction error:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch customer satisfaction',
|
||||
details: error.response?.data || error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
31
inventory-server/dashboard/gorgias-server/server.js
Normal file
31
inventory-server/dashboard/gorgias-server/server.js
Normal file
@@ -0,0 +1,31 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const path = require('path');
|
||||
require('dotenv').config({
|
||||
path: path.resolve(__dirname, '.env')
|
||||
});
|
||||
|
||||
const app = express();
|
||||
const port = process.env.PORT || 3006;
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
// Import routes
|
||||
const gorgiasRoutes = require('./routes/gorgias.routes');
|
||||
|
||||
// Use routes
|
||||
app.use('/api/gorgias', gorgiasRoutes);
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
console.error(err.stack);
|
||||
res.status(500).json({ error: 'Something went wrong!' });
|
||||
});
|
||||
|
||||
// Start server
|
||||
app.listen(port, () => {
|
||||
console.log(`Gorgias API server running on port ${port}`);
|
||||
});
|
||||
|
||||
module.exports = app;
|
||||
@@ -0,0 +1,119 @@
|
||||
const axios = require('axios');
|
||||
const { createClient } = require('redis');
|
||||
|
||||
class GorgiasService {
|
||||
constructor() {
|
||||
this.redis = createClient({
|
||||
url: process.env.REDIS_URL
|
||||
});
|
||||
|
||||
this.redis.on('error', err => console.error('Redis Client Error:', err));
|
||||
this.redis.connect().catch(err => console.error('Redis connection error:', err));
|
||||
|
||||
// Create base64 encoded auth string
|
||||
const auth = Buffer.from(`${process.env.GORGIAS_API_USERNAME}:${process.env.GORGIAS_API_KEY}`).toString('base64');
|
||||
|
||||
this.apiClient = axios.create({
|
||||
baseURL: `https://${process.env.GORGIAS_DOMAIN}.gorgias.com/api`,
|
||||
headers: {
|
||||
'Authorization': `Basic ${auth}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async getStatistics(name, filters = {}) {
|
||||
const cacheKey = `gorgias:stats:${name}:${JSON.stringify(filters)}`;
|
||||
|
||||
try {
|
||||
// Try Redis first
|
||||
const cachedData = await this.redis.get(cacheKey);
|
||||
if (cachedData) {
|
||||
console.log(`Statistics ${name} found in Redis cache`);
|
||||
return JSON.parse(cachedData);
|
||||
}
|
||||
|
||||
console.log(`Fetching ${name} statistics with filters:`, filters);
|
||||
|
||||
// Convert dates to UTC midnight if not already set
|
||||
if (!filters.start_datetime || !filters.end_datetime) {
|
||||
const start = new Date(filters.start_datetime || filters.start_date);
|
||||
start.setUTCHours(0, 0, 0, 0);
|
||||
const end = new Date(filters.end_datetime || filters.end_date);
|
||||
end.setUTCHours(23, 59, 59, 999);
|
||||
|
||||
filters = {
|
||||
...filters,
|
||||
start_datetime: start.toISOString(),
|
||||
end_datetime: end.toISOString()
|
||||
};
|
||||
}
|
||||
|
||||
// Fetch from API
|
||||
const response = await this.apiClient.post(`/stats/${name}`, filters);
|
||||
const data = response.data;
|
||||
|
||||
// Save to Redis with 5 minute expiry
|
||||
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||
EX: 300 // 5 minutes
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error(`Error in getStatistics for ${name}:`, {
|
||||
error: error.message,
|
||||
filters,
|
||||
response: error.response?.data
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getTickets(params = {}) {
|
||||
const cacheKey = `gorgias:tickets:${JSON.stringify(params)}`;
|
||||
|
||||
try {
|
||||
// Try Redis first
|
||||
const cachedData = await this.redis.get(cacheKey);
|
||||
if (cachedData) {
|
||||
console.log('Tickets found in Redis cache');
|
||||
return JSON.parse(cachedData);
|
||||
}
|
||||
|
||||
// Convert dates to UTC midnight
|
||||
const formattedParams = { ...params };
|
||||
if (params.start_date) {
|
||||
const start = new Date(params.start_date);
|
||||
start.setUTCHours(0, 0, 0, 0);
|
||||
formattedParams.start_datetime = start.toISOString();
|
||||
delete formattedParams.start_date;
|
||||
}
|
||||
if (params.end_date) {
|
||||
const end = new Date(params.end_date);
|
||||
end.setUTCHours(23, 59, 59, 999);
|
||||
formattedParams.end_datetime = end.toISOString();
|
||||
delete formattedParams.end_date;
|
||||
}
|
||||
|
||||
// Fetch from API
|
||||
const response = await this.apiClient.get('/tickets', { params: formattedParams });
|
||||
const data = response.data;
|
||||
|
||||
// Save to Redis with 5 minute expiry
|
||||
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||
EX: 300 // 5 minutes
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error('Error fetching tickets:', {
|
||||
error: error.message,
|
||||
params,
|
||||
response: error.response?.data
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new GorgiasService();
|
||||
1966
inventory-server/dashboard/klaviyo-server/package-lock.json
generated
Normal file
1966
inventory-server/dashboard/klaviyo-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
inventory-server/dashboard/klaviyo-server/package.json
Normal file
25
inventory-server/dashboard/klaviyo-server/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "klaviyo-server",
|
||||
"version": "1.0.0",
|
||||
"description": "Klaviyo API integration server",
|
||||
"main": "server.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"dev": "nodemon server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.4.7",
|
||||
"esm": "^3.2.25",
|
||||
"express": "^4.18.2",
|
||||
"express-rate-limit": "^7.5.0",
|
||||
"ioredis": "^5.4.1",
|
||||
"luxon": "^3.5.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"recharts": "^2.15.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
import express from 'express';
|
||||
import { CampaignsService } from '../services/campaigns.service.js';
|
||||
import { TimeManager } from '../utils/time.utils.js';
|
||||
|
||||
export function createCampaignsRouter(apiKey, apiRevision) {
|
||||
const router = express.Router();
|
||||
const timeManager = new TimeManager();
|
||||
const campaignsService = new CampaignsService(apiKey, apiRevision);
|
||||
|
||||
// Get campaigns with optional filtering
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const params = {
|
||||
pageSize: parseInt(req.query.pageSize) || 50,
|
||||
sort: req.query.sort || '-send_time',
|
||||
status: req.query.status,
|
||||
startDate: req.query.startDate,
|
||||
endDate: req.query.endDate,
|
||||
pageCursor: req.query.pageCursor
|
||||
};
|
||||
|
||||
console.log('[Campaigns Route] Fetching campaigns with params:', params);
|
||||
const data = await campaignsService.getCampaigns(params);
|
||||
console.log('[Campaigns Route] Success:', {
|
||||
count: data.data?.length || 0
|
||||
});
|
||||
res.json(data);
|
||||
} catch (error) {
|
||||
console.error('[Campaigns Route] Error:', error);
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
details: error.response?.data || null
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get campaigns by time range
|
||||
router.get('/:timeRange', async (req, res) => {
|
||||
try {
|
||||
const { timeRange } = req.params;
|
||||
const { status } = req.query;
|
||||
|
||||
let result;
|
||||
if (timeRange === 'custom') {
|
||||
const { startDate, endDate } = req.query;
|
||||
if (!startDate || !endDate) {
|
||||
return res.status(400).json({ error: 'Custom range requires startDate and endDate' });
|
||||
}
|
||||
|
||||
result = await campaignsService.getCampaigns({
|
||||
startDate,
|
||||
endDate,
|
||||
status
|
||||
});
|
||||
} else {
|
||||
result = await campaignsService.getCampaignsByTimeRange(
|
||||
timeRange,
|
||||
{ status }
|
||||
);
|
||||
}
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error("[Campaigns Route] Error:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -0,0 +1,480 @@
|
||||
import express from 'express';
|
||||
import { EventsService } from '../services/events.service.js';
|
||||
import { TimeManager } from '../utils/time.utils.js';
|
||||
import { RedisService } from '../services/redis.service.js';
|
||||
|
||||
// Import METRIC_IDS from events service
|
||||
const METRIC_IDS = {
|
||||
PLACED_ORDER: 'Y8cqcF',
|
||||
SHIPPED_ORDER: 'VExpdL',
|
||||
ACCOUNT_CREATED: 'TeeypV',
|
||||
CANCELED_ORDER: 'YjVMNg',
|
||||
NEW_BLOG_POST: 'YcxeDr',
|
||||
PAYMENT_REFUNDED: 'R7XUYh'
|
||||
};
|
||||
|
||||
export function createEventsRouter(apiKey, apiRevision) {
|
||||
const router = express.Router();
|
||||
const timeManager = new TimeManager();
|
||||
const eventsService = new EventsService(apiKey, apiRevision);
|
||||
const redisService = new RedisService();
|
||||
|
||||
// Get events with optional filtering
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const params = {
|
||||
pageSize: parseInt(req.query.pageSize) || 50,
|
||||
sort: req.query.sort || '-datetime',
|
||||
metricId: req.query.metricId,
|
||||
startDate: req.query.startDate,
|
||||
endDate: req.query.endDate,
|
||||
pageCursor: req.query.pageCursor,
|
||||
fields: {}
|
||||
};
|
||||
|
||||
// Parse fields parameter if provided
|
||||
if (req.query.fields) {
|
||||
try {
|
||||
params.fields = JSON.parse(req.query.fields);
|
||||
} catch (e) {
|
||||
console.warn('[Events Route] Invalid fields parameter:', e);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('[Events Route] Fetching events with params:', params);
|
||||
const data = await eventsService.getEvents(params);
|
||||
console.log('[Events Route] Success:', {
|
||||
count: data.data?.length || 0,
|
||||
included: data.included?.length || 0
|
||||
});
|
||||
res.json(data);
|
||||
} catch (error) {
|
||||
console.error('[Events Route] Error:', error);
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
details: error.response?.data || null
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get events by time range
|
||||
router.get('/by-time/:timeRange', async (req, res) => {
|
||||
try {
|
||||
const { timeRange } = req.params;
|
||||
const { metricId, startDate, endDate } = req.query;
|
||||
|
||||
let result;
|
||||
if (timeRange === 'custom') {
|
||||
if (!startDate || !endDate) {
|
||||
return res.status(400).json({ error: 'Custom range requires startDate and endDate' });
|
||||
}
|
||||
|
||||
const range = timeManager.getCustomRange(startDate, endDate);
|
||||
if (!range) {
|
||||
return res.status(400).json({ error: 'Invalid date range' });
|
||||
}
|
||||
|
||||
result = await eventsService.getEvents({
|
||||
metricId,
|
||||
startDate: range.start.toISO(),
|
||||
endDate: range.end.toISO()
|
||||
});
|
||||
} else {
|
||||
result = await eventsService.getEventsByTimeRange(
|
||||
timeRange,
|
||||
{ metricId }
|
||||
);
|
||||
}
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error("[Events Route] Error:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get comprehensive statistics for a time period
|
||||
router.get('/stats', async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
console.log('[Events Route] Stats request:', {
|
||||
timeRange,
|
||||
startDate,
|
||||
endDate
|
||||
});
|
||||
|
||||
let range;
|
||||
if (startDate && endDate) {
|
||||
range = timeManager.getCustomRange(startDate, endDate);
|
||||
} else if (timeRange) {
|
||||
range = timeManager.getDateRange(timeRange);
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||
}
|
||||
|
||||
if (!range) {
|
||||
return res.status(400).json({ error: 'Invalid time range' });
|
||||
}
|
||||
|
||||
const params = {
|
||||
timeRange,
|
||||
startDate: range.start.toISO(),
|
||||
endDate: range.end.toISO()
|
||||
};
|
||||
|
||||
console.log('[Events Route] Calculating period stats with params:', params);
|
||||
const stats = await eventsService.calculatePeriodStats(params);
|
||||
console.log('[Events Route] Stats response:', {
|
||||
timeRange: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO()
|
||||
},
|
||||
shippedCount: stats?.shipping?.shippedCount,
|
||||
totalOrders: stats?.orderCount
|
||||
});
|
||||
|
||||
res.json({
|
||||
timeRange: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO(),
|
||||
displayStart: timeManager.formatForDisplay(range.start),
|
||||
displayEnd: timeManager.formatForDisplay(range.end)
|
||||
},
|
||||
stats
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Events Route] Error:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Add new route for smart revenue projection
|
||||
router.get('/projection', async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
console.log('[Events Route] Projection request:', {
|
||||
timeRange,
|
||||
startDate,
|
||||
endDate
|
||||
});
|
||||
|
||||
let range;
|
||||
if (startDate && endDate) {
|
||||
range = timeManager.getCustomRange(startDate, endDate);
|
||||
} else if (timeRange) {
|
||||
range = timeManager.getDateRange(timeRange);
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||
}
|
||||
|
||||
if (!range) {
|
||||
return res.status(400).json({ error: 'Invalid time range' });
|
||||
}
|
||||
|
||||
const params = {
|
||||
timeRange,
|
||||
startDate: range.start.toISO(),
|
||||
endDate: range.end.toISO()
|
||||
};
|
||||
|
||||
// Try to get from cache first with a short TTL
|
||||
const cacheKey = redisService._getCacheKey('projection', params);
|
||||
const cachedData = await redisService.get(cacheKey);
|
||||
|
||||
if (cachedData) {
|
||||
console.log('[Events Route] Cache hit for projection');
|
||||
return res.json(cachedData);
|
||||
}
|
||||
|
||||
console.log('[Events Route] Calculating smart projection with params:', params);
|
||||
const projection = await eventsService.calculateSmartProjection(params);
|
||||
|
||||
// Cache the results with a short TTL (5 minutes)
|
||||
await redisService.set(cacheKey, projection, 300);
|
||||
|
||||
res.json(projection);
|
||||
} catch (error) {
|
||||
console.error("[Events Route] Error calculating projection:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Add new route for detailed stats
|
||||
router.get('/stats/details', async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate, metric, daily = false } = req.query;
|
||||
|
||||
let range;
|
||||
if (startDate && endDate) {
|
||||
range = timeManager.getCustomRange(startDate, endDate);
|
||||
} else if (timeRange) {
|
||||
range = timeManager.getDateRange(timeRange);
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||
}
|
||||
|
||||
if (!range) {
|
||||
return res.status(400).json({ error: 'Invalid time range' });
|
||||
}
|
||||
|
||||
const params = {
|
||||
timeRange,
|
||||
startDate: range.start.toISO(),
|
||||
endDate: range.end.toISO(),
|
||||
metric,
|
||||
daily: daily === 'true' || daily === true
|
||||
};
|
||||
|
||||
// Try to get from cache first
|
||||
const cacheKey = redisService._getCacheKey('stats:details', params);
|
||||
const cachedData = await redisService.get(cacheKey);
|
||||
|
||||
if (cachedData) {
|
||||
console.log('[Events Route] Cache hit for detailed stats');
|
||||
return res.json({
|
||||
timeRange: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO(),
|
||||
displayStart: timeManager.formatForDisplay(range.start),
|
||||
displayEnd: timeManager.formatForDisplay(range.end)
|
||||
},
|
||||
stats: cachedData
|
||||
});
|
||||
}
|
||||
|
||||
const stats = await eventsService.calculateDetailedStats(params);
|
||||
|
||||
// Cache the results
|
||||
const ttl = redisService._getTTL(timeRange);
|
||||
await redisService.set(cacheKey, stats, ttl);
|
||||
|
||||
res.json({
|
||||
timeRange: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO(),
|
||||
displayStart: timeManager.formatForDisplay(range.start),
|
||||
displayEnd: timeManager.formatForDisplay(range.end)
|
||||
},
|
||||
stats
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Events Route] Error:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get product statistics for a time period
|
||||
router.get('/products', async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
|
||||
let range;
|
||||
if (startDate && endDate) {
|
||||
range = timeManager.getCustomRange(startDate, endDate);
|
||||
} else if (timeRange) {
|
||||
range = timeManager.getDateRange(timeRange);
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||
}
|
||||
|
||||
if (!range) {
|
||||
return res.status(400).json({ error: 'Invalid time range' });
|
||||
}
|
||||
|
||||
const params = {
|
||||
timeRange,
|
||||
startDate: range.start.toISO(),
|
||||
endDate: range.end.toISO()
|
||||
};
|
||||
|
||||
// Try to get from cache first
|
||||
const cacheKey = redisService._getCacheKey('events', params);
|
||||
const cachedData = await redisService.getEventData('products', params);
|
||||
|
||||
if (cachedData) {
|
||||
console.log('[Events Route] Cache hit for products');
|
||||
return res.json({
|
||||
timeRange: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO(),
|
||||
displayStart: timeManager.formatForDisplay(range.start),
|
||||
displayEnd: timeManager.formatForDisplay(range.end)
|
||||
},
|
||||
stats: {
|
||||
products: cachedData
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const stats = await eventsService.calculatePeriodStats(params);
|
||||
|
||||
res.json({
|
||||
timeRange: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO(),
|
||||
displayStart: timeManager.formatForDisplay(range.start),
|
||||
displayEnd: timeManager.formatForDisplay(range.end)
|
||||
},
|
||||
stats
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Events Route] Error:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get event feed (multiple event types sorted by time)
|
||||
router.get('/feed', async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate, metricIds } = req.query;
|
||||
|
||||
let range;
|
||||
if (startDate && endDate) {
|
||||
range = timeManager.getCustomRange(startDate, endDate);
|
||||
} else if (timeRange) {
|
||||
range = timeManager.getDateRange(timeRange);
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||
}
|
||||
|
||||
if (!range) {
|
||||
return res.status(400).json({ error: 'Invalid time range' });
|
||||
}
|
||||
|
||||
const params = {
|
||||
timeRange,
|
||||
startDate: range.start.toISO(),
|
||||
endDate: range.end.toISO(),
|
||||
metricIds: metricIds ? JSON.parse(metricIds) : null
|
||||
};
|
||||
|
||||
const result = await eventsService.getMultiMetricEvents(params);
|
||||
|
||||
res.json({
|
||||
timeRange: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO(),
|
||||
displayStart: timeManager.formatForDisplay(range.start),
|
||||
displayEnd: timeManager.formatForDisplay(range.end)
|
||||
},
|
||||
...result
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Events Route] Error:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get aggregated events data
|
||||
router.get('/aggregate', async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate, interval = 'day', metricId, property } = req.query;
|
||||
|
||||
let range;
|
||||
if (startDate && endDate) {
|
||||
range = timeManager.getCustomRange(startDate, endDate);
|
||||
} else if (timeRange) {
|
||||
range = timeManager.getDateRange(timeRange);
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||
}
|
||||
|
||||
if (!range) {
|
||||
return res.status(400).json({ error: 'Invalid time range' });
|
||||
}
|
||||
|
||||
const params = {
|
||||
timeRange,
|
||||
startDate: range.start.toISO(),
|
||||
endDate: range.end.toISO(),
|
||||
metricId,
|
||||
interval,
|
||||
property
|
||||
};
|
||||
|
||||
const result = await eventsService.getEvents(params);
|
||||
const groupedData = timeManager.groupEventsByInterval(result.data, interval, property);
|
||||
|
||||
res.json({
|
||||
timeRange: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO(),
|
||||
displayStart: timeManager.formatForDisplay(range.start),
|
||||
displayEnd: timeManager.formatForDisplay(range.end)
|
||||
},
|
||||
data: groupedData
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Events Route] Error:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get date range for a given time period
|
||||
router.get("/dateRange", async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
|
||||
let range;
|
||||
if (startDate && endDate) {
|
||||
range = timeManager.getCustomRange(startDate, endDate);
|
||||
} else {
|
||||
range = timeManager.getDateRange(timeRange || 'today');
|
||||
}
|
||||
|
||||
if (!range) {
|
||||
return res.status(400).json({
|
||||
error: "Invalid time range parameters"
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO(),
|
||||
displayStart: timeManager.formatForDisplay(range.start),
|
||||
displayEnd: timeManager.formatForDisplay(range.end)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error getting date range:', error);
|
||||
res.status(500).json({
|
||||
error: "Failed to get date range"
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Clear cache for a specific time range
|
||||
router.post("/clearCache", async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate } = req.body;
|
||||
await redisService.clearCache({ timeRange, startDate, endDate });
|
||||
res.json({ message: "Cache cleared successfully" });
|
||||
} catch (error) {
|
||||
console.error('Error clearing cache:', error);
|
||||
res.status(500).json({ error: "Failed to clear cache" });
|
||||
}
|
||||
});
|
||||
|
||||
// Add new batch metrics endpoint
|
||||
router.get('/batch', async (req, res) => {
|
||||
try {
|
||||
const { timeRange, startDate, endDate, metrics } = req.query;
|
||||
|
||||
// Parse metrics array from query
|
||||
const metricsList = metrics ? JSON.parse(metrics) : [];
|
||||
|
||||
const params = timeRange === 'custom'
|
||||
? { startDate, endDate, metrics: metricsList }
|
||||
: { timeRange, metrics: metricsList };
|
||||
|
||||
const results = await eventsService.getBatchMetrics(params);
|
||||
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('[Events Route] Error in batch request:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
17
inventory-server/dashboard/klaviyo-server/routes/index.js
Normal file
17
inventory-server/dashboard/klaviyo-server/routes/index.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import express from 'express';
|
||||
import { createEventsRouter } from './events.routes.js';
|
||||
import { createMetricsRoutes } from './metrics.routes.js';
|
||||
import { createCampaignsRouter } from './campaigns.routes.js';
|
||||
import { createReportingRouter } from './reporting.routes.js';
|
||||
|
||||
export function createApiRouter(apiKey, apiRevision) {
|
||||
const router = express.Router();
|
||||
|
||||
// Mount routers
|
||||
router.use('/events', createEventsRouter(apiKey, apiRevision));
|
||||
router.use('/metrics', createMetricsRoutes(apiKey, apiRevision));
|
||||
router.use('/campaigns', createCampaignsRouter(apiKey, apiRevision));
|
||||
router.use('/reporting', createReportingRouter(apiKey, apiRevision));
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
import express from 'express';
|
||||
import { MetricsService } from '../services/metrics.service.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
export function createMetricsRoutes(apiKey, apiRevision) {
|
||||
const metricsService = new MetricsService(apiKey, apiRevision);
|
||||
|
||||
// Get all metrics
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
console.log('[Metrics Route] Fetching metrics');
|
||||
const data = await metricsService.getMetrics();
|
||||
console.log('[Metrics Route] Success:', {
|
||||
count: data.data?.length || 0
|
||||
});
|
||||
res.json(data);
|
||||
} catch (error) {
|
||||
console.error('[Metrics Route] Error:', error);
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: error.message,
|
||||
details: error.response?.data || null
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
import express from 'express';
|
||||
import { ReportingService } from '../services/reporting.service.js';
|
||||
import { TimeManager } from '../utils/time.utils.js';
|
||||
|
||||
export function createReportingRouter(apiKey, apiRevision) {
|
||||
const router = express.Router();
|
||||
const reportingService = new ReportingService(apiKey, apiRevision);
|
||||
const timeManager = new TimeManager();
|
||||
|
||||
// Get campaign reports by time range
|
||||
router.get('/campaigns/:timeRange', async (req, res) => {
|
||||
try {
|
||||
const { timeRange } = req.params;
|
||||
const { channel } = req.query;
|
||||
|
||||
const reports = await reportingService.getCampaignReports({
|
||||
timeRange,
|
||||
channel
|
||||
});
|
||||
|
||||
res.json(reports);
|
||||
} catch (error) {
|
||||
console.error('[ReportingRoutes] Error fetching campaign reports:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
78
inventory-server/dashboard/klaviyo-server/server.js
Normal file
78
inventory-server/dashboard/klaviyo-server/server.js
Normal file
@@ -0,0 +1,78 @@
|
||||
import express from 'express';
|
||||
import cors from 'cors';
|
||||
import dotenv from 'dotenv';
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import { createApiRouter } from './routes/index.js';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
// Get directory name in ES modules
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Load environment variables
|
||||
const envPath = path.resolve(__dirname, '.env');
|
||||
console.log('[Server] Loading .env file from:', envPath);
|
||||
dotenv.config({ path: envPath });
|
||||
|
||||
// Debug environment variables (without exposing sensitive data)
|
||||
console.log('[Server] Environment variables loaded:', {
|
||||
REDIS_HOST: process.env.REDIS_HOST || '(not set)',
|
||||
REDIS_PORT: process.env.REDIS_PORT || '(not set)',
|
||||
REDIS_USERNAME: process.env.REDIS_USERNAME || '(not set)',
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD ? '(set)' : '(not set)',
|
||||
NODE_ENV: process.env.NODE_ENV || '(not set)',
|
||||
});
|
||||
|
||||
const app = express();
|
||||
const port = process.env.KLAVIYO_PORT || 3004;
|
||||
|
||||
// Rate limiting for reporting endpoints
|
||||
const reportingLimiter = rateLimit({
|
||||
windowMs: 10 * 60 * 1000, // 10 minutes
|
||||
max: 10, // limit each IP to 10 requests per windowMs
|
||||
message: 'Too many requests to reporting endpoint, please try again later',
|
||||
keyGenerator: (req) => {
|
||||
// Use a combination of IP and endpoint for more granular control
|
||||
return `${req.ip}-reporting`;
|
||||
},
|
||||
skip: (req) => {
|
||||
// Only apply to campaign-values-reports endpoint
|
||||
return !req.path.includes('campaign-values-reports');
|
||||
}
|
||||
});
|
||||
|
||||
// Middleware
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
// Debug middleware to log all requests
|
||||
app.use((req, res, next) => {
|
||||
console.log(`[${new Date().toISOString()}] ${req.method} ${req.url}`);
|
||||
next();
|
||||
});
|
||||
|
||||
// Apply rate limiting to reporting endpoints
|
||||
app.use('/api/klaviyo/reporting', reportingLimiter);
|
||||
|
||||
// Create and mount API routes
|
||||
const apiRouter = createApiRouter(
|
||||
process.env.KLAVIYO_API_KEY,
|
||||
process.env.KLAVIYO_API_REVISION || '2024-02-15'
|
||||
);
|
||||
app.use('/api/klaviyo', apiRouter);
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
console.error('Unhandled error:', err);
|
||||
res.status(500).json({
|
||||
status: 'error',
|
||||
message: 'Internal server error',
|
||||
details: process.env.NODE_ENV === 'development' ? err.message : undefined
|
||||
});
|
||||
});
|
||||
|
||||
// Start server
|
||||
app.listen(port, '0.0.0.0', () => {
|
||||
console.log(`Klaviyo server listening at http://0.0.0.0:${port}`);
|
||||
});
|
||||
@@ -0,0 +1,206 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { TimeManager } from '../utils/time.utils.js';
|
||||
import { RedisService } from './redis.service.js';
|
||||
|
||||
export class CampaignsService {
|
||||
constructor(apiKey, apiRevision) {
|
||||
this.apiKey = apiKey;
|
||||
this.apiRevision = apiRevision;
|
||||
this.baseUrl = 'https://a.klaviyo.com/api';
|
||||
this.timeManager = new TimeManager();
|
||||
this.redisService = new RedisService();
|
||||
}
|
||||
|
||||
async getCampaigns(params = {}) {
|
||||
try {
|
||||
// Add request debouncing
|
||||
const requestKey = JSON.stringify(params);
|
||||
if (this._pendingRequests && this._pendingRequests[requestKey]) {
|
||||
return this._pendingRequests[requestKey];
|
||||
}
|
||||
|
||||
// Try to get from cache first
|
||||
const cacheKey = this.redisService._getCacheKey('campaigns', params);
|
||||
let cachedData = null;
|
||||
try {
|
||||
cachedData = await this.redisService.get(`${cacheKey}:raw`);
|
||||
if (cachedData) {
|
||||
return cachedData;
|
||||
}
|
||||
} catch (cacheError) {
|
||||
console.warn('[CampaignsService] Cache error:', cacheError);
|
||||
}
|
||||
|
||||
this._pendingRequests = this._pendingRequests || {};
|
||||
this._pendingRequests[requestKey] = (async () => {
|
||||
let allCampaigns = [];
|
||||
let nextCursor = params.pageCursor;
|
||||
let pageCount = 0;
|
||||
|
||||
const filter = params.filter || this._buildFilter(params);
|
||||
|
||||
do {
|
||||
const queryParams = new URLSearchParams();
|
||||
if (filter) {
|
||||
queryParams.append('filter', filter);
|
||||
}
|
||||
queryParams.append('sort', params.sort || '-send_time');
|
||||
|
||||
if (nextCursor) {
|
||||
queryParams.append('page[cursor]', nextCursor);
|
||||
}
|
||||
|
||||
const url = `${this.baseUrl}/campaigns?${queryParams.toString()}`;
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Klaviyo-API-Key ${this.apiKey}`,
|
||||
'revision': this.apiRevision
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
console.error('[CampaignsService] API Error:', errorData);
|
||||
throw new Error(`Klaviyo API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const responseData = await response.json();
|
||||
allCampaigns = allCampaigns.concat(responseData.data || []);
|
||||
pageCount++;
|
||||
|
||||
nextCursor = responseData.links?.next ?
|
||||
new URL(responseData.links.next).searchParams.get('page[cursor]') : null;
|
||||
|
||||
if (nextCursor) {
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
}
|
||||
} catch (fetchError) {
|
||||
console.error('[CampaignsService] Fetch error:', fetchError);
|
||||
throw fetchError;
|
||||
}
|
||||
|
||||
} while (nextCursor);
|
||||
|
||||
const transformedCampaigns = this._transformCampaigns(allCampaigns);
|
||||
|
||||
const result = {
|
||||
data: transformedCampaigns,
|
||||
meta: {
|
||||
total_count: transformedCampaigns.length,
|
||||
page_count: pageCount
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const ttl = this.redisService._getTTL(params.timeRange);
|
||||
await this.redisService.set(`${cacheKey}:raw`, result, ttl);
|
||||
} catch (cacheError) {
|
||||
console.warn('[CampaignsService] Cache set error:', cacheError);
|
||||
}
|
||||
|
||||
delete this._pendingRequests[requestKey];
|
||||
return result;
|
||||
})();
|
||||
|
||||
return await this._pendingRequests[requestKey];
|
||||
} catch (error) {
|
||||
console.error('[CampaignsService] Error fetching campaigns:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
_buildFilter(params) {
|
||||
const filters = [];
|
||||
|
||||
if (params.startDate && params.endDate) {
|
||||
const startUtc = this.timeManager.formatForAPI(params.startDate);
|
||||
const endUtc = this.timeManager.formatForAPI(params.endDate);
|
||||
|
||||
filters.push(`greater-or-equal(send_time,${startUtc})`);
|
||||
filters.push(`less-than(send_time,${endUtc})`);
|
||||
}
|
||||
|
||||
if (params.status) {
|
||||
filters.push(`equals(status,"${params.status}")`);
|
||||
}
|
||||
|
||||
if (params.customFilters) {
|
||||
filters.push(...params.customFilters);
|
||||
}
|
||||
|
||||
return filters.length > 0 ? (filters.length > 1 ? `and(${filters.join(',')})` : filters[0]) : null;
|
||||
}
|
||||
|
||||
async getCampaignsByTimeRange(timeRange, options = {}) {
|
||||
const range = this.timeManager.getDateRange(timeRange);
|
||||
if (!range) {
|
||||
throw new Error('Invalid time range specified');
|
||||
}
|
||||
|
||||
const params = {
|
||||
timeRange,
|
||||
startDate: range.start.toISO(),
|
||||
endDate: range.end.toISO(),
|
||||
...options
|
||||
};
|
||||
|
||||
// Try to get from cache first
|
||||
const cacheKey = this.redisService._getCacheKey('campaigns', params);
|
||||
let cachedData = null;
|
||||
try {
|
||||
cachedData = await this.redisService.get(`${cacheKey}:raw`);
|
||||
if (cachedData) {
|
||||
return cachedData;
|
||||
}
|
||||
} catch (cacheError) {
|
||||
console.warn('[CampaignsService] Cache error:', cacheError);
|
||||
}
|
||||
|
||||
return this.getCampaigns(params);
|
||||
}
|
||||
|
||||
_transformCampaigns(campaigns) {
|
||||
if (!Array.isArray(campaigns)) {
|
||||
console.warn('[CampaignsService] Campaigns is not an array:', campaigns);
|
||||
return [];
|
||||
}
|
||||
|
||||
return campaigns.map(campaign => {
|
||||
try {
|
||||
const stats = campaign.attributes?.campaign_message?.stats || {};
|
||||
|
||||
return {
|
||||
id: campaign.id,
|
||||
name: campaign.attributes?.name || "Unnamed Campaign",
|
||||
subject: campaign.attributes?.campaign_message?.subject || "",
|
||||
send_time: campaign.attributes?.send_time,
|
||||
stats: {
|
||||
delivery_rate: stats.delivery_rate || 0,
|
||||
delivered: stats.delivered || 0,
|
||||
recipients: stats.recipients || 0,
|
||||
open_rate: stats.open_rate || 0,
|
||||
opens_unique: stats.opens_unique || 0,
|
||||
opens: stats.opens || 0,
|
||||
clicks_unique: stats.clicks_unique || 0,
|
||||
click_rate: stats.click_rate || 0,
|
||||
click_to_open_rate: stats.click_to_open_rate || 0,
|
||||
conversion_value: stats.conversion_value || 0,
|
||||
conversion_uniques: stats.conversion_uniques || 0
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('[CampaignsService] Error transforming campaign:', error, campaign);
|
||||
return {
|
||||
id: campaign.id || 'unknown',
|
||||
name: 'Error Processing Campaign',
|
||||
stats: {}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
2202
inventory-server/dashboard/klaviyo-server/services/events.service.js
Normal file
2202
inventory-server/dashboard/klaviyo-server/services/events.service.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,38 @@
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
export class MetricsService {
|
||||
constructor(apiKey, apiRevision) {
|
||||
this.apiKey = apiKey;
|
||||
this.apiRevision = apiRevision;
|
||||
this.baseUrl = 'https://a.klaviyo.com/api';
|
||||
}
|
||||
async getMetrics() {
|
||||
try {
|
||||
const response = await fetch(`${this.baseUrl}/metrics/`, {
|
||||
headers: {
|
||||
'Authorization': `Klaviyo-API-Key ${this.apiKey}`,
|
||||
'revision': this.apiRevision,
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
console.error('[MetricsService] API Error:', errorData);
|
||||
throw new Error(`Klaviyo API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
// Sort the results by name before returning
|
||||
if (data.data) {
|
||||
data.data.sort((a, b) => a.attributes.name.localeCompare(b.attributes.name));
|
||||
}
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error('[MetricsService] Error fetching metrics:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,262 @@
|
||||
import Redis from 'ioredis';
|
||||
import { TimeManager } from '../utils/time.utils.js';
|
||||
import dotenv from 'dotenv';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
// Get directory name in ES modules
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Load environment variables again (redundant but safe)
|
||||
const envPath = path.resolve(__dirname, '../.env');
|
||||
console.log('[RedisService] Loading .env file from:', envPath);
|
||||
dotenv.config({ path: envPath });
|
||||
|
||||
export class RedisService {
|
||||
constructor() {
|
||||
this.timeManager = new TimeManager();
|
||||
this.DEFAULT_TTL = 5 * 60; // 5 minutes default TTL
|
||||
this.isConnected = false;
|
||||
this._initializeRedis();
|
||||
}
|
||||
|
||||
_initializeRedis() {
|
||||
try {
|
||||
// Debug: Print all environment variables we're looking for
|
||||
console.log('[RedisService] Environment variables state:', {
|
||||
REDIS_HOST: process.env.REDIS_HOST ? '(set)' : '(not set)',
|
||||
REDIS_PORT: process.env.REDIS_PORT ? '(set)' : '(not set)',
|
||||
REDIS_USERNAME: process.env.REDIS_USERNAME ? '(set)' : '(not set)',
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD ? '(set)' : '(not set)',
|
||||
});
|
||||
|
||||
// Log Redis configuration (without password)
|
||||
const host = process.env.REDIS_HOST || 'localhost';
|
||||
const port = parseInt(process.env.REDIS_PORT) || 6379;
|
||||
const username = process.env.REDIS_USERNAME || 'default';
|
||||
const password = process.env.REDIS_PASSWORD;
|
||||
|
||||
console.log('[RedisService] Initializing Redis with config:', {
|
||||
host,
|
||||
port,
|
||||
username,
|
||||
hasPassword: !!password
|
||||
});
|
||||
|
||||
const config = {
|
||||
host,
|
||||
port,
|
||||
username,
|
||||
retryStrategy: (times) => {
|
||||
const delay = Math.min(times * 50, 2000);
|
||||
return delay;
|
||||
},
|
||||
maxRetriesPerRequest: 3,
|
||||
enableReadyCheck: true,
|
||||
connectTimeout: 10000,
|
||||
showFriendlyErrorStack: true,
|
||||
retryUnfulfilled: true,
|
||||
maxRetryAttempts: 5
|
||||
};
|
||||
|
||||
// Only add password if it exists
|
||||
if (password) {
|
||||
console.log('[RedisService] Adding password to config');
|
||||
config.password = password;
|
||||
} else {
|
||||
console.warn('[RedisService] No Redis password found in environment variables!');
|
||||
}
|
||||
|
||||
this.client = new Redis(config);
|
||||
|
||||
// Handle connection events
|
||||
this.client.on('connect', () => {
|
||||
console.log('[RedisService] Connected to Redis');
|
||||
this.isConnected = true;
|
||||
});
|
||||
|
||||
this.client.on('ready', () => {
|
||||
console.log('[RedisService] Redis is ready');
|
||||
this.isConnected = true;
|
||||
});
|
||||
|
||||
this.client.on('error', (err) => {
|
||||
console.error('[RedisService] Redis error:', err);
|
||||
this.isConnected = false;
|
||||
// Log more details about the error
|
||||
if (err.code === 'WRONGPASS') {
|
||||
console.error('[RedisService] Authentication failed. Please check your Redis password.');
|
||||
}
|
||||
});
|
||||
|
||||
this.client.on('close', () => {
|
||||
console.log('[RedisService] Redis connection closed');
|
||||
this.isConnected = false;
|
||||
});
|
||||
|
||||
this.client.on('reconnecting', (params) => {
|
||||
console.log('[RedisService] Reconnecting to Redis:', params);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('[RedisService] Error initializing Redis:', error);
|
||||
this.isConnected = false;
|
||||
}
|
||||
}
|
||||
|
||||
async get(key) {
|
||||
if (!this.isConnected) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const data = await this.client.get(key);
|
||||
return data ? JSON.parse(data) : null;
|
||||
} catch (error) {
|
||||
console.error('[RedisService] Error getting data:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async set(key, data, ttl = this.DEFAULT_TTL) {
|
||||
if (!this.isConnected) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.client.setex(key, ttl, JSON.stringify(data));
|
||||
} catch (error) {
|
||||
console.error('[RedisService] Error setting data:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to generate cache keys
|
||||
_getCacheKey(type, params = {}) {
|
||||
const {
|
||||
timeRange,
|
||||
startDate,
|
||||
endDate,
|
||||
metricId,
|
||||
metric,
|
||||
daily,
|
||||
cacheKey,
|
||||
isPreviousPeriod,
|
||||
customFilters
|
||||
} = params;
|
||||
|
||||
let key = `klaviyo:${type}`;
|
||||
|
||||
// Handle "stats:details" for daily or metric-based keys
|
||||
if (type === 'stats:details') {
|
||||
// Add metric to key
|
||||
key += `:${metric || 'all'}`;
|
||||
|
||||
// Add daily flag if present
|
||||
if (daily) {
|
||||
key += ':daily';
|
||||
}
|
||||
|
||||
// Add custom filters hash if present
|
||||
if (customFilters?.length) {
|
||||
const filterHash = customFilters.join('').replace(/[^a-zA-Z0-9]/g, '');
|
||||
key += `:${filterHash}`;
|
||||
}
|
||||
}
|
||||
|
||||
// If a specific cache key is provided, use it (highest priority)
|
||||
if (cacheKey) {
|
||||
key += `:${cacheKey}`;
|
||||
}
|
||||
// Otherwise, build a default cache key
|
||||
else if (timeRange) {
|
||||
key += `:${timeRange}`;
|
||||
if (metricId) {
|
||||
key += `:${metricId}`;
|
||||
}
|
||||
if (isPreviousPeriod) {
|
||||
key += ':prev';
|
||||
}
|
||||
} else if (startDate && endDate) {
|
||||
// For custom date ranges, include both dates in the key
|
||||
key += `:custom:${startDate}:${endDate}`;
|
||||
if (metricId) {
|
||||
key += `:${metricId}`;
|
||||
}
|
||||
if (isPreviousPeriod) {
|
||||
key += ':prev';
|
||||
}
|
||||
}
|
||||
|
||||
// Add order type to key if present
|
||||
if (['pre_orders', 'local_pickup', 'on_hold'].includes(metric)) {
|
||||
key += `:${metric}`;
|
||||
}
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
|
||||
// Get TTL based on time range
|
||||
_getTTL(timeRange) {
|
||||
const TTL_MAP = {
|
||||
'today': 2 * 60, // 2 minutes
|
||||
'yesterday': 30 * 60, // 30 minutes
|
||||
'thisWeek': 5 * 60, // 5 minutes
|
||||
'lastWeek': 60 * 60, // 1 hour
|
||||
'thisMonth': 10 * 60, // 10 minutes
|
||||
'lastMonth': 2 * 60 * 60, // 2 hours
|
||||
'last7days': 5 * 60, // 5 minutes
|
||||
'last30days': 15 * 60, // 15 minutes
|
||||
'custom': 15 * 60 // 15 minutes
|
||||
};
|
||||
return TTL_MAP[timeRange] || this.DEFAULT_TTL;
|
||||
}
|
||||
|
||||
async getEventData(type, params) {
|
||||
if (!this.isConnected) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const baseKey = this._getCacheKey('events', params);
|
||||
const data = await this.get(`${baseKey}:${type}`);
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error('[RedisService] Error getting event data:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async cacheEventData(type, params, data) {
|
||||
if (!this.isConnected) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const ttl = this._getTTL(params.timeRange);
|
||||
const baseKey = this._getCacheKey('events', params);
|
||||
|
||||
// Cache raw event data
|
||||
await this.set(`${baseKey}:${type}`, data, ttl);
|
||||
} catch (error) {
|
||||
console.error('[RedisService] Error caching event data:', error);
|
||||
}
|
||||
}
|
||||
|
||||
async clearCache(params = {}) {
|
||||
if (!this.isConnected) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const pattern = this._getCacheKey('events', params) + '*';
|
||||
const keys = await this.client.keys(pattern);
|
||||
if (keys.length > 0) {
|
||||
await this.client.del(...keys);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[RedisService] Error clearing cache:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,254 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { TimeManager } from '../utils/time.utils.js';
|
||||
import { RedisService } from './redis.service.js';
|
||||
|
||||
const METRIC_IDS = {
|
||||
PLACED_ORDER: 'Y8cqcF'
|
||||
};
|
||||
|
||||
export class ReportingService {
|
||||
constructor(apiKey, apiRevision) {
|
||||
this.apiKey = apiKey;
|
||||
this.apiRevision = apiRevision;
|
||||
this.baseUrl = 'https://a.klaviyo.com/api';
|
||||
this.timeManager = new TimeManager();
|
||||
this.redisService = new RedisService();
|
||||
this._pendingReportRequest = null;
|
||||
}
|
||||
|
||||
async getCampaignReports(params = {}) {
|
||||
try {
|
||||
// Check if there's a pending request
|
||||
if (this._pendingReportRequest) {
|
||||
console.log('[ReportingService] Using pending campaign report request');
|
||||
return this._pendingReportRequest;
|
||||
}
|
||||
|
||||
// Try to get from cache first
|
||||
const cacheKey = this.redisService._getCacheKey('campaign_reports', params);
|
||||
let cachedData = null;
|
||||
try {
|
||||
cachedData = await this.redisService.get(`${cacheKey}:raw`);
|
||||
if (cachedData) {
|
||||
console.log('[ReportingService] Using cached campaign report data');
|
||||
return cachedData;
|
||||
}
|
||||
} catch (cacheError) {
|
||||
console.warn('[ReportingService] Cache error:', cacheError);
|
||||
}
|
||||
|
||||
// Create new request promise
|
||||
this._pendingReportRequest = (async () => {
|
||||
console.log('[ReportingService] Fetching fresh campaign report data');
|
||||
|
||||
const range = this.timeManager.getDateRange(params.timeRange || 'last30days');
|
||||
|
||||
// Determine which channels to fetch based on params
|
||||
const channelsToFetch = params.channel === 'all' || !params.channel
|
||||
? ['email', 'sms']
|
||||
: [params.channel];
|
||||
|
||||
const allResults = [];
|
||||
|
||||
// Fetch each channel
|
||||
for (const channel of channelsToFetch) {
|
||||
const payload = {
|
||||
data: {
|
||||
type: "campaign-values-report",
|
||||
attributes: {
|
||||
timeframe: {
|
||||
start: range.start.toISO(),
|
||||
end: range.end.toISO()
|
||||
},
|
||||
statistics: [
|
||||
"delivery_rate",
|
||||
"delivered",
|
||||
"recipients",
|
||||
"open_rate",
|
||||
"opens_unique",
|
||||
"opens",
|
||||
"click_rate",
|
||||
"clicks_unique",
|
||||
"click_to_open_rate",
|
||||
"conversion_value",
|
||||
"conversion_uniques"
|
||||
],
|
||||
conversion_metric_id: METRIC_IDS.PLACED_ORDER,
|
||||
filter: `equals(send_channel,"${channel}")`
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/campaign-values-reports`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Klaviyo-API-Key ${this.apiKey}`,
|
||||
'revision': this.apiRevision
|
||||
},
|
||||
body: JSON.stringify(payload)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
console.error('[ReportingService] API Error:', errorData);
|
||||
throw new Error(`Klaviyo API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const reportData = await response.json();
|
||||
console.log(`[ReportingService] Raw ${channel} report data:`, JSON.stringify(reportData, null, 2));
|
||||
|
||||
// Get campaign IDs from the report
|
||||
const campaignIds = reportData.data?.attributes?.results?.map(result =>
|
||||
result.groupings?.campaign_id
|
||||
).filter(Boolean) || [];
|
||||
|
||||
if (campaignIds.length > 0) {
|
||||
// Get campaign details including send time and subject lines
|
||||
const campaignDetails = await this.getCampaignDetails(campaignIds);
|
||||
|
||||
// Process results for this channel
|
||||
const channelResults = reportData.data.attributes.results.map(result => {
|
||||
const campaignId = result.groupings.campaign_id;
|
||||
const details = campaignDetails.find(detail => detail.id === campaignId);
|
||||
|
||||
return {
|
||||
id: campaignId,
|
||||
name: details.attributes.name,
|
||||
subject: details.attributes.subject,
|
||||
send_time: details.attributes.send_time,
|
||||
channel: channel, // Use the channel we're currently processing
|
||||
stats: {
|
||||
delivery_rate: result.statistics.delivery_rate,
|
||||
delivered: result.statistics.delivered,
|
||||
recipients: result.statistics.recipients,
|
||||
open_rate: result.statistics.open_rate,
|
||||
opens_unique: result.statistics.opens_unique,
|
||||
opens: result.statistics.opens,
|
||||
click_rate: result.statistics.click_rate,
|
||||
clicks_unique: result.statistics.clicks_unique,
|
||||
click_to_open_rate: result.statistics.click_to_open_rate,
|
||||
conversion_value: result.statistics.conversion_value,
|
||||
conversion_uniques: result.statistics.conversion_uniques
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
allResults.push(...channelResults);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort all results by date
|
||||
const enrichedData = {
|
||||
data: allResults.sort((a, b) => {
|
||||
const dateA = new Date(a.send_time);
|
||||
const dateB = new Date(b.send_time);
|
||||
return dateB - dateA; // Sort by date descending
|
||||
})
|
||||
};
|
||||
|
||||
console.log('[ReportingService] Enriched data:', JSON.stringify(enrichedData, null, 2));
|
||||
|
||||
// Cache the enriched response for 10 minutes
|
||||
try {
|
||||
await this.redisService.set(`${cacheKey}:raw`, enrichedData, 600);
|
||||
} catch (cacheError) {
|
||||
console.warn('[ReportingService] Cache set error:', cacheError);
|
||||
}
|
||||
|
||||
return enrichedData;
|
||||
})();
|
||||
|
||||
const result = await this._pendingReportRequest;
|
||||
this._pendingReportRequest = null;
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
console.error('[ReportingService] Error fetching campaign reports:', error);
|
||||
this._pendingReportRequest = null;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getCampaignDetails(campaignIds = []) {
|
||||
if (!Array.isArray(campaignIds) || campaignIds.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const fetchWithTimeout = async (campaignId, retries = 3) => {
|
||||
for (let i = 0; i < retries; i++) {
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), 10000); // 10 second timeout
|
||||
|
||||
const response = await fetch(
|
||||
`${this.baseUrl}/campaigns/${campaignId}?include=campaign-messages`,
|
||||
{
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Authorization': `Klaviyo-API-Key ${this.apiKey}`,
|
||||
'revision': this.apiRevision
|
||||
},
|
||||
signal: controller.signal
|
||||
}
|
||||
);
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch campaign ${campaignId}: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
if (!data.data) {
|
||||
throw new Error(`Invalid response for campaign ${campaignId}`);
|
||||
}
|
||||
|
||||
const message = data.included?.find(item => item.type === 'campaign-message');
|
||||
|
||||
console.log('[ReportingService] Campaign details for ID:', campaignId, {
|
||||
send_channel: data.data.attributes.send_channel,
|
||||
raw_attributes: data.data.attributes
|
||||
});
|
||||
|
||||
return {
|
||||
id: data.data.id,
|
||||
type: data.data.type,
|
||||
attributes: {
|
||||
...data.data.attributes,
|
||||
name: data.data.attributes.name,
|
||||
send_time: data.data.attributes.send_time,
|
||||
subject: message?.attributes?.content?.subject,
|
||||
send_channel: data.data.attributes.send_channel || 'email'
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
if (i === retries - 1) throw error;
|
||||
await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1))); // Exponential backoff
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Process in smaller chunks to avoid overwhelming the API
|
||||
const chunkSize = 10;
|
||||
const campaignDetails = [];
|
||||
|
||||
for (let i = 0; i < campaignIds.length; i += chunkSize) {
|
||||
const chunk = campaignIds.slice(i, i + chunkSize);
|
||||
const results = await Promise.all(
|
||||
chunk.map(id => fetchWithTimeout(id).catch(error => {
|
||||
console.error(`Failed to fetch campaign ${id}:`, error);
|
||||
return null;
|
||||
}))
|
||||
);
|
||||
campaignDetails.push(...results.filter(Boolean));
|
||||
|
||||
if (i + chunkSize < campaignIds.length) {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000)); // 1 second delay between chunks
|
||||
}
|
||||
}
|
||||
|
||||
return campaignDetails;
|
||||
}
|
||||
}
|
||||
448
inventory-server/dashboard/klaviyo-server/utils/time.utils.js
Normal file
448
inventory-server/dashboard/klaviyo-server/utils/time.utils.js
Normal file
@@ -0,0 +1,448 @@
|
||||
import { DateTime } from 'luxon';
|
||||
|
||||
export class TimeManager {
|
||||
constructor(dayStartHour = 1) {
|
||||
this.timezone = 'America/New_York';
|
||||
this.dayStartHour = dayStartHour; // Hour (0-23) when the business day starts
|
||||
this.weekStartDay = 7; // 7 = Sunday in Luxon
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the start of the current business day
|
||||
* If current time is before dayStartHour, return previous day at dayStartHour
|
||||
*/
|
||||
getDayStart(dt = this.getNow()) {
|
||||
if (!dt.isValid) {
|
||||
console.error("[TimeManager] Invalid datetime provided to getDayStart");
|
||||
return this.getNow();
|
||||
}
|
||||
const dayStart = dt.set({ hour: this.dayStartHour, minute: 0, second: 0, millisecond: 0 });
|
||||
return dt.hour < this.dayStartHour ? dayStart.minus({ days: 1 }) : dayStart;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the end of the current business day
|
||||
* End is defined as dayStartHour - 1 minute on the next day
|
||||
*/
|
||||
getDayEnd(dt = this.getNow()) {
|
||||
if (!dt.isValid) {
|
||||
console.error("[TimeManager] Invalid datetime provided to getDayEnd");
|
||||
return this.getNow();
|
||||
}
|
||||
const nextDay = this.getDayStart(dt).plus({ days: 1 });
|
||||
return nextDay.minus({ minutes: 1 });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the start of the week containing the given date
|
||||
* Aligns with custom day start time and starts on Sunday
|
||||
*/
|
||||
getWeekStart(dt = this.getNow()) {
|
||||
if (!dt.isValid) {
|
||||
console.error("[TimeManager] Invalid datetime provided to getWeekStart");
|
||||
return this.getNow();
|
||||
}
|
||||
// Set to start of week (Sunday) and adjust hour
|
||||
const weekStart = dt.set({ weekday: this.weekStartDay }).startOf('day');
|
||||
// If the week start time would be after the given time, go back a week
|
||||
if (weekStart > dt) {
|
||||
return weekStart.minus({ weeks: 1 }).set({ hour: this.dayStartHour });
|
||||
}
|
||||
return weekStart.set({ hour: this.dayStartHour });
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert any date input to a Luxon DateTime in Eastern time
|
||||
*/
|
||||
toDateTime(date) {
|
||||
if (!date) return null;
|
||||
|
||||
if (date instanceof DateTime) {
|
||||
return date.setZone(this.timezone);
|
||||
}
|
||||
|
||||
// If it's an ISO string or Date object, parse it
|
||||
const dt = DateTime.fromISO(date instanceof Date ? date.toISOString() : date);
|
||||
if (!dt.isValid) {
|
||||
console.error("[TimeManager] Invalid date input:", date);
|
||||
return null;
|
||||
}
|
||||
|
||||
return dt.setZone(this.timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a date for API requests (UTC ISO string)
|
||||
*/
|
||||
formatForAPI(date) {
|
||||
if (!date) return null;
|
||||
|
||||
// Parse the input date
|
||||
const dt = this.toDateTime(date);
|
||||
if (!dt || !dt.isValid) {
|
||||
console.error("[TimeManager] Invalid date for API:", date);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Convert to UTC for API request
|
||||
const utc = dt.toUTC();
|
||||
|
||||
console.log("[TimeManager] API date conversion:", {
|
||||
input: date,
|
||||
eastern: dt.toISO(),
|
||||
utc: utc.toISO(),
|
||||
offset: dt.offset
|
||||
});
|
||||
|
||||
return utc.toISO();
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a date for display (in Eastern time)
|
||||
*/
|
||||
formatForDisplay(date) {
|
||||
const dt = this.toDateTime(date);
|
||||
if (!dt || !dt.isValid) return '';
|
||||
return dt.toFormat('LLL d, yyyy h:mm a');
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate if a date range is valid
|
||||
*/
|
||||
isValidDateRange(start, end) {
|
||||
const startDt = this.toDateTime(start);
|
||||
const endDt = this.toDateTime(end);
|
||||
return startDt && endDt && endDt > startDt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current time in Eastern timezone
|
||||
*/
|
||||
getNow() {
|
||||
return DateTime.now().setZone(this.timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a date range for the last N hours
|
||||
*/
|
||||
getLastNHours(hours) {
|
||||
const now = this.getNow();
|
||||
return {
|
||||
start: now.minus({ hours }),
|
||||
end: now
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a date range for the last N days
|
||||
* Aligns with custom day start time
|
||||
*/
|
||||
getLastNDays(days) {
|
||||
const now = this.getNow();
|
||||
const dayStart = this.getDayStart(now);
|
||||
return {
|
||||
start: dayStart.minus({ days }),
|
||||
end: this.getDayEnd(now)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a date range for a specific time period
|
||||
* All ranges align with custom day start time
|
||||
*/
|
||||
getDateRange(period) {
|
||||
const now = this.getNow();
|
||||
|
||||
// Normalize period to handle both 'last' and 'previous' prefixes
|
||||
const normalizedPeriod = period.startsWith('previous') ? period.replace('previous', 'last') : period;
|
||||
|
||||
switch (normalizedPeriod) {
|
||||
case 'custom': {
|
||||
// Custom ranges are handled separately via getCustomRange
|
||||
console.warn('[TimeManager] Custom ranges should use getCustomRange method');
|
||||
return null;
|
||||
}
|
||||
case 'today': {
|
||||
const dayStart = this.getDayStart(now);
|
||||
return {
|
||||
start: dayStart,
|
||||
end: this.getDayEnd(now)
|
||||
};
|
||||
}
|
||||
case 'yesterday': {
|
||||
const yesterday = now.minus({ days: 1 });
|
||||
return {
|
||||
start: this.getDayStart(yesterday),
|
||||
end: this.getDayEnd(yesterday)
|
||||
};
|
||||
}
|
||||
case 'last7days': {
|
||||
// For last 7 days, we want to include today and the previous 6 days
|
||||
const dayStart = this.getDayStart(now);
|
||||
const weekStart = dayStart.minus({ days: 6 });
|
||||
return {
|
||||
start: weekStart,
|
||||
end: this.getDayEnd(now)
|
||||
};
|
||||
}
|
||||
case 'last30days': {
|
||||
// Include today and previous 29 days
|
||||
const dayStart = this.getDayStart(now);
|
||||
const monthStart = dayStart.minus({ days: 29 });
|
||||
return {
|
||||
start: monthStart,
|
||||
end: this.getDayEnd(now)
|
||||
};
|
||||
}
|
||||
case 'last90days': {
|
||||
// Include today and previous 89 days
|
||||
const dayStart = this.getDayStart(now);
|
||||
const start = dayStart.minus({ days: 89 });
|
||||
return {
|
||||
start,
|
||||
end: this.getDayEnd(now)
|
||||
};
|
||||
}
|
||||
case 'thisWeek': {
|
||||
// Get the start of the week (Sunday) with custom hour
|
||||
const weekStart = this.getWeekStart(now);
|
||||
return {
|
||||
start: weekStart,
|
||||
end: this.getDayEnd(now)
|
||||
};
|
||||
}
|
||||
case 'lastWeek': {
|
||||
const lastWeek = now.minus({ weeks: 1 });
|
||||
const weekStart = this.getWeekStart(lastWeek);
|
||||
const weekEnd = weekStart.plus({ days: 6 }); // 6 days after start = Saturday
|
||||
return {
|
||||
start: weekStart,
|
||||
end: this.getDayEnd(weekEnd)
|
||||
};
|
||||
}
|
||||
case 'thisMonth': {
|
||||
const dayStart = this.getDayStart(now);
|
||||
const monthStart = dayStart.startOf('month').set({ hour: this.dayStartHour });
|
||||
return {
|
||||
start: monthStart,
|
||||
end: this.getDayEnd(now)
|
||||
};
|
||||
}
|
||||
case 'lastMonth': {
|
||||
const lastMonth = now.minus({ months: 1 });
|
||||
const monthStart = lastMonth.startOf('month').set({ hour: this.dayStartHour });
|
||||
const monthEnd = monthStart.plus({ months: 1 }).minus({ days: 1 });
|
||||
return {
|
||||
start: monthStart,
|
||||
end: this.getDayEnd(monthEnd)
|
||||
};
|
||||
}
|
||||
default:
|
||||
console.warn(`[TimeManager] Unknown period: ${period}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a duration in milliseconds to a human-readable string
|
||||
*/
|
||||
formatDuration(ms) {
|
||||
return DateTime.fromMillis(ms).toFormat("hh'h' mm'm' ss's'");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get relative time string (e.g., "2 hours ago")
|
||||
*/
|
||||
getRelativeTime(date) {
|
||||
const dt = this.toDateTime(date);
|
||||
if (!dt) return '';
|
||||
return dt.toRelative();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a custom date range using exact dates and times provided
|
||||
* @param {string} startDate - ISO string or Date for range start
|
||||
* @param {string} endDate - ISO string or Date for range end
|
||||
* @returns {Object} Object with start and end DateTime objects
|
||||
*/
|
||||
getCustomRange(startDate, endDate) {
|
||||
if (!startDate || !endDate) {
|
||||
console.error("[TimeManager] Custom range requires both start and end dates");
|
||||
return null;
|
||||
}
|
||||
|
||||
const start = this.toDateTime(startDate);
|
||||
const end = this.toDateTime(endDate);
|
||||
|
||||
if (!start || !end || !start.isValid || !end.isValid) {
|
||||
console.error("[TimeManager] Invalid dates provided for custom range");
|
||||
return null;
|
||||
}
|
||||
|
||||
// Validate the range
|
||||
if (end < start) {
|
||||
console.error("[TimeManager] End date must be after start date");
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
start,
|
||||
end
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the previous period's date range based on the current period
|
||||
* @param {string} period - The current period
|
||||
* @param {DateTime} now - The current datetime (optional)
|
||||
* @returns {Object} Object with start and end DateTime objects
|
||||
*/
|
||||
getPreviousPeriod(period, now = this.getNow()) {
|
||||
const normalizedPeriod = period.startsWith('previous') ? period.replace('previous', 'last') : period;
|
||||
|
||||
switch (normalizedPeriod) {
|
||||
case 'today': {
|
||||
const yesterday = now.minus({ days: 1 });
|
||||
return {
|
||||
start: this.getDayStart(yesterday),
|
||||
end: this.getDayEnd(yesterday)
|
||||
};
|
||||
}
|
||||
case 'yesterday': {
|
||||
const twoDaysAgo = now.minus({ days: 2 });
|
||||
return {
|
||||
start: this.getDayStart(twoDaysAgo),
|
||||
end: this.getDayEnd(twoDaysAgo)
|
||||
};
|
||||
}
|
||||
case 'last7days': {
|
||||
const dayStart = this.getDayStart(now);
|
||||
const currentStart = dayStart.minus({ days: 6 });
|
||||
const prevEnd = currentStart.minus({ milliseconds: 1 });
|
||||
const prevStart = prevEnd.minus({ days: 6 });
|
||||
return {
|
||||
start: prevStart,
|
||||
end: prevEnd
|
||||
};
|
||||
}
|
||||
case 'last30days': {
|
||||
const dayStart = this.getDayStart(now);
|
||||
const currentStart = dayStart.minus({ days: 29 });
|
||||
const prevEnd = currentStart.minus({ milliseconds: 1 });
|
||||
const prevStart = prevEnd.minus({ days: 29 });
|
||||
return {
|
||||
start: prevStart,
|
||||
end: prevEnd
|
||||
};
|
||||
}
|
||||
case 'last90days': {
|
||||
const dayStart = this.getDayStart(now);
|
||||
const currentStart = dayStart.minus({ days: 89 });
|
||||
const prevEnd = currentStart.minus({ milliseconds: 1 });
|
||||
const prevStart = prevEnd.minus({ days: 89 });
|
||||
return {
|
||||
start: prevStart,
|
||||
end: prevEnd
|
||||
};
|
||||
}
|
||||
case 'thisWeek': {
|
||||
const weekStart = this.getWeekStart(now);
|
||||
const prevEnd = weekStart.minus({ milliseconds: 1 });
|
||||
const prevStart = this.getWeekStart(prevEnd);
|
||||
return {
|
||||
start: prevStart,
|
||||
end: prevEnd
|
||||
};
|
||||
}
|
||||
case 'lastWeek': {
|
||||
const lastWeekStart = this.getWeekStart(now.minus({ weeks: 1 }));
|
||||
const prevEnd = lastWeekStart.minus({ milliseconds: 1 });
|
||||
const prevStart = this.getWeekStart(prevEnd);
|
||||
return {
|
||||
start: prevStart,
|
||||
end: prevEnd
|
||||
};
|
||||
}
|
||||
case 'thisMonth': {
|
||||
const monthStart = now.startOf('month').set({ hour: this.dayStartHour });
|
||||
const prevEnd = monthStart.minus({ milliseconds: 1 });
|
||||
const prevStart = prevEnd.startOf('month').set({ hour: this.dayStartHour });
|
||||
return {
|
||||
start: prevStart,
|
||||
end: prevEnd
|
||||
};
|
||||
}
|
||||
case 'lastMonth': {
|
||||
const lastMonthStart = now.minus({ months: 1 }).startOf('month').set({ hour: this.dayStartHour });
|
||||
const prevEnd = lastMonthStart.minus({ milliseconds: 1 });
|
||||
const prevStart = prevEnd.startOf('month').set({ hour: this.dayStartHour });
|
||||
return {
|
||||
start: prevStart,
|
||||
end: prevEnd
|
||||
};
|
||||
}
|
||||
default:
|
||||
console.warn(`[TimeManager] No previous period defined for: ${period}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
groupEventsByInterval(events, interval = 'day', property = null) {
|
||||
if (!events?.length) return [];
|
||||
|
||||
const groupedData = new Map();
|
||||
const now = DateTime.now().setZone('America/New_York');
|
||||
|
||||
for (const event of events) {
|
||||
const datetime = DateTime.fromISO(event.attributes.datetime);
|
||||
let groupKey;
|
||||
|
||||
switch (interval) {
|
||||
case 'hour':
|
||||
groupKey = datetime.startOf('hour').toISO();
|
||||
break;
|
||||
case 'day':
|
||||
groupKey = datetime.startOf('day').toISO();
|
||||
break;
|
||||
case 'week':
|
||||
groupKey = datetime.startOf('week').toISO();
|
||||
break;
|
||||
case 'month':
|
||||
groupKey = datetime.startOf('month').toISO();
|
||||
break;
|
||||
default:
|
||||
groupKey = datetime.startOf('day').toISO();
|
||||
}
|
||||
|
||||
const existingGroup = groupedData.get(groupKey) || {
|
||||
datetime: groupKey,
|
||||
count: 0,
|
||||
value: 0
|
||||
};
|
||||
|
||||
existingGroup.count++;
|
||||
|
||||
if (property) {
|
||||
// Extract property value from event
|
||||
const props = event.attributes?.event_properties || event.attributes?.properties || {};
|
||||
let value = 0;
|
||||
|
||||
if (property === '$value') {
|
||||
// Special case for $value - use event value
|
||||
value = Number(event.attributes?.value || 0);
|
||||
} else {
|
||||
// Otherwise get from properties
|
||||
value = Number(props[property] || 0);
|
||||
}
|
||||
|
||||
existingGroup.value = (existingGroup.value || 0) + value;
|
||||
}
|
||||
|
||||
groupedData.set(groupKey, existingGroup);
|
||||
}
|
||||
|
||||
// Convert to array and sort by datetime
|
||||
return Array.from(groupedData.values())
|
||||
.sort((a, b) => DateTime.fromISO(a.datetime) - DateTime.fromISO(b.datetime));
|
||||
}
|
||||
}
|
||||
935
inventory-server/dashboard/meta-server/package-lock.json
generated
Normal file
935
inventory-server/dashboard/meta-server/package-lock.json
generated
Normal file
@@ -0,0 +1,935 @@
|
||||
{
|
||||
"name": "meta-server",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "meta-server",
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^1.7.9",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^4.21.2"
|
||||
}
|
||||
},
|
||||
"node_modules/accepts": {
|
||||
"version": "1.3.8",
|
||||
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
|
||||
"integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mime-types": "~2.1.34",
|
||||
"negotiator": "0.6.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/array-flatten": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser": {
|
||||
"version": "1.20.3",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
|
||||
"integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"content-type": "~1.0.5",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"destroy": "1.2.0",
|
||||
"http-errors": "2.0.0",
|
||||
"iconv-lite": "0.4.24",
|
||||
"on-finished": "2.4.1",
|
||||
"qs": "6.13.0",
|
||||
"raw-body": "2.5.2",
|
||||
"type-is": "~1.6.18",
|
||||
"unpipe": "1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8",
|
||||
"npm": "1.2.8000 || >= 1.4.16"
|
||||
}
|
||||
},
|
||||
"node_modules/bytes": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bind-apply-helpers": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz",
|
||||
"integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bound": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz",
|
||||
"integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.1",
|
||||
"get-intrinsic": "^1.2.6"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/content-disposition": {
|
||||
"version": "0.5.4",
|
||||
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
|
||||
"integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safe-buffer": "5.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/content-type": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
|
||||
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/cookie": {
|
||||
"version": "0.7.1",
|
||||
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz",
|
||||
"integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/cookie-signature": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
|
||||
"integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/cors": {
|
||||
"version": "2.8.5",
|
||||
"resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz",
|
||||
"integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"object-assign": "^4",
|
||||
"vary": "^1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/depd": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||
"integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/destroy": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
|
||||
"integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8",
|
||||
"npm": "1.2.8000 || >= 1.4.16"
|
||||
}
|
||||
},
|
||||
"node_modules/dotenv": {
|
||||
"version": "16.4.7",
|
||||
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
|
||||
"integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://dotenvx.com"
|
||||
}
|
||||
},
|
||||
"node_modules/dunder-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"gopd": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/ee-first": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
|
||||
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/encodeurl": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
|
||||
"integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/es-define-property": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
|
||||
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-errors": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
|
||||
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-object-atoms": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz",
|
||||
"integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-html": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
|
||||
"integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/etag": {
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
|
||||
"integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/express": {
|
||||
"version": "4.21.2",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
|
||||
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"accepts": "~1.3.8",
|
||||
"array-flatten": "1.1.1",
|
||||
"body-parser": "1.20.3",
|
||||
"content-disposition": "0.5.4",
|
||||
"content-type": "~1.0.4",
|
||||
"cookie": "0.7.1",
|
||||
"cookie-signature": "1.0.6",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"encodeurl": "~2.0.0",
|
||||
"escape-html": "~1.0.3",
|
||||
"etag": "~1.8.1",
|
||||
"finalhandler": "1.3.1",
|
||||
"fresh": "0.5.2",
|
||||
"http-errors": "2.0.0",
|
||||
"merge-descriptors": "1.0.3",
|
||||
"methods": "~1.1.2",
|
||||
"on-finished": "2.4.1",
|
||||
"parseurl": "~1.3.3",
|
||||
"path-to-regexp": "0.1.12",
|
||||
"proxy-addr": "~2.0.7",
|
||||
"qs": "6.13.0",
|
||||
"range-parser": "~1.2.1",
|
||||
"safe-buffer": "5.2.1",
|
||||
"send": "0.19.0",
|
||||
"serve-static": "1.16.2",
|
||||
"setprototypeof": "1.2.0",
|
||||
"statuses": "2.0.1",
|
||||
"type-is": "~1.6.18",
|
||||
"utils-merge": "1.0.1",
|
||||
"vary": "~1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.10.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/finalhandler": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz",
|
||||
"integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "2.6.9",
|
||||
"encodeurl": "~2.0.0",
|
||||
"escape-html": "~1.0.3",
|
||||
"on-finished": "2.4.1",
|
||||
"parseurl": "~1.3.3",
|
||||
"statuses": "2.0.1",
|
||||
"unpipe": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.9",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
|
||||
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
"url": "https://github.com/sponsors/RubenVerborgh"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"debug": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
|
||||
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/forwarded": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
||||
"integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/fresh": {
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
|
||||
"integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/function-bind": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
|
||||
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-intrinsic": {
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.6.tgz",
|
||||
"integrity": "sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.1",
|
||||
"dunder-proto": "^1.0.0",
|
||||
"es-define-property": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"es-object-atoms": "^1.0.0",
|
||||
"function-bind": "^1.1.2",
|
||||
"gopd": "^1.2.0",
|
||||
"has-symbols": "^1.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"math-intrinsics": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/gopd": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
|
||||
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-symbols": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/http-errors": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
|
||||
"integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"depd": "2.0.0",
|
||||
"inherits": "2.0.4",
|
||||
"setprototypeof": "1.2.0",
|
||||
"statuses": "2.0.1",
|
||||
"toidentifier": "1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safer-buffer": ">= 2.1.2 < 3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/ipaddr.js": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
|
||||
"integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/math-intrinsics": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/media-typer": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||
"integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/merge-descriptors": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
|
||||
"integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==",
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/methods": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
|
||||
"integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/mime": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
|
||||
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"mime": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-db": {
|
||||
"version": "1.52.0",
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-types": {
|
||||
"version": "2.1.35",
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/negotiator": {
|
||||
"version": "0.6.3",
|
||||
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
|
||||
"integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/object-assign": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
||||
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/object-inspect": {
|
||||
"version": "1.13.3",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz",
|
||||
"integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/on-finished": {
|
||||
"version": "2.4.1",
|
||||
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
|
||||
"integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ee-first": "1.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/parseurl": {
|
||||
"version": "1.3.3",
|
||||
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
|
||||
"integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/path-to-regexp": {
|
||||
"version": "0.1.12",
|
||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz",
|
||||
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/proxy-addr": {
|
||||
"version": "2.0.7",
|
||||
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
|
||||
"integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"forwarded": "0.2.0",
|
||||
"ipaddr.js": "1.9.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/qs": {
|
||||
"version": "6.13.0",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
|
||||
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"side-channel": "^1.0.6"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/range-parser": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
|
||||
"integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/raw-body": {
|
||||
"version": "2.5.2",
|
||||
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
|
||||
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"http-errors": "2.0.0",
|
||||
"iconv-lite": "0.4.24",
|
||||
"unpipe": "1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/send": {
|
||||
"version": "0.19.0",
|
||||
"resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz",
|
||||
"integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"destroy": "1.2.0",
|
||||
"encodeurl": "~1.0.2",
|
||||
"escape-html": "~1.0.3",
|
||||
"etag": "~1.8.1",
|
||||
"fresh": "0.5.2",
|
||||
"http-errors": "2.0.0",
|
||||
"mime": "1.6.0",
|
||||
"ms": "2.1.3",
|
||||
"on-finished": "2.4.1",
|
||||
"range-parser": "~1.2.1",
|
||||
"statuses": "2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/send/node_modules/encodeurl": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
|
||||
"integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/send/node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/serve-static": {
|
||||
"version": "1.16.2",
|
||||
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz",
|
||||
"integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"encodeurl": "~2.0.0",
|
||||
"escape-html": "~1.0.3",
|
||||
"parseurl": "~1.3.3",
|
||||
"send": "0.19.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/setprototypeof": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
|
||||
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/side-channel": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
|
||||
"integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"object-inspect": "^1.13.3",
|
||||
"side-channel-list": "^1.0.0",
|
||||
"side-channel-map": "^1.0.1",
|
||||
"side-channel-weakmap": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel-list": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
|
||||
"integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"object-inspect": "^1.13.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel-map": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
|
||||
"integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bound": "^1.0.2",
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.5",
|
||||
"object-inspect": "^1.13.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel-weakmap": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
|
||||
"integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bound": "^1.0.2",
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.5",
|
||||
"object-inspect": "^1.13.3",
|
||||
"side-channel-map": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/statuses": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/toidentifier": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
|
||||
"integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/type-is": {
|
||||
"version": "1.6.18",
|
||||
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
|
||||
"integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"media-typer": "0.3.0",
|
||||
"mime-types": "~2.1.24"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/unpipe": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||
"integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/utils-merge": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
|
||||
"integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/vary": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
|
||||
"integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
20
inventory-server/dashboard/meta-server/package.json
Normal file
20
inventory-server/dashboard/meta-server/package.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "meta-server",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"start": "node server.js",
|
||||
"dev": "nodemon server.js"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"axios": "^1.7.9",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^4.21.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const {
|
||||
fetchCampaigns,
|
||||
fetchAccountInsights,
|
||||
updateCampaignBudget,
|
||||
updateCampaignStatus,
|
||||
} = require('../services/meta.service');
|
||||
|
||||
// Get all campaigns with insights
|
||||
router.get('/campaigns', async (req, res) => {
|
||||
try {
|
||||
const { since, until } = req.query;
|
||||
|
||||
if (!since || !until) {
|
||||
return res.status(400).json({ error: 'Date range is required (since, until)' });
|
||||
}
|
||||
|
||||
const campaigns = await fetchCampaigns(since, until);
|
||||
res.json(campaigns);
|
||||
} catch (error) {
|
||||
console.error('Campaign fetch error:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch campaigns',
|
||||
details: error.response?.data?.error?.message || error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get account insights
|
||||
router.get('/account-insights', async (req, res) => {
|
||||
try {
|
||||
const { since, until } = req.query;
|
||||
|
||||
if (!since || !until) {
|
||||
return res.status(400).json({ error: 'Date range is required (since, until)' });
|
||||
}
|
||||
|
||||
const insights = await fetchAccountInsights(since, until);
|
||||
res.json(insights);
|
||||
} catch (error) {
|
||||
console.error('Account insights fetch error:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch account insights',
|
||||
details: error.response?.data?.error?.message || error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Update campaign budget
|
||||
router.patch('/campaigns/:campaignId/budget', async (req, res) => {
|
||||
try {
|
||||
const { campaignId } = req.params;
|
||||
const { budget } = req.body;
|
||||
|
||||
if (!budget) {
|
||||
return res.status(400).json({ error: 'Budget is required' });
|
||||
}
|
||||
|
||||
const result = await updateCampaignBudget(campaignId, budget);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('Budget update error:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to update campaign budget',
|
||||
details: error.response?.data?.error?.message || error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Update campaign status (pause/unpause)
|
||||
router.post('/campaigns/:campaignId/:action', async (req, res) => {
|
||||
try {
|
||||
const { campaignId, action } = req.params;
|
||||
|
||||
if (!['pause', 'unpause'].includes(action)) {
|
||||
return res.status(400).json({ error: 'Invalid action. Use "pause" or "unpause"' });
|
||||
}
|
||||
|
||||
const result = await updateCampaignStatus(campaignId, action);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('Status update error:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to update campaign status',
|
||||
details: error.response?.data?.error?.message || error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
31
inventory-server/dashboard/meta-server/server.js
Normal file
31
inventory-server/dashboard/meta-server/server.js
Normal file
@@ -0,0 +1,31 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const path = require('path');
|
||||
require('dotenv').config({
|
||||
path: path.resolve(__dirname, '.env')
|
||||
});
|
||||
|
||||
const app = express();
|
||||
const port = process.env.PORT || 3005;
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
// Import routes
|
||||
const campaignRoutes = require('./routes/campaigns.routes');
|
||||
|
||||
// Use routes
|
||||
app.use('/api/meta', campaignRoutes);
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
console.error(err.stack);
|
||||
res.status(500).json({ error: 'Something went wrong!' });
|
||||
});
|
||||
|
||||
// Start server
|
||||
app.listen(port, () => {
|
||||
console.log(`Meta API server running on port ${port}`);
|
||||
});
|
||||
|
||||
module.exports = app;
|
||||
@@ -0,0 +1,99 @@
|
||||
const { default: axios } = require('axios');
|
||||
|
||||
const META_API_VERSION = process.env.META_API_VERSION || 'v21.0';
|
||||
const META_API_BASE_URL = `https://graph.facebook.com/${META_API_VERSION}`;
|
||||
const META_ACCESS_TOKEN = process.env.META_ACCESS_TOKEN;
|
||||
const AD_ACCOUNT_ID = process.env.META_AD_ACCOUNT_ID;
|
||||
|
||||
const metaApiRequest = async (endpoint, params = {}) => {
|
||||
try {
|
||||
const response = await axios.get(`${META_API_BASE_URL}/${endpoint}`, {
|
||||
params: {
|
||||
access_token: META_ACCESS_TOKEN,
|
||||
time_zone: 'America/New_York',
|
||||
...params,
|
||||
},
|
||||
});
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
console.error('Meta API Error:', {
|
||||
message: error.message,
|
||||
response: error.response?.data,
|
||||
endpoint,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const fetchCampaigns = async (since, until) => {
|
||||
const campaigns = await metaApiRequest(`act_${AD_ACCOUNT_ID}/campaigns`, {
|
||||
fields: [
|
||||
'id',
|
||||
'name',
|
||||
'status',
|
||||
'objective',
|
||||
'daily_budget',
|
||||
'lifetime_budget',
|
||||
'adsets{daily_budget,lifetime_budget}',
|
||||
`insights.time_range({'since':'${since}','until':'${until}'}).level(campaign){
|
||||
spend,
|
||||
impressions,
|
||||
clicks,
|
||||
ctr,
|
||||
reach,
|
||||
frequency,
|
||||
cpm,
|
||||
cpc,
|
||||
actions,
|
||||
action_values,
|
||||
cost_per_action_type
|
||||
}`,
|
||||
].join(','),
|
||||
limit: 100,
|
||||
});
|
||||
|
||||
return campaigns.data.filter(c => c.insights?.data?.[0]?.spend > 0);
|
||||
};
|
||||
|
||||
const fetchAccountInsights = async (since, until) => {
|
||||
const accountInsights = await metaApiRequest(`act_${AD_ACCOUNT_ID}/insights`, {
|
||||
fields: 'reach,spend,impressions,clicks,ctr,cpm,actions,action_values',
|
||||
time_range: JSON.stringify({ since, until }),
|
||||
});
|
||||
|
||||
return accountInsights.data[0] || null;
|
||||
};
|
||||
|
||||
const updateCampaignBudget = async (campaignId, budget) => {
|
||||
try {
|
||||
const response = await axios.post(`${META_API_BASE_URL}/${campaignId}`, {
|
||||
access_token: META_ACCESS_TOKEN,
|
||||
daily_budget: budget * 100, // Convert to cents
|
||||
});
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
console.error('Update campaign budget error:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const updateCampaignStatus = async (campaignId, action) => {
|
||||
try {
|
||||
const status = action === 'pause' ? 'PAUSED' : 'ACTIVE';
|
||||
const response = await axios.post(`${META_API_BASE_URL}/${campaignId}`, {
|
||||
access_token: META_ACCESS_TOKEN,
|
||||
status,
|
||||
});
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
console.error('Update campaign status error:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
fetchCampaigns,
|
||||
fetchAccountInsights,
|
||||
updateCampaignBudget,
|
||||
updateCampaignStatus,
|
||||
};
|
||||
24
inventory-server/dashboard/package-lock.json
generated
Normal file
24
inventory-server/dashboard/package-lock.json
generated
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "dashboard",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"dotenv": "^16.4.7"
|
||||
}
|
||||
},
|
||||
"node_modules/dotenv": {
|
||||
"version": "16.4.7",
|
||||
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
|
||||
"integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://dotenvx.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
5
inventory-server/dashboard/package.json
Normal file
5
inventory-server/dashboard/package.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"dotenv": "^16.4.7"
|
||||
}
|
||||
}
|
||||
13
inventory-server/dashboard/typeform-server/.env.example
Normal file
13
inventory-server/dashboard/typeform-server/.env.example
Normal file
@@ -0,0 +1,13 @@
|
||||
# Server Configuration
|
||||
NODE_ENV=development
|
||||
TYPEFORM_PORT=3008
|
||||
|
||||
# Redis Configuration
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
# Typeform API Configuration
|
||||
TYPEFORM_ACCESS_TOKEN=your_typeform_access_token_here
|
||||
|
||||
# Optional: Form IDs (if you want to store them in env)
|
||||
TYPEFORM_FORM_ID_1=your_first_form_id
|
||||
TYPEFORM_FORM_ID_2=your_second_form_id
|
||||
1411
inventory-server/dashboard/typeform-server/package-lock.json
generated
Normal file
1411
inventory-server/dashboard/typeform-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
20
inventory-server/dashboard/typeform-server/package.json
Normal file
20
inventory-server/dashboard/typeform-server/package.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "typeform-server",
|
||||
"version": "1.0.0",
|
||||
"description": "Typeform API integration server",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"dev": "nodemon server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^1.6.2",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2",
|
||||
"redis": "^4.6.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,121 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const typeformService = require('../services/typeform.service');
|
||||
|
||||
// Get form responses
|
||||
router.get('/forms/:formId/responses', async (req, res) => {
|
||||
try {
|
||||
const { formId } = req.params;
|
||||
const filters = req.query;
|
||||
|
||||
console.log(`Fetching responses for form ${formId} with filters:`, filters);
|
||||
|
||||
if (!formId) {
|
||||
return res.status(400).json({
|
||||
error: 'Missing form ID',
|
||||
details: 'The form ID parameter is required'
|
||||
});
|
||||
}
|
||||
|
||||
const data = await typeformService.getFormResponsesWithFilters(formId, filters);
|
||||
|
||||
if (!data) {
|
||||
return res.status(404).json({
|
||||
error: 'No data found',
|
||||
details: `No responses found for form ${formId}`
|
||||
});
|
||||
}
|
||||
|
||||
res.json(data);
|
||||
} catch (error) {
|
||||
console.error('Form responses error:', {
|
||||
formId: req.params.formId,
|
||||
filters: req.query,
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
response: error.response?.data
|
||||
});
|
||||
|
||||
// Handle specific error cases
|
||||
if (error.response?.status === 401) {
|
||||
return res.status(401).json({
|
||||
error: 'Authentication failed',
|
||||
details: 'Invalid Typeform API credentials'
|
||||
});
|
||||
}
|
||||
|
||||
if (error.response?.status === 404) {
|
||||
return res.status(404).json({
|
||||
error: 'Not found',
|
||||
details: `Form '${req.params.formId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
if (error.response?.status === 400) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid request',
|
||||
details: error.response?.data?.message || 'The request was invalid',
|
||||
data: error.response?.data
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch form responses',
|
||||
details: error.response?.data?.message || error.message,
|
||||
data: error.response?.data
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get form insights
|
||||
router.get('/forms/:formId/insights', async (req, res) => {
|
||||
try {
|
||||
const { formId } = req.params;
|
||||
|
||||
if (!formId) {
|
||||
return res.status(400).json({
|
||||
error: 'Missing form ID',
|
||||
details: 'The form ID parameter is required'
|
||||
});
|
||||
}
|
||||
|
||||
const data = await typeformService.getFormInsights(formId);
|
||||
|
||||
if (!data) {
|
||||
return res.status(404).json({
|
||||
error: 'No data found',
|
||||
details: `No insights found for form ${formId}`
|
||||
});
|
||||
}
|
||||
|
||||
res.json(data);
|
||||
} catch (error) {
|
||||
console.error('Form insights error:', {
|
||||
formId: req.params.formId,
|
||||
error: error.message,
|
||||
response: error.response?.data
|
||||
});
|
||||
|
||||
if (error.response?.status === 401) {
|
||||
return res.status(401).json({
|
||||
error: 'Authentication failed',
|
||||
details: 'Invalid Typeform API credentials'
|
||||
});
|
||||
}
|
||||
|
||||
if (error.response?.status === 404) {
|
||||
return res.status(404).json({
|
||||
error: 'Not found',
|
||||
details: `Form '${req.params.formId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch form insights',
|
||||
details: error.response?.data?.message || error.message,
|
||||
data: error.response?.data
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
31
inventory-server/dashboard/typeform-server/server.js
Normal file
31
inventory-server/dashboard/typeform-server/server.js
Normal file
@@ -0,0 +1,31 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const path = require('path');
|
||||
require('dotenv').config({
|
||||
path: path.resolve(__dirname, '.env')
|
||||
});
|
||||
|
||||
const app = express();
|
||||
const port = process.env.TYPEFORM_PORT || 3008;
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
// Import routes
|
||||
const typeformRoutes = require('./routes/typeform.routes');
|
||||
|
||||
// Use routes
|
||||
app.use('/api/typeform', typeformRoutes);
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
console.error(err.stack);
|
||||
res.status(500).json({ error: 'Something went wrong!' });
|
||||
});
|
||||
|
||||
// Start server
|
||||
app.listen(port, () => {
|
||||
console.log(`Typeform API server running on port ${port}`);
|
||||
});
|
||||
|
||||
module.exports = app;
|
||||
@@ -0,0 +1,142 @@
|
||||
const axios = require('axios');
|
||||
const { createClient } = require('redis');
|
||||
|
||||
class TypeformService {
|
||||
constructor() {
|
||||
this.redis = createClient({
|
||||
url: process.env.REDIS_URL
|
||||
});
|
||||
|
||||
this.redis.on('error', err => console.error('Redis Client Error:', err));
|
||||
this.redis.connect().catch(err => console.error('Redis connection error:', err));
|
||||
|
||||
const token = process.env.TYPEFORM_ACCESS_TOKEN;
|
||||
console.log('Initializing Typeform client with token:', token ? `${token.slice(0, 10)}...` : 'missing');
|
||||
|
||||
this.apiClient = axios.create({
|
||||
baseURL: 'https://api.typeform.com',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
// Test the token
|
||||
this.testConnection();
|
||||
}
|
||||
|
||||
async testConnection() {
|
||||
try {
|
||||
const response = await this.apiClient.get('/forms');
|
||||
console.log('Typeform connection test successful:', {
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Typeform connection test failed:', {
|
||||
error: error.message,
|
||||
response: error.response?.data,
|
||||
status: error.response?.status,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async getFormResponses(formId, params = {}) {
|
||||
const cacheKey = `typeform:responses:${formId}:${JSON.stringify(params)}`;
|
||||
|
||||
try {
|
||||
// Try Redis first
|
||||
const cachedData = await this.redis.get(cacheKey);
|
||||
if (cachedData) {
|
||||
console.log(`Form responses for ${formId} found in Redis cache`);
|
||||
return JSON.parse(cachedData);
|
||||
}
|
||||
|
||||
// Fetch from API
|
||||
const response = await this.apiClient.get(`/forms/${formId}/responses`, { params });
|
||||
const data = response.data;
|
||||
|
||||
// Save to Redis with 5 minute expiry
|
||||
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||
EX: 300 // 5 minutes
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error(`Error fetching form responses for ${formId}:`, {
|
||||
error: error.message,
|
||||
params,
|
||||
response: error.response?.data
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getFormInsights(formId) {
|
||||
const cacheKey = `typeform:insights:${formId}`;
|
||||
|
||||
try {
|
||||
// Try Redis first
|
||||
const cachedData = await this.redis.get(cacheKey);
|
||||
if (cachedData) {
|
||||
console.log(`Form insights for ${formId} found in Redis cache`);
|
||||
return JSON.parse(cachedData);
|
||||
}
|
||||
|
||||
// Log the request details
|
||||
console.log(`Fetching insights for form ${formId}...`, {
|
||||
url: `/insights/${formId}/summary`,
|
||||
headers: this.apiClient.defaults.headers
|
||||
});
|
||||
|
||||
// Fetch from API
|
||||
const response = await this.apiClient.get(`/insights/${formId}/summary`);
|
||||
console.log('Typeform insights response:', {
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
data: response.data
|
||||
});
|
||||
const data = response.data;
|
||||
|
||||
// Save to Redis with 5 minute expiry
|
||||
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||
EX: 300 // 5 minutes
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error(`Error fetching form insights for ${formId}:`, {
|
||||
error: error.message,
|
||||
response: error.response?.data,
|
||||
status: error.response?.status,
|
||||
headers: error.response?.headers,
|
||||
requestUrl: `/insights/${formId}/summary`,
|
||||
requestHeaders: this.apiClient.defaults.headers
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getFormResponsesWithFilters(formId, { since, until, pageSize = 25, ...otherParams } = {}) {
|
||||
try {
|
||||
const params = {
|
||||
page_size: pageSize,
|
||||
...otherParams
|
||||
};
|
||||
|
||||
if (since) {
|
||||
params.since = new Date(since).toISOString();
|
||||
}
|
||||
if (until) {
|
||||
params.until = new Date(until).toISOString();
|
||||
}
|
||||
|
||||
return await this.getFormResponses(formId, params);
|
||||
} catch (error) {
|
||||
console.error('Error in getFormResponsesWithFilters:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new TypeformService();
|
||||
@@ -1,6 +1,169 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Forecasting: summarize sales for products received in a period by brand
|
||||
router.get('/forecast', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const brand = (req.query.brand || '').toString();
|
||||
const titleSearch = (req.query.search || req.query.q || '').toString().trim() || null;
|
||||
const startDateStr = req.query.startDate;
|
||||
const endDateStr = req.query.endDate;
|
||||
|
||||
if (!brand) {
|
||||
return res.status(400).json({ error: 'Missing required parameter: brand' });
|
||||
}
|
||||
|
||||
// Default to last 30 days if no dates provided
|
||||
const endDate = endDateStr ? new Date(endDateStr) : new Date();
|
||||
const startDate = startDateStr ? new Date(startDateStr) : new Date(endDate.getTime() - 29 * 24 * 60 * 60 * 1000);
|
||||
|
||||
// Normalize to date boundaries for consistency
|
||||
const startISO = new Date(Date.UTC(startDate.getUTCFullYear(), startDate.getUTCMonth(), startDate.getUTCDate())).toISOString();
|
||||
const endISO = new Date(Date.UTC(endDate.getUTCFullYear(), endDate.getUTCMonth(), endDate.getUTCDate())).toISOString();
|
||||
|
||||
const sql = `
|
||||
WITH params AS (
|
||||
SELECT
|
||||
$1::date AS start_date,
|
||||
$2::date AS end_date,
|
||||
$3::text AS brand,
|
||||
$4::text AS title_search,
|
||||
(($2::date - $1::date) + 1)::int AS days
|
||||
),
|
||||
category_path AS (
|
||||
WITH RECURSIVE cp AS (
|
||||
SELECT c.cat_id, c.name, c.parent_id, c.name::text AS path
|
||||
FROM categories c WHERE c.parent_id IS NULL
|
||||
UNION ALL
|
||||
SELECT c.cat_id, c.name, c.parent_id, (cp.path || ' > ' || c.name)::text
|
||||
FROM categories c
|
||||
JOIN cp ON c.parent_id = cp.cat_id
|
||||
)
|
||||
SELECT * FROM cp
|
||||
),
|
||||
product_first_received AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
COALESCE(p.first_received::date, MIN(r.received_date)::date) AS first_received_date
|
||||
FROM products p
|
||||
LEFT JOIN receivings r ON r.pid = p.pid
|
||||
GROUP BY p.pid, p.first_received
|
||||
),
|
||||
recent_products AS (
|
||||
SELECT p.pid
|
||||
FROM products p
|
||||
JOIN product_first_received fr ON fr.pid = p.pid
|
||||
JOIN params pr ON 1=1
|
||||
WHERE p.visible = true
|
||||
AND COALESCE(p.brand,'Unbranded') = pr.brand
|
||||
AND fr.first_received_date BETWEEN pr.start_date AND pr.end_date
|
||||
AND (pr.title_search IS NULL OR p.title ILIKE '%' || pr.title_search || '%')
|
||||
),
|
||||
product_pick_category AS (
|
||||
(
|
||||
SELECT DISTINCT ON (pc.pid)
|
||||
pc.pid,
|
||||
c.name AS category_name,
|
||||
COALESCE(cp.path, c.name) AS path
|
||||
FROM product_categories pc
|
||||
JOIN categories c ON c.cat_id = pc.cat_id AND (c.type IS NULL OR c.type NOT IN (20,21))
|
||||
LEFT JOIN category_path cp ON cp.cat_id = c.cat_id
|
||||
WHERE pc.pid IN (SELECT pid FROM recent_products)
|
||||
AND (cp.path IS NULL OR (
|
||||
cp.path NOT ILIKE '%Black Friday%'
|
||||
AND cp.path NOT ILIKE '%Deals%'
|
||||
))
|
||||
AND COALESCE(c.name, '') NOT IN ('Black Friday', 'Deals')
|
||||
ORDER BY pc.pid, length(COALESCE(cp.path,'')) DESC
|
||||
)
|
||||
UNION ALL
|
||||
(
|
||||
SELECT
|
||||
rp.pid,
|
||||
'Uncategorized'::text AS category_name,
|
||||
'Uncategorized'::text AS path
|
||||
FROM recent_products rp
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM product_categories pc
|
||||
JOIN categories c ON c.cat_id = pc.cat_id AND (c.type IS NULL OR c.type NOT IN (20,21))
|
||||
LEFT JOIN category_path cp ON cp.cat_id = c.cat_id
|
||||
WHERE pc.pid = rp.pid
|
||||
AND (cp.path IS NULL OR (
|
||||
cp.path NOT ILIKE '%Black Friday%'
|
||||
AND cp.path NOT ILIKE '%Deals%'
|
||||
))
|
||||
AND COALESCE(c.name, '') NOT IN ('Black Friday', 'Deals')
|
||||
)
|
||||
)
|
||||
),
|
||||
product_sales AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.title,
|
||||
p.sku,
|
||||
COALESCE(p.stock_quantity, 0) AS stock_quantity,
|
||||
COALESCE(p.price, 0) AS price,
|
||||
COALESCE(SUM(o.quantity), 0) AS total_sold
|
||||
FROM recent_products rp
|
||||
JOIN products p ON p.pid = rp.pid
|
||||
LEFT JOIN params pr ON true
|
||||
LEFT JOIN orders o ON o.pid = p.pid
|
||||
AND o.date::date BETWEEN pr.start_date AND pr.end_date
|
||||
AND (o.canceled IS DISTINCT FROM TRUE)
|
||||
GROUP BY p.pid, p.title, p.sku, p.stock_quantity, p.price
|
||||
)
|
||||
SELECT
|
||||
ppc.category_name,
|
||||
ppc.path,
|
||||
COUNT(ps.pid) AS num_products,
|
||||
SUM(ps.total_sold) AS total_sold,
|
||||
ROUND(AVG(COALESCE(ps.total_sold,0) / NULLIF(pr.days,0)), 2) AS avg_daily_sales,
|
||||
ROUND(AVG(COALESCE(ps.total_sold,0)), 2) AS avg_total_sold,
|
||||
MIN(ps.total_sold) AS min_total_sold,
|
||||
MAX(ps.total_sold) AS max_total_sold,
|
||||
JSON_AGG(
|
||||
JSON_BUILD_OBJECT(
|
||||
'pid', ps.pid,
|
||||
'title', ps.title,
|
||||
'sku', ps.sku,
|
||||
'total_sold', ps.total_sold,
|
||||
'categoryPath', ppc.path
|
||||
)
|
||||
) AS products
|
||||
FROM product_sales ps
|
||||
JOIN product_pick_category ppc ON ppc.pid = ps.pid
|
||||
JOIN params pr ON true
|
||||
GROUP BY ppc.category_name, ppc.path
|
||||
HAVING SUM(ps.total_sold) >= 0
|
||||
ORDER BY (ppc.category_name = 'Uncategorized') ASC, avg_total_sold DESC NULLS LAST
|
||||
LIMIT 200;
|
||||
`;
|
||||
|
||||
const { rows } = await pool.query(sql, [startISO, endISO, brand, titleSearch]);
|
||||
|
||||
// Normalize/shape response keys to match front-end expectations
|
||||
const shaped = rows.map(r => ({
|
||||
category_name: r.category_name,
|
||||
path: r.path,
|
||||
avg_daily_sales: Number(r.avg_daily_sales) || 0,
|
||||
total_sold: Number(r.total_sold) || 0,
|
||||
num_products: Number(r.num_products) || 0,
|
||||
avgTotalSold: Number(r.avg_total_sold) || 0,
|
||||
minSold: Number(r.min_total_sold) || 0,
|
||||
maxSold: Number(r.max_total_sold) || 0,
|
||||
products: Array.isArray(r.products) ? r.products : []
|
||||
}));
|
||||
|
||||
res.json(shaped);
|
||||
} catch (error) {
|
||||
console.error('Error fetching forecast data:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch forecast data' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get overall analytics stats
|
||||
router.get('/stats', async (req, res) => {
|
||||
try {
|
||||
@@ -608,4 +771,4 @@ router.get('/categories', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
module.exports = router;
|
||||
|
||||
@@ -107,10 +107,10 @@ router.get('/stats', async (req, res) => {
|
||||
// Get overall cost metrics from purchase orders
|
||||
const { rows: [overallCostMetrics] } = await pool.query(`
|
||||
SELECT
|
||||
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
|
||||
ROUND((SUM(ordered * po_cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * po_cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE cost_price IS NOT NULL
|
||||
WHERE po_cost_price IS NOT NULL
|
||||
AND ordered > 0
|
||||
AND vendor IS NOT NULL AND vendor != ''
|
||||
`);
|
||||
@@ -261,10 +261,10 @@ router.get('/', async (req, res) => {
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
vendor,
|
||||
ROUND((SUM(ordered * cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * cost_price)::numeric, 3) as total_spend
|
||||
ROUND((SUM(ordered * po_cost_price)::numeric / NULLIF(SUM(ordered), 0)), 2) as avg_unit_cost,
|
||||
ROUND(SUM(ordered * po_cost_price)::numeric, 3) as total_spend
|
||||
FROM purchase_orders
|
||||
WHERE cost_price IS NOT NULL AND ordered > 0
|
||||
WHERE po_cost_price IS NOT NULL AND ordered > 0
|
||||
GROUP BY vendor
|
||||
) po ON vm.vendor_name = po.vendor
|
||||
${whereClause}
|
||||
|
||||
6
inventory/package-lock.json
generated
6
inventory/package-lock.json
generated
@@ -3763,9 +3763,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/caniuse-lite": {
|
||||
"version": "1.0.30001700",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001700.tgz",
|
||||
"integrity": "sha512-2S6XIXwaE7K7erT8dY+kLQcpa5ms63XlRkMkReXjle+kf6c5g38vyMl+Z5y8dSxOFDhcFe+nxnn261PLxBSQsQ==",
|
||||
"version": "1.0.30001739",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001739.tgz",
|
||||
"integrity": "sha512-y+j60d6ulelrNSwpPyrHdl+9mJnQzHBr08xm48Qno0nSk4h3Qojh+ziv2qE6rXf4k3tadF4o1J/1tAbVm1NtnA==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
|
||||
@@ -7,7 +7,8 @@
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
"preview": "vite preview",
|
||||
"mount": "../mountremote.command"
|
||||
},
|
||||
"dependencies": {
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
|
||||
@@ -103,14 +103,7 @@ function App() {
|
||||
}>
|
||||
{/* Core inventory app routes - will be lazy loaded */}
|
||||
<Route index element={
|
||||
<Protected page="dashboard" fallback={<FirstAccessiblePage />}>
|
||||
<Suspense fallback={<PageLoading />}>
|
||||
<Overview />
|
||||
</Suspense>
|
||||
</Protected>
|
||||
} />
|
||||
<Route path="/" element={
|
||||
<Protected page="dashboard">
|
||||
<Protected page="overview" fallback={<FirstAccessiblePage />}>
|
||||
<Suspense fallback={<PageLoading />}>
|
||||
<Overview />
|
||||
</Suspense>
|
||||
|
||||
@@ -1,67 +1,162 @@
|
||||
# Permission System Documentation
|
||||
|
||||
This document outlines the simplified permission system implemented in the Inventory Manager application.
|
||||
This document outlines the permission system implemented in the Inventory Manager application.
|
||||
|
||||
## Permission Structure
|
||||
|
||||
Permissions follow this naming convention:
|
||||
|
||||
- Page access: `access:{page_name}`
|
||||
- Actions: `{action}:{resource}`
|
||||
- Settings sections: `settings:{section_name}`
|
||||
- Admin features: `admin:{feature}`
|
||||
|
||||
Examples:
|
||||
- `access:products` - Can access the Products page
|
||||
- `create:products` - Can create new products
|
||||
- `edit:users` - Can edit user accounts
|
||||
- `settings:user_management` - Can access User Management settings
|
||||
- `admin:debug` - Can see debug information
|
||||
|
||||
## Permission Component
|
||||
## Permission Components
|
||||
|
||||
### Protected
|
||||
### PermissionGuard
|
||||
|
||||
The core component that conditionally renders content based on permissions.
|
||||
|
||||
```tsx
|
||||
<Protected
|
||||
permission="create:products"
|
||||
<PermissionGuard
|
||||
permission="settings:user_management"
|
||||
fallback={<p>No permission</p>}
|
||||
>
|
||||
<button>Create Product</button>
|
||||
</Protected>
|
||||
<button>Manage Users</button>
|
||||
</PermissionGuard>
|
||||
```
|
||||
|
||||
Options:
|
||||
- `permission`: Single permission code (e.g., "create:products")
|
||||
- `page`: Page name (checks `access:{page}` permission)
|
||||
- `resource` + `action`: Resource and action (checks `{action}:{resource}` permission)
|
||||
- `permission`: Single permission code
|
||||
- `anyPermissions`: Array of permissions (ANY match grants access)
|
||||
- `allPermissions`: Array of permissions (ALL required)
|
||||
- `adminOnly`: For admin-only sections
|
||||
- `page`: Page name (checks `access:{page}` permission)
|
||||
- `fallback`: Content to show if permission check fails
|
||||
|
||||
### RequireAuth
|
||||
### PermissionProtectedRoute
|
||||
|
||||
Used for basic authentication checks (is user logged in?).
|
||||
Protects entire pages based on page access permissions.
|
||||
|
||||
```tsx
|
||||
<Route element={
|
||||
<RequireAuth>
|
||||
<MainLayout />
|
||||
</RequireAuth>
|
||||
}>
|
||||
{/* Protected routes */}
|
||||
</Route>
|
||||
<Route path="/products" element={
|
||||
<PermissionProtectedRoute page="products">
|
||||
<Products />
|
||||
</PermissionProtectedRoute>
|
||||
} />
|
||||
```
|
||||
|
||||
## Common Permission Codes
|
||||
### ProtectedSection
|
||||
|
||||
Protects sections within a page based on action permissions.
|
||||
|
||||
```tsx
|
||||
<ProtectedSection page="products" action="create">
|
||||
<button>Add Product</button>
|
||||
</ProtectedSection>
|
||||
```
|
||||
|
||||
### PermissionButton
|
||||
|
||||
Button that automatically handles permissions.
|
||||
|
||||
```tsx
|
||||
<PermissionButton
|
||||
page="products"
|
||||
action="create"
|
||||
onClick={handleCreateProduct}
|
||||
>
|
||||
Add Product
|
||||
</PermissionButton>
|
||||
```
|
||||
|
||||
### SettingsSection
|
||||
|
||||
Specific component for settings with built-in permission checks.
|
||||
|
||||
```tsx
|
||||
<SettingsSection
|
||||
title="System Settings"
|
||||
description="Configure global settings"
|
||||
permission="settings:global"
|
||||
>
|
||||
{/* Settings content */}
|
||||
</SettingsSection>
|
||||
```
|
||||
|
||||
## Permission Hooks
|
||||
|
||||
### usePermissions
|
||||
|
||||
Core hook for checking any permission.
|
||||
|
||||
```tsx
|
||||
const { hasPermission, hasPageAccess, isAdmin } = usePermissions();
|
||||
if (hasPermission('settings:user_management')) {
|
||||
// Can access user management
|
||||
}
|
||||
```
|
||||
|
||||
### usePagePermission
|
||||
|
||||
Specialized hook for page-level permissions.
|
||||
|
||||
```tsx
|
||||
const { canView, canCreate, canEdit, canDelete } = usePagePermission('products');
|
||||
if (canView()) {
|
||||
// Can view products
|
||||
}
|
||||
```
|
||||
|
||||
## Database Schema
|
||||
|
||||
Permissions are stored in the database:
|
||||
- `permissions` table: Stores all available permissions
|
||||
- `user_permissions` junction table: Maps permissions to users
|
||||
|
||||
Admin users automatically have all permissions.
|
||||
|
||||
## Implemented Permission Codes
|
||||
|
||||
### Page Access Permissions
|
||||
| Code | Description |
|
||||
|------|-------------|
|
||||
| `access:dashboard` | Access to Dashboard page |
|
||||
| `access:overview` | Access to Overview page |
|
||||
| `access:products` | Access to Products page |
|
||||
| `create:products` | Create new products |
|
||||
| `edit:products` | Edit existing products |
|
||||
| `delete:products` | Delete products |
|
||||
| `view:users` | View user accounts |
|
||||
| `edit:users` | Edit user accounts |
|
||||
| `manage:permissions` | Assign permissions to users |
|
||||
| `access:categories` | Access to Categories page |
|
||||
| `access:brands` | Access to Brands page |
|
||||
| `access:vendors` | Access to Vendors page |
|
||||
| `access:purchase_orders` | Access to Purchase Orders page |
|
||||
| `access:analytics` | Access to Analytics page |
|
||||
| `access:forecasting` | Access to Forecasting page |
|
||||
| `access:import` | Access to Import page |
|
||||
| `access:settings` | Access to Settings page |
|
||||
| `access:chat` | Access to Chat Archive page |
|
||||
|
||||
### Settings Permissions
|
||||
| Code | Description |
|
||||
|------|-------------|
|
||||
| `settings:global` | Access to Global Settings section |
|
||||
| `settings:products` | Access to Product Settings section |
|
||||
| `settings:vendors` | Access to Vendor Settings section |
|
||||
| `settings:data_management` | Access to Data Management settings |
|
||||
| `settings:calculation_settings` | Access to Calculation Settings |
|
||||
| `settings:library_management` | Access to Image Library Management |
|
||||
| `settings:performance_metrics` | Access to Performance Metrics |
|
||||
| `settings:prompt_management` | Access to AI Prompt Management |
|
||||
| `settings:stock_management` | Access to Stock Management |
|
||||
| `settings:templates` | Access to Template Management |
|
||||
| `settings:user_management` | Access to User Management |
|
||||
|
||||
### Admin Permissions
|
||||
| Code | Description |
|
||||
|------|-------------|
|
||||
| `admin:debug` | Can see debug information and features |
|
||||
|
||||
## Implementation Examples
|
||||
|
||||
@@ -70,35 +165,40 @@ Used for basic authentication checks (is user logged in?).
|
||||
In `App.tsx`:
|
||||
```tsx
|
||||
<Route path="/products" element={
|
||||
<Protected page="products" fallback={<Navigate to="/" />}>
|
||||
<PermissionProtectedRoute page="products">
|
||||
<Products />
|
||||
</Protected>
|
||||
</PermissionProtectedRoute>
|
||||
} />
|
||||
```
|
||||
|
||||
### Component Level Protection
|
||||
|
||||
```tsx
|
||||
<Protected permission="edit:products">
|
||||
<form>
|
||||
{/* Form fields */}
|
||||
<button type="submit">Save Changes</button>
|
||||
</form>
|
||||
</Protected>
|
||||
const { hasPermission } = usePermissions();
|
||||
|
||||
function handleAction() {
|
||||
if (!hasPermission('settings:user_management')) {
|
||||
toast.error("You don't have permission");
|
||||
return;
|
||||
}
|
||||
// Action logic
|
||||
}
|
||||
```
|
||||
|
||||
### Button Protection
|
||||
### UI Element Protection
|
||||
|
||||
```tsx
|
||||
<Button
|
||||
onClick={handleDelete}
|
||||
disabled={!hasPermission('delete:products')}
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
<PermissionGuard permission="settings:user_management">
|
||||
<button onClick={handleManageUsers}>
|
||||
Manage Users
|
||||
</button>
|
||||
</PermissionGuard>
|
||||
```
|
||||
|
||||
// With Protected component
|
||||
<Protected permission="delete:products" fallback={null}>
|
||||
<Button onClick={handleDelete}>Delete</Button>
|
||||
</Protected>
|
||||
```
|
||||
## Notes
|
||||
|
||||
- **Page Access**: These permissions control which pages a user can navigate to
|
||||
- **Settings Access**: These permissions control access to different sections within the Settings page
|
||||
- **Admin Features**: Special permissions for administrative functions
|
||||
- **CRUD Operations**: The application currently focuses on viewing and managing data rather than creating/editing/deleting individual records
|
||||
- **User Management**: User CRUD operations are handled through the settings interface rather than dedicated user management pages
|
||||
1488
inventory/src/components/dashboard/FinancialOverview.tsx
Normal file
1488
inventory/src/components/dashboard/FinancialOverview.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,76 @@
|
||||
import { format, addDays, addMonths } from "date-fns";
|
||||
import { Calendar as CalendarIcon, Info } from "lucide-react";
|
||||
import { DateRange } from "react-day-picker";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Calendar } from "@/components/ui/calendar";
|
||||
import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
interface DateRangePickerQuickProps {
|
||||
value: DateRange;
|
||||
onChange: (range: DateRange | undefined) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function DateRangePickerQuick({ value, onChange, className }: DateRangePickerQuickProps) {
|
||||
return (
|
||||
<div className={cn("grid gap-2", className)}>
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
id="date"
|
||||
variant={"outline"}
|
||||
className={cn(
|
||||
"h-8 w-[300px] justify-start text-left font-normal",
|
||||
!value && "text-muted-foreground"
|
||||
)}
|
||||
>
|
||||
<CalendarIcon className="mr-2 h-4 w-4" />
|
||||
{value?.from ? (
|
||||
value.to ? (
|
||||
<>
|
||||
{format(value.from, "LLL dd, y")} -{" "}
|
||||
{format(value.to, "LLL dd, y")}
|
||||
</>
|
||||
) : (
|
||||
format(value.from, "LLL dd, y")
|
||||
)
|
||||
) : (
|
||||
<span>Pick a date range</span>
|
||||
)}
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-auto p-2" align="start">
|
||||
<div className="flex justify-center"><Badge variant="secondary"><Info className="mr-1 h-3 w-3" /> Only products received during the selected date range will be shown</Badge></div>
|
||||
<div className="space-y-2">
|
||||
<Calendar
|
||||
initialFocus
|
||||
mode="range"
|
||||
defaultMonth={value?.from}
|
||||
selected={value}
|
||||
onSelect={(range) => {
|
||||
if (range) onChange(range);
|
||||
}}
|
||||
numberOfMonths={2}
|
||||
/>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<Button size="sm" variant="outline" onClick={() => onChange({ from: addDays(addMonths(new Date(), -1), 1), to: new Date() })}>
|
||||
Last Month
|
||||
</Button>
|
||||
<Button size="sm" variant="outline" onClick={() => onChange({ from: addDays(addMonths(new Date(), -3), 1), to: new Date() })}>
|
||||
Last 3 Months
|
||||
</Button>
|
||||
<Button size="sm" variant="outline" onClick={() => onChange({ from: addDays(addMonths(new Date(), -6), 1), to: new Date() })}>
|
||||
Last 6 Months
|
||||
</Button>
|
||||
<Button size="sm" variant="outline" onClick={() => onChange({ from: addDays(addMonths(new Date(), -12), 1), to: new Date() })}>
|
||||
Last Year
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
956
inventory/src/components/forecasting/QuickOrderBuilder.tsx
Normal file
956
inventory/src/components/forecasting/QuickOrderBuilder.tsx
Normal file
@@ -0,0 +1,956 @@
|
||||
import { useEffect, useMemo, useRef, useState, useTransition, useCallback, memo } from "react";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Textarea } from "@/components/ui/textarea";
|
||||
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
|
||||
import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table";
|
||||
import { Code } from "@/components/ui/code";
|
||||
import * as XLSX from "xlsx";
|
||||
import { toast } from "sonner";
|
||||
import { X as XIcon } from "lucide-react";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
|
||||
export interface CategorySummary {
|
||||
category: string;
|
||||
categoryPath: string;
|
||||
avgTotalSold: number;
|
||||
minSold: number;
|
||||
maxSold: number;
|
||||
}
|
||||
|
||||
type ParsedRow = {
|
||||
product: string;
|
||||
sku?: string;
|
||||
categoryHint?: string;
|
||||
moq?: number;
|
||||
upc?: string;
|
||||
};
|
||||
|
||||
type OrderRow = ParsedRow & {
|
||||
matchedCategoryPath?: string;
|
||||
matchedCategoryName?: string;
|
||||
baseSuggestion?: number; // from category avg
|
||||
finalQty: number; // adjusted for MOQ
|
||||
};
|
||||
|
||||
type HeaderMap = {
|
||||
// Stores generated column ids like "col-0" instead of raw header text
|
||||
product?: string;
|
||||
sku?: string;
|
||||
categoryHint?: string;
|
||||
moq?: string;
|
||||
upc?: string;
|
||||
};
|
||||
|
||||
const PRODUCT_HEADER_SYNONYMS = [
|
||||
"product",
|
||||
"name",
|
||||
"title",
|
||||
"description",
|
||||
"item",
|
||||
"item name",
|
||||
"sku description",
|
||||
"product name",
|
||||
];
|
||||
|
||||
const SKU_HEADER_SYNONYMS = [
|
||||
"sku",
|
||||
"item#",
|
||||
"item number",
|
||||
"supplier #",
|
||||
"supplier no",
|
||||
"supplier_no",
|
||||
"product code",
|
||||
];
|
||||
|
||||
const CATEGORY_HEADER_SYNONYMS = [
|
||||
"category",
|
||||
"categories",
|
||||
"line",
|
||||
"collection",
|
||||
"type",
|
||||
];
|
||||
|
||||
const MOQ_HEADER_SYNONYMS = [
|
||||
"moq",
|
||||
"min qty",
|
||||
"min. order qty",
|
||||
"min order qty",
|
||||
"qty per unit",
|
||||
"unit qty",
|
||||
"inner pack",
|
||||
"case pack",
|
||||
"pack",
|
||||
];
|
||||
|
||||
const UPC_HEADER_SYNONYMS = [
|
||||
"upc",
|
||||
"barcode",
|
||||
"bar code",
|
||||
"ean",
|
||||
"jan",
|
||||
"upc code",
|
||||
];
|
||||
|
||||
function normalizeHeader(h: string) {
|
||||
return h.trim().toLowerCase();
|
||||
}
|
||||
|
||||
function autoMapHeaderNames(headers: string[]): { product?: string; sku?: string; categoryHint?: string; moq?: string; upc?: string } {
|
||||
const norm = headers.map((h) => normalizeHeader(h));
|
||||
const findFirst = (syns: string[]) => {
|
||||
for (const s of syns) {
|
||||
const idx = norm.findIndex((h) => h === s || h.includes(s));
|
||||
if (idx >= 0) return headers[idx];
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
return {
|
||||
product: findFirst(PRODUCT_HEADER_SYNONYMS) || headers[0],
|
||||
sku: findFirst(SKU_HEADER_SYNONYMS),
|
||||
categoryHint: findFirst(CATEGORY_HEADER_SYNONYMS),
|
||||
moq: findFirst(MOQ_HEADER_SYNONYMS),
|
||||
upc: findFirst(UPC_HEADER_SYNONYMS),
|
||||
};
|
||||
}
|
||||
|
||||
function detectDelimiter(text: string): string {
|
||||
// Very simple heuristic: prefer tab, then comma, then semicolon
|
||||
const lines = text.split(/\r?\n/).slice(0, 5);
|
||||
const counts = { "\t": 0, ",": 0, ";": 0 } as Record<string, number>;
|
||||
for (const line of lines) {
|
||||
counts["\t"] += (line.match(/\t/g) || []).length;
|
||||
counts[","] += (line.match(/,/g) || []).length;
|
||||
counts[";"] += (line.match(/;/g) || []).length;
|
||||
}
|
||||
return Object.entries(counts).sort((a, b) => b[1] - a[1])[0][0];
|
||||
}
|
||||
|
||||
function parsePasted(text: string): { headers: string[]; rows: string[][] } {
|
||||
const delimiter = detectDelimiter(text);
|
||||
const lines = text
|
||||
.split(/\r?\n/)
|
||||
.map((l) => l.trim())
|
||||
.filter(Boolean);
|
||||
if (lines.length === 0) return { headers: [], rows: [] };
|
||||
const headers = lines[0].split(delimiter).map((s) => s.trim());
|
||||
const rows = lines.slice(1).map((l) => {
|
||||
const parts = l.split(delimiter).map((s) => s.trim());
|
||||
// Preserve empty trailing columns by padding to headers length
|
||||
while (parts.length < headers.length) parts.push("");
|
||||
return parts;
|
||||
});
|
||||
return { headers, rows };
|
||||
}
|
||||
|
||||
function toIntOrUndefined(v: any): number | undefined {
|
||||
if (v === null || v === undefined) return undefined;
|
||||
const n = Number(String(v).replace(/[^0-9.-]/g, ""));
|
||||
return Number.isFinite(n) && n > 0 ? Math.round(n) : undefined;
|
||||
}
|
||||
|
||||
function scoreCategoryMatch(catText: string, name: string, hint?: string): number {
|
||||
const base = catText.toLowerCase();
|
||||
const tokens = (name || "")
|
||||
.toLowerCase()
|
||||
.split(/[^a-z0-9]+/)
|
||||
.filter((t) => t.length >= 3);
|
||||
let score = 0;
|
||||
for (const t of tokens) {
|
||||
if (base.includes(t)) score += 2;
|
||||
}
|
||||
if (hint) {
|
||||
const h = hint.toLowerCase();
|
||||
if (base.includes(h)) score += 5;
|
||||
}
|
||||
return score;
|
||||
}
|
||||
|
||||
function suggestFromCategory(avgTotalSold?: number, scalePct: number = 100): number {
|
||||
const scaled = (avgTotalSold || 0) * (isFinite(scalePct) ? scalePct : 100) / 100;
|
||||
const base = Math.max(1, Math.round(scaled));
|
||||
return base;
|
||||
}
|
||||
|
||||
function applyMOQ(qty: number, moq?: number): number {
|
||||
if (!moq || moq <= 1) return Math.max(0, qty);
|
||||
if (qty <= 0) return 0;
|
||||
const mult = Math.ceil(qty / moq);
|
||||
return mult * moq;
|
||||
}
|
||||
|
||||
export function QuickOrderBuilder({
|
||||
categories,
|
||||
brand,
|
||||
}: {
|
||||
categories: CategorySummary[];
|
||||
brand?: string;
|
||||
}) {
|
||||
const fileInputRef = useRef<HTMLInputElement | null>(null);
|
||||
|
||||
const [pasted, setPasted] = useState("");
|
||||
const [headers, setHeaders] = useState<string[]>([]);
|
||||
const [rawRows, setRawRows] = useState<string[][]>([]);
|
||||
const [headerMap, setHeaderMap] = useState<HeaderMap>({});
|
||||
const [orderRows, setOrderRows] = useState<OrderRow[]>([]);
|
||||
const [showJson, setShowJson] = useState(false);
|
||||
const [selectedSupplierId, setSelectedSupplierId] = useState<string | undefined>(undefined);
|
||||
const [scalePct, setScalePct] = useState<number>(100);
|
||||
const [scaleInput, setScaleInput] = useState<string>("100");
|
||||
const [showExcludedOnly, setShowExcludedOnly] = useState<boolean>(false);
|
||||
const [parsed, setParsed] = useState<boolean>(false);
|
||||
const [showMapping, setShowMapping] = useState<boolean>(false);
|
||||
const [, startTransition] = useTransition();
|
||||
const [initialCategories, setInitialCategories] = useState<CategorySummary[] | null>(null);
|
||||
|
||||
// Local storage draft persistence
|
||||
const DRAFT_KEY = "quickOrderBuilderDraft";
|
||||
const restoringRef = useRef(false);
|
||||
|
||||
// Load suppliers from existing endpoint used elsewhere in the app
|
||||
const { data: fieldOptions } = useQuery({
|
||||
queryKey: ["field-options"],
|
||||
queryFn: async () => {
|
||||
const res = await fetch("/api/import/field-options");
|
||||
if (!res.ok) throw new Error("Failed to load field options");
|
||||
return res.json();
|
||||
},
|
||||
});
|
||||
const supplierOptions: { label: string; value: string }[] = fieldOptions?.suppliers || [];
|
||||
|
||||
// Default supplier to the brand name if an exact label match exists
|
||||
useEffect(() => {
|
||||
if (!supplierOptions?.length) return;
|
||||
if (selectedSupplierId) return;
|
||||
if (brand) {
|
||||
const match = supplierOptions.find((s) => s.label?.toLowerCase?.() === brand.toLowerCase());
|
||||
if (match) setSelectedSupplierId(String(match.value));
|
||||
}
|
||||
}, [supplierOptions, brand, selectedSupplierId]);
|
||||
|
||||
// Restore draft on mount
|
||||
useEffect(() => {
|
||||
try {
|
||||
const raw = localStorage.getItem(DRAFT_KEY);
|
||||
if (!raw) return;
|
||||
const draft = JSON.parse(raw);
|
||||
restoringRef.current = true;
|
||||
setPasted(draft.pasted ?? "");
|
||||
setHeaders(Array.isArray(draft.headers) ? draft.headers : []);
|
||||
setRawRows(Array.isArray(draft.rawRows) ? draft.rawRows : []);
|
||||
setHeaderMap(draft.headerMap ?? {});
|
||||
setOrderRows(Array.isArray(draft.orderRows) ? draft.orderRows : []);
|
||||
setSelectedSupplierId(draft.selectedSupplierId ?? undefined);
|
||||
const restoredScale = typeof draft.scalePct === 'number' ? draft.scalePct : 100;
|
||||
setScalePct(restoredScale);
|
||||
setScaleInput(String(restoredScale));
|
||||
setParsed(Array.isArray(draft.headers) && draft.headers.length > 0);
|
||||
setShowMapping(!(Array.isArray(draft.orderRows) && draft.orderRows.length > 0));
|
||||
if (Array.isArray(draft.categoriesSnapshot)) {
|
||||
setInitialCategories(draft.categoriesSnapshot);
|
||||
}
|
||||
// brand is passed via props; we don't override it here
|
||||
} catch (e) {
|
||||
console.warn("Failed to restore draft", e);
|
||||
} finally {
|
||||
// Defer toggling off to next tick to allow state batching
|
||||
setTimeout(() => { restoringRef.current = false; }, 0);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Save draft on changes
|
||||
useEffect(() => {
|
||||
if (restoringRef.current) return;
|
||||
const draft = {
|
||||
pasted,
|
||||
headers,
|
||||
rawRows,
|
||||
headerMap,
|
||||
orderRows,
|
||||
selectedSupplierId,
|
||||
scalePct,
|
||||
brand,
|
||||
categoriesSnapshot: categories,
|
||||
};
|
||||
try {
|
||||
localStorage.setItem(DRAFT_KEY, JSON.stringify(draft));
|
||||
} catch (e) {
|
||||
// ignore storage quota errors silently
|
||||
}
|
||||
}, [pasted, headers, rawRows, headerMap, orderRows, selectedSupplierId, scalePct, brand]);
|
||||
|
||||
// Debounce scale input -> numeric scalePct
|
||||
useEffect(() => {
|
||||
const handle = setTimeout(() => {
|
||||
const v = Math.max(1, Math.min(500, Math.round(Number(scaleInput) || 0)));
|
||||
setScalePct(v);
|
||||
}, 500);
|
||||
return () => clearTimeout(handle);
|
||||
}, [scaleInput]);
|
||||
|
||||
const effectiveCategories = (categories && categories.length > 0) ? categories : (initialCategories || []);
|
||||
|
||||
const categoryOptions = useMemo(() => {
|
||||
const arr = (effectiveCategories || [])
|
||||
.map((c) => ({
|
||||
value: c.categoryPath || c.category,
|
||||
label: c.categoryPath ? `${c.category} — ${c.categoryPath}` : c.category,
|
||||
}))
|
||||
.filter((o) => !!o.value && String(o.value).trim() !== "");
|
||||
// dedupe by value to avoid duplicate Select values
|
||||
const dedup = new Map<string, string>();
|
||||
for (const o of arr) {
|
||||
if (!dedup.has(o.value)) dedup.set(o.value, o.label);
|
||||
}
|
||||
return Array.from(dedup.entries()).map(([value, label]) => ({ value, label }));
|
||||
}, [effectiveCategories]);
|
||||
|
||||
const categoryByKey = useMemo(() => {
|
||||
const map = new Map<string, CategorySummary>();
|
||||
for (const c of effectiveCategories || []) {
|
||||
map.set(c.categoryPath || c.category, c);
|
||||
}
|
||||
return map;
|
||||
}, [effectiveCategories]);
|
||||
|
||||
// Build header option list with generated ids so values are never empty and keys are unique
|
||||
const headerOptions = useMemo(
|
||||
() => headers.map((h, i) => ({ id: `col-${i}`, index: i, label: h && h.trim() ? h : `Column ${i + 1}` })),
|
||||
[headers]
|
||||
);
|
||||
const idToIndex = useMemo(() => new Map(headerOptions.map((o) => [o.id, o.index])), [headerOptions]);
|
||||
|
||||
function headerNameToId(name?: string): string | undefined {
|
||||
if (!name) return undefined;
|
||||
const idx = headers.findIndex((h) => h === name);
|
||||
return idx >= 0 ? `col-${idx}` : undefined;
|
||||
}
|
||||
|
||||
function handleFileChange(e: React.ChangeEvent<HTMLInputElement>) {
|
||||
const f = e.target.files?.[0];
|
||||
if (!f) return;
|
||||
const reader = new FileReader();
|
||||
const ext = f.name.split(".").pop()?.toLowerCase();
|
||||
|
||||
reader.onload = () => {
|
||||
try {
|
||||
let wb: XLSX.WorkBook | null = null;
|
||||
if (ext === "xlsx" || ext === "xls") {
|
||||
const data = new Uint8Array(reader.result as ArrayBuffer);
|
||||
wb = XLSX.read(data, { type: "array" });
|
||||
} else if (ext === "csv" || ext === "tsv") {
|
||||
const text = reader.result as string;
|
||||
wb = XLSX.read(text, { type: "string" });
|
||||
} else {
|
||||
// Try naive string read
|
||||
const text = reader.result as string;
|
||||
wb = XLSX.read(text, { type: "string" });
|
||||
}
|
||||
if (!wb) throw new Error("Unable to parse file");
|
||||
const sheet = wb.Sheets[wb.SheetNames[0]];
|
||||
const rows: any[][] = XLSX.utils.sheet_to_json(sheet, { header: 1, raw: false, defval: "" });
|
||||
if (!rows.length) throw new Error("Empty file");
|
||||
const hdrs = (rows[0] as string[]).map((h) => String(h || "").trim());
|
||||
const body = rows.slice(1).map((r) => (r as any[]).map((v) => String(v ?? "").trim()));
|
||||
// Build mapping based on detected names -> ids
|
||||
const mappedNames = autoMapHeaderNames(hdrs);
|
||||
const mappedIds: HeaderMap = {
|
||||
product: headerNameToId(mappedNames.product) ?? (hdrs.length > 0 ? `col-0` : undefined),
|
||||
sku: headerNameToId(mappedNames.sku),
|
||||
categoryHint: headerNameToId(mappedNames.categoryHint),
|
||||
moq: headerNameToId(mappedNames.moq),
|
||||
upc: headerNameToId(mappedNames.upc),
|
||||
};
|
||||
setHeaders(hdrs);
|
||||
setRawRows(body);
|
||||
setHeaderMap(mappedIds);
|
||||
setPasted("");
|
||||
setParsed(true);
|
||||
setShowMapping(true);
|
||||
toast.success("File parsed");
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
toast.error("Could not parse file");
|
||||
}
|
||||
};
|
||||
|
||||
if (ext === "xlsx" || ext === "xls") {
|
||||
reader.readAsArrayBuffer(f);
|
||||
} else {
|
||||
reader.readAsText(f);
|
||||
}
|
||||
}
|
||||
|
||||
function handlePasteParse() {
|
||||
try {
|
||||
const { headers: hdrs, rows } = parsePasted(pasted);
|
||||
if (!hdrs.length || !rows.length) {
|
||||
toast.error("No data detected");
|
||||
return;
|
||||
}
|
||||
const mappedNames = autoMapHeaderNames(hdrs);
|
||||
const mappedIds: HeaderMap = {
|
||||
product: headerNameToId(mappedNames.product) ?? (hdrs.length > 0 ? `col-0` : undefined),
|
||||
sku: headerNameToId(mappedNames.sku),
|
||||
categoryHint: headerNameToId(mappedNames.categoryHint),
|
||||
moq: headerNameToId(mappedNames.moq),
|
||||
upc: headerNameToId(mappedNames.upc),
|
||||
};
|
||||
setHeaders(hdrs);
|
||||
setRawRows(rows);
|
||||
setHeaderMap(mappedIds);
|
||||
setParsed(true);
|
||||
setShowMapping(true);
|
||||
toast.success("Pasted data parsed");
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
toast.error("Paste parse failed");
|
||||
}
|
||||
}
|
||||
|
||||
function buildParsedRows(): ParsedRow[] {
|
||||
if (!headers.length || !rawRows.length) return [];
|
||||
const idx = (id?: string) => (id ? idToIndex.get(id) ?? -1 : -1);
|
||||
const iProduct = idx(headerMap.product);
|
||||
const iSku = idx(headerMap.sku);
|
||||
const iCat = idx(headerMap.categoryHint);
|
||||
const iMoq = idx(headerMap.moq);
|
||||
const iUpc = idx(headerMap.upc);
|
||||
const out: ParsedRow[] = [];
|
||||
for (const r of rawRows) {
|
||||
const product = String(iProduct >= 0 ? r[iProduct] ?? "" : "").trim();
|
||||
const upc = iUpc >= 0 ? String(r[iUpc] ?? "") : undefined;
|
||||
if (!product && !(upc && upc.trim())) continue;
|
||||
const sku = iSku >= 0 ? String(r[iSku] ?? "") : undefined;
|
||||
const categoryHint = iCat >= 0 ? String(r[iCat] ?? "") : undefined;
|
||||
const moq = iMoq >= 0 ? toIntOrUndefined(r[iMoq]) : undefined;
|
||||
out.push({ product, sku, categoryHint, moq, upc });
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function matchCategory(row: ParsedRow): { key?: string; name?: string } {
|
||||
if (!categories?.length) return {};
|
||||
let bestKey: string | undefined;
|
||||
let bestName: string | undefined;
|
||||
let bestScore = -1;
|
||||
for (const c of categories) {
|
||||
const key = c.categoryPath || c.category;
|
||||
const text = `${c.category} ${c.categoryPath || ""}`;
|
||||
const s = scoreCategoryMatch(text, row.product, row.categoryHint);
|
||||
if (s > bestScore) {
|
||||
bestScore = s;
|
||||
bestKey = key;
|
||||
bestName = c.category;
|
||||
}
|
||||
}
|
||||
return bestScore > 0 ? { key: bestKey, name: bestName } : {};
|
||||
}
|
||||
|
||||
function buildOrderRows() {
|
||||
const parsed = buildParsedRows();
|
||||
if (!parsed.length) {
|
||||
toast.error("Nothing to process");
|
||||
return;
|
||||
}
|
||||
const next: OrderRow[] = parsed.map((r) => {
|
||||
const m = matchCategory(r);
|
||||
const cat = m.key ? categoryByKey.get(m.key) : undefined;
|
||||
const base = suggestFromCategory(cat?.avgTotalSold, scalePct);
|
||||
const finalQty = applyMOQ(base, r.moq);
|
||||
return {
|
||||
...r,
|
||||
matchedCategoryPath: m.key,
|
||||
matchedCategoryName: m.name,
|
||||
baseSuggestion: base,
|
||||
finalQty,
|
||||
};
|
||||
});
|
||||
setOrderRows(next);
|
||||
setShowMapping(false);
|
||||
}
|
||||
|
||||
// Re-apply scaling dynamically to suggested rows
|
||||
useEffect(() => {
|
||||
if (!orderRows.length) return;
|
||||
startTransition(() => {
|
||||
setOrderRows((rows) =>
|
||||
rows.map((row) => {
|
||||
const cat = row.matchedCategoryPath ? categoryByKey.get(row.matchedCategoryPath) : undefined;
|
||||
if (!cat) return row; // nothing to scale when no category
|
||||
const prevAuto = applyMOQ(row.baseSuggestion || 0, row.moq);
|
||||
const nextBase = suggestFromCategory(cat.avgTotalSold, scalePct);
|
||||
const nextAuto = applyMOQ(nextBase, row.moq);
|
||||
const isAuto = row.finalQty === prevAuto;
|
||||
return {
|
||||
...row,
|
||||
baseSuggestion: nextBase,
|
||||
finalQty: isAuto ? nextAuto : row.finalQty,
|
||||
};
|
||||
})
|
||||
);
|
||||
});
|
||||
}, [scalePct, categoryByKey]);
|
||||
|
||||
// After categories load (e.g. after refresh), recompute base suggestions
|
||||
useEffect(() => {
|
||||
if (!orderRows.length) return;
|
||||
startTransition(() => {
|
||||
setOrderRows((rows) =>
|
||||
rows.map((row) => {
|
||||
const cat = row.matchedCategoryPath ? categoryByKey.get(row.matchedCategoryPath) : undefined;
|
||||
if (!cat) return row;
|
||||
const nextBase = suggestFromCategory(cat.avgTotalSold, scalePct);
|
||||
const nextAuto = applyMOQ(nextBase, row.moq);
|
||||
const prevAuto = applyMOQ(row.baseSuggestion || 0, row.moq);
|
||||
const isAuto = row.finalQty === prevAuto || !row.baseSuggestion; // treat empty base as auto
|
||||
return {
|
||||
...row,
|
||||
baseSuggestion: nextBase,
|
||||
finalQty: isAuto ? nextAuto : row.finalQty,
|
||||
};
|
||||
})
|
||||
);
|
||||
});
|
||||
}, [categoryByKey]);
|
||||
|
||||
const changeCategory = useCallback((idx: number, newKey?: string) => {
|
||||
setOrderRows((rows) => {
|
||||
const copy = [...rows];
|
||||
const row = { ...copy[idx] };
|
||||
row.matchedCategoryPath = newKey;
|
||||
if (newKey) {
|
||||
const cat = categoryByKey.get(newKey);
|
||||
row.matchedCategoryName = cat?.category;
|
||||
row.baseSuggestion = suggestFromCategory(cat?.avgTotalSold, scalePct);
|
||||
row.finalQty = applyMOQ(row.baseSuggestion || 0, row.moq);
|
||||
} else {
|
||||
row.matchedCategoryName = undefined;
|
||||
row.baseSuggestion = undefined;
|
||||
row.finalQty = row.moq ? row.moq : 0;
|
||||
}
|
||||
copy[idx] = row;
|
||||
return copy;
|
||||
});
|
||||
}, [categoryByKey, scalePct]);
|
||||
|
||||
const changeQty = useCallback((idx: number, value: string) => {
|
||||
const n = Number(value);
|
||||
startTransition(() => setOrderRows((rows) => {
|
||||
const copy = [...rows];
|
||||
const row = { ...copy[idx] };
|
||||
const raw = Number.isFinite(n) ? Math.round(n) : 0;
|
||||
row.finalQty = raw; // do not enforce MOQ on manual edits
|
||||
copy[idx] = row;
|
||||
return copy;
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const removeRow = useCallback((idx: number) => {
|
||||
setOrderRows((rows) => rows.filter((_, i) => i !== idx));
|
||||
}, []);
|
||||
|
||||
const visibleRows = useMemo(() => (
|
||||
showExcludedOnly
|
||||
? orderRows.filter((r) => !(r.finalQty > 0 && r.upc && r.upc.trim()))
|
||||
: orderRows
|
||||
), [orderRows, showExcludedOnly]);
|
||||
|
||||
const OrderRowsTable = useMemo(() => memo(function OrderRowsTableInner({
|
||||
rows,
|
||||
}: { rows: OrderRow[] }) {
|
||||
return (
|
||||
<div className="rounded-md border overflow-x-auto">
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Product</TableHead>
|
||||
<TableHead>SKU</TableHead>
|
||||
<TableHead>UPC</TableHead>
|
||||
<TableHead>Category</TableHead>
|
||||
<TableHead className="text-right">Avg Sold</TableHead>
|
||||
<TableHead className="text-right">MOQ</TableHead>
|
||||
<TableHead className="text-right">Order Qty</TableHead>
|
||||
<TableHead className="text-right">Actions</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{rows.map((r, idx) => {
|
||||
const cat = r.matchedCategoryPath ? categoryByKey.get(r.matchedCategoryPath) : undefined;
|
||||
const isExcluded = !(r.finalQty > 0 && r.upc && r.upc.trim());
|
||||
return (
|
||||
<TableRow key={`${r.product || r.upc || 'row'}-${idx}`} className={isExcluded ? 'bg-destructive/10' : undefined}>
|
||||
<TableCell>
|
||||
<div className="font-medium">{r.product}</div>
|
||||
</TableCell>
|
||||
<TableCell className="whitespace-nowrap">{r.sku || ""}</TableCell>
|
||||
<TableCell className="whitespace-nowrap">{r.upc || ""}</TableCell>
|
||||
<TableCell className="min-w-[280px]">
|
||||
<Select
|
||||
value={r.matchedCategoryPath ?? "__none"}
|
||||
onValueChange={(v) => changeCategory(idx, v === "__none" ? undefined : v)}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select category" />
|
||||
</SelectTrigger>
|
||||
<SelectContent className="max-h-[320px]">
|
||||
<SelectItem value="__none">Unmatched</SelectItem>
|
||||
{categoryOptions.map((c) => (
|
||||
<SelectItem key={c.value} value={c.value}>{c.label}</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</TableCell>
|
||||
<TableCell className="text-right">{cat?.avgTotalSold?.toFixed?.(2) ?? "-"}</TableCell>
|
||||
<TableCell className="text-right">{r.moq ?? "-"}</TableCell>
|
||||
<TableCell className="text-right">
|
||||
<Input
|
||||
className="w-24 text-right"
|
||||
value={Number.isFinite(r.finalQty) ? r.finalQty : 0}
|
||||
onChange={(e) => changeQty(idx, e.target.value)}
|
||||
inputMode="numeric"
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell className="text-right">
|
||||
<Button variant="ghost" size="sm" onClick={() => removeRow(idx)} aria-label="Remove row">
|
||||
<XIcon className="h-4 w-4" />
|
||||
</Button>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
);
|
||||
})}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</div>
|
||||
);
|
||||
}), [categoryByKey, categoryOptions, changeCategory, changeQty, removeRow]);
|
||||
|
||||
const exportJson = useMemo(() => {
|
||||
const items = orderRows
|
||||
.filter((r) => (r.finalQty || 0) > 0 && !!(r.upc && r.upc.trim()))
|
||||
.map((r) => ({ upc: r.upc!, quantity: r.finalQty }));
|
||||
return {
|
||||
supplierId: selectedSupplierId ?? null,
|
||||
generatedAt: new Date().toISOString(),
|
||||
itemCount: items.length,
|
||||
items,
|
||||
};
|
||||
}, [orderRows, selectedSupplierId]);
|
||||
|
||||
const canProcess = headers.length > 0 && rawRows.length > 0;
|
||||
|
||||
return (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Quick Order Builder</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
{/* Supplier + Clear */}
|
||||
<div className="flex flex-wrap items-end gap-3">
|
||||
<div className="max-w-sm">
|
||||
<div className="text-sm font-medium mb-1">Supplier</div>
|
||||
<Select
|
||||
value={selectedSupplierId ?? "__none"}
|
||||
onValueChange={(v) => setSelectedSupplierId(v === "__none" ? undefined : v)}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select supplier" />
|
||||
</SelectTrigger>
|
||||
<SelectContent className="max-h-[320px]">
|
||||
<SelectItem value="__none">Select supplier…</SelectItem>
|
||||
{supplierOptions.map((s) => (
|
||||
<SelectItem key={String(s.value)} value={String(s.value)}>
|
||||
{s.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={() => {
|
||||
setPasted("");
|
||||
setHeaders([]);
|
||||
setRawRows([]);
|
||||
setHeaderMap({});
|
||||
setOrderRows([]);
|
||||
setShowJson(false);
|
||||
setSelectedSupplierId(undefined);
|
||||
setScalePct(100);
|
||||
setScaleInput("100");
|
||||
setParsed(false);
|
||||
setShowMapping(false);
|
||||
try { localStorage.removeItem(DRAFT_KEY); } catch {}
|
||||
toast.message("Draft cleared");
|
||||
}}
|
||||
>
|
||||
Clear Draft
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{!parsed && (
|
||||
<>
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<Input
|
||||
ref={fileInputRef}
|
||||
type="file"
|
||||
accept=".xlsx,.xls,.csv,.tsv,.txt"
|
||||
onChange={handleFileChange}
|
||||
className="max-w-sm"
|
||||
/>
|
||||
<span className="text-muted-foreground text-sm">or paste below</span>
|
||||
</div>
|
||||
|
||||
<Textarea
|
||||
placeholder="Paste rows (with a header): Product, SKU, Category, MOQ..."
|
||||
value={pasted}
|
||||
onChange={(e) => setPasted(e.target.value)}
|
||||
className="min-h-[120px]"
|
||||
/>
|
||||
<div className="flex gap-2">
|
||||
<Button variant="outline" onClick={handlePasteParse} disabled={!pasted.trim()}>
|
||||
Parse Pasted Data
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{headers.length > 0 && showMapping && (
|
||||
<div className="space-y-3">
|
||||
<div className="text-sm font-medium">Map Columns</div>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-2">
|
||||
<div>
|
||||
<div className="text-xs text-muted-foreground mb-1">Product (recommended)</div>
|
||||
<Select
|
||||
value={headerMap.product}
|
||||
onValueChange={(v) => setHeaderMap((m) => ({ ...m, product: v }))}
|
||||
>
|
||||
<SelectTrigger><SelectValue placeholder="Select column" /></SelectTrigger>
|
||||
<SelectContent>
|
||||
{headerOptions.map((o) => {
|
||||
const ci = idToIndex.get(o.id)!;
|
||||
const samples = rawRows
|
||||
.map((r) => String((r && r[ci]) ?? "").trim())
|
||||
.filter((v) => v.length > 0)
|
||||
.slice(0, 3);
|
||||
return (
|
||||
<SelectItem key={o.id} value={o.id}>
|
||||
<div className="flex flex-col text-left">
|
||||
<span>{o.label}</span>
|
||||
{samples.length > 0 && (
|
||||
<span className="text-xs text-muted-foreground truncate">{samples.join(" • ")}</span>
|
||||
)}
|
||||
</div>
|
||||
</SelectItem>
|
||||
);
|
||||
})}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-xs text-muted-foreground mb-1">UPC / Barcode (recommended)</div>
|
||||
<Select
|
||||
value={headerMap.upc ?? "__none"}
|
||||
onValueChange={(v) => setHeaderMap((m) => ({ ...m, upc: v === "__none" ? undefined : v }))}
|
||||
>
|
||||
<SelectTrigger><SelectValue placeholder="Select column" /></SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="__none">None</SelectItem>
|
||||
{headerOptions.map((o) => {
|
||||
const ci = idToIndex.get(o.id)!;
|
||||
const samples = rawRows
|
||||
.map((r) => String((r && r[ci]) ?? "").trim())
|
||||
.filter((v) => v.length > 0)
|
||||
.slice(0, 3);
|
||||
return (
|
||||
<SelectItem key={o.id} value={o.id}>
|
||||
<div className="flex flex-col text-left">
|
||||
<span>{o.label}</span>
|
||||
{samples.length > 0 && (
|
||||
<span className="text-xs text-muted-foreground truncate">{samples.join(" • ")}</span>
|
||||
)}
|
||||
</div>
|
||||
</SelectItem>
|
||||
);
|
||||
})}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-xs text-muted-foreground mb-1">SKU (optional)</div>
|
||||
<Select
|
||||
value={headerMap.sku ?? "__none"}
|
||||
onValueChange={(v) => setHeaderMap((m) => ({ ...m, sku: v === "__none" ? undefined : v }))}
|
||||
>
|
||||
<SelectTrigger><SelectValue placeholder="Select column" /></SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="__none">None</SelectItem>
|
||||
{headerOptions.map((o) => {
|
||||
const ci = idToIndex.get(o.id)!;
|
||||
const samples = rawRows
|
||||
.map((r) => String((r && r[ci]) ?? "").trim())
|
||||
.filter((v) => v.length > 0)
|
||||
.slice(0, 3);
|
||||
return (
|
||||
<SelectItem key={o.id} value={o.id}>
|
||||
<div className="flex flex-col text-left">
|
||||
<span>{o.label}</span>
|
||||
{samples.length > 0 && (
|
||||
<span className="text-xs text-muted-foreground truncate">{samples.join(" • ")}</span>
|
||||
)}
|
||||
</div>
|
||||
</SelectItem>
|
||||
);
|
||||
})}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-xs text-muted-foreground mb-1">Category Hint (optional)</div>
|
||||
<Select
|
||||
value={headerMap.categoryHint ?? "__none"}
|
||||
onValueChange={(v) => setHeaderMap((m) => ({ ...m, categoryHint: v === "__none" ? undefined : v }))}
|
||||
>
|
||||
<SelectTrigger><SelectValue placeholder="Select column" /></SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="__none">None</SelectItem>
|
||||
{headerOptions.map((o) => {
|
||||
const ci = idToIndex.get(o.id)!;
|
||||
const samples = rawRows
|
||||
.map((r) => String((r && r[ci]) ?? "").trim())
|
||||
.filter((v) => v.length > 0)
|
||||
.slice(0, 3);
|
||||
return (
|
||||
<SelectItem key={o.id} value={o.id}>
|
||||
<div className="flex flex-col text-left">
|
||||
<span>{o.label}</span>
|
||||
{samples.length > 0 && (
|
||||
<span className="text-xs text-muted-foreground truncate">{samples.join(" • ")}</span>
|
||||
)}
|
||||
</div>
|
||||
</SelectItem>
|
||||
);
|
||||
})}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-xs text-muted-foreground mb-1">MOQ (optional)</div>
|
||||
<Select
|
||||
value={headerMap.moq ?? "__none"}
|
||||
onValueChange={(v) => setHeaderMap((m) => ({ ...m, moq: v === "__none" ? undefined : v }))}
|
||||
>
|
||||
<SelectTrigger><SelectValue placeholder="Select column" /></SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="__none">None</SelectItem>
|
||||
{headerOptions.map((o) => {
|
||||
const ci = idToIndex.get(o.id)!;
|
||||
const samples = rawRows
|
||||
.map((r) => String((r && r[ci]) ?? "").trim())
|
||||
.filter((v) => v.length > 0)
|
||||
.slice(0, 3);
|
||||
return (
|
||||
<SelectItem key={o.id} value={o.id}>
|
||||
<div className="flex flex-col text-left">
|
||||
<span>{o.label}</span>
|
||||
{samples.length > 0 && (
|
||||
<span className="text-xs text-muted-foreground truncate">{samples.join(" • ")}</span>
|
||||
)}
|
||||
</div>
|
||||
</SelectItem>
|
||||
);
|
||||
})}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-wrap items-end gap-3">
|
||||
<Button onClick={buildOrderRows} disabled={!canProcess || (!headerMap.product && !headerMap.upc)}>
|
||||
Build Suggestions
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{orderRows.length > 0 && (
|
||||
<div className="space-y-3">
|
||||
{/* Controls for existing suggestions */}
|
||||
<div className="flex flex-wrap items-end justify-between gap-3">
|
||||
<div className="flex items-end gap-3">
|
||||
<div>
|
||||
<div className="text-xs text-muted-foreground mb-1">Scale suggestions (%)</div>
|
||||
<Input
|
||||
type="number"
|
||||
className="w-28"
|
||||
value={scaleInput}
|
||||
onChange={(e) => setScaleInput(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex items-center gap-2 pb-2">
|
||||
<Checkbox id="excludedOnly" checked={showExcludedOnly} onCheckedChange={(v) => setShowExcludedOnly(!!v)} />
|
||||
<label htmlFor="excludedOnly" className="text-sm">Show excluded only</label>
|
||||
</div>
|
||||
</div>
|
||||
<div className="pb-2">
|
||||
<Button variant="outline" size="sm" onClick={() => setShowMapping((v) => !v)}>
|
||||
{showMapping ? 'Hide Mapping' : 'Edit Mapping'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<OrderRowsTable rows={visibleRows} />
|
||||
|
||||
{/* Exclusion alert if some rows won't be exported */}
|
||||
{(() => {
|
||||
const excluded = orderRows.filter((r) => !(r.finalQty > 0 && r.upc && r.upc.trim()));
|
||||
if (excluded.length === 0) return null;
|
||||
const missingUpc = excluded.filter((r) => !r.upc || !r.upc.trim()).length;
|
||||
const zeroQty = excluded.filter((r) => !(r.finalQty > 0)).length;
|
||||
return (
|
||||
<Alert variant="destructive">
|
||||
<AlertTitle>Some rows will not be included</AlertTitle>
|
||||
<AlertDescription>
|
||||
<div className="text-sm">
|
||||
{excluded.length} row{excluded.length !== 1 ? "s" : ""} excluded from JSON
|
||||
<ul className="list-disc ml-5">
|
||||
{missingUpc > 0 && <li>{missingUpc} missing UPC</li>}
|
||||
{zeroQty > 0 && <li>{zeroQty} with zero quantity</li>}
|
||||
</ul>
|
||||
</div>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
);
|
||||
})()}
|
||||
<div className="flex gap-2">
|
||||
<Button variant="outline" onClick={() => setShowJson((s) => !s)}>
|
||||
{showJson ? "Hide" : "Preview"} JSON
|
||||
</Button>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setShowJson(true);
|
||||
navigator.clipboard?.writeText(JSON.stringify(exportJson, null, 2)).then(
|
||||
() => toast.success("JSON copied"),
|
||||
() => toast.message("JSON ready (copy failed)")
|
||||
).finally(() => {
|
||||
try { localStorage.removeItem(DRAFT_KEY); } catch {}
|
||||
});
|
||||
}}
|
||||
>
|
||||
Copy JSON
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{showJson && (
|
||||
<Code className="p-4 w-full rounded-md border whitespace-pre-wrap">
|
||||
{JSON.stringify(exportJson, null, 2)}
|
||||
</Code>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@@ -8,21 +8,17 @@ interface Product {
|
||||
pid: string;
|
||||
sku: string;
|
||||
title: string;
|
||||
stock_quantity: number;
|
||||
daily_sales_avg: number;
|
||||
forecast_units: number;
|
||||
forecast_revenue: number;
|
||||
confidence_level: number;
|
||||
total_sold: number;
|
||||
}
|
||||
|
||||
export interface ForecastItem {
|
||||
category: string;
|
||||
categoryPath: string;
|
||||
avgDailySales: number;
|
||||
totalSold: number;
|
||||
numProducts: number;
|
||||
avgPrice: number;
|
||||
avgTotalSold: number;
|
||||
minSold: number;
|
||||
maxSold: number;
|
||||
products?: Product[];
|
||||
}
|
||||
|
||||
@@ -57,7 +53,7 @@ export const columns: ColumnDef<ForecastItem>[] = [
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: "avgDailySales",
|
||||
accessorKey: "avgTotalSold",
|
||||
header: ({ column }) => {
|
||||
return (
|
||||
<Button
|
||||
@@ -65,16 +61,54 @@ export const columns: ColumnDef<ForecastItem>[] = [
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
|
||||
className="whitespace-nowrap"
|
||||
>
|
||||
Avg Daily Sales
|
||||
Avg Total Sold
|
||||
<ArrowUpDown className="ml-2 h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
},
|
||||
cell: ({ row }) => {
|
||||
const value = row.getValue("avgDailySales") as number;
|
||||
const value = row.getValue("avgTotalSold") as number;
|
||||
return value?.toFixed(2) || "0.00";
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "minSold",
|
||||
header: ({ column }) => {
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
|
||||
className="whitespace-nowrap"
|
||||
>
|
||||
Min Sold
|
||||
<ArrowUpDown className="ml-2 h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
},
|
||||
cell: ({ row }) => {
|
||||
const value = row.getValue("minSold") as number;
|
||||
return value?.toLocaleString() || "0";
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "maxSold",
|
||||
header: ({ column }) => {
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
|
||||
className="whitespace-nowrap"
|
||||
>
|
||||
Max Sold
|
||||
<ArrowUpDown className="ml-2 h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
},
|
||||
cell: ({ row }) => {
|
||||
const value = row.getValue("maxSold") as number;
|
||||
return value?.toLocaleString() || "0";
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "totalSold",
|
||||
header: ({ column }) => {
|
||||
@@ -112,44 +146,6 @@ export const columns: ColumnDef<ForecastItem>[] = [
|
||||
return value?.toLocaleString() || "0";
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "avgTotalSold",
|
||||
header: ({ column }) => {
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
|
||||
className="whitespace-nowrap"
|
||||
>
|
||||
Avg Total Sold
|
||||
<ArrowUpDown className="ml-2 h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
},
|
||||
cell: ({ row }) => {
|
||||
const value = row.getValue("avgTotalSold") as number;
|
||||
return value?.toFixed(2) || "0.00";
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "avgPrice",
|
||||
header: ({ column }) => {
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === "asc")}
|
||||
className="whitespace-nowrap"
|
||||
>
|
||||
Avg Price
|
||||
<ArrowUpDown className="ml-2 h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
},
|
||||
cell: ({ row }) => {
|
||||
const value = row.getValue("avgPrice") as number;
|
||||
return `$${value?.toFixed(2) || "0.00"}`;
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
export const renderSubComponent = ({ row }: { row: any }) => {
|
||||
@@ -161,11 +157,7 @@ export const renderSubComponent = ({ row }: { row: any }) => {
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Product</TableHead>
|
||||
<TableHead className="text-right">Stock</TableHead>
|
||||
<TableHead className="text-right">Daily Sales</TableHead>
|
||||
<TableHead className="text-right">Forecast Units</TableHead>
|
||||
<TableHead className="text-right">Forecast Revenue</TableHead>
|
||||
<TableHead className="text-right">Confidence</TableHead>
|
||||
<TableHead className="text-right">Sold</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
@@ -182,15 +174,11 @@ export const renderSubComponent = ({ row }: { row: any }) => {
|
||||
</a>
|
||||
<div className="text-sm text-muted-foreground">{product.sku}</div>
|
||||
</TableCell>
|
||||
<TableCell className="text-right">{product.stock_quantity}</TableCell>
|
||||
<TableCell className="text-right">{product.daily_sales_avg.toFixed(1)}</TableCell>
|
||||
<TableCell className="text-right">{product.forecast_units.toFixed(1)}</TableCell>
|
||||
<TableCell className="text-right">{product.forecast_revenue.toFixed(2)}</TableCell>
|
||||
<TableCell className="text-right">{product.confidence_level.toFixed(1)}%</TableCell>
|
||||
<TableCell className="text-right">{product.total_sold?.toLocaleString?.() ?? product.total_sold}</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</ScrollArea>
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
@@ -29,6 +29,8 @@ import {
|
||||
} from "@/components/ui/sidebar";
|
||||
import { useLocation, useNavigate, Link } from "react-router-dom";
|
||||
import { Protected } from "@/components/auth/Protected";
|
||||
import { useContext } from "react";
|
||||
import { AuthContext } from "@/contexts/AuthContext";
|
||||
|
||||
const dashboardItems = [
|
||||
{
|
||||
@@ -112,6 +114,7 @@ export function AppSidebar() {
|
||||
const location = useLocation();
|
||||
const navigate = useNavigate();
|
||||
useSidebar();
|
||||
const { user } = useContext(AuthContext);
|
||||
|
||||
const handleLogout = () => {
|
||||
localStorage.removeItem('token');
|
||||
@@ -119,6 +122,12 @@ export function AppSidebar() {
|
||||
navigate('/login');
|
||||
};
|
||||
|
||||
// Check if user has access to any items in a section
|
||||
const hasAccessToSection = (items: typeof inventoryItems): boolean => {
|
||||
if (user?.is_admin) return true;
|
||||
return items.some(item => user?.permissions?.includes(item.permission));
|
||||
};
|
||||
|
||||
const renderMenuItems = (items: typeof inventoryItems) => {
|
||||
return items.map((item) => {
|
||||
const isActive =
|
||||
@@ -180,58 +189,58 @@ export function AppSidebar() {
|
||||
<SidebarSeparator />
|
||||
<SidebarContent>
|
||||
{/* Dashboard Section */}
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Dashboard</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{renderMenuItems(dashboardItems)}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
|
||||
{hasAccessToSection(dashboardItems) && (
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Dashboard</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{renderMenuItems(dashboardItems)}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
)}
|
||||
|
||||
{/* Inventory Section */}
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Inventory</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{renderMenuItems(inventoryItems)}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
|
||||
{hasAccessToSection(inventoryItems) && (
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Inventory</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{renderMenuItems(inventoryItems)}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
)}
|
||||
|
||||
{/* Product Setup Section */}
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Product Setup</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{renderMenuItems(productSetupItems)}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
|
||||
{hasAccessToSection(productSetupItems) && (
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Product Setup</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{renderMenuItems(productSetupItems)}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
)}
|
||||
|
||||
{/* Chat Section */}
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Chat</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{renderMenuItems(chatItems)}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
<SidebarSeparator />
|
||||
{hasAccessToSection(chatItems) && (
|
||||
<SidebarGroup>
|
||||
<SidebarGroupLabel>Chat</SidebarGroupLabel>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
{renderMenuItems(chatItems)}
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
)}
|
||||
|
||||
{/* Settings Section */}
|
||||
<SidebarGroup>
|
||||
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
<Protected
|
||||
permission="access:settings"
|
||||
fallback={null}
|
||||
>
|
||||
<Protected permission="access:settings" fallback={null}>
|
||||
<SidebarGroup>
|
||||
<SidebarGroupContent>
|
||||
<SidebarMenu>
|
||||
<SidebarMenuItem>
|
||||
<SidebarMenuButton
|
||||
asChild
|
||||
@@ -246,10 +255,10 @@ export function AppSidebar() {
|
||||
</Link>
|
||||
</SidebarMenuButton>
|
||||
</SidebarMenuItem>
|
||||
</Protected>
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
</SidebarMenu>
|
||||
</SidebarGroupContent>
|
||||
</SidebarGroup>
|
||||
</Protected>
|
||||
</SidebarContent>
|
||||
<SidebarSeparator />
|
||||
<SidebarFooter>
|
||||
|
||||
@@ -312,7 +312,7 @@ const SupplierSelector = React.memo(({
|
||||
{suppliers?.map((supplier: any) => (
|
||||
<CommandItem
|
||||
key={supplier.value}
|
||||
value={supplier.label}
|
||||
value={`${supplier.label} ${supplier.value}`}
|
||||
onSelect={() => {
|
||||
onChange(supplier.value);
|
||||
setOpen(false); // Close popover after selection
|
||||
@@ -347,11 +347,25 @@ const CompanySelector = React.memo(({
|
||||
companies: any[]
|
||||
}) => {
|
||||
const [open, setOpen] = useState(false);
|
||||
const [query, setQuery] = useState("");
|
||||
const handleCommandListWheel = (e: React.WheelEvent) => {
|
||||
e.currentTarget.scrollTop += e.deltaY;
|
||||
e.stopPropagation();
|
||||
};
|
||||
|
||||
// Filtered and sliced list to prevent UI freezes with very large lists
|
||||
const filteredCompanies = React.useMemo(() => {
|
||||
if (!query.trim()) {
|
||||
// When no search, show a capped subset for performance
|
||||
return (companies || []).slice(0, 200);
|
||||
}
|
||||
const q = query.toLowerCase();
|
||||
return (companies || []).filter((c: any) => (
|
||||
String(c.label || '').toLowerCase().includes(q) ||
|
||||
String(c.value || '').toLowerCase().includes(q)
|
||||
));
|
||||
}, [companies, query]);
|
||||
|
||||
return (
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
@@ -369,14 +383,14 @@ const CompanySelector = React.memo(({
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-[var(--radix-popover-trigger-width)] p-0">
|
||||
<Command>
|
||||
<CommandInput placeholder="Search companies..." />
|
||||
<CommandInput placeholder="Search companies..." value={query} onValueChange={setQuery} />
|
||||
<CommandList className="max-h-[200px] overflow-y-auto" onWheel={handleCommandListWheel}>
|
||||
<CommandEmpty>No companies found.</CommandEmpty>
|
||||
<CommandGroup>
|
||||
{companies?.map((company: any) => (
|
||||
{filteredCompanies.map((company: any) => (
|
||||
<CommandItem
|
||||
key={company.value}
|
||||
value={company.label}
|
||||
value={`${company.label} ${company.value}`}
|
||||
onSelect={() => {
|
||||
onChange(company.value);
|
||||
setOpen(false); // Close popover after selection
|
||||
@@ -443,7 +457,7 @@ const LineSelector = React.memo(({
|
||||
{lines?.map((line: any) => (
|
||||
<CommandItem
|
||||
key={line.value}
|
||||
value={line.label}
|
||||
value={`${line.label} ${line.value}`}
|
||||
onSelect={() => {
|
||||
onChange(line.value);
|
||||
setOpen(false); // Close popover after selection
|
||||
@@ -510,7 +524,7 @@ const SubLineSelector = React.memo(({
|
||||
{sublines?.map((subline: any) => (
|
||||
<CommandItem
|
||||
key={subline.value}
|
||||
value={subline.label}
|
||||
value={`${subline.label} ${subline.value}`}
|
||||
onSelect={() => {
|
||||
onChange(subline.value);
|
||||
setOpen(false); // Close popover after selection
|
||||
|
||||
@@ -186,9 +186,11 @@ export const UploadFlow = ({ state, onNext, onBack }: Props) => {
|
||||
// Apply global selections to each row of data if they exist
|
||||
const dataWithGlobalSelections = globalSelections
|
||||
? dataWithMeta.map((row: Data<string> & { __index?: string }) => {
|
||||
const newRow = { ...row };
|
||||
const newRow = { ...row } as any;
|
||||
if (globalSelections.supplier) newRow.supplier = globalSelections.supplier;
|
||||
if (globalSelections.company) newRow.company = globalSelections.company;
|
||||
if (globalSelections.line) newRow.line = globalSelections.line;
|
||||
if (globalSelections.subline) newRow.subline = globalSelections.subline;
|
||||
return newRow;
|
||||
})
|
||||
: dataWithMeta;
|
||||
|
||||
@@ -542,7 +542,7 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
|
||||
<div className="text-xs font-semibold text-blue-700 mb-2">
|
||||
Company-Specific Instructions
|
||||
</div>
|
||||
<pre className="whitespace-pre-wrap">
|
||||
<pre className="whitespace-pre-wrap break-words break-all">
|
||||
{content.substring(
|
||||
companySpecificStartIndex,
|
||||
companySpecificEndIndex +
|
||||
@@ -566,7 +566,7 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
|
||||
<div className="text-xs font-semibold text-amber-700 mb-2">
|
||||
Taxonomy Data
|
||||
</div>
|
||||
<pre className="whitespace-pre-wrap">
|
||||
<pre className="whitespace-pre-wrap break-words break-all">
|
||||
{content.substring(
|
||||
actualTaxonomyStartIndex,
|
||||
taxEnd
|
||||
@@ -587,7 +587,7 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
|
||||
<div className="text-xs font-semibold text-pink-700 mb-2">
|
||||
Product Data
|
||||
</div>
|
||||
<pre className="whitespace-pre-wrap">
|
||||
<pre className="whitespace-pre-wrap break-words break-all">
|
||||
{content.substring(
|
||||
productDataStartIndex
|
||||
)}
|
||||
@@ -600,7 +600,7 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
|
||||
})()}
|
||||
</div>
|
||||
) : (
|
||||
<pre className="whitespace-pre-wrap">
|
||||
<pre className="whitespace-pre-wrap break-words break-all">
|
||||
{message.content}
|
||||
</pre>
|
||||
)}
|
||||
@@ -612,7 +612,7 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
|
||||
</div>
|
||||
) : (
|
||||
<ScrollArea className="h-full w-full">
|
||||
<Code className="whitespace-pre-wrap p-4 break-normal max-w-full">
|
||||
<Code className="whitespace-pre-wrap break-words break-all p-4 max-w-full overflow-x-hidden">
|
||||
{currentPrompt.prompt}
|
||||
</Code>
|
||||
</ScrollArea>
|
||||
|
||||
@@ -20,6 +20,8 @@ interface UpcValidationTableAdapterProps<T extends string> {
|
||||
copyDown: (rowIndex: number, fieldKey: string, endRowIndex?: number) => void
|
||||
validatingCells: Set<string>
|
||||
isLoadingTemplates: boolean
|
||||
editingCells: Set<string>
|
||||
setEditingCells: React.Dispatch<React.SetStateAction<Set<string>>>
|
||||
rowProductLines: Record<string, any[]>
|
||||
rowSublines: Record<string, any[]>
|
||||
isLoadingLines: Record<string, boolean>
|
||||
@@ -53,6 +55,8 @@ function UpcValidationTableAdapter<T extends string>({
|
||||
copyDown,
|
||||
validatingCells: externalValidatingCells,
|
||||
isLoadingTemplates,
|
||||
editingCells,
|
||||
setEditingCells,
|
||||
rowProductLines,
|
||||
rowSublines,
|
||||
isLoadingLines,
|
||||
@@ -86,11 +90,7 @@ function UpcValidationTableAdapter<T extends string>({
|
||||
|
||||
// First add from itemNumbers directly - this is the source of truth for template applications
|
||||
if (itemNumbers) {
|
||||
// Log all numbers for debugging
|
||||
console.log(`[ADAPTER-DEBUG] Received itemNumbers map with ${itemNumbers.size} entries`);
|
||||
|
||||
itemNumbers.forEach((itemNumber, rowIndex) => {
|
||||
console.log(`[ADAPTER-DEBUG] Adding item number for row ${rowIndex} from itemNumbers: ${itemNumber}`);
|
||||
result.set(rowIndex, itemNumber);
|
||||
});
|
||||
}
|
||||
@@ -100,14 +100,12 @@ function UpcValidationTableAdapter<T extends string>({
|
||||
// Check if upcValidation has an item number for this row
|
||||
const itemNumber = upcValidation.getItemNumber(index);
|
||||
if (itemNumber) {
|
||||
console.log(`[ADAPTER-DEBUG] Adding item number for row ${index} from upcValidation: ${itemNumber}`);
|
||||
result.set(index, itemNumber);
|
||||
}
|
||||
|
||||
// Also check if it's directly in the data
|
||||
const dataItemNumber = data[index].item_number;
|
||||
if (dataItemNumber && !result.has(index)) {
|
||||
console.log(`[ADAPTER-DEBUG] Adding item number for row ${index} from data: ${dataItemNumber}`);
|
||||
result.set(index, dataItemNumber);
|
||||
}
|
||||
});
|
||||
@@ -151,6 +149,8 @@ function UpcValidationTableAdapter<T extends string>({
|
||||
rowSublines={rowSublines}
|
||||
isLoadingLines={isLoadingLines}
|
||||
isLoadingSublines={isLoadingSublines}
|
||||
editingCells={editingCells}
|
||||
setEditingCells={setEditingCells}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import InputCell from './cells/InputCell'
|
||||
import SelectCell from './cells/SelectCell'
|
||||
import MultiSelectCell from './cells/MultiSelectCell'
|
||||
import { TableCell } from '@/components/ui/table'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
|
||||
// Context for copy down selection mode
|
||||
export const CopyDownContext = React.createContext<{
|
||||
@@ -78,7 +77,10 @@ const BaseCellContent = React.memo(({
|
||||
hasErrors,
|
||||
options = [],
|
||||
className = '',
|
||||
fieldKey = ''
|
||||
fieldKey = '',
|
||||
onStartEdit,
|
||||
onEndEdit,
|
||||
isValidating
|
||||
}: {
|
||||
field: Field<string>;
|
||||
value: any;
|
||||
@@ -87,6 +89,9 @@ const BaseCellContent = React.memo(({
|
||||
options?: readonly any[];
|
||||
className?: string;
|
||||
fieldKey?: string;
|
||||
onStartEdit?: () => void;
|
||||
onEndEdit?: () => void;
|
||||
isValidating?: boolean;
|
||||
}) => {
|
||||
// Get field type information
|
||||
const fieldType = fieldKey === 'line' || fieldKey === 'subline'
|
||||
@@ -113,10 +118,13 @@ const BaseCellContent = React.memo(({
|
||||
field={{...field, fieldType: { type: 'select', options }}}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onStartEdit={onStartEdit}
|
||||
onEndEdit={onEndEdit}
|
||||
options={options}
|
||||
hasErrors={hasErrors}
|
||||
className={className}
|
||||
disabled={field.disabled}
|
||||
isValidating={isValidating}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -127,10 +135,13 @@ const BaseCellContent = React.memo(({
|
||||
field={field}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onStartEdit={onStartEdit}
|
||||
onEndEdit={onEndEdit}
|
||||
options={options}
|
||||
hasErrors={hasErrors}
|
||||
className={className}
|
||||
disabled={field.disabled}
|
||||
isValidating={isValidating}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -141,10 +152,13 @@ const BaseCellContent = React.memo(({
|
||||
field={field}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onStartEdit={onStartEdit}
|
||||
onEndEdit={onEndEdit}
|
||||
options={options}
|
||||
hasErrors={hasErrors}
|
||||
className={className}
|
||||
disabled={field.disabled}
|
||||
isValidating={isValidating}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -154,10 +168,13 @@ const BaseCellContent = React.memo(({
|
||||
field={field}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onStartEdit={onStartEdit}
|
||||
onEndEdit={onEndEdit}
|
||||
hasErrors={hasErrors}
|
||||
isMultiline={isMultiline}
|
||||
isPrice={isPrice}
|
||||
disabled={field.disabled}
|
||||
isValidating={isValidating}
|
||||
/>
|
||||
);
|
||||
}, (prev, next) => {
|
||||
@@ -191,6 +208,8 @@ export interface ValidationCellProps {
|
||||
rowIndex: number
|
||||
copyDown?: (endRowIndex?: number) => void
|
||||
totalRows?: number
|
||||
editingCells: Set<string>
|
||||
setEditingCells: React.Dispatch<React.SetStateAction<Set<string>>>
|
||||
}
|
||||
|
||||
// Add efficient error message extraction function
|
||||
@@ -288,7 +307,9 @@ const ValidationCell = React.memo(({
|
||||
width,
|
||||
copyDown,
|
||||
rowIndex,
|
||||
totalRows = 0
|
||||
totalRows = 0,
|
||||
// editingCells not used; keep setEditingCells for API compatibility
|
||||
setEditingCells
|
||||
}: ValidationCellProps) => {
|
||||
// Use the CopyDown context
|
||||
const copyDownContext = React.useContext(CopyDownContext);
|
||||
@@ -297,9 +318,6 @@ const ValidationCell = React.memo(({
|
||||
// This ensures that when the itemNumber changes, the display value changes
|
||||
let displayValue;
|
||||
if (fieldKey === 'item_number' && itemNumber) {
|
||||
// Always log when an item_number field is rendered to help debug
|
||||
console.log(`[VC-DEBUG] ValidationCell rendering item_number for row=${rowIndex} with itemNumber=${itemNumber}, value=${value}`);
|
||||
|
||||
// Prioritize itemNumber prop for item_number fields
|
||||
displayValue = itemNumber;
|
||||
} else {
|
||||
@@ -324,6 +342,22 @@ const ValidationCell = React.memo(({
|
||||
// Add state for hover on target row
|
||||
const [isTargetRowHovered, setIsTargetRowHovered] = React.useState(false);
|
||||
|
||||
// PERFORMANCE FIX: Create cell key for editing state management
|
||||
const cellKey = `${rowIndex}-${fieldKey}`;
|
||||
|
||||
// SINGLE-CLICK EDITING FIX: Create editing state management functions
|
||||
const handleStartEdit = React.useCallback(() => {
|
||||
setEditingCells(prev => new Set([...prev, cellKey]));
|
||||
}, [setEditingCells, cellKey]);
|
||||
|
||||
const handleEndEdit = React.useCallback(() => {
|
||||
setEditingCells(prev => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.delete(cellKey);
|
||||
return newSet;
|
||||
});
|
||||
}, [setEditingCells, cellKey]);
|
||||
|
||||
// Force isValidating to be a boolean
|
||||
const isLoading = isValidating === true;
|
||||
|
||||
@@ -437,33 +471,33 @@ const ValidationCell = React.memo(({
|
||||
</TooltipProvider>
|
||||
</div>
|
||||
)}
|
||||
{isLoading ? (
|
||||
<div className={`flex items-center justify-center gap-2 border ${hasError || isRequiredButEmpty ? 'border-red-500' : 'border-input'} rounded-md px-2 py-2`}>
|
||||
<Skeleton className="w-full h-4" />
|
||||
</div>
|
||||
) : (
|
||||
<div
|
||||
className={`truncate overflow-hidden ${isCopyDownHovered && !copyDownContext.isInCopyDownMode ? 'bg-blue-50/50' : ''}`}
|
||||
style={{
|
||||
backgroundColor: isSourceCell ? '#dbeafe' :
|
||||
isSelectedTarget ? '#bfdbfe' :
|
||||
isInTargetRow && isTargetRowHovered ? '#dbeafe' :
|
||||
undefined,
|
||||
borderRadius: (isSourceCell || isSelectedTarget || isInTargetRow) ? '0.375rem' : undefined,
|
||||
boxShadow: isSourceCell ? '0 0 0 2px #3b82f6' : undefined
|
||||
}}
|
||||
>
|
||||
<BaseCellContent
|
||||
field={field}
|
||||
value={displayValue}
|
||||
onChange={onChange}
|
||||
hasErrors={hasError || isRequiredButEmpty}
|
||||
options={options}
|
||||
className={cellClassName}
|
||||
fieldKey={fieldKey}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
className={`relative truncate overflow-hidden ${isCopyDownHovered && !copyDownContext.isInCopyDownMode ? 'bg-blue-50/50' : ''}`}
|
||||
style={{
|
||||
backgroundColor: isSourceCell ? '#dbeafe' :
|
||||
isSelectedTarget ? '#bfdbfe' :
|
||||
isInTargetRow && isTargetRowHovered ? '#dbeafe' :
|
||||
undefined,
|
||||
borderRadius: (isSourceCell || isSelectedTarget || isInTargetRow) ? '0.375rem' : undefined,
|
||||
boxShadow: isSourceCell ? '0 0 0 2px #3b82f6' : undefined
|
||||
}}
|
||||
>
|
||||
<BaseCellContent
|
||||
field={field}
|
||||
value={displayValue}
|
||||
onChange={onChange}
|
||||
hasErrors={hasError || isRequiredButEmpty}
|
||||
options={options}
|
||||
className={cellClassName}
|
||||
fieldKey={fieldKey}
|
||||
onStartEdit={handleStartEdit}
|
||||
onEndEdit={handleEndEdit}
|
||||
isValidating={isLoading}
|
||||
/>
|
||||
{isLoading && (
|
||||
<span className="pointer-events-none absolute right-2 top-2 h-2 w-2 rounded-full bg-muted-foreground animate-pulse" />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</TableCell>
|
||||
);
|
||||
@@ -514,4 +548,4 @@ const ValidationCell = React.memo(({
|
||||
|
||||
ValidationCell.displayName = 'ValidationCell';
|
||||
|
||||
export default ValidationCell;
|
||||
export default ValidationCell;
|
||||
|
||||
@@ -61,7 +61,9 @@ const ValidationContainer = <T extends string>({
|
||||
fields,
|
||||
isLoadingTemplates,
|
||||
validatingCells,
|
||||
setValidatingCells
|
||||
setValidatingCells,
|
||||
editingCells,
|
||||
setEditingCells
|
||||
} = validationState
|
||||
|
||||
// Use product lines fetching hook
|
||||
@@ -121,26 +123,40 @@ const ValidationContainer = <T extends string>({
|
||||
|
||||
// Function to mark a row for revalidation
|
||||
const markRowForRevalidation = useCallback((rowIndex: number, fieldKey?: string) => {
|
||||
// Map filtered rowIndex to original data index via __index
|
||||
const originalIndex = (() => {
|
||||
try {
|
||||
const row = filteredData[rowIndex];
|
||||
if (!row) return rowIndex;
|
||||
const id = row.__index;
|
||||
if (!id) return rowIndex;
|
||||
const idx = data.findIndex(r => r.__index === id);
|
||||
return idx >= 0 ? idx : rowIndex;
|
||||
} catch {
|
||||
return rowIndex;
|
||||
}
|
||||
})();
|
||||
|
||||
setFieldsToRevalidate(prev => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.add(rowIndex);
|
||||
newSet.add(originalIndex);
|
||||
return newSet;
|
||||
});
|
||||
|
||||
|
||||
// Also track which specific field needs to be revalidated
|
||||
if (fieldKey) {
|
||||
setFieldsToRevalidateMap(prev => {
|
||||
const newMap = { ...prev };
|
||||
if (!newMap[rowIndex]) {
|
||||
newMap[rowIndex] = [];
|
||||
if (!newMap[originalIndex]) {
|
||||
newMap[originalIndex] = [];
|
||||
}
|
||||
if (!newMap[rowIndex].includes(fieldKey)) {
|
||||
newMap[rowIndex] = [...newMap[rowIndex], fieldKey];
|
||||
if (!newMap[originalIndex].includes(fieldKey)) {
|
||||
newMap[originalIndex] = [...newMap[originalIndex], fieldKey];
|
||||
}
|
||||
return newMap;
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
}, [data, filteredData]);
|
||||
|
||||
// Add a ref to track the last validation time
|
||||
|
||||
@@ -160,8 +176,6 @@ const ValidationContainer = <T extends string>({
|
||||
// Clear the fields map
|
||||
setFieldsToRevalidateMap({});
|
||||
|
||||
console.log(`Validating ${rowsToRevalidate.length} rows with specific fields`);
|
||||
|
||||
// Revalidate each row with specific fields information
|
||||
validationState.revalidateRows(rowsToRevalidate, fieldsMap);
|
||||
}, [fieldsToRevalidate, validationState, fieldsToRevalidateMap]);
|
||||
@@ -488,81 +502,39 @@ const ValidationContainer = <T extends string>({
|
||||
// Detect if this is a direct item_number edit
|
||||
const isItemNumberEdit = key === 'item_number' as T;
|
||||
|
||||
// For item_number edits, we need special handling to ensure they persist
|
||||
// For item_number edits, use core updateRow to atomically update + validate
|
||||
if (isItemNumberEdit) {
|
||||
console.log(`Manual edit to item_number: ${value}`);
|
||||
|
||||
// First, update data immediately to ensure edit takes effect
|
||||
setData(prevData => {
|
||||
const newData = [...prevData];
|
||||
if (originalIndex >= 0 && originalIndex < newData.length) {
|
||||
newData[originalIndex] = {
|
||||
...newData[originalIndex],
|
||||
[key]: processedValue
|
||||
};
|
||||
}
|
||||
return newData;
|
||||
});
|
||||
|
||||
// Mark for revalidation after a delay to ensure data update completes first
|
||||
setTimeout(() => {
|
||||
markRowForRevalidation(rowIndex, key as string);
|
||||
}, 0);
|
||||
|
||||
// Return early to prevent double-updating
|
||||
const idx = originalIndex >= 0 ? originalIndex : rowIndex;
|
||||
validationState.updateRow(idx, key as unknown as any, processedValue);
|
||||
return;
|
||||
}
|
||||
|
||||
// For all other fields, use standard approach
|
||||
// Always use setData for updating - immediate update for better UX
|
||||
const updatedRow = { ...rowData, [key]: processedValue };
|
||||
// For all other fields, use core updateRow for atomic update + validation
|
||||
const idx = originalIndex >= 0 ? originalIndex : rowIndex;
|
||||
validationState.updateRow(idx, key as unknown as any, processedValue);
|
||||
|
||||
// Mark this row for revalidation to clear any existing errors
|
||||
markRowForRevalidation(rowIndex, key as string);
|
||||
|
||||
// Update the data immediately to show the change
|
||||
setData(prevData => {
|
||||
const newData = [...prevData];
|
||||
if (originalIndex >= 0 && originalIndex < newData.length) {
|
||||
// Create a new row object with the updated field
|
||||
newData[originalIndex] = {
|
||||
...newData[originalIndex],
|
||||
[key]: processedValue
|
||||
};
|
||||
}
|
||||
return newData;
|
||||
});
|
||||
|
||||
// Secondary effects - using a timeout to ensure UI updates first
|
||||
setTimeout(() => {
|
||||
// Secondary effects - using requestAnimationFrame for better performance
|
||||
requestAnimationFrame(() => {
|
||||
// Handle company change - clear line/subline and fetch product lines
|
||||
if (key === 'company' && value) {
|
||||
console.log(`Company changed to ${value} for row ${rowIndex}, updating lines and sublines`);
|
||||
|
||||
// Clear any existing line/subline values immediately
|
||||
setData(prevData => {
|
||||
const newData = [...prevData];
|
||||
const idx = newData.findIndex(item => item.__index === rowId);
|
||||
if (idx >= 0) {
|
||||
console.log(`Clearing line and subline values for row with ID ${rowId}`);
|
||||
newData[idx] = {
|
||||
...newData[idx],
|
||||
line: undefined,
|
||||
subline: undefined
|
||||
};
|
||||
} else {
|
||||
console.warn(`Could not find row with ID ${rowId} to clear line/subline values`);
|
||||
}
|
||||
return newData;
|
||||
});
|
||||
|
||||
// Fetch product lines for the new company
|
||||
// Fetch product lines for the new company with debouncing
|
||||
if (rowId && value !== undefined) {
|
||||
const companyId = value.toString();
|
||||
|
||||
// Force immediate fetch for better UX
|
||||
console.log(`Immediately fetching product lines for company ${companyId} for row ${rowId}`);
|
||||
|
||||
// Set loading state first
|
||||
setValidatingCells(prev => {
|
||||
const newSet = new Set(prev);
|
||||
@@ -570,29 +542,29 @@ const ValidationContainer = <T extends string>({
|
||||
return newSet;
|
||||
});
|
||||
|
||||
fetchProductLines(rowId, companyId)
|
||||
.then(lines => {
|
||||
console.log(`Successfully loaded ${lines.length} product lines for company ${companyId}`);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(`Error fetching product lines for company ${companyId}:`, err);
|
||||
toast.error("Failed to load product lines");
|
||||
})
|
||||
.finally(() => {
|
||||
// Clear loading indicator
|
||||
setValidatingCells(prev => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.delete(`${rowIndex}-line`);
|
||||
return newSet;
|
||||
// Debounce the API call to prevent excessive requests
|
||||
setTimeout(() => {
|
||||
fetchProductLines(rowId, companyId)
|
||||
.catch(err => {
|
||||
console.error(`Error fetching product lines for company ${companyId}:`, err);
|
||||
toast.error("Failed to load product lines");
|
||||
})
|
||||
.finally(() => {
|
||||
// Clear loading indicator
|
||||
setValidatingCells(prev => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.delete(`${rowIndex}-line`);
|
||||
return newSet;
|
||||
});
|
||||
});
|
||||
});
|
||||
}, 100); // 100ms debounce
|
||||
}
|
||||
}
|
||||
|
||||
// Handle supplier + UPC validation - using the most recent values
|
||||
if (key === 'supplier' && value) {
|
||||
// Get the latest UPC value from the updated row
|
||||
const upcValue = updatedRow.upc || updatedRow.barcode;
|
||||
const upcValue = (data[rowIndex] as any)?.upc || (data[rowIndex] as any)?.barcode;
|
||||
|
||||
if (upcValue) {
|
||||
console.log(`Validating UPC: rowIndex=${rowIndex}, supplier=${value}, upc=${upcValue}`);
|
||||
@@ -689,7 +661,7 @@ const ValidationContainer = <T extends string>({
|
||||
// Handle UPC/barcode + supplier validation
|
||||
if ((key === 'upc' || key === 'barcode') && value) {
|
||||
// Get latest supplier from the updated row
|
||||
const supplier = updatedRow.supplier;
|
||||
const supplier = (data[rowIndex] as any)?.supplier;
|
||||
|
||||
if (supplier) {
|
||||
console.log(`Validating UPC from UPC change: rowIndex=${rowIndex}, supplier=${supplier}, upc=${value}`);
|
||||
@@ -728,7 +700,7 @@ const ValidationContainer = <T extends string>({
|
||||
});
|
||||
}
|
||||
}
|
||||
}, 0); // Using 0ms timeout to defer execution until after the UI update
|
||||
}); // Using requestAnimationFrame to defer execution until after the UI update
|
||||
}, [data, filteredData, setData, fetchProductLines, fetchSublines, upcValidation, markRowForRevalidation]);
|
||||
|
||||
// Fix the missing loading indicator clear code
|
||||
@@ -800,15 +772,15 @@ const ValidationContainer = <T extends string>({
|
||||
markRowForRevalidation(targetRowIndex, fieldKey);
|
||||
});
|
||||
|
||||
// Clear the loading state for all cells after a short delay
|
||||
setTimeout(() => {
|
||||
// Clear the loading state for all cells efficiently
|
||||
requestAnimationFrame(() => {
|
||||
setValidatingCells(prev => {
|
||||
if (prev.size === 0) return prev;
|
||||
if (prev.size === 0 || updatingCells.size === 0) return prev;
|
||||
const newSet = new Set(prev);
|
||||
updatingCells.forEach(cell => newSet.delete(cell));
|
||||
return newSet;
|
||||
});
|
||||
}, 100);
|
||||
});
|
||||
|
||||
// If copying UPC or supplier fields, validate UPC for all rows
|
||||
if (fieldKey === 'upc' || fieldKey === 'barcode' || fieldKey === 'supplier') {
|
||||
@@ -949,6 +921,8 @@ const ValidationContainer = <T extends string>({
|
||||
filters={filters}
|
||||
templates={templates}
|
||||
applyTemplate={applyTemplateWrapper}
|
||||
editingCells={editingCells}
|
||||
setEditingCells={setEditingCells}
|
||||
getTemplateDisplayText={getTemplateDisplayText}
|
||||
isValidatingUpc={isRowValidatingUpc}
|
||||
validatingUpcRows={Array.from(upcValidation.validatingRows)}
|
||||
@@ -987,7 +961,18 @@ const ValidationContainer = <T extends string>({
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="flex flex-col h-[calc(100vh-10rem)] overflow-hidden">
|
||||
<div
|
||||
className="flex flex-col h-[calc(100vh-10rem)] overflow-hidden"
|
||||
onMouseUp={() => {
|
||||
// Prevent stray text selection when clicking away from cells
|
||||
try {
|
||||
const sel = window.getSelection?.();
|
||||
if (sel && sel.type === 'Range') {
|
||||
sel.removeAllRanges();
|
||||
}
|
||||
} catch {}
|
||||
}}
|
||||
>
|
||||
<div className="flex-1 overflow-hidden">
|
||||
<div className="flex h-[calc(100vh-9.5rem)] flex-col">
|
||||
<div className="flex-1 overflow-hidden">
|
||||
@@ -1229,4 +1214,4 @@ const ValidationContainer = <T extends string>({
|
||||
)
|
||||
}
|
||||
|
||||
export default ValidationContainer
|
||||
export default ValidationContainer
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useMemo, useCallback, useState } from 'react'
|
||||
import React, { useMemo, useCallback, useState, useEffect, useRef } from 'react'
|
||||
import {
|
||||
useReactTable,
|
||||
getCoreRowModel,
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
RowSelectionState,
|
||||
ColumnDef
|
||||
} from '@tanstack/react-table'
|
||||
import { useVirtualizer } from '@tanstack/react-virtual'
|
||||
import { Fields, Field } from '../../../types'
|
||||
import { RowData, Template } from '../hooks/validationTypes'
|
||||
import ValidationCell, { CopyDownContext } from './ValidationCell'
|
||||
@@ -24,6 +25,10 @@ type ErrorType = {
|
||||
source?: string;
|
||||
}
|
||||
|
||||
// Stable empty errors array to prevent unnecessary re-renders
|
||||
// Use a mutable empty array to satisfy the ErrorType[] type
|
||||
const EMPTY_ERRORS: ErrorType[] = [];
|
||||
|
||||
interface ValidationTableProps<T extends string> {
|
||||
data: RowData<T>[]
|
||||
fields: Fields<T>
|
||||
@@ -46,6 +51,8 @@ interface ValidationTableProps<T extends string> {
|
||||
itemNumbers: Map<number, string>
|
||||
isLoadingTemplates?: boolean
|
||||
copyDown: (rowIndex: number, key: string, endRowIndex?: number) => void
|
||||
editingCells: Set<string>
|
||||
setEditingCells: React.Dispatch<React.SetStateAction<Set<string>>>
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
@@ -106,7 +113,9 @@ const MemoizedCell = React.memo(({
|
||||
width,
|
||||
rowIndex,
|
||||
copyDown,
|
||||
totalRows
|
||||
totalRows,
|
||||
editingCells,
|
||||
setEditingCells
|
||||
}: {
|
||||
field: Field<string>,
|
||||
value: any,
|
||||
@@ -119,7 +128,9 @@ const MemoizedCell = React.memo(({
|
||||
width: number,
|
||||
rowIndex: number,
|
||||
copyDown?: (endRowIndex?: number) => void,
|
||||
totalRows: number
|
||||
totalRows: number,
|
||||
editingCells: Set<string>,
|
||||
setEditingCells: React.Dispatch<React.SetStateAction<Set<string>>>
|
||||
}) => {
|
||||
return (
|
||||
<ValidationCell
|
||||
@@ -135,37 +146,24 @@ const MemoizedCell = React.memo(({
|
||||
rowIndex={rowIndex}
|
||||
copyDown={copyDown}
|
||||
totalRows={totalRows}
|
||||
editingCells={editingCells}
|
||||
setEditingCells={setEditingCells}
|
||||
/>
|
||||
);
|
||||
}, (prev, next) => {
|
||||
// CRITICAL FIX: Never memoize item_number cells - always re-render them
|
||||
// For item_number cells, only re-render when itemNumber actually changes
|
||||
if (prev.fieldKey === 'item_number') {
|
||||
return false; // Never skip re-renders for item_number cells
|
||||
return prev.itemNumber === next.itemNumber &&
|
||||
prev.value === next.value &&
|
||||
prev.isValidating === next.isValidating;
|
||||
}
|
||||
|
||||
// Optimize the memo comparison function for better performance
|
||||
// Only re-render if these essential props change
|
||||
const valueEqual = prev.value === next.value;
|
||||
const isValidatingEqual = prev.isValidating === next.isValidating;
|
||||
|
||||
// Shallow equality check for errors array
|
||||
const errorsEqual = prev.errors === next.errors || (
|
||||
Array.isArray(prev.errors) &&
|
||||
Array.isArray(next.errors) &&
|
||||
prev.errors.length === next.errors.length &&
|
||||
prev.errors.every((err, idx) => err === next.errors[idx])
|
||||
);
|
||||
|
||||
// Shallow equality check for options array
|
||||
const optionsEqual = prev.options === next.options || (
|
||||
Array.isArray(prev.options) &&
|
||||
Array.isArray(next.options) &&
|
||||
prev.options.length === next.options.length &&
|
||||
prev.options.every((opt, idx) => opt === next.options?.[idx])
|
||||
);
|
||||
|
||||
// Skip checking for props that rarely change
|
||||
return valueEqual && isValidatingEqual && errorsEqual && optionsEqual;
|
||||
// Simplified memo comparison - most expensive checks removed
|
||||
// Note: editingCells changes are not checked here as they need immediate re-renders
|
||||
return prev.value === next.value &&
|
||||
prev.isValidating === next.isValidating &&
|
||||
prev.errors === next.errors &&
|
||||
prev.options === next.options;
|
||||
});
|
||||
|
||||
MemoizedCell.displayName = 'MemoizedCell';
|
||||
@@ -185,6 +183,8 @@ const ValidationTable = <T extends string>({
|
||||
itemNumbers,
|
||||
isLoadingTemplates = false,
|
||||
copyDown,
|
||||
editingCells,
|
||||
setEditingCells,
|
||||
rowProductLines = {},
|
||||
rowSublines = {},
|
||||
isLoadingLines = {},
|
||||
@@ -194,6 +194,14 @@ const ValidationTable = <T extends string>({
|
||||
upcValidationResults
|
||||
}: ValidationTableProps<T>) => {
|
||||
const { translations } = useRsi<T>();
|
||||
const tableRootRef = useRef<HTMLDivElement | null>(null);
|
||||
const getScrollElement = useCallback(() => tableRootRef.current?.parentElement ?? null, []);
|
||||
const [, forceRerender] = useState(0);
|
||||
|
||||
useEffect(() => {
|
||||
if (!tableRootRef.current) return;
|
||||
forceRerender((value) => value + 1);
|
||||
}, []);
|
||||
|
||||
// Add state for copy down selection mode
|
||||
const [isInCopyDownMode, setIsInCopyDownMode] = useState(false);
|
||||
@@ -393,29 +401,44 @@ const ValidationTable = <T extends string>({
|
||||
} else if (fieldKey === 'subline' && rowId && rowSublines[rowId]) {
|
||||
options = rowSublines[rowId];
|
||||
}
|
||||
|
||||
const validatingKey = `${row.index}-${fieldKey}`;
|
||||
const isCellValidating = validatingCells.has(validatingKey);
|
||||
|
||||
// Get the current cell value first
|
||||
const currentValue = fieldKey === 'item_number' && row.original[field.key]
|
||||
? row.original[field.key]
|
||||
: row.original[field.key as keyof typeof row.original];
|
||||
|
||||
// Determine if this cell is in loading state - use a clear consistent approach
|
||||
// Determine if this cell is in loading state - only show loading for empty fields
|
||||
let isLoading = false;
|
||||
|
||||
// Check the validatingCells Set first (for item_number and other fields)
|
||||
const cellLoadingKey = `${row.index}-${fieldKey}`;
|
||||
if (validatingCells.has(cellLoadingKey)) {
|
||||
isLoading = true;
|
||||
}
|
||||
// Check if UPC is validating for this row and field is item_number
|
||||
else if (fieldKey === 'item_number' && isRowValidatingUpc(row.index)) {
|
||||
isLoading = true;
|
||||
}
|
||||
// Add loading state for line/subline fields
|
||||
else if (fieldKey === 'line' && rowId && isLoadingLines[rowId]) {
|
||||
isLoading = true;
|
||||
}
|
||||
else if (fieldKey === 'subline' && rowId && isLoadingSublines[rowId]) {
|
||||
isLoading = true;
|
||||
// Only show loading if the field is currently empty
|
||||
const isEmpty = currentValue === undefined || currentValue === null || currentValue === '' ||
|
||||
(Array.isArray(currentValue) && currentValue.length === 0);
|
||||
|
||||
if (isEmpty) {
|
||||
// Check the validatingCells Set first (for item_number and other fields)
|
||||
const cellLoadingKey = `${row.index}-${fieldKey}`;
|
||||
if (validatingCells.has(cellLoadingKey)) {
|
||||
isLoading = true;
|
||||
}
|
||||
// Check if UPC is validating for this row and field is item_number
|
||||
else if (fieldKey === 'item_number' && isRowValidatingUpc(row.index)) {
|
||||
isLoading = true;
|
||||
}
|
||||
// Add loading state for line/subline fields
|
||||
else if (fieldKey === 'line' && rowId && isLoadingLines[rowId]) {
|
||||
isLoading = true;
|
||||
}
|
||||
else if (fieldKey === 'subline' && rowId && isLoadingSublines[rowId]) {
|
||||
isLoading = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Get validation errors for this cell
|
||||
const cellErrors = validationErrors.get(row.index)?.[fieldKey] || [];
|
||||
// Use stable EMPTY_ERRORS to avoid new array creation on every render
|
||||
const cellErrors = validationErrors.get(row.index)?.[fieldKey] || EMPTY_ERRORS;
|
||||
|
||||
// Create a copy of the field with guaranteed field type for line and subline fields
|
||||
let fieldWithType = field;
|
||||
@@ -448,22 +471,19 @@ const ValidationTable = <T extends string>({
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: For item_number fields, create a unique key that includes the itemNumber value
|
||||
// This forces a complete re-render when the itemNumber changes
|
||||
// Create stable keys that only change when actual content changes
|
||||
const cellKey = fieldKey === 'item_number'
|
||||
? `cell-${row.index}-${fieldKey}-${itemNumber || 'empty'}-${Date.now()}` // Force re-render on every render cycle for item_number
|
||||
? `cell-${row.index}-${fieldKey}-${itemNumber || 'empty'}` // Only change when itemNumber actually changes
|
||||
: `cell-${row.index}-${fieldKey}`;
|
||||
|
||||
return (
|
||||
<MemoizedCell
|
||||
key={cellKey} // CRITICAL: Add key to force re-render when itemNumber changes
|
||||
field={fieldWithType as Field<string>}
|
||||
value={fieldKey === 'item_number' && row.original[field.key]
|
||||
? row.original[field.key] // Use direct value from row data
|
||||
: row.original[field.key as keyof typeof row.original]}
|
||||
value={currentValue}
|
||||
onChange={(value) => handleFieldUpdate(row.index, field.key as T, value)}
|
||||
errors={cellErrors}
|
||||
isValidating={isLoading}
|
||||
isValidating={isLoading || isCellValidating}
|
||||
fieldKey={fieldKey}
|
||||
options={options}
|
||||
itemNumber={itemNumber}
|
||||
@@ -471,6 +491,8 @@ const ValidationTable = <T extends string>({
|
||||
rowIndex={row.index}
|
||||
copyDown={(endRowIndex?: number) => handleCopyDown(row.index, field.key as string, endRowIndex)}
|
||||
totalRows={data.length}
|
||||
editingCells={editingCells}
|
||||
setEditingCells={setEditingCells}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -495,6 +517,47 @@ const ValidationTable = <T extends string>({
|
||||
getRowId: useCallback((_row: RowData<T>, index: number) => String(index), []),
|
||||
});
|
||||
|
||||
const rowModel = table.getRowModel();
|
||||
const rows = rowModel.rows;
|
||||
const visibleColumnCount = table.getVisibleFlatColumns().length;
|
||||
|
||||
const rowVirtualizer = useVirtualizer({
|
||||
count: rows.length,
|
||||
getScrollElement,
|
||||
estimateSize: () => 66,
|
||||
overscan: 8,
|
||||
measureElement:
|
||||
typeof window !== 'undefined'
|
||||
? (el: Element | null) => el?.getBoundingClientRect().height || 0
|
||||
: undefined,
|
||||
});
|
||||
|
||||
const scrollElement = getScrollElement();
|
||||
|
||||
const virtualRows = scrollElement
|
||||
? rowVirtualizer.getVirtualItems()
|
||||
: rows.map((_, index) => ({
|
||||
index,
|
||||
key: `row-fallback-${index}`,
|
||||
start: 0,
|
||||
end: 0,
|
||||
size: 0,
|
||||
}));
|
||||
const paddingTop = scrollElement && virtualRows.length > 0 ? virtualRows[0].start : 0;
|
||||
const paddingBottom =
|
||||
scrollElement && virtualRows.length > 0
|
||||
? rowVirtualizer.getTotalSize() - virtualRows[virtualRows.length - 1].end
|
||||
: 0;
|
||||
|
||||
const measureVirtualRow = useCallback(
|
||||
(node: HTMLTableRowElement | null) => {
|
||||
const scrollEl = getScrollElement();
|
||||
if (!scrollEl || !node) return;
|
||||
rowVirtualizer.measureElement(node);
|
||||
},
|
||||
[getScrollElement, rowVirtualizer]
|
||||
);
|
||||
|
||||
// Calculate total table width for stable horizontal scrolling
|
||||
const totalWidth = useMemo(() => {
|
||||
return columns.reduce((total, col) => total + (col.size || 0), 0);
|
||||
@@ -515,7 +578,7 @@ const ValidationTable = <T extends string>({
|
||||
|
||||
return (
|
||||
<CopyDownContext.Provider value={copyDownContextValue}>
|
||||
<div className="min-w-max relative">
|
||||
<div ref={tableRootRef} className="min-w-max relative">
|
||||
{/* Add global styles for copy down mode */}
|
||||
{isInCopyDownMode && (
|
||||
<style>
|
||||
@@ -620,7 +683,17 @@ const ValidationTable = <T extends string>({
|
||||
transform: 'translateZ(0)' // Force GPU acceleration
|
||||
}}>
|
||||
<TableBody>
|
||||
{table.getRowModel().rows.map((row) => {
|
||||
{paddingTop > 0 && (
|
||||
<TableRow key="virtual-padding-top">
|
||||
<TableCell
|
||||
colSpan={visibleColumnCount}
|
||||
style={{ height: `${paddingTop}px`, padding: 0, border: 'none' }}
|
||||
/>
|
||||
</TableRow>
|
||||
)}
|
||||
{virtualRows.map((virtualRow) => {
|
||||
const row = rows[virtualRow.index];
|
||||
if (!row) return null;
|
||||
// Precompute validation error status for this row
|
||||
const hasErrors = validationErrors.has(parseInt(row.id)) &&
|
||||
Object.keys(validationErrors.get(parseInt(row.id)) || {}).length > 0;
|
||||
@@ -649,6 +722,7 @@ const ValidationTable = <T extends string>({
|
||||
isCopyDownTarget ? "cursor-pointer copy-down-target-row" : ""
|
||||
)}
|
||||
style={rowStyle}
|
||||
ref={scrollElement ? measureVirtualRow : undefined}
|
||||
onMouseEnter={() => handleRowMouseEnter(parseInt(row.id))}
|
||||
>
|
||||
{row.getVisibleCells().map((cell: any) => (
|
||||
@@ -659,6 +733,14 @@ const ValidationTable = <T extends string>({
|
||||
</TableRow>
|
||||
);
|
||||
})}
|
||||
{paddingBottom > 0 && (
|
||||
<TableRow key="virtual-padding-bottom">
|
||||
<TableCell
|
||||
colSpan={visibleColumnCount}
|
||||
style={{ height: `${paddingBottom}px`, padding: 0, border: 'none' }}
|
||||
/>
|
||||
</TableRow>
|
||||
)}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</div>
|
||||
@@ -678,6 +760,10 @@ const areEqual = (prev: ValidationTableProps<any>, next: ValidationTableProps<an
|
||||
// Fast path: data length change always means re-render
|
||||
if (prev.data.length !== next.data.length) return false;
|
||||
|
||||
// CRITICAL: Check if data content has actually changed
|
||||
// Simple reference equality check - if data array reference changed, re-render
|
||||
if (prev.data !== next.data) return false;
|
||||
|
||||
// Efficiently check row selection changes
|
||||
const prevSelectionKeys = Object.keys(prev.rowSelection);
|
||||
const nextSelectionKeys = Object.keys(next.rowSelection);
|
||||
@@ -693,4 +779,4 @@ const areEqual = (prev: ValidationTableProps<any>, next: ValidationTableProps<an
|
||||
return true;
|
||||
};
|
||||
|
||||
export default React.memo(ValidationTable, areEqual);
|
||||
export default React.memo(ValidationTable, areEqual);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useState, useCallback, useTransition, useRef, useEffect, useMemo } from 'react'
|
||||
import React, { useState, useCallback, useMemo, useEffect } from 'react'
|
||||
import { Field } from '../../../../types'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { cn } from '@/lib/utils'
|
||||
@@ -15,21 +15,11 @@ interface InputCellProps<T extends string> {
|
||||
isPrice?: boolean
|
||||
disabled?: boolean
|
||||
className?: string
|
||||
isValidating?: boolean
|
||||
}
|
||||
|
||||
// Add efficient price formatting utility
|
||||
const formatPrice = (value: string): string => {
|
||||
// Remove any non-numeric characters except decimal point
|
||||
const numericValue = value.replace(/[^\d.]/g, '');
|
||||
|
||||
// Parse as float and format to 2 decimal places
|
||||
const numValue = parseFloat(numericValue);
|
||||
if (!isNaN(numValue)) {
|
||||
return numValue.toFixed(2);
|
||||
}
|
||||
|
||||
return numericValue;
|
||||
};
|
||||
|
||||
// (removed unused formatPrice helper)
|
||||
|
||||
const InputCell = <T extends string>({
|
||||
field,
|
||||
@@ -41,20 +31,14 @@ const InputCell = <T extends string>({
|
||||
isMultiline = false,
|
||||
isPrice = false,
|
||||
disabled = false,
|
||||
className = ''
|
||||
className = '', isValidating: _isValidating = false
|
||||
}: InputCellProps<T>) => {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const [editValue, setEditValue] = useState('');
|
||||
const [isPending, startTransition] = useTransition();
|
||||
|
||||
// Use a ref to track if we need to process the value
|
||||
const needsProcessingRef = useRef(false);
|
||||
|
||||
// Track local display value to avoid waiting for validation
|
||||
const [localDisplayValue, setLocalDisplayValue] = useState<string | null>(null);
|
||||
|
||||
// Add state for hover
|
||||
const [isHovered, setIsHovered] = useState(false);
|
||||
const [pendingDisplayValue, setPendingDisplayValue] = useState<string | null>(null);
|
||||
|
||||
// Remove optimistic updates and rely on parent state
|
||||
|
||||
// Helper function to check if a class is present in the className string
|
||||
const hasClass = (cls: string): boolean => {
|
||||
@@ -62,36 +46,16 @@ const InputCell = <T extends string>({
|
||||
return classNames.includes(cls);
|
||||
};
|
||||
|
||||
// Initialize localDisplayValue on mount and when value changes externally
|
||||
useEffect(() => {
|
||||
if (localDisplayValue === null ||
|
||||
(typeof value === 'string' && typeof localDisplayValue === 'string' &&
|
||||
value.trim() !== localDisplayValue.trim())) {
|
||||
setLocalDisplayValue(value);
|
||||
}
|
||||
}, [value, localDisplayValue]);
|
||||
// No complex initialization needed
|
||||
|
||||
// Efficiently handle price formatting without multiple rerenders
|
||||
useEffect(() => {
|
||||
if (isPrice && needsProcessingRef.current && !isEditing) {
|
||||
needsProcessingRef.current = false;
|
||||
|
||||
// Do price processing only when needed
|
||||
const formattedValue = formatPrice(value);
|
||||
if (formattedValue !== value) {
|
||||
onChange(formattedValue);
|
||||
}
|
||||
}
|
||||
}, [value, isPrice, isEditing, onChange]);
|
||||
|
||||
// Handle focus event - optimized to be synchronous
|
||||
// Handle focus event
|
||||
const handleFocus = useCallback(() => {
|
||||
setIsEditing(true);
|
||||
setPendingDisplayValue(null);
|
||||
|
||||
// For price fields, strip formatting when focusing
|
||||
if (value !== undefined && value !== null) {
|
||||
if (isPrice) {
|
||||
// Remove any non-numeric characters except decimal point
|
||||
// Remove any non-numeric characters except decimal point for editing
|
||||
const numericValue = String(value).replace(/[^\d.]/g, '');
|
||||
setEditValue(numericValue);
|
||||
} else {
|
||||
@@ -104,61 +68,48 @@ const InputCell = <T extends string>({
|
||||
onStartEdit?.();
|
||||
}, [value, onStartEdit, isPrice]);
|
||||
|
||||
// Handle blur event - use transition for non-critical updates
|
||||
// Handle blur event - save to parent only
|
||||
const handleBlur = useCallback(() => {
|
||||
// First - lock in the current edit value to prevent it from being lost
|
||||
const finalValue = editValue.trim();
|
||||
|
||||
setPendingDisplayValue(finalValue);
|
||||
|
||||
// Then transition to non-editing state
|
||||
startTransition(() => {
|
||||
setIsEditing(false);
|
||||
|
||||
// Format the value for storage (remove formatting like $ for price)
|
||||
let processedValue = finalValue;
|
||||
|
||||
if (isPrice && processedValue) {
|
||||
needsProcessingRef.current = true;
|
||||
}
|
||||
|
||||
// Update local display value immediately to prevent UI flicker
|
||||
setLocalDisplayValue(processedValue);
|
||||
|
||||
// Commit the change to parent component
|
||||
onChange(processedValue);
|
||||
onEndEdit?.();
|
||||
});
|
||||
}, [editValue, onChange, onEndEdit, isPrice]);
|
||||
// Save to parent - parent must update immediately for this to work
|
||||
onChange(finalValue);
|
||||
|
||||
// Exit editing mode
|
||||
setIsEditing(false);
|
||||
onEndEdit?.();
|
||||
}, [editValue, onChange, onEndEdit]);
|
||||
|
||||
// Handle direct input change - optimized to be synchronous for typing
|
||||
const handleChange = useCallback((e: React.ChangeEvent<HTMLInputElement | HTMLTextAreaElement>) => {
|
||||
const newValue = isPrice ? e.target.value.replace(/[$,]/g, '') : e.target.value;
|
||||
setEditValue(newValue);
|
||||
}, [isPrice]);
|
||||
|
||||
useEffect(() => {
|
||||
if (pendingDisplayValue === null) return;
|
||||
const currentValue = value ?? '';
|
||||
if (String(currentValue) === pendingDisplayValue) {
|
||||
setPendingDisplayValue(null);
|
||||
}
|
||||
}, [value, pendingDisplayValue]);
|
||||
|
||||
// Get the display value - prioritize local display value
|
||||
// Get the display value - prefer pending value when present for immediate feedback
|
||||
const displayValue = useMemo(() => {
|
||||
// First priority: local display value (for immediate updates)
|
||||
if (localDisplayValue !== null) {
|
||||
if (isPrice) {
|
||||
// Format price value
|
||||
const numValue = parseFloat(localDisplayValue);
|
||||
return !isNaN(numValue) ? numValue.toFixed(2) : localDisplayValue;
|
||||
}
|
||||
return localDisplayValue;
|
||||
}
|
||||
|
||||
// Second priority: handle price formatting for the actual value
|
||||
if (isPrice && value) {
|
||||
if (typeof value === 'number') {
|
||||
return value.toFixed(2);
|
||||
} else if (typeof value === 'string' && /^-?\d+(\.\d+)?$/.test(value)) {
|
||||
return parseFloat(value).toFixed(2);
|
||||
const rawValue = pendingDisplayValue !== null ? pendingDisplayValue : value ?? '';
|
||||
|
||||
if (isPrice && rawValue !== '' && rawValue !== undefined && rawValue !== null) {
|
||||
if (typeof rawValue === 'number') {
|
||||
return rawValue.toFixed(2);
|
||||
} else if (typeof rawValue === 'string' && /^-?\d+(\.\d+)?$/.test(rawValue)) {
|
||||
return parseFloat(rawValue).toFixed(2);
|
||||
}
|
||||
}
|
||||
|
||||
// Default: use the actual value or empty string
|
||||
return value ?? '';
|
||||
}, [isPrice, value, localDisplayValue]);
|
||||
|
||||
return String(rawValue);
|
||||
}, [isPrice, value, pendingDisplayValue]);
|
||||
|
||||
// Add outline even when not in focus
|
||||
const outlineClass = "border focus-visible:ring-0 focus-visible:ring-offset-0";
|
||||
@@ -221,7 +172,6 @@ const InputCell = <T extends string>({
|
||||
className={cn(
|
||||
outlineClass,
|
||||
hasErrors ? "border-destructive" : "",
|
||||
isPending ? "opacity-50" : "",
|
||||
className
|
||||
)}
|
||||
style={{
|
||||
@@ -267,33 +217,11 @@ const InputCell = <T extends string>({
|
||||
)
|
||||
}
|
||||
|
||||
// Optimize memo comparison to focus on essential props
|
||||
// Simplified memo comparison
|
||||
export default React.memo(InputCell, (prev, next) => {
|
||||
if (prev.hasErrors !== next.hasErrors) return false;
|
||||
if (prev.isMultiline !== next.isMultiline) return false;
|
||||
if (prev.isPrice !== next.isPrice) return false;
|
||||
if (prev.disabled !== next.disabled) return false;
|
||||
if (prev.field !== next.field) return false;
|
||||
|
||||
// Only check value if not editing (to avoid expensive rerender during editing)
|
||||
if (prev.value !== next.value) {
|
||||
// For price values, do a more intelligent comparison
|
||||
if (prev.isPrice) {
|
||||
// Convert both to numeric values for comparison
|
||||
const prevNum = typeof prev.value === 'number' ? prev.value :
|
||||
typeof prev.value === 'string' ? parseFloat(prev.value) : 0;
|
||||
const nextNum = typeof next.value === 'number' ? next.value :
|
||||
typeof next.value === 'string' ? parseFloat(next.value) : 0;
|
||||
|
||||
// Only update if the actual numeric values differ
|
||||
if (!isNaN(prevNum) && !isNaN(nextNum) &&
|
||||
Math.abs(prevNum - nextNum) > 0.001) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
// Only re-render if essential props change
|
||||
return prev.value === next.value &&
|
||||
prev.hasErrors === next.hasErrors &&
|
||||
prev.disabled === next.disabled &&
|
||||
prev.field === next.field;
|
||||
});
|
||||
|
||||
@@ -11,6 +11,7 @@ import { Badge } from '@/components/ui/badge'
|
||||
interface FieldOption {
|
||||
label: string;
|
||||
value: string;
|
||||
hex?: string; // optional hex color for colors field
|
||||
}
|
||||
|
||||
interface MultiSelectCellProps<T extends string> {
|
||||
@@ -23,8 +24,10 @@ interface MultiSelectCellProps<T extends string> {
|
||||
options?: readonly FieldOption[]
|
||||
disabled?: boolean
|
||||
className?: string
|
||||
isValidating?: boolean
|
||||
}
|
||||
|
||||
|
||||
// Memoized option item to prevent unnecessary renders for large option lists
|
||||
const OptionItem = React.memo(({
|
||||
option,
|
||||
@@ -157,7 +160,7 @@ const MultiSelectCell = <T extends string>({
|
||||
hasErrors,
|
||||
options: providedOptions,
|
||||
disabled = false,
|
||||
className = ''
|
||||
className = '', isValidating: _isValidating = false
|
||||
}: MultiSelectCellProps<T>) => {
|
||||
const [open, setOpen] = useState(false)
|
||||
const [searchQuery, setSearchQuery] = useState("")
|
||||
@@ -237,24 +240,43 @@ const MultiSelectCell = <T extends string>({
|
||||
if (providedOptions && providedOptions.length > 0) {
|
||||
// Check if options are already in the right format
|
||||
if (typeof providedOptions[0] === 'object' && 'label' in providedOptions[0] && 'value' in providedOptions[0]) {
|
||||
return providedOptions as FieldOption[];
|
||||
// Preserve optional hex if present (hex or hex_color without #)
|
||||
return (providedOptions as any[]).map(opt => ({
|
||||
label: opt.label,
|
||||
value: String(opt.value),
|
||||
hex: opt.hex
|
||||
|| (opt.hexColor ? `#${String(opt.hexColor).replace(/^#/, '')}` : undefined)
|
||||
|| (opt.hex_color ? `#${String(opt.hex_color).replace(/^#/, '')}` : undefined)
|
||||
})) as FieldOption[];
|
||||
}
|
||||
|
||||
return providedOptions.map(option => ({
|
||||
return (providedOptions as any[]).map(option => ({
|
||||
label: option.label || String(option.value),
|
||||
value: String(option.value)
|
||||
value: String(option.value),
|
||||
hex: option.hex
|
||||
|| (option.hexColor ? `#${String(option.hexColor).replace(/^#/, '')}` : undefined)
|
||||
|| (option.hex_color ? `#${String(option.hex_color).replace(/^#/, '')}` : undefined)
|
||||
}));
|
||||
}
|
||||
|
||||
// Check field options format
|
||||
if (fieldOptions.length > 0) {
|
||||
if (typeof fieldOptions[0] === 'object' && 'label' in fieldOptions[0] && 'value' in fieldOptions[0]) {
|
||||
return fieldOptions as FieldOption[];
|
||||
return (fieldOptions as any[]).map(opt => ({
|
||||
label: opt.label,
|
||||
value: String(opt.value),
|
||||
hex: opt.hex
|
||||
|| (opt.hexColor ? `#${String(opt.hexColor).replace(/^#/, '')}` : undefined)
|
||||
|| (opt.hex_color ? `#${String(opt.hex_color).replace(/^#/, '')}` : undefined)
|
||||
})) as FieldOption[];
|
||||
}
|
||||
|
||||
return fieldOptions.map(option => ({
|
||||
return (fieldOptions as any[]).map(option => ({
|
||||
label: option.label || String(option.value),
|
||||
value: String(option.value)
|
||||
value: String(option.value),
|
||||
hex: option.hex
|
||||
|| (option.hexColor ? `#${String(option.hexColor).replace(/^#/, '')}` : undefined)
|
||||
|| (option.hex_color ? `#${String(option.hex_color).replace(/^#/, '')}` : undefined)
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -491,7 +513,18 @@ const MultiSelectCell = <T extends string>({
|
||||
onSelect={() => handleSelect(option.value)}
|
||||
className="cursor-pointer"
|
||||
>
|
||||
{option.label}
|
||||
<div className="flex items-center gap-2">
|
||||
{field.key === 'colors' && option.hex && (
|
||||
<span
|
||||
className={`inline-block h-3.5 w-3.5 rounded-full ${option.hex.toLowerCase() === '#ffffff' || option.hex.toLowerCase() === '#fff' ? 'border' : ''}`}
|
||||
style={{
|
||||
backgroundColor: option.hex,
|
||||
...(option.hex.toLowerCase() === '#ffffff' || option.hex.toLowerCase() === '#fff' ? { borderColor: '#000' } : {})
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<span>{option.label}</span>
|
||||
</div>
|
||||
{selectedValueSet.has(option.value) && (
|
||||
<Check className="ml-auto h-4 w-4" />
|
||||
)}
|
||||
@@ -542,4 +575,4 @@ export default React.memo(MultiSelectCell, (prev, next) => {
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,8 +22,10 @@ interface SelectCellProps<T extends string> {
|
||||
options: readonly any[]
|
||||
disabled?: boolean
|
||||
className?: string
|
||||
isValidating?: boolean
|
||||
}
|
||||
|
||||
|
||||
// Lightweight version of the select cell with minimal dependencies
|
||||
const SelectCell = <T extends string>({
|
||||
field,
|
||||
@@ -34,7 +36,7 @@ const SelectCell = <T extends string>({
|
||||
hasErrors,
|
||||
options = [],
|
||||
disabled = false,
|
||||
className = ''
|
||||
className = '', isValidating = false
|
||||
}: SelectCellProps<T>) => {
|
||||
// State for the open/closed state of the dropdown
|
||||
const [open, setOpen] = useState(false);
|
||||
@@ -47,6 +49,7 @@ const SelectCell = <T extends string>({
|
||||
|
||||
// State to track if the value is being processed/validated
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
const showProcessing = isProcessing || isValidating;
|
||||
|
||||
// Add state for hover
|
||||
const [isHovered, setIsHovered] = useState(false);
|
||||
@@ -61,8 +64,10 @@ const SelectCell = <T extends string>({
|
||||
useEffect(() => {
|
||||
setInternalValue(value);
|
||||
// When the value prop changes, it means validation is complete
|
||||
setIsProcessing(false);
|
||||
}, [value]);
|
||||
if (!isValidating) {
|
||||
setIsProcessing(false);
|
||||
}
|
||||
}, [value, isValidating]);
|
||||
|
||||
// Memoize options processing to avoid recalculation on every render
|
||||
const selectOptions = useMemo(() => {
|
||||
@@ -142,10 +147,12 @@ const SelectCell = <T extends string>({
|
||||
// 5. Call onChange synchronously to avoid race conditions with other cells
|
||||
onChange(valueToCommit);
|
||||
|
||||
// 6. Clear processing state after a short delay
|
||||
// 6. Clear processing state after a short delay - reduced for responsiveness
|
||||
setTimeout(() => {
|
||||
if (!isValidating) {
|
||||
setIsProcessing(false);
|
||||
}, 200);
|
||||
}
|
||||
}, 50);
|
||||
}, [onChange, onEndEdit]);
|
||||
|
||||
// If disabled, render a static view
|
||||
@@ -200,7 +207,7 @@ const SelectCell = <T extends string>({
|
||||
"w-full justify-between font-normal",
|
||||
"border",
|
||||
!internalValue && "text-muted-foreground",
|
||||
isProcessing && "text-muted-foreground",
|
||||
showProcessing && "text-muted-foreground",
|
||||
hasErrors ? "border-destructive" : "",
|
||||
className
|
||||
)}
|
||||
@@ -233,7 +240,7 @@ const SelectCell = <T extends string>({
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
>
|
||||
<span className={isProcessing ? "opacity-70" : ""}>
|
||||
<span className={showProcessing ? "opacity-70" : ""}>
|
||||
{displayValue}
|
||||
</span>
|
||||
<ChevronsUpDown className="mr-1.5 h-4 w-4 shrink-0 opacity-50" />
|
||||
|
||||
@@ -296,10 +296,24 @@ export const useAiValidation = <T extends string>(
|
||||
lastProduct: data[data.length - 1]
|
||||
});
|
||||
|
||||
// Clean the data to ensure we only send what's needed
|
||||
// Build a complete row object including empty cells so API receives all fields
|
||||
const cleanedData = data.map(item => {
|
||||
const { __index, ...rest } = item;
|
||||
return rest;
|
||||
const { __index, ...rest } = item as any;
|
||||
// Ensure all known field keys are present, even if empty
|
||||
const withAllKeys: Record<string, any> = {};
|
||||
(fields as any[]).forEach((f) => {
|
||||
const k = String(f.key);
|
||||
// Preserve arrays (e.g., multi-select) as empty array if undefined
|
||||
if (Array.isArray(rest[k])) {
|
||||
withAllKeys[k] = rest[k];
|
||||
} else if (rest[k] === undefined) {
|
||||
// Use empty string to represent an empty cell
|
||||
withAllKeys[k] = "";
|
||||
} else {
|
||||
withAllKeys[k] = rest[k];
|
||||
}
|
||||
});
|
||||
return withAllKeys;
|
||||
});
|
||||
|
||||
console.log('Cleaned data sample:', {
|
||||
@@ -421,10 +435,21 @@ export const useAiValidation = <T extends string>(
|
||||
});
|
||||
}, 1000) as unknown as NodeJS.Timeout;
|
||||
|
||||
// Clean the data to ensure we only send what's needed
|
||||
// Build a complete row object including empty cells so API receives all fields
|
||||
const cleanedData = data.map(item => {
|
||||
const { __index, ...rest } = item;
|
||||
return rest;
|
||||
const { __index, ...rest } = item as any;
|
||||
const withAllKeys: Record<string, any> = {};
|
||||
(fields as any[]).forEach((f) => {
|
||||
const k = String(f.key);
|
||||
if (Array.isArray(rest[k])) {
|
||||
withAllKeys[k] = rest[k];
|
||||
} else if (rest[k] === undefined) {
|
||||
withAllKeys[k] = "";
|
||||
} else {
|
||||
withAllKeys[k] = rest[k];
|
||||
}
|
||||
});
|
||||
return withAllKeys;
|
||||
});
|
||||
|
||||
console.log('Cleaned data for validation:', cleanedData);
|
||||
@@ -728,4 +753,4 @@ export const useAiValidation = <T extends string>(
|
||||
revertAiChange,
|
||||
isChangeReverted
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
@@ -7,14 +7,24 @@ import { RowData, isEmpty } from './validationTypes';
|
||||
// Create a cache for validation results to avoid repeated validation of the same data
|
||||
const validationResultCache = new Map();
|
||||
|
||||
// Add a function to clear cache for a specific field value
|
||||
export const clearValidationCacheForField = (fieldKey: string) => {
|
||||
// Look for entries that match this field key
|
||||
validationResultCache.forEach((_, key) => {
|
||||
if (key.startsWith(`${fieldKey}-`)) {
|
||||
validationResultCache.delete(key);
|
||||
}
|
||||
});
|
||||
// Optimize cache clearing - only clear when necessary
|
||||
export const clearValidationCacheForField = (fieldKey: string, specificValue?: any) => {
|
||||
if (specificValue !== undefined) {
|
||||
// Only clear specific field-value combinations
|
||||
const specificKey = `${fieldKey}-${String(specificValue)}`;
|
||||
validationResultCache.forEach((_, key) => {
|
||||
if (key.startsWith(specificKey)) {
|
||||
validationResultCache.delete(key);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Clear all entries for the field
|
||||
validationResultCache.forEach((_, key) => {
|
||||
if (key.startsWith(`${fieldKey}-`)) {
|
||||
validationResultCache.delete(key);
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Add a special function to clear all uniqueness validation caches
|
||||
|
||||
@@ -18,6 +18,15 @@ export const useFilterManagement = <T extends string>(
|
||||
|
||||
// Filter data based on current filter state
|
||||
const filteredData = useMemo(() => {
|
||||
// Fast path: no filters active, return original data reference to avoid re-renders
|
||||
const noSearch = !filters.searchText || filters.searchText.trim() === '';
|
||||
const noErrorsOnly = !filters.showErrorsOnly;
|
||||
const noFieldFilter = !filters.filterField || !filters.filterValue || filters.filterValue.trim() === '';
|
||||
|
||||
if (noSearch && noErrorsOnly && noFieldFilter) {
|
||||
return data; // preserve reference; prevents full table rerender on error map changes
|
||||
}
|
||||
|
||||
return data.filter((row, index) => {
|
||||
// Filter by search text
|
||||
if (filters.searchText) {
|
||||
@@ -107,4 +116,4 @@ export const useFilterManagement = <T extends string>(
|
||||
updateFilters,
|
||||
resetFilters
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,332 +1,541 @@
|
||||
import { useCallback } from 'react';
|
||||
import { RowData } from './validationTypes';
|
||||
import { useCallback, useEffect, useMemo, useRef, startTransition } from 'react';
|
||||
import type { Dispatch, SetStateAction } from 'react';
|
||||
import { RowData, isEmpty as isValueEmpty } from './validationTypes';
|
||||
import type { Field, Fields } from '../../../types';
|
||||
import { ErrorType, ValidationError } from '../../../types';
|
||||
import { ErrorSources, ErrorType, ValidationError } from '../../../types';
|
||||
import { useUniqueValidation } from './useUniqueValidation';
|
||||
|
||||
export const useRowOperations = <T extends string>(
|
||||
data: RowData<T>[],
|
||||
fields: Fields<T>,
|
||||
setData: React.Dispatch<React.SetStateAction<RowData<T>[]>>,
|
||||
setValidationErrors: React.Dispatch<React.SetStateAction<Map<number, Record<string, ValidationError[]>>>>,
|
||||
validateFieldFromHook: (value: any, field: Field<T>) => ValidationError[]
|
||||
validateFieldFromHook: (value: any, field: Field<T>) => ValidationError[],
|
||||
setValidatingCells?: Dispatch<SetStateAction<Set<string>>>
|
||||
) => {
|
||||
// Helper function to validate a field value
|
||||
const { validateUniqueField } = useUniqueValidation<T>(fields);
|
||||
|
||||
const dataRef = useRef(data);
|
||||
useEffect(() => {
|
||||
dataRef.current = data;
|
||||
}, [data]);
|
||||
|
||||
type ValidationTask = { cancel: () => void };
|
||||
const pendingValidationTasksRef = useRef<Map<string, ValidationTask>>(new Map());
|
||||
|
||||
const scheduleIdleTask = useCallback((taskKey: string, runTask: () => void) => {
|
||||
const existingTask = pendingValidationTasksRef.current.get(taskKey);
|
||||
existingTask?.cancel();
|
||||
|
||||
const execute = () => {
|
||||
pendingValidationTasksRef.current.delete(taskKey);
|
||||
runTask();
|
||||
};
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
const win = window as Window & typeof globalThis & {
|
||||
requestIdleCallback?: (callback: IdleRequestCallback, options?: IdleRequestOptions) => number;
|
||||
cancelIdleCallback?: (handle: number) => void;
|
||||
};
|
||||
|
||||
if (win.requestIdleCallback) {
|
||||
const handle = win.requestIdleCallback(() => {
|
||||
execute();
|
||||
}, { timeout: 250 });
|
||||
|
||||
pendingValidationTasksRef.current.set(taskKey, {
|
||||
cancel: () => win.cancelIdleCallback?.(handle),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const timeoutId = window.setTimeout(execute, 0);
|
||||
pendingValidationTasksRef.current.set(taskKey, {
|
||||
cancel: () => window.clearTimeout(timeoutId),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
execute();
|
||||
}, []);
|
||||
|
||||
const updateValidatingCell = useCallback(
|
||||
(rowIndex: number, fieldKey: string, pending: boolean) => {
|
||||
if (!setValidatingCells) return;
|
||||
const cellKey = `${rowIndex}-${fieldKey}`;
|
||||
setValidatingCells((prev: Set<string>) => {
|
||||
const hasKey = prev.has(cellKey);
|
||||
if (pending && hasKey) return prev;
|
||||
if (!pending && !hasKey) return prev;
|
||||
const next = new Set(prev);
|
||||
if (pending) next.add(cellKey);
|
||||
else next.delete(cellKey);
|
||||
return next;
|
||||
});
|
||||
},
|
||||
[setValidatingCells]
|
||||
);
|
||||
|
||||
const scheduleFieldValidation = useCallback(
|
||||
(rowIndex: number, fieldKey: string, runValidation: () => void) => {
|
||||
updateValidatingCell(rowIndex, fieldKey, true);
|
||||
try {
|
||||
runValidation();
|
||||
} finally {
|
||||
updateValidatingCell(rowIndex, fieldKey, false);
|
||||
}
|
||||
},
|
||||
[updateValidatingCell]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
pendingValidationTasksRef.current.forEach((task) => task.cancel());
|
||||
pendingValidationTasksRef.current.clear();
|
||||
};
|
||||
}, []);
|
||||
|
||||
const uniquenessFieldKeys = useMemo(() => {
|
||||
const keys = new Set<string>([
|
||||
'item_number',
|
||||
'upc',
|
||||
'barcode',
|
||||
'supplier_no',
|
||||
'notions_no',
|
||||
'name'
|
||||
]);
|
||||
|
||||
fields.forEach((field) => {
|
||||
if (field.validations?.some((v) => v.rule === 'unique')) {
|
||||
keys.add(String(field.key));
|
||||
}
|
||||
});
|
||||
|
||||
return keys;
|
||||
}, [fields]);
|
||||
|
||||
const mergeUniqueErrorsForFields = useCallback(
|
||||
(
|
||||
baseErrors: Map<number, Record<string, ValidationError[]>>,
|
||||
dataForCalc: RowData<T>[],
|
||||
fieldKeysToCheck: string[]
|
||||
) => {
|
||||
if (!fieldKeysToCheck.length) return baseErrors;
|
||||
|
||||
const newErrors = new Map(baseErrors);
|
||||
|
||||
fieldKeysToCheck.forEach((fieldKey) => {
|
||||
if (!uniquenessFieldKeys.has(fieldKey)) return;
|
||||
|
||||
const uniqueMap = validateUniqueField(dataForCalc, fieldKey);
|
||||
const rowsWithUniqueErrors = new Set<number>();
|
||||
uniqueMap.forEach((_, rowIdx) => rowsWithUniqueErrors.add(rowIdx));
|
||||
|
||||
uniqueMap.forEach((errorsForRow, rowIdx) => {
|
||||
const existing = { ...(newErrors.get(rowIdx) || {}) };
|
||||
const info = errorsForRow[fieldKey];
|
||||
const currentValue = (dataForCalc[rowIdx] as any)?.[fieldKey];
|
||||
|
||||
if (info && !isValueEmpty(currentValue)) {
|
||||
existing[fieldKey] = [
|
||||
{
|
||||
message: info.message,
|
||||
level: info.level,
|
||||
source: info.source ?? ErrorSources.Table,
|
||||
type: info.type ?? ErrorType.Unique
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
if (Object.keys(existing).length > 0) newErrors.set(rowIdx, existing);
|
||||
else newErrors.delete(rowIdx);
|
||||
});
|
||||
|
||||
newErrors.forEach((rowErrs, rowIdx) => {
|
||||
if (rowsWithUniqueErrors.has(rowIdx)) return;
|
||||
|
||||
if ((rowErrs as any)[fieldKey]) {
|
||||
const filtered = (rowErrs as any)[fieldKey].filter((e: ValidationError) => e.type !== ErrorType.Unique);
|
||||
if (filtered.length > 0) (rowErrs as any)[fieldKey] = filtered;
|
||||
else delete (rowErrs as any)[fieldKey];
|
||||
|
||||
if (Object.keys(rowErrs).length > 0) newErrors.set(rowIdx, rowErrs);
|
||||
else newErrors.delete(rowIdx);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return newErrors;
|
||||
},
|
||||
[uniquenessFieldKeys, validateUniqueField]
|
||||
);
|
||||
|
||||
const pendingUniqueFieldsRef = useRef<Set<string>>(new Set());
|
||||
|
||||
const runUniqueValidation = useCallback(
|
||||
(fieldsToProcess: string[]) => {
|
||||
if (!fieldsToProcess.length) return;
|
||||
setValidationErrors((prev) =>
|
||||
mergeUniqueErrorsForFields(prev, dataRef.current, fieldsToProcess)
|
||||
);
|
||||
},
|
||||
[mergeUniqueErrorsForFields, setValidationErrors]
|
||||
);
|
||||
|
||||
const scheduleUniqueValidation = useCallback(
|
||||
(fieldKeys: string[]) => {
|
||||
if (!fieldKeys.length) return;
|
||||
const uniqueKeys = fieldKeys.filter((key) => uniquenessFieldKeys.has(key));
|
||||
if (!uniqueKeys.length) return;
|
||||
|
||||
if (pendingUniqueFieldsRef.current.size === 0 && uniqueKeys.length <= 2) {
|
||||
const immediateKeys = Array.from(new Set(uniqueKeys));
|
||||
runUniqueValidation(immediateKeys);
|
||||
return;
|
||||
}
|
||||
|
||||
uniqueKeys.forEach((fieldKey) => pendingUniqueFieldsRef.current.add(fieldKey));
|
||||
|
||||
scheduleIdleTask('unique:batch', () => {
|
||||
const fieldsToProcess = Array.from(pendingUniqueFieldsRef.current);
|
||||
pendingUniqueFieldsRef.current.clear();
|
||||
if (!fieldsToProcess.length) return;
|
||||
runUniqueValidation(fieldsToProcess);
|
||||
});
|
||||
},
|
||||
[runUniqueValidation, scheduleIdleTask, uniquenessFieldKeys]
|
||||
);
|
||||
|
||||
const fieldValidationHelper = useCallback(
|
||||
(rowIndex: number, specificField?: string) => {
|
||||
// Skip validation if row doesn't exist
|
||||
if (rowIndex < 0 || rowIndex >= data.length) return;
|
||||
const currentData = dataRef.current;
|
||||
if (rowIndex < 0 || rowIndex >= currentData.length) return;
|
||||
|
||||
// Get the row data
|
||||
const row = data[rowIndex];
|
||||
const row = currentData[rowIndex];
|
||||
|
||||
// If validating a specific field, only check that field
|
||||
if (specificField) {
|
||||
const field = fields.find((f) => String(f.key) === specificField);
|
||||
if (field) {
|
||||
const value = row[specificField as keyof typeof row];
|
||||
if (!field) return;
|
||||
|
||||
// Use state setter instead of direct mutation
|
||||
setValidationErrors((prev) => {
|
||||
const newErrors = new Map(prev);
|
||||
const existingErrors = { ...(newErrors.get(rowIndex) || {}) };
|
||||
const value = row[specificField as keyof typeof row];
|
||||
|
||||
// Quick check for required fields - this prevents flashing errors
|
||||
const isRequired = field.validations?.some(
|
||||
(v) => v.rule === "required"
|
||||
);
|
||||
const isEmpty =
|
||||
value === undefined ||
|
||||
value === null ||
|
||||
value === "" ||
|
||||
(Array.isArray(value) && value.length === 0) ||
|
||||
(typeof value === "object" &&
|
||||
value !== null &&
|
||||
Object.keys(value).length === 0);
|
||||
|
||||
// For non-empty values, remove required errors immediately
|
||||
if (isRequired && !isEmpty && existingErrors[specificField]) {
|
||||
const nonRequiredErrors = existingErrors[specificField].filter(
|
||||
(e) => e.type !== ErrorType.Required
|
||||
);
|
||||
if (nonRequiredErrors.length === 0) {
|
||||
// If no other errors, remove the field entirely from errors
|
||||
delete existingErrors[specificField];
|
||||
} else {
|
||||
existingErrors[specificField] = nonRequiredErrors;
|
||||
}
|
||||
}
|
||||
|
||||
// Run full validation for the field
|
||||
const errors = validateFieldFromHook(value, field as unknown as Field<T>);
|
||||
|
||||
// Update validation errors for this field
|
||||
if (errors.length > 0) {
|
||||
existingErrors[specificField] = errors;
|
||||
} else {
|
||||
delete existingErrors[specificField];
|
||||
}
|
||||
|
||||
// Update validation errors map
|
||||
if (Object.keys(existingErrors).length > 0) {
|
||||
newErrors.set(rowIndex, existingErrors);
|
||||
} else {
|
||||
newErrors.delete(rowIndex);
|
||||
}
|
||||
|
||||
return newErrors;
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Validate all fields in the row
|
||||
updateValidatingCell(rowIndex, specificField, true);
|
||||
setValidationErrors((prev) => {
|
||||
const newErrors = new Map(prev);
|
||||
const rowErrors: Record<string, ValidationError[]> = {};
|
||||
const existingErrors = prev.get(rowIndex) || {};
|
||||
const newRowErrors = { ...existingErrors };
|
||||
let rowChanged = false;
|
||||
|
||||
fields.forEach((field) => {
|
||||
const isRequired = field.validations?.some((v) => v.rule === 'required');
|
||||
const valueIsEmpty =
|
||||
value === undefined ||
|
||||
value === null ||
|
||||
value === '' ||
|
||||
(Array.isArray(value) && value.length === 0) ||
|
||||
(typeof value === 'object' && value !== null && Object.keys(value).length === 0);
|
||||
|
||||
if (isRequired && !valueIsEmpty && newRowErrors[specificField]) {
|
||||
const nonRequiredErrors = newRowErrors[specificField].filter((e) => e.type !== ErrorType.Required);
|
||||
|
||||
if (nonRequiredErrors.length === 0) {
|
||||
rowChanged = true;
|
||||
delete newRowErrors[specificField];
|
||||
} else if (nonRequiredErrors.length !== newRowErrors[specificField].length) {
|
||||
rowChanged = true;
|
||||
newRowErrors[specificField] = nonRequiredErrors;
|
||||
}
|
||||
}
|
||||
|
||||
const errors = validateFieldFromHook(value, field as unknown as Field<T>);
|
||||
|
||||
if (errors.length > 0) {
|
||||
const existing = newRowErrors[specificField] || [];
|
||||
const sameLength = existing.length === errors.length;
|
||||
const sameContent = sameLength && existing.every((err, idx) => err.message === errors[idx].message && err.type === errors[idx].type);
|
||||
if (!sameContent) {
|
||||
rowChanged = true;
|
||||
newRowErrors[specificField] = errors;
|
||||
}
|
||||
} else if (newRowErrors[specificField]) {
|
||||
rowChanged = true;
|
||||
delete newRowErrors[specificField];
|
||||
}
|
||||
|
||||
let resultMap = prev;
|
||||
if (rowChanged) {
|
||||
resultMap = new Map(prev);
|
||||
if (Object.keys(newRowErrors).length > 0) {
|
||||
resultMap.set(rowIndex, newRowErrors);
|
||||
} else {
|
||||
resultMap.delete(rowIndex);
|
||||
}
|
||||
}
|
||||
|
||||
if (uniquenessFieldKeys.has(specificField)) {
|
||||
scheduleUniqueValidation([specificField]);
|
||||
return rowChanged ? resultMap : prev;
|
||||
}
|
||||
|
||||
return rowChanged ? resultMap : prev;
|
||||
});
|
||||
updateValidatingCell(rowIndex, specificField, false);
|
||||
} else {
|
||||
setValidationErrors((prev) => {
|
||||
const rowErrors: Record<string, ValidationError[]> = {};
|
||||
|
||||
fields.forEach((field) => {
|
||||
const fieldKey = String(field.key);
|
||||
const value = row[fieldKey as keyof typeof row];
|
||||
const errors = validateFieldFromHook(value, field as unknown as Field<T>);
|
||||
|
||||
const valueForField = row[fieldKey as keyof typeof row];
|
||||
const errors = validateFieldFromHook(valueForField, field as unknown as Field<T>);
|
||||
if (errors.length > 0) {
|
||||
rowErrors[fieldKey] = errors;
|
||||
}
|
||||
});
|
||||
|
||||
// Update validation errors map
|
||||
if (Object.keys(rowErrors).length > 0) {
|
||||
newErrors.set(rowIndex, rowErrors);
|
||||
} else {
|
||||
newErrors.delete(rowIndex);
|
||||
if (Object.keys(rowErrors).length === 0) {
|
||||
if (!prev.has(rowIndex)) return prev;
|
||||
const result = new Map(prev);
|
||||
result.delete(rowIndex);
|
||||
return result;
|
||||
}
|
||||
|
||||
return newErrors;
|
||||
});
|
||||
const existing = prev.get(rowIndex);
|
||||
const sameEntries = existing && Object.keys(rowErrors).length === Object.keys(existing).length && Object.entries(rowErrors).every(([key, val]) => {
|
||||
const existingVal = existing[key];
|
||||
return (
|
||||
existingVal &&
|
||||
existingVal.length === val.length &&
|
||||
existingVal.every((err, idx) => err.message === val[idx].message && err.type === val[idx].type)
|
||||
);
|
||||
});
|
||||
|
||||
if (sameEntries) return prev;
|
||||
|
||||
const result = new Map(prev);
|
||||
result.set(rowIndex, rowErrors);
|
||||
return result;
|
||||
});
|
||||
const uniqueKeys = fields
|
||||
.map((field) => String(field.key))
|
||||
.filter((fieldKey) => uniquenessFieldKeys.has(fieldKey));
|
||||
if (uniqueKeys.length > 0) {
|
||||
scheduleUniqueValidation(uniqueKeys);
|
||||
}
|
||||
}
|
||||
},
|
||||
[data, fields, validateFieldFromHook, setValidationErrors]
|
||||
[fields, scheduleUniqueValidation, setValidationErrors, uniquenessFieldKeys, validateFieldFromHook]
|
||||
);
|
||||
|
||||
// Use validateRow as an alias for fieldValidationHelper for compatibility
|
||||
const validateRow = fieldValidationHelper;
|
||||
|
||||
// Modified updateRow function that properly handles field-specific validation
|
||||
const updateRow = useCallback(
|
||||
(rowIndex: number, key: T, value: any) => {
|
||||
// Process value before updating data
|
||||
let processedValue = value;
|
||||
|
||||
// Strip dollar signs from price fields
|
||||
if (
|
||||
(key === "msrp" || key === "cost_each") &&
|
||||
typeof value === "string"
|
||||
) {
|
||||
processedValue = value.replace(/[$,]/g, "");
|
||||
|
||||
// Also ensure it's a valid number
|
||||
if ((key === 'msrp' || key === 'cost_each') && typeof value === 'string') {
|
||||
processedValue = value.replace(/[$,]/g, '');
|
||||
const numValue = parseFloat(processedValue);
|
||||
if (!isNaN(numValue)) {
|
||||
if (!Number.isNaN(numValue)) {
|
||||
processedValue = numValue.toFixed(2);
|
||||
}
|
||||
}
|
||||
|
||||
// Find the row data first
|
||||
const rowData = data[rowIndex];
|
||||
const currentData = dataRef.current;
|
||||
const rowData = currentData[rowIndex];
|
||||
if (!rowData) {
|
||||
console.error(`No row data found for index ${rowIndex}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a copy of the row to avoid mutation
|
||||
const updatedRow = { ...rowData, [key]: processedValue };
|
||||
|
||||
// Update the data immediately - this sets the value
|
||||
setData((prevData) => {
|
||||
const newData = [...prevData];
|
||||
if (rowIndex >= 0 && rowIndex < newData.length) {
|
||||
newData[rowIndex] = updatedRow;
|
||||
}
|
||||
return newData;
|
||||
const nextData = [...currentData];
|
||||
if (rowIndex >= 0 && rowIndex < nextData.length) {
|
||||
nextData[rowIndex] = updatedRow;
|
||||
}
|
||||
dataRef.current = nextData;
|
||||
|
||||
startTransition(() => {
|
||||
setData(() => nextData);
|
||||
});
|
||||
|
||||
// Find the field definition
|
||||
const field = fields.find((f) => String(f.key) === key);
|
||||
if (!field) return;
|
||||
|
||||
// CRITICAL FIX: Combine both validation operations into a single state update
|
||||
// to prevent intermediate rendering that causes error icon flashing
|
||||
setValidationErrors((prev) => {
|
||||
const newMap = new Map(prev);
|
||||
const existingErrors = newMap.get(rowIndex) || {};
|
||||
const newRowErrors = { ...existingErrors };
|
||||
scheduleFieldValidation(rowIndex, String(key), () => {
|
||||
setValidationErrors((prev) => {
|
||||
const existingErrors = prev.get(rowIndex) || {};
|
||||
const newRowErrors = { ...existingErrors };
|
||||
let rowChanged = false;
|
||||
|
||||
// Check for required field first
|
||||
const isRequired = field.validations?.some(
|
||||
(v) => v.rule === "required"
|
||||
);
|
||||
const isEmpty =
|
||||
processedValue === undefined ||
|
||||
processedValue === null ||
|
||||
processedValue === "" ||
|
||||
(Array.isArray(processedValue) && processedValue.length === 0) ||
|
||||
(typeof processedValue === "object" &&
|
||||
processedValue !== null &&
|
||||
Object.keys(processedValue).length === 0);
|
||||
const latestRow = dataRef.current[rowIndex];
|
||||
const currentValue = latestRow ? (latestRow[String(key) as keyof typeof latestRow] as unknown) : processedValue;
|
||||
|
||||
// For required fields with values, remove required errors
|
||||
if (isRequired && !isEmpty && newRowErrors[key as string]) {
|
||||
const hasRequiredError = newRowErrors[key as string].some(
|
||||
(e) => e.type === ErrorType.Required
|
||||
);
|
||||
|
||||
if (hasRequiredError) {
|
||||
// Remove required errors but keep other types of errors
|
||||
const nonRequiredErrors = newRowErrors[key as string].filter(
|
||||
(e) => e.type !== ErrorType.Required
|
||||
);
|
||||
const isRequired = field.validations?.some((v) => v.rule === 'required');
|
||||
const valueIsEmpty =
|
||||
currentValue === undefined ||
|
||||
currentValue === null ||
|
||||
currentValue === '' ||
|
||||
(Array.isArray(currentValue) && currentValue.length === 0) ||
|
||||
(typeof currentValue === 'object' && currentValue !== null && Object.keys(currentValue).length === 0);
|
||||
|
||||
if (isRequired && !valueIsEmpty && newRowErrors[String(key)]) {
|
||||
const nonRequiredErrors = newRowErrors[String(key)].filter((e) => e.type !== ErrorType.Required);
|
||||
if (nonRequiredErrors.length === 0) {
|
||||
// If no other errors, delete the field's errors entirely
|
||||
delete newRowErrors[key as string];
|
||||
} else {
|
||||
// Otherwise keep non-required errors
|
||||
newRowErrors[key as string] = nonRequiredErrors;
|
||||
if (newRowErrors[String(key)]) {
|
||||
rowChanged = true;
|
||||
delete newRowErrors[String(key)];
|
||||
}
|
||||
} else if (nonRequiredErrors.length !== newRowErrors[String(key)].length) {
|
||||
rowChanged = true;
|
||||
newRowErrors[String(key)] = nonRequiredErrors;
|
||||
}
|
||||
}
|
||||
|
||||
const errors = validateFieldFromHook(
|
||||
currentValue,
|
||||
field as unknown as Field<T>
|
||||
).filter((e) => e.type !== ErrorType.Required || valueIsEmpty);
|
||||
|
||||
if (errors.length > 0) {
|
||||
const existing = newRowErrors[String(key)] || [];
|
||||
const sameLength = existing.length === errors.length;
|
||||
const sameContent = sameLength && existing.every((err, idx) => err.message === errors[idx].message && err.type === errors[idx].type);
|
||||
if (!sameContent) {
|
||||
rowChanged = true;
|
||||
newRowErrors[String(key)] = errors;
|
||||
}
|
||||
} else if (newRowErrors[String(key)]) {
|
||||
rowChanged = true;
|
||||
delete newRowErrors[String(key)];
|
||||
}
|
||||
|
||||
let resultMap = prev;
|
||||
if (rowChanged) {
|
||||
resultMap = new Map(prev);
|
||||
if (Object.keys(newRowErrors).length > 0) {
|
||||
resultMap.set(rowIndex, newRowErrors);
|
||||
} else {
|
||||
resultMap.delete(rowIndex);
|
||||
}
|
||||
}
|
||||
|
||||
const fieldsToCheck: string[] = [];
|
||||
if (uniquenessFieldKeys.has(String(key))) fieldsToCheck.push(String(key));
|
||||
if (key === ('upc' as T) || key === ('barcode' as T) || key === ('supplier' as T)) {
|
||||
if (uniquenessFieldKeys.has('item_number')) fieldsToCheck.push('item_number');
|
||||
}
|
||||
|
||||
if (fieldsToCheck.length > 0) {
|
||||
scheduleUniqueValidation(fieldsToCheck);
|
||||
}
|
||||
|
||||
return rowChanged ? resultMap : prev;
|
||||
});
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
const rowId = rowData.__index;
|
||||
|
||||
if (key === 'company' && processedValue) {
|
||||
const nextData = [...dataRef.current];
|
||||
const idx = nextData.findIndex((item) => item.__index === rowId);
|
||||
if (idx >= 0) {
|
||||
nextData[idx] = {
|
||||
...nextData[idx],
|
||||
line: undefined,
|
||||
subline: undefined,
|
||||
};
|
||||
dataRef.current = nextData;
|
||||
startTransition(() => {
|
||||
setData(() => nextData);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Now run full validation for the field (except for required which we already handled)
|
||||
const errors = validateFieldFromHook(
|
||||
processedValue,
|
||||
field as unknown as Field<T>
|
||||
).filter((e) => e.type !== ErrorType.Required || isEmpty);
|
||||
|
||||
// Update with new validation results
|
||||
if (errors.length > 0) {
|
||||
newRowErrors[key as string] = errors;
|
||||
} else if (!newRowErrors[key as string]) {
|
||||
// If no errors found and no existing errors, ensure field is removed from errors
|
||||
delete newRowErrors[key as string];
|
||||
if (key === 'line' && processedValue) {
|
||||
const nextData = [...dataRef.current];
|
||||
const idx = nextData.findIndex((item) => item.__index === rowId);
|
||||
if (idx >= 0) {
|
||||
nextData[idx] = {
|
||||
...nextData[idx],
|
||||
subline: undefined,
|
||||
};
|
||||
dataRef.current = nextData;
|
||||
startTransition(() => {
|
||||
setData(() => nextData);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Update the map
|
||||
if (Object.keys(newRowErrors).length > 0) {
|
||||
newMap.set(rowIndex, newRowErrors);
|
||||
} else {
|
||||
newMap.delete(rowIndex);
|
||||
}
|
||||
|
||||
return newMap;
|
||||
});
|
||||
|
||||
// Handle simple secondary effects here
|
||||
setTimeout(() => {
|
||||
// Use __index to find the actual row in the full data array
|
||||
const rowId = rowData.__index;
|
||||
|
||||
// Handle company change - clear line/subline
|
||||
if (key === "company" && processedValue) {
|
||||
// Clear any existing line/subline values
|
||||
setData((prevData) => {
|
||||
const newData = [...prevData];
|
||||
const idx = newData.findIndex((item) => item.__index === rowId);
|
||||
if (idx >= 0) {
|
||||
newData[idx] = {
|
||||
...newData[idx],
|
||||
line: undefined,
|
||||
subline: undefined,
|
||||
};
|
||||
}
|
||||
return newData;
|
||||
});
|
||||
}
|
||||
|
||||
// Handle line change - clear subline
|
||||
if (key === "line" && processedValue) {
|
||||
// Clear any existing subline value
|
||||
setData((prevData) => {
|
||||
const newData = [...prevData];
|
||||
const idx = newData.findIndex((item) => item.__index === rowId);
|
||||
if (idx >= 0) {
|
||||
newData[idx] = {
|
||||
...newData[idx],
|
||||
subline: undefined,
|
||||
};
|
||||
}
|
||||
return newData;
|
||||
});
|
||||
}
|
||||
}, 50);
|
||||
}, 5);
|
||||
},
|
||||
[data, fields, validateFieldFromHook, setData, setValidationErrors]
|
||||
[fields, scheduleFieldValidation, scheduleUniqueValidation, setData, setValidationErrors, uniquenessFieldKeys, validateFieldFromHook]
|
||||
);
|
||||
|
||||
// Improved revalidateRows function
|
||||
const revalidateRows = useCallback(
|
||||
async (
|
||||
rowIndexes: number[],
|
||||
updatedFields?: { [rowIndex: number]: string[] }
|
||||
) => {
|
||||
// Process all specified rows using a single state update to avoid race conditions
|
||||
const uniqueFieldsToCheck = new Set<string>();
|
||||
const fieldsMarked: Array<[number, string]> = [];
|
||||
|
||||
setValidationErrors((prev) => {
|
||||
const newErrors = new Map(prev);
|
||||
let newErrors = new Map(prev);
|
||||
const currentData = dataRef.current;
|
||||
|
||||
// Process each row
|
||||
for (const rowIndex of rowIndexes) {
|
||||
if (rowIndex < 0 || rowIndex >= data.length) continue;
|
||||
|
||||
const row = data[rowIndex];
|
||||
if (rowIndex < 0 || rowIndex >= currentData.length) continue;
|
||||
const row = currentData[rowIndex];
|
||||
if (!row) continue;
|
||||
|
||||
// If we have specific fields to update for this row
|
||||
const fieldsToValidate = updatedFields?.[rowIndex] || [];
|
||||
|
||||
if (fieldsToValidate.length > 0) {
|
||||
// Get existing errors for this row
|
||||
const existingRowErrors = { ...(newErrors.get(rowIndex) || {}) };
|
||||
|
||||
// Validate each specified field
|
||||
for (const fieldKey of fieldsToValidate) {
|
||||
const field = fields.find((f) => String(f.key) === fieldKey);
|
||||
if (!field) continue;
|
||||
|
||||
const value = row[fieldKey as keyof typeof row];
|
||||
updateValidatingCell(rowIndex, fieldKey, true);
|
||||
fieldsMarked.push([rowIndex, fieldKey]);
|
||||
|
||||
// Run validation for this field
|
||||
const value = row[fieldKey as keyof typeof row];
|
||||
const errors = validateFieldFromHook(value, field as unknown as Field<T>);
|
||||
|
||||
// Update errors for this field
|
||||
if (errors.length > 0) {
|
||||
existingRowErrors[fieldKey] = errors;
|
||||
} else {
|
||||
delete existingRowErrors[fieldKey];
|
||||
}
|
||||
|
||||
if (uniquenessFieldKeys.has(fieldKey)) {
|
||||
uniqueFieldsToCheck.add(fieldKey);
|
||||
}
|
||||
}
|
||||
|
||||
// Update the row's errors
|
||||
if (Object.keys(existingRowErrors).length > 0) {
|
||||
newErrors.set(rowIndex, existingRowErrors);
|
||||
} else {
|
||||
newErrors.delete(rowIndex);
|
||||
}
|
||||
} else {
|
||||
// No specific fields provided - validate the entire row
|
||||
const rowErrors: Record<string, ValidationError[]> = {};
|
||||
|
||||
// Validate all fields in the row
|
||||
for (const field of fields) {
|
||||
const fieldKey = String(field.key);
|
||||
const value = row[fieldKey as keyof typeof row];
|
||||
|
||||
// Run validation for this field
|
||||
const errors = validateFieldFromHook(value, field as unknown as Field<T>);
|
||||
|
||||
// Update errors for this field
|
||||
if (errors.length > 0) {
|
||||
rowErrors[fieldKey] = errors;
|
||||
}
|
||||
|
||||
if (uniquenessFieldKeys.has(fieldKey)) {
|
||||
uniqueFieldsToCheck.add(fieldKey);
|
||||
}
|
||||
}
|
||||
|
||||
// Update the row's errors
|
||||
if (Object.keys(rowErrors).length > 0) {
|
||||
newErrors.set(rowIndex, rowErrors);
|
||||
} else {
|
||||
@@ -337,24 +546,38 @@ export const useRowOperations = <T extends string>(
|
||||
|
||||
return newErrors;
|
||||
});
|
||||
|
||||
fieldsMarked.forEach(([rowIndex, fieldKey]) => {
|
||||
updateValidatingCell(rowIndex, fieldKey, false);
|
||||
});
|
||||
|
||||
if (uniqueFieldsToCheck.size > 0) {
|
||||
scheduleUniqueValidation(Array.from(uniqueFieldsToCheck));
|
||||
}
|
||||
},
|
||||
[data, fields, validateFieldFromHook]
|
||||
[
|
||||
fields,
|
||||
scheduleUniqueValidation,
|
||||
setValidationErrors,
|
||||
uniquenessFieldKeys,
|
||||
validateFieldFromHook,
|
||||
updateValidatingCell
|
||||
]
|
||||
);
|
||||
|
||||
// Copy a cell value to all cells below it in the same column
|
||||
const copyDown = useCallback(
|
||||
(rowIndex: number, key: T) => {
|
||||
// Get the source value to copy
|
||||
const sourceValue = data[rowIndex][key];
|
||||
const currentData = dataRef.current;
|
||||
const sourceRow = currentData[rowIndex];
|
||||
if (!sourceRow) return;
|
||||
|
||||
// Update all rows below with the same value using the existing updateRow function
|
||||
// This ensures all validation logic runs consistently
|
||||
for (let i = rowIndex + 1; i < data.length; i++) {
|
||||
// Just use updateRow which will handle validation with proper timing
|
||||
const sourceValue = sourceRow[key];
|
||||
|
||||
for (let i = rowIndex + 1; i < currentData.length; i++) {
|
||||
updateRow(i, key, sourceValue);
|
||||
}
|
||||
},
|
||||
[data, updateRow]
|
||||
[updateRow]
|
||||
);
|
||||
|
||||
return {
|
||||
@@ -363,4 +586,4 @@ export const useRowOperations = <T extends string>(
|
||||
revalidateRows,
|
||||
copyDown
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
@@ -10,8 +10,6 @@ export const useUniqueItemNumbersValidation = <T extends string>(
|
||||
) => {
|
||||
// Update validateUniqueItemNumbers to also check for uniqueness of UPC/barcode
|
||||
const validateUniqueItemNumbers = useCallback(async () => {
|
||||
console.log("Validating unique fields");
|
||||
|
||||
// Skip if no data
|
||||
if (!data.length) return;
|
||||
|
||||
@@ -23,11 +21,6 @@ export const useUniqueItemNumbersValidation = <T extends string>(
|
||||
.filter((field) => field.validations?.some((v) => v.rule === "unique"))
|
||||
.map((field) => String(field.key));
|
||||
|
||||
console.log(
|
||||
`Found ${uniqueFields.length} fields requiring uniqueness validation:`,
|
||||
uniqueFields
|
||||
);
|
||||
|
||||
// Always check item_number uniqueness even if not explicitly defined
|
||||
if (!uniqueFields.includes("item_number")) {
|
||||
uniqueFields.push("item_number");
|
||||
@@ -41,32 +34,44 @@ export const useUniqueItemNumbersValidation = <T extends string>(
|
||||
// Initialize batch updates
|
||||
const errors = new Map<number, Record<string, ValidationError[]>>();
|
||||
|
||||
// Single pass through data to identify all unique values
|
||||
data.forEach((row, index) => {
|
||||
uniqueFields.forEach((fieldKey) => {
|
||||
const value = row[fieldKey as keyof typeof row];
|
||||
// ASYNC: Single pass through data to identify all unique values in batches
|
||||
const BATCH_SIZE = 20;
|
||||
for (let batchStart = 0; batchStart < data.length; batchStart += BATCH_SIZE) {
|
||||
const batchEnd = Math.min(batchStart + BATCH_SIZE, data.length);
|
||||
|
||||
for (let index = batchStart; index < batchEnd; index++) {
|
||||
const row = data[index];
|
||||
uniqueFields.forEach((fieldKey) => {
|
||||
const value = row[fieldKey as keyof typeof row];
|
||||
|
||||
// Skip empty values
|
||||
if (value === undefined || value === null || value === "") {
|
||||
return;
|
||||
}
|
||||
// Skip empty values
|
||||
if (value === undefined || value === null || value === "") {
|
||||
return;
|
||||
}
|
||||
|
||||
const valueStr = String(value);
|
||||
const fieldMap = uniqueFieldsMap.get(fieldKey);
|
||||
const valueStr = String(value);
|
||||
const fieldMap = uniqueFieldsMap.get(fieldKey);
|
||||
|
||||
if (fieldMap) {
|
||||
// Get or initialize the array of indices for this value
|
||||
const indices = fieldMap.get(valueStr) || [];
|
||||
indices.push(index);
|
||||
fieldMap.set(valueStr, indices);
|
||||
}
|
||||
});
|
||||
});
|
||||
if (fieldMap) {
|
||||
// Get or initialize the array of indices for this value
|
||||
const indices = fieldMap.get(valueStr) || [];
|
||||
indices.push(index);
|
||||
fieldMap.set(valueStr, indices);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Yield control back to UI thread after each batch
|
||||
if (batchEnd < data.length) {
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
}
|
||||
}
|
||||
|
||||
// Process duplicates
|
||||
uniqueFields.forEach((fieldKey) => {
|
||||
// ASYNC: Process duplicates in batches to prevent UI blocking
|
||||
let processedFields = 0;
|
||||
for (const fieldKey of uniqueFields) {
|
||||
const fieldMap = uniqueFieldsMap.get(fieldKey);
|
||||
if (!fieldMap) return;
|
||||
if (!fieldMap) continue;
|
||||
|
||||
fieldMap.forEach((indices, value) => {
|
||||
// Only process if there are duplicates
|
||||
@@ -93,54 +98,56 @@ export const useUniqueItemNumbersValidation = <T extends string>(
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
processedFields++;
|
||||
// Yield control after every few fields to prevent UI blocking
|
||||
if (processedFields % 2 === 0) {
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
}
|
||||
}
|
||||
|
||||
// Apply batch updates only if we have errors to report
|
||||
if (errors.size > 0) {
|
||||
// OPTIMIZATION: Check if we actually have new errors before updating state
|
||||
let hasChanges = false;
|
||||
// Merge uniqueness errors with existing validation errors
|
||||
setValidationErrors((prev) => {
|
||||
const newMap = new Map(prev);
|
||||
|
||||
// We'll update errors with a single batch operation
|
||||
setValidationErrors((prev) => {
|
||||
const newMap = new Map(prev);
|
||||
// Add uniqueness errors
|
||||
errors.forEach((rowErrors, rowIndex) => {
|
||||
const existingErrors = newMap.get(rowIndex) || {};
|
||||
const updatedErrors = { ...existingErrors };
|
||||
|
||||
// Check each row for changes
|
||||
errors.forEach((rowErrors, rowIndex) => {
|
||||
const existingErrors = newMap.get(rowIndex) || {};
|
||||
const updatedErrors = { ...existingErrors };
|
||||
let rowHasChanges = false;
|
||||
|
||||
// Check each field for changes
|
||||
Object.entries(rowErrors).forEach(([fieldKey, fieldErrors]) => {
|
||||
// Compare with existing errors
|
||||
const existingFieldErrors = existingErrors[fieldKey];
|
||||
|
||||
if (
|
||||
!existingFieldErrors ||
|
||||
existingFieldErrors.length !== fieldErrors.length ||
|
||||
!existingFieldErrors.every(
|
||||
(err, idx) =>
|
||||
err.message === fieldErrors[idx].message &&
|
||||
err.type === fieldErrors[idx].type
|
||||
)
|
||||
) {
|
||||
// We have a change
|
||||
updatedErrors[fieldKey] = fieldErrors;
|
||||
rowHasChanges = true;
|
||||
hasChanges = true;
|
||||
}
|
||||
});
|
||||
|
||||
// Only update if we have changes
|
||||
if (rowHasChanges) {
|
||||
newMap.set(rowIndex, updatedErrors);
|
||||
}
|
||||
// Add uniqueness errors to existing errors
|
||||
Object.entries(rowErrors).forEach(([fieldKey, fieldErrors]) => {
|
||||
updatedErrors[fieldKey] = fieldErrors;
|
||||
});
|
||||
|
||||
// Only return a new map if we have changes
|
||||
return hasChanges ? newMap : prev;
|
||||
newMap.set(rowIndex, updatedErrors);
|
||||
});
|
||||
}
|
||||
|
||||
// Clean up rows that have no uniqueness errors anymore
|
||||
// by removing only uniqueness error types from rows not in the errors map
|
||||
newMap.forEach((rowErrors, rowIndex) => {
|
||||
if (!errors.has(rowIndex)) {
|
||||
// Remove uniqueness errors from this row
|
||||
const cleanedErrors: Record<string, ValidationError[]> = {};
|
||||
Object.entries(rowErrors).forEach(([fieldKey, fieldErrors]) => {
|
||||
// Keep non-uniqueness errors
|
||||
const nonUniqueErrors = fieldErrors.filter(error => error.type !== ErrorType.Unique);
|
||||
if (nonUniqueErrors.length > 0) {
|
||||
cleanedErrors[fieldKey] = nonUniqueErrors;
|
||||
}
|
||||
});
|
||||
|
||||
// Update the row or remove it if no errors remain
|
||||
if (Object.keys(cleanedErrors).length > 0) {
|
||||
newMap.set(rowIndex, cleanedErrors);
|
||||
} else {
|
||||
newMap.delete(rowIndex);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return newMap;
|
||||
});
|
||||
|
||||
console.log("Uniqueness validation complete");
|
||||
}, [data, fields, setValidationErrors]);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user