Compare commits
56 Commits
check-numb
...
a161f4533d
| Author | SHA1 | Date | |
|---|---|---|---|
| a161f4533d | |||
| 6e30ba60ff | |||
| 138251cf86 | |||
| 24aee1db90 | |||
| 2fe7fd5b2f | |||
| d8b39979cd | |||
| 4776a112b6 | |||
| 2ff325a132 | |||
| 5d46a2a7e5 | |||
| 512b351429 | |||
| 3991341376 | |||
| 5833779c10 | |||
| c61115f665 | |||
| 7da2b304b4 | |||
| 4ccda8ad49 | |||
| 88f703ec70 | |||
| ab998fb7c4 | |||
| faaa8cc47a | |||
| 459c5092d2 | |||
| 6c9fd062e9 | |||
| 5d7d7a8671 | |||
| 54f55b06a1 | |||
| 4935cfe3bb | |||
| 5e2ee73e2d | |||
| 4dfe85231a | |||
| 9e7aac836e | |||
| d35c7dd6cf | |||
| ad1ebeefe1 | |||
| a0c442d1af | |||
| 7938c50762 | |||
| 5dcd19e7f3 | |||
| 075e7253a0 | |||
| 763aa4f74b | |||
| 520ff5bd74 | |||
| 8496bbc4ee | |||
| 38f6688f10 | |||
| fcfe7e2fab | |||
| 2e3e81a02b | |||
| 8606a90e34 | |||
| a97819f4a6 | |||
| dd82c624d8 | |||
| 7999e1e64a | |||
| 12a0f540b3 | |||
| e793cb0cc5 | |||
| b2330dee22 | |||
| 00501704df | |||
| 4cb41a7e4c | |||
| d05d27494d | |||
| 4ed734e5c0 | |||
| 1e3be5d4cb | |||
| 8dd852dd6a | |||
| eeff5817ea | |||
| 1b19feb172 | |||
| 80ff8124ec | |||
| 8508bfac93 | |||
| ac14179bd2 |
6
.gitignore
vendored
6
.gitignore
vendored
@@ -68,3 +68,9 @@ inventory-server/scripts/.fuse_hidden00000fa20000000a
|
|||||||
.VSCodeCounter/
|
.VSCodeCounter/
|
||||||
.VSCodeCounter/*
|
.VSCodeCounter/*
|
||||||
.VSCodeCounter/**/*
|
.VSCodeCounter/**/*
|
||||||
|
|
||||||
|
*/chat/db-convert/db/*
|
||||||
|
*/chat/db-convert/mongo_converter_env/*
|
||||||
|
|
||||||
|
# Ignore compiled Vite config to avoid duplication
|
||||||
|
vite.config.js
|
||||||
@@ -7,12 +7,13 @@ This document outlines the permission system implemented in the Inventory Manage
|
|||||||
Permissions follow this naming convention:
|
Permissions follow this naming convention:
|
||||||
|
|
||||||
- Page access: `access:{page_name}`
|
- Page access: `access:{page_name}`
|
||||||
- Actions: `{action}:{resource}`
|
- Settings sections: `settings:{section_name}`
|
||||||
|
- Admin features: `admin:{feature}`
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
- `access:products` - Can access the Products page
|
- `access:products` - Can access the Products page
|
||||||
- `create:products` - Can create new products
|
- `settings:user_management` - Can access User Management settings
|
||||||
- `edit:users` - Can edit user accounts
|
- `admin:debug` - Can see debug information
|
||||||
|
|
||||||
## Permission Components
|
## Permission Components
|
||||||
|
|
||||||
@@ -22,10 +23,10 @@ The core component that conditionally renders content based on permissions.
|
|||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
<PermissionGuard
|
<PermissionGuard
|
||||||
permission="create:products"
|
permission="settings:user_management"
|
||||||
fallback={<p>No permission</p>}
|
fallback={<p>No permission</p>}
|
||||||
>
|
>
|
||||||
<button>Create Product</button>
|
<button>Manage Users</button>
|
||||||
</PermissionGuard>
|
</PermissionGuard>
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -81,7 +82,7 @@ Specific component for settings with built-in permission checks.
|
|||||||
<SettingsSection
|
<SettingsSection
|
||||||
title="System Settings"
|
title="System Settings"
|
||||||
description="Configure global settings"
|
description="Configure global settings"
|
||||||
permission="edit:system_settings"
|
permission="settings:global"
|
||||||
>
|
>
|
||||||
{/* Settings content */}
|
{/* Settings content */}
|
||||||
</SettingsSection>
|
</SettingsSection>
|
||||||
@@ -95,8 +96,8 @@ Core hook for checking any permission.
|
|||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
const { hasPermission, hasPageAccess, isAdmin } = usePermissions();
|
const { hasPermission, hasPageAccess, isAdmin } = usePermissions();
|
||||||
if (hasPermission('delete:products')) {
|
if (hasPermission('settings:user_management')) {
|
||||||
// Can delete products
|
// Can access user management
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -106,8 +107,8 @@ Specialized hook for page-level permissions.
|
|||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
const { canView, canCreate, canEdit, canDelete } = usePagePermission('products');
|
const { canView, canCreate, canEdit, canDelete } = usePagePermission('products');
|
||||||
if (canEdit()) {
|
if (canView()) {
|
||||||
// Can edit products
|
// Can view products
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -119,18 +120,43 @@ Permissions are stored in the database:
|
|||||||
|
|
||||||
Admin users automatically have all permissions.
|
Admin users automatically have all permissions.
|
||||||
|
|
||||||
## Common Permission Codes
|
## Implemented Permission Codes
|
||||||
|
|
||||||
|
### Page Access Permissions
|
||||||
| Code | Description |
|
| Code | Description |
|
||||||
|------|-------------|
|
|------|-------------|
|
||||||
| `access:dashboard` | Access to Dashboard page |
|
| `access:dashboard` | Access to Dashboard page |
|
||||||
|
| `access:overview` | Access to Overview page |
|
||||||
| `access:products` | Access to Products page |
|
| `access:products` | Access to Products page |
|
||||||
| `create:products` | Create new products |
|
| `access:categories` | Access to Categories page |
|
||||||
| `edit:products` | Edit existing products |
|
| `access:brands` | Access to Brands page |
|
||||||
| `delete:products` | Delete products |
|
| `access:vendors` | Access to Vendors page |
|
||||||
| `view:users` | View user accounts |
|
| `access:purchase_orders` | Access to Purchase Orders page |
|
||||||
| `edit:users` | Edit user accounts |
|
| `access:analytics` | Access to Analytics page |
|
||||||
| `manage:permissions` | Assign permissions to users |
|
| `access:forecasting` | Access to Forecasting page |
|
||||||
|
| `access:import` | Access to Import page |
|
||||||
|
| `access:settings` | Access to Settings page |
|
||||||
|
| `access:chat` | Access to Chat Archive page |
|
||||||
|
|
||||||
|
### Settings Permissions
|
||||||
|
| Code | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `settings:global` | Access to Global Settings section |
|
||||||
|
| `settings:products` | Access to Product Settings section |
|
||||||
|
| `settings:vendors` | Access to Vendor Settings section |
|
||||||
|
| `settings:data_management` | Access to Data Management settings |
|
||||||
|
| `settings:calculation_settings` | Access to Calculation Settings |
|
||||||
|
| `settings:library_management` | Access to Image Library Management |
|
||||||
|
| `settings:performance_metrics` | Access to Performance Metrics |
|
||||||
|
| `settings:prompt_management` | Access to AI Prompt Management |
|
||||||
|
| `settings:stock_management` | Access to Stock Management |
|
||||||
|
| `settings:templates` | Access to Template Management |
|
||||||
|
| `settings:user_management` | Access to User Management |
|
||||||
|
|
||||||
|
### Admin Permissions
|
||||||
|
| Code | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `admin:debug` | Can see debug information and features |
|
||||||
|
|
||||||
## Implementation Examples
|
## Implementation Examples
|
||||||
|
|
||||||
@@ -148,25 +174,31 @@ In `App.tsx`:
|
|||||||
### Component Level Protection
|
### Component Level Protection
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
const { canEdit } = usePagePermission('products');
|
const { hasPermission } = usePermissions();
|
||||||
|
|
||||||
function handleEdit() {
|
function handleAction() {
|
||||||
if (!canEdit()) {
|
if (!hasPermission('settings:user_management')) {
|
||||||
toast.error("You don't have permission");
|
toast.error("You don't have permission");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Edit logic
|
// Action logic
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### UI Element Protection
|
### UI Element Protection
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
<PermissionButton
|
<PermissionGuard permission="settings:user_management">
|
||||||
page="products"
|
<button onClick={handleManageUsers}>
|
||||||
action="delete"
|
Manage Users
|
||||||
onClick={handleDelete}
|
</button>
|
||||||
>
|
</PermissionGuard>
|
||||||
Delete
|
|
||||||
</PermissionButton>
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- **Page Access**: These permissions control which pages a user can navigate to
|
||||||
|
- **Settings Access**: These permissions control access to different sections within the Settings page
|
||||||
|
- **Admin Features**: Special permissions for administrative functions
|
||||||
|
- **CRUD Operations**: The application currently focuses on viewing and managing data rather than creating/editing/deleting individual records
|
||||||
|
- **User Management**: User CRUD operations are handled through the settings interface rather than dedicated user management pages
|
||||||
23
docs/setup-chat.md
Normal file
23
docs/setup-chat.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
This portion of the application is going to be a read only chat archive. It will pull data from a rocketchat export converted to postgresql. This is a separate database than the rest of the inventory application uses, but it will still use users and permissions from the inventory database. Both databases are on the same postgres instance.
|
||||||
|
|
||||||
|
For now, let's add a select to the top of the page that allows me to "view as" any of the users in the rocketchat database. We'll connect this to the authorization in the main application later.
|
||||||
|
|
||||||
|
The db connection info is stored in the .env file in the inventory-server root. It contains these variables
|
||||||
|
DB_HOST=localhost
|
||||||
|
DB_USER=rocketchat_user
|
||||||
|
DB_PASSWORD=password
|
||||||
|
DB_NAME=rocketchat_converted
|
||||||
|
DB_PORT=5432
|
||||||
|
|
||||||
|
Not all of the information in this database is relevant as it's a direct export from another app with more features. You can use the query tool to examine the structure and data available.
|
||||||
|
|
||||||
|
Server-side files should use similar conventions and the same technologies as the inventory-server (inventor-server root) and auth-server (inventory-server/auth). I will provide my current pm2 ecosystem file upon request for you to add the configuration for the new "chat-server". I use Caddy on the server and can provide my caddyfile to assist with configuring the api routes. All configuration and routes for the chat-server should go in the inventory-server/chat folder or subfolders you create.
|
||||||
|
|
||||||
|
The folder you see as inventory-server is actually a direct mount of the /var/www/html/inventory folder on the server. You can read and write files from there like usual, but any terminal commands for the server I will have to run myself.
|
||||||
|
|
||||||
|
The "Chat" page should be added to the main application sidebar and a similar page to the others should be created in inventory/src/pages. All other frontend pages should go in inventory/src/components/chat.
|
||||||
|
|
||||||
|
The application uses shadcn components and those should be used for all ui elements where possible (located in inventory/src/components/ui). The UI should match existing pages and components.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
112
docs/split-up-pos.md
Normal file
112
docs/split-up-pos.md
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
Okay, I understand completely now. The core issue is that the previous approaches tried too hard to reconcile every receipt back to a specific PO line within the `purchase_orders` table structure, which doesn't reflect the reality where receipts can be independent events. Your downstream scripts, especially `daily_snapshots` and `product_metrics`, rely on having a complete picture of *all* receivings.
|
||||||
|
|
||||||
|
Let's pivot to a model that respects both distinct data streams: **Orders (Intent)** and **Receivings (Actuals)**.
|
||||||
|
|
||||||
|
**Proposed Solution: Separate `purchase_orders` and `receivings` Tables**
|
||||||
|
|
||||||
|
This is the cleanest way to model the reality you've described.
|
||||||
|
|
||||||
|
1. **`purchase_orders` Table:**
|
||||||
|
* **Purpose:** Tracks the status and details of purchase *orders* placed. Represents the *intent* to receive goods.
|
||||||
|
* **Key Columns:** `po_id`, `pid`, `ordered` (quantity ordered), `po_cost_price`, `date` (order/created date), `expected_date`, `status` (PO lifecycle: 'ordered', 'canceled', 'done'), `vendor`, `notes`, etc.
|
||||||
|
* **Crucially:** This table *does not* need a `received` column or a `receiving_history` column derived from complex allocations. It focuses solely on the PO itself.
|
||||||
|
|
||||||
|
2. **`receivings` Table (New or Refined):**
|
||||||
|
* **Purpose:** Tracks every single line item received, regardless of whether it was linked to a PO during the receiving process. Represents the *actual* goods that arrived.
|
||||||
|
* **Key Columns:**
|
||||||
|
* `receiving_id` (Identifier for the overall receiving document/batch)
|
||||||
|
* `pid` (Product ID received)
|
||||||
|
* `received_qty` (Quantity received for this specific line)
|
||||||
|
* `cost_each` (Actual cost paid for this item on this receiving)
|
||||||
|
* `received_date` (Actual date the item was received)
|
||||||
|
* `received_by` (Employee ID/Name)
|
||||||
|
* `source_po_id` (The `po_id` entered on the receiving screen, *nullable*. Stores the original link attempt, even if it was wrong or missing)
|
||||||
|
* `source_receiving_status` (The status from the source `receivings` table: 'partial_received', 'full_received', 'paid', 'canceled')
|
||||||
|
|
||||||
|
**How the Import Script Changes:**
|
||||||
|
|
||||||
|
1. **Fetch POs:** Fetch data from `po` and `po_products`.
|
||||||
|
2. **Populate `purchase_orders`:**
|
||||||
|
* Insert/Update rows into `purchase_orders` based directly on the fetched PO data.
|
||||||
|
* Set `po_id`, `pid`, `ordered`, `po_cost_price`, `date` (`COALESCE(date_ordered, date_created)`), `expected_date`.
|
||||||
|
* Set `status` by mapping the source `po.status` code directly ('ordered', 'canceled', 'done', etc.).
|
||||||
|
* **No complex allocation needed here.**
|
||||||
|
3. **Fetch Receivings:** Fetch data from `receivings` and `receivings_products`.
|
||||||
|
4. **Populate `receivings`:**
|
||||||
|
* For *every* line item fetched from `receivings_products`:
|
||||||
|
* Perform necessary data validation (dates, numbers).
|
||||||
|
* Insert a new row into `receivings` with all the relevant details (`receiving_id`, `pid`, `received_qty`, `cost_each`, `received_date`, `received_by`, `source_po_id`, `source_receiving_status`).
|
||||||
|
* Use `ON CONFLICT (receiving_id, pid)` (or similar unique key based on your source data) `DO UPDATE SET ...` for incremental updates if necessary, or simply delete/re-insert based on `receiving_id` for simplicity if performance allows.
|
||||||
|
|
||||||
|
**Impact on Downstream Scripts (and how to adapt):**
|
||||||
|
|
||||||
|
* **Initial Query (Active POs):**
|
||||||
|
* `SELECT ... FROM purchase_orders po WHERE po.status NOT IN ('canceled', 'done', 'paid_equivalent_status?') AND po.date >= ...`
|
||||||
|
* `active_pos`: `COUNT(DISTINCT po.po_id)` based on the filtered POs.
|
||||||
|
* `overdue_pos`: Add `AND po.expected_date < CURRENT_DATE`.
|
||||||
|
* `total_units`: `SUM(po.ordered)`. Represents total units *ordered* on active POs.
|
||||||
|
* `total_cost`: `SUM(po.ordered * po.po_cost_price)`. Cost of units *ordered*.
|
||||||
|
* `total_retail`: `SUM(po.ordered * pm.current_price)`. Retail value of units *ordered*.
|
||||||
|
* **Result:** This query now cleanly reports on the status of *orders* placed, which seems closer to its original intent. The filter `po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')` is replaced by `po.status NOT IN ('canceled', 'done', 'paid_equivalent?')`. The 90% received check is removed as `received` is not reliably tracked *on the PO* anymore.
|
||||||
|
|
||||||
|
* **`daily_product_snapshots`:**
|
||||||
|
* **`SalesData` CTE:** No change needed.
|
||||||
|
* **`ReceivingData` CTE:** **Must be changed.** Query the **`receivings`** table instead of `purchase_orders`.
|
||||||
|
```sql
|
||||||
|
ReceivingData AS (
|
||||||
|
SELECT
|
||||||
|
rl.pid,
|
||||||
|
COUNT(DISTINCT rl.receiving_id) as receiving_doc_count,
|
||||||
|
SUM(rl.received_qty) AS units_received,
|
||||||
|
SUM(rl.received_qty * rl.cost_each) AS cost_received
|
||||||
|
FROM public.receivings rl
|
||||||
|
WHERE rl.received_date::date = _date
|
||||||
|
-- Optional: Filter out canceled receivings if needed
|
||||||
|
-- AND rl.source_receiving_status <> 'canceled'
|
||||||
|
GROUP BY rl.pid
|
||||||
|
),
|
||||||
|
```
|
||||||
|
* **Result:** This now accurately reflects *all* units received on a given day from the definitive source.
|
||||||
|
|
||||||
|
* **`update_product_metrics`:**
|
||||||
|
* **`CurrentInfo` CTE:** No change needed (pulls from `products`).
|
||||||
|
* **`OnOrderInfo` CTE:** Needs re-evaluation. How do you want to define "On Order"?
|
||||||
|
* **Option A (Strict PO View):** `SUM(po.ordered)` from `purchase_orders po WHERE po.status NOT IN ('canceled', 'done', 'paid_equivalent?')`. This is quantity on *open orders*, ignoring fulfillment state. Simple, but might overestimate if items arrived unlinked.
|
||||||
|
* **Option B (Approximate Fulfillment):** `SUM(po.ordered)` from open POs MINUS `SUM(rl.received_qty)` from `receivings rl` where `rl.source_po_id = po.po_id` (summing only directly linked receivings). Better, but still misses fulfillment via unlinked receivings.
|
||||||
|
* **Option C (Heuristic):** `SUM(po.ordered)` from open POs MINUS `SUM(rl.received_qty)` from `receivings rl` where `rl.pid = po.pid` and `rl.received_date >= po.date`. This *tries* to account for unlinked receivings but is imprecise.
|
||||||
|
* **Recommendation:** Start with **Option A** for simplicity, clearly labeling it "Quantity on Open POs". You might need a separate process or metric for a more nuanced view of expected vs. actual pipeline.
|
||||||
|
```sql
|
||||||
|
-- Example for Option A
|
||||||
|
OnOrderInfo AS (
|
||||||
|
SELECT
|
||||||
|
pid,
|
||||||
|
SUM(ordered) AS on_order_qty, -- Total qty on open POs
|
||||||
|
SUM(ordered * po_cost_price) AS on_order_cost -- Cost of qty on open POs
|
||||||
|
FROM public.purchase_orders
|
||||||
|
WHERE status NOT IN ('canceled', 'done', 'paid_equivalent?') -- Define your open statuses
|
||||||
|
GROUP BY pid
|
||||||
|
),
|
||||||
|
```
|
||||||
|
* **`HistoricalDates` CTE:**
|
||||||
|
* `date_first_sold`, `max_order_date`: No change (queries `orders`).
|
||||||
|
* `date_first_received_calc`, `date_last_received_calc`: **Must be changed.** Query `MIN(rl.received_date)` and `MAX(rl.received_date)` from the **`receivings`** table grouped by `pid`.
|
||||||
|
* **`SnapshotAggregates` CTE:**
|
||||||
|
* `received_qty_30d`, `received_cost_30d`: These are calculated from `daily_product_snapshots`, which are now correctly sourced from `receivings`, so this part is fine.
|
||||||
|
* **Forecasting Calculations:** Will use the chosen definition of `on_order_qty`. Be aware of the implications of Option A (potentially inflated if unlinked receivings fulfill orders).
|
||||||
|
* **Result:** Metrics are calculated based on distinct order data and complete receiving data. The definition of "on order" needs careful consideration.
|
||||||
|
|
||||||
|
**Summary of this Approach:**
|
||||||
|
|
||||||
|
* **Pros:**
|
||||||
|
* Accurately models distinct order and receiving events.
|
||||||
|
* Provides a definitive source (`receivings`) for all received inventory.
|
||||||
|
* Simplifies the `purchase_orders` table and its import logic.
|
||||||
|
* Avoids complex/potentially inaccurate allocation logic for unlinked receivings within the main tables.
|
||||||
|
* Avoids synthetic records.
|
||||||
|
* Fixes downstream reporting (`daily_snapshots` receiving data).
|
||||||
|
* **Cons:**
|
||||||
|
* Requires creating/managing the `receivings` table.
|
||||||
|
* Requires modifying downstream queries (`ReceivingData`, `OnOrderInfo`, `HistoricalDates`).
|
||||||
|
* Calculating a precise "net quantity still expected to arrive" (true on-order minus all relevant fulfillment) becomes more complex and may require specific business rules or heuristics outside the basic table structure if Option A for `OnOrderInfo` isn't sufficient.
|
||||||
|
|
||||||
|
This two-table approach (`purchase_orders` + `receivings`) seems the most robust and accurate way to handle your requirement for complete receiving records independent of potentially flawed PO linking. It directly addresses the shortcomings of the previous attempts.
|
||||||
@@ -1,222 +0,0 @@
|
|||||||
// ecosystem.config.js
|
|
||||||
const path = require('path');
|
|
||||||
const dotenv = require('dotenv');
|
|
||||||
|
|
||||||
// Load environment variables safely with error handling
|
|
||||||
const loadEnvFile = (envPath) => {
|
|
||||||
try {
|
|
||||||
console.log('Loading env from:', envPath);
|
|
||||||
const result = dotenv.config({ path: envPath });
|
|
||||||
if (result.error) {
|
|
||||||
console.warn(`Warning: .env file not found or invalid at ${envPath}:`, result.error.message);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
console.log('Env variables loaded from', envPath, ':', Object.keys(result.parsed || {}));
|
|
||||||
return result.parsed || {};
|
|
||||||
} catch (error) {
|
|
||||||
console.warn(`Warning: Error loading .env file at ${envPath}:`, error.message);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load environment variables for each server
|
|
||||||
const authEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/auth-server/.env'));
|
|
||||||
const aircallEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/aircall-server/.env'));
|
|
||||||
const klaviyoEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/klaviyo-server/.env'));
|
|
||||||
const metaEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/meta-server/.env'));
|
|
||||||
const googleAnalyticsEnv = require('dotenv').config({
|
|
||||||
path: path.resolve(__dirname, 'dashboard/google-server/.env')
|
|
||||||
}).parsed || {};
|
|
||||||
const typeformEnv = loadEnvFile(path.resolve(__dirname, 'dashboard/typeform-server/.env'));
|
|
||||||
const inventoryEnv = loadEnvFile(path.resolve(__dirname, 'inventory/.env'));
|
|
||||||
|
|
||||||
// Common log settings for all apps
|
|
||||||
const logSettings = {
|
|
||||||
log_rotate: true,
|
|
||||||
max_size: '10M',
|
|
||||||
retain: '10',
|
|
||||||
log_date_format: 'YYYY-MM-DD HH:mm:ss'
|
|
||||||
};
|
|
||||||
|
|
||||||
// Common app settings
|
|
||||||
const commonSettings = {
|
|
||||||
instances: 1,
|
|
||||||
exec_mode: 'fork',
|
|
||||||
autorestart: true,
|
|
||||||
watch: false,
|
|
||||||
max_memory_restart: '1G',
|
|
||||||
time: true,
|
|
||||||
...logSettings,
|
|
||||||
ignore_watch: [
|
|
||||||
'node_modules',
|
|
||||||
'logs',
|
|
||||||
'.git',
|
|
||||||
'*.log'
|
|
||||||
],
|
|
||||||
min_uptime: 5000,
|
|
||||||
max_restarts: 5,
|
|
||||||
restart_delay: 4000,
|
|
||||||
listen_timeout: 50000,
|
|
||||||
kill_timeout: 5000,
|
|
||||||
node_args: '--max-old-space-size=1536'
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
apps: [
|
|
||||||
{
|
|
||||||
...commonSettings,
|
|
||||||
name: 'auth-server',
|
|
||||||
script: './dashboard/auth-server/index.js',
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
PORT: 3003,
|
|
||||||
...authEnv
|
|
||||||
},
|
|
||||||
error_file: 'dashboard/auth-server/logs/pm2/err.log',
|
|
||||||
out_file: 'dashboard/auth-server/logs/pm2/out.log',
|
|
||||||
log_file: 'dashboard/auth-server/logs/pm2/combined.log',
|
|
||||||
env_production: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
PORT: 3003
|
|
||||||
},
|
|
||||||
env_development: {
|
|
||||||
NODE_ENV: 'development',
|
|
||||||
PORT: 3003
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
...commonSettings,
|
|
||||||
name: 'aircall-server',
|
|
||||||
script: './dashboard/aircall-server/server.js',
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
AIRCALL_PORT: 3002,
|
|
||||||
...aircallEnv
|
|
||||||
},
|
|
||||||
error_file: 'dashboard/aircall-server/logs/pm2/err.log',
|
|
||||||
out_file: 'dashboard/aircall-server/logs/pm2/out.log',
|
|
||||||
log_file: 'dashboard/aircall-server/logs/pm2/combined.log',
|
|
||||||
env_production: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
AIRCALL_PORT: 3002
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
...commonSettings,
|
|
||||||
name: 'klaviyo-server',
|
|
||||||
script: './dashboard/klaviyo-server/server.js',
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
KLAVIYO_PORT: 3004,
|
|
||||||
...klaviyoEnv
|
|
||||||
},
|
|
||||||
error_file: 'dashboard/klaviyo-server/logs/pm2/err.log',
|
|
||||||
out_file: 'dashboard/klaviyo-server/logs/pm2/out.log',
|
|
||||||
log_file: 'dashboard/klaviyo-server/logs/pm2/combined.log',
|
|
||||||
env_production: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
KLAVIYO_PORT: 3004
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
...commonSettings,
|
|
||||||
name: 'meta-server',
|
|
||||||
script: './dashboard/meta-server/server.js',
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
PORT: 3005,
|
|
||||||
...metaEnv
|
|
||||||
},
|
|
||||||
error_file: 'dashboard/meta-server/logs/pm2/err.log',
|
|
||||||
out_file: 'dashboard/meta-server/logs/pm2/out.log',
|
|
||||||
log_file: 'dashboard/meta-server/logs/pm2/combined.log',
|
|
||||||
env_production: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
PORT: 3005
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "gorgias-server",
|
|
||||||
script: "./dashboard/gorgias-server/server.js",
|
|
||||||
env: {
|
|
||||||
NODE_ENV: "development",
|
|
||||||
PORT: 3006
|
|
||||||
},
|
|
||||||
env_production: {
|
|
||||||
NODE_ENV: "production",
|
|
||||||
PORT: 3006
|
|
||||||
},
|
|
||||||
error_file: "dashboard/logs/gorgias-server-error.log",
|
|
||||||
out_file: "dashboard/logs/gorgias-server-out.log",
|
|
||||||
log_file: "dashboard/logs/gorgias-server-combined.log",
|
|
||||||
time: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
...commonSettings,
|
|
||||||
name: 'google-server',
|
|
||||||
script: path.resolve(__dirname, 'dashboard/google-server/server.js'),
|
|
||||||
watch: false,
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
GOOGLE_ANALYTICS_PORT: 3007,
|
|
||||||
...googleAnalyticsEnv
|
|
||||||
},
|
|
||||||
error_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/err.log'),
|
|
||||||
out_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/out.log'),
|
|
||||||
log_file: path.resolve(__dirname, 'dashboard/google-server/logs/pm2/combined.log'),
|
|
||||||
env_production: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
GOOGLE_ANALYTICS_PORT: 3007
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
...commonSettings,
|
|
||||||
name: 'typeform-server',
|
|
||||||
script: './dashboard/typeform-server/server.js',
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
TYPEFORM_PORT: 3008,
|
|
||||||
...typeformEnv
|
|
||||||
},
|
|
||||||
error_file: 'dashboard/typeform-server/logs/pm2/err.log',
|
|
||||||
out_file: 'dashboard/typeform-server/logs/pm2/out.log',
|
|
||||||
log_file: 'dashboard/typeform-server/logs/pm2/combined.log',
|
|
||||||
env_production: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
TYPEFORM_PORT: 3008
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
...commonSettings,
|
|
||||||
name: 'inventory-server',
|
|
||||||
script: './inventory/src/server.js',
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
PORT: 3010,
|
|
||||||
...inventoryEnv
|
|
||||||
},
|
|
||||||
error_file: 'inventory/logs/pm2/err.log',
|
|
||||||
out_file: 'inventory/logs/pm2/out.log',
|
|
||||||
log_file: 'inventory/logs/pm2/combined.log',
|
|
||||||
env_production: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
PORT: 3010,
|
|
||||||
...inventoryEnv
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
...commonSettings,
|
|
||||||
name: 'new-auth-server',
|
|
||||||
script: './inventory-server/auth/server.js',
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
AUTH_PORT: 3011,
|
|
||||||
...inventoryEnv,
|
|
||||||
JWT_SECRET: process.env.JWT_SECRET
|
|
||||||
},
|
|
||||||
error_file: 'inventory-server/auth/logs/pm2/err.log',
|
|
||||||
out_file: 'inventory-server/auth/logs/pm2/out.log',
|
|
||||||
log_file: 'inventory-server/auth/logs/pm2/combined.log'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
};
|
|
||||||
@@ -34,10 +34,12 @@ const authenticate = async (req, res, next) => {
|
|||||||
|
|
||||||
// Get user from database
|
// Get user from database
|
||||||
const result = await pool.query(
|
const result = await pool.query(
|
||||||
'SELECT id, username, is_admin FROM users WHERE id = $1',
|
'SELECT id, username, email, is_admin, rocket_chat_user_id FROM users WHERE id = $1',
|
||||||
[decoded.userId]
|
[decoded.userId]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
console.log('Database query result for user', decoded.userId, ':', result.rows[0]);
|
||||||
|
|
||||||
if (result.rows.length === 0) {
|
if (result.rows.length === 0) {
|
||||||
return res.status(401).json({ error: 'User not found' });
|
return res.status(401).json({ error: 'User not found' });
|
||||||
}
|
}
|
||||||
@@ -58,7 +60,7 @@ router.post('/login', async (req, res) => {
|
|||||||
|
|
||||||
// Get user from database
|
// Get user from database
|
||||||
const result = await pool.query(
|
const result = await pool.query(
|
||||||
'SELECT id, username, password, is_admin, is_active FROM users WHERE username = $1',
|
'SELECT id, username, password, is_admin, is_active, rocket_chat_user_id FROM users WHERE username = $1',
|
||||||
[username]
|
[username]
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -101,6 +103,7 @@ router.post('/login', async (req, res) => {
|
|||||||
id: user.id,
|
id: user.id,
|
||||||
username: user.username,
|
username: user.username,
|
||||||
is_admin: user.is_admin,
|
is_admin: user.is_admin,
|
||||||
|
rocket_chat_user_id: user.rocket_chat_user_id,
|
||||||
permissions
|
permissions
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -119,8 +122,13 @@ router.get('/me', authenticate, async (req, res) => {
|
|||||||
res.json({
|
res.json({
|
||||||
id: req.user.id,
|
id: req.user.id,
|
||||||
username: req.user.username,
|
username: req.user.username,
|
||||||
|
email: req.user.email,
|
||||||
is_admin: req.user.is_admin,
|
is_admin: req.user.is_admin,
|
||||||
permissions
|
rocket_chat_user_id: req.user.rocket_chat_user_id,
|
||||||
|
permissions,
|
||||||
|
// Debug info
|
||||||
|
_debug_raw_user: req.user,
|
||||||
|
_server_identifier: "INVENTORY_AUTH_SERVER_MODIFIED"
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error getting current user:', error);
|
console.error('Error getting current user:', error);
|
||||||
@@ -132,7 +140,7 @@ router.get('/me', authenticate, async (req, res) => {
|
|||||||
router.get('/users', authenticate, requirePermission('view:users'), async (req, res) => {
|
router.get('/users', authenticate, requirePermission('view:users'), async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const result = await pool.query(`
|
const result = await pool.query(`
|
||||||
SELECT id, username, email, is_admin, is_active, created_at, last_login
|
SELECT id, username, email, is_admin, is_active, rocket_chat_user_id, created_at, last_login
|
||||||
FROM users
|
FROM users
|
||||||
ORDER BY username
|
ORDER BY username
|
||||||
`);
|
`);
|
||||||
@@ -151,7 +159,7 @@ router.get('/users/:id', authenticate, requirePermission('view:users'), async (r
|
|||||||
|
|
||||||
// Get user details
|
// Get user details
|
||||||
const userResult = await pool.query(`
|
const userResult = await pool.query(`
|
||||||
SELECT id, username, email, is_admin, is_active, created_at, last_login
|
SELECT id, username, email, is_admin, is_active, rocket_chat_user_id, created_at, last_login
|
||||||
FROM users
|
FROM users
|
||||||
WHERE id = $1
|
WHERE id = $1
|
||||||
`, [userId]);
|
`, [userId]);
|
||||||
@@ -187,13 +195,14 @@ router.post('/users', authenticate, requirePermission('create:users'), async (re
|
|||||||
const client = await pool.connect();
|
const client = await pool.connect();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { username, email, password, is_admin, is_active, permissions } = req.body;
|
const { username, email, password, is_admin, is_active, rocket_chat_user_id, permissions } = req.body;
|
||||||
|
|
||||||
console.log("Create user request:", {
|
console.log("Create user request:", {
|
||||||
username,
|
username,
|
||||||
email,
|
email,
|
||||||
is_admin,
|
is_admin,
|
||||||
is_active,
|
is_active,
|
||||||
|
rocket_chat_user_id,
|
||||||
permissions: permissions || []
|
permissions: permissions || []
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -221,10 +230,10 @@ router.post('/users', authenticate, requirePermission('create:users'), async (re
|
|||||||
|
|
||||||
// Insert new user
|
// Insert new user
|
||||||
const userResult = await client.query(`
|
const userResult = await client.query(`
|
||||||
INSERT INTO users (username, email, password, is_admin, is_active, created_at)
|
INSERT INTO users (username, email, password, is_admin, is_active, rocket_chat_user_id, created_at)
|
||||||
VALUES ($1, $2, $3, $4, $5, CURRENT_TIMESTAMP)
|
VALUES ($1, $2, $3, $4, $5, $6, CURRENT_TIMESTAMP)
|
||||||
RETURNING id
|
RETURNING id
|
||||||
`, [username, email || null, hashedPassword, !!is_admin, is_active !== false]);
|
`, [username, email || null, hashedPassword, !!is_admin, is_active !== false, rocket_chat_user_id || null]);
|
||||||
|
|
||||||
const userId = userResult.rows[0].id;
|
const userId = userResult.rows[0].id;
|
||||||
|
|
||||||
@@ -299,7 +308,7 @@ router.put('/users/:id', authenticate, requirePermission('edit:users'), async (r
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const userId = req.params.id;
|
const userId = req.params.id;
|
||||||
const { username, email, password, is_admin, is_active, permissions } = req.body;
|
const { username, email, password, is_admin, is_active, rocket_chat_user_id, permissions } = req.body;
|
||||||
|
|
||||||
console.log("Update user request:", {
|
console.log("Update user request:", {
|
||||||
userId,
|
userId,
|
||||||
@@ -307,6 +316,7 @@ router.put('/users/:id', authenticate, requirePermission('edit:users'), async (r
|
|||||||
email,
|
email,
|
||||||
is_admin,
|
is_admin,
|
||||||
is_active,
|
is_active,
|
||||||
|
rocket_chat_user_id,
|
||||||
permissions: permissions || []
|
permissions: permissions || []
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -348,6 +358,11 @@ router.put('/users/:id', authenticate, requirePermission('edit:users'), async (r
|
|||||||
updateValues.push(!!is_active);
|
updateValues.push(!!is_active);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (rocket_chat_user_id !== undefined) {
|
||||||
|
updateFields.push(`rocket_chat_user_id = $${paramIndex++}`);
|
||||||
|
updateValues.push(rocket_chat_user_id || null);
|
||||||
|
}
|
||||||
|
|
||||||
// Update password if provided
|
// Update password if provided
|
||||||
if (password) {
|
if (password) {
|
||||||
const saltRounds = 10;
|
const saltRounds = 10;
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ global.pool = pool;
|
|||||||
app.use(express.json());
|
app.use(express.json());
|
||||||
app.use(morgan('combined'));
|
app.use(morgan('combined'));
|
||||||
app.use(cors({
|
app.use(cors({
|
||||||
origin: ['http://localhost:5173', 'http://localhost:5174', 'https://inventory.kent.pw'],
|
origin: ['http://localhost:5175', 'http://localhost:5174', 'https://inventory.kent.pw'],
|
||||||
credentials: true
|
credentials: true
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -108,7 +108,7 @@ app.get('/me', async (req, res) => {
|
|||||||
|
|
||||||
// Get user details from database
|
// Get user details from database
|
||||||
const userResult = await pool.query(
|
const userResult = await pool.query(
|
||||||
'SELECT id, username, email, is_admin, is_active FROM users WHERE id = $1',
|
'SELECT id, username, email, is_admin, rocket_chat_user_id, is_active FROM users WHERE id = $1',
|
||||||
[decoded.userId]
|
[decoded.userId]
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -135,6 +135,7 @@ app.get('/me', async (req, res) => {
|
|||||||
id: user.id,
|
id: user.id,
|
||||||
username: user.username,
|
username: user.username,
|
||||||
email: user.email,
|
email: user.email,
|
||||||
|
rocket_chat_user_id: user.rocket_chat_user_id,
|
||||||
is_admin: user.is_admin,
|
is_admin: user.is_admin,
|
||||||
permissions: permissions
|
permissions: permissions
|
||||||
});
|
});
|
||||||
|
|||||||
881
inventory-server/chat/db-convert/mongo_to_postgres_converter.py
Normal file
881
inventory-server/chat/db-convert/mongo_to_postgres_converter.py
Normal file
@@ -0,0 +1,881 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
MongoDB to PostgreSQL Converter for Rocket.Chat
|
||||||
|
Converts MongoDB BSON export files to PostgreSQL database
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python3 mongo_to_postgres_converter.py \
|
||||||
|
--mongo-path db/database/62df06d44234d20001289144 \
|
||||||
|
--pg-database rocketchat_converted \
|
||||||
|
--pg-user rocketchat_user \
|
||||||
|
--pg-password your_password \
|
||||||
|
--debug
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import struct
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
import argparse
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
# Auto-install dependencies if needed
|
||||||
|
try:
|
||||||
|
import bson
|
||||||
|
import psycopg2
|
||||||
|
except ImportError:
|
||||||
|
print("Installing required packages...")
|
||||||
|
subprocess.check_call([sys.executable, "-m", "pip", "install", "pymongo", "psycopg2-binary"])
|
||||||
|
import bson
|
||||||
|
import psycopg2
|
||||||
|
|
||||||
|
class MongoToPostgresConverter:
|
||||||
|
def __init__(self, mongo_db_path: str, postgres_config: Dict[str, str], debug_mode: bool = False, debug_collections: List[str] = None):
|
||||||
|
self.mongo_db_path = Path(mongo_db_path)
|
||||||
|
self.postgres_config = postgres_config
|
||||||
|
self.debug_mode = debug_mode
|
||||||
|
self.debug_collections = debug_collections or []
|
||||||
|
self.collections = {}
|
||||||
|
self.schema_info = {}
|
||||||
|
self.error_log = {}
|
||||||
|
|
||||||
|
def log_debug(self, message: str, collection: str = None):
|
||||||
|
"""Log debug messages if debug mode is enabled and collection is in debug list"""
|
||||||
|
if self.debug_mode and (not self.debug_collections or collection in self.debug_collections):
|
||||||
|
print(f"DEBUG: {message}")
|
||||||
|
|
||||||
|
def log_error(self, collection: str, error_type: str, details: str):
|
||||||
|
"""Log detailed error information"""
|
||||||
|
if collection not in self.error_log:
|
||||||
|
self.error_log[collection] = []
|
||||||
|
self.error_log[collection].append({
|
||||||
|
'type': error_type,
|
||||||
|
'details': details,
|
||||||
|
'timestamp': datetime.now().isoformat()
|
||||||
|
})
|
||||||
|
|
||||||
|
def sample_documents(self, collection_name: str, max_samples: int = 3) -> List[Dict]:
|
||||||
|
"""Sample documents from a collection for debugging"""
|
||||||
|
if not self.debug_mode or (self.debug_collections and collection_name not in self.debug_collections):
|
||||||
|
return []
|
||||||
|
|
||||||
|
print(f"\n🔍 Sampling documents from {collection_name}:")
|
||||||
|
|
||||||
|
bson_file = self.collections[collection_name]['bson_file']
|
||||||
|
if bson_file.stat().st_size == 0:
|
||||||
|
print(" Collection is empty")
|
||||||
|
return []
|
||||||
|
|
||||||
|
samples = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(bson_file, 'rb') as f:
|
||||||
|
sample_count = 0
|
||||||
|
while sample_count < max_samples:
|
||||||
|
try:
|
||||||
|
doc_size = int.from_bytes(f.read(4), byteorder='little')
|
||||||
|
if doc_size <= 0:
|
||||||
|
break
|
||||||
|
f.seek(-4, 1)
|
||||||
|
doc_bytes = f.read(doc_size)
|
||||||
|
if len(doc_bytes) != doc_size:
|
||||||
|
break
|
||||||
|
|
||||||
|
doc = bson.decode(doc_bytes)
|
||||||
|
samples.append(doc)
|
||||||
|
sample_count += 1
|
||||||
|
|
||||||
|
print(f" Sample {sample_count} - Keys: {list(doc.keys())}")
|
||||||
|
# Show a few key fields with their types and truncated values
|
||||||
|
for key, value in list(doc.items())[:3]:
|
||||||
|
value_preview = str(value)[:50] + "..." if len(str(value)) > 50 else str(value)
|
||||||
|
print(f" {key}: {type(value).__name__} = {value_preview}")
|
||||||
|
if len(doc) > 3:
|
||||||
|
print(f" ... and {len(doc) - 3} more fields")
|
||||||
|
print()
|
||||||
|
|
||||||
|
except (bson.InvalidBSON, struct.error, OSError) as e:
|
||||||
|
self.log_error(collection_name, 'document_parsing', str(e))
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_error(collection_name, 'file_reading', str(e))
|
||||||
|
print(f" Error reading collection: {e}")
|
||||||
|
|
||||||
|
return samples
|
||||||
|
|
||||||
|
def discover_collections(self):
|
||||||
|
"""Discover all BSON files and their metadata"""
|
||||||
|
print("Discovering MongoDB collections...")
|
||||||
|
|
||||||
|
for bson_file in self.mongo_db_path.glob("*.bson"):
|
||||||
|
collection_name = bson_file.stem
|
||||||
|
metadata_file = bson_file.with_suffix(".metadata.json")
|
||||||
|
|
||||||
|
# Read metadata if available
|
||||||
|
metadata = {}
|
||||||
|
if metadata_file.exists():
|
||||||
|
try:
|
||||||
|
with open(metadata_file, 'r', encoding='utf-8') as f:
|
||||||
|
metadata = json.load(f)
|
||||||
|
except (UnicodeDecodeError, json.JSONDecodeError) as e:
|
||||||
|
print(f"Warning: Could not read metadata for {collection_name}: {e}")
|
||||||
|
metadata = {}
|
||||||
|
|
||||||
|
# Get file size and document count estimate
|
||||||
|
file_size = bson_file.stat().st_size
|
||||||
|
doc_count = self._estimate_document_count(bson_file)
|
||||||
|
|
||||||
|
self.collections[collection_name] = {
|
||||||
|
'bson_file': bson_file,
|
||||||
|
'metadata': metadata,
|
||||||
|
'file_size': file_size,
|
||||||
|
'estimated_docs': doc_count
|
||||||
|
}
|
||||||
|
|
||||||
|
print(f"Found {len(self.collections)} collections")
|
||||||
|
for name, info in self.collections.items():
|
||||||
|
print(f" - {name}: {info['file_size']/1024/1024:.1f}MB (~{info['estimated_docs']} docs)")
|
||||||
|
|
||||||
|
def _estimate_document_count(self, bson_file: Path) -> int:
|
||||||
|
"""Estimate document count by reading first few documents"""
|
||||||
|
if bson_file.stat().st_size == 0:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(bson_file, 'rb') as f:
|
||||||
|
docs_sampled = 0
|
||||||
|
bytes_sampled = 0
|
||||||
|
max_sample_size = min(1024 * 1024, bson_file.stat().st_size) # 1MB or file size
|
||||||
|
|
||||||
|
while bytes_sampled < max_sample_size:
|
||||||
|
try:
|
||||||
|
doc_size = int.from_bytes(f.read(4), byteorder='little')
|
||||||
|
if doc_size <= 0 or doc_size > 16 * 1024 * 1024: # MongoDB doc size limit
|
||||||
|
break
|
||||||
|
f.seek(-4, 1) # Go back
|
||||||
|
doc_bytes = f.read(doc_size)
|
||||||
|
if len(doc_bytes) != doc_size:
|
||||||
|
break
|
||||||
|
bson.decode(doc_bytes) # Validate it's a valid BSON document
|
||||||
|
docs_sampled += 1
|
||||||
|
bytes_sampled += doc_size
|
||||||
|
except (bson.InvalidBSON, struct.error, OSError):
|
||||||
|
break
|
||||||
|
|
||||||
|
if docs_sampled > 0 and bytes_sampled > 0:
|
||||||
|
avg_doc_size = bytes_sampled / docs_sampled
|
||||||
|
return int(bson_file.stat().st_size / avg_doc_size)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def analyze_schema(self, collection_name: str, sample_size: int = 100) -> Dict[str, Any]:
|
||||||
|
"""Analyze collection schema by sampling documents"""
|
||||||
|
print(f"Analyzing schema for {collection_name}...")
|
||||||
|
|
||||||
|
bson_file = self.collections[collection_name]['bson_file']
|
||||||
|
if bson_file.stat().st_size == 0:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
schema = {}
|
||||||
|
docs_analyzed = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(bson_file, 'rb') as f:
|
||||||
|
while docs_analyzed < sample_size:
|
||||||
|
try:
|
||||||
|
doc_size = int.from_bytes(f.read(4), byteorder='little')
|
||||||
|
if doc_size <= 0:
|
||||||
|
break
|
||||||
|
f.seek(-4, 1)
|
||||||
|
doc_bytes = f.read(doc_size)
|
||||||
|
if len(doc_bytes) != doc_size:
|
||||||
|
break
|
||||||
|
|
||||||
|
doc = bson.decode(doc_bytes)
|
||||||
|
self._analyze_document_schema(doc, schema)
|
||||||
|
docs_analyzed += 1
|
||||||
|
|
||||||
|
except (bson.InvalidBSON, struct.error, OSError):
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error analyzing {collection_name}: {e}")
|
||||||
|
|
||||||
|
self.schema_info[collection_name] = schema
|
||||||
|
return schema
|
||||||
|
|
||||||
|
def _analyze_document_schema(self, doc: Dict[str, Any], schema: Dict[str, Any], prefix: str = ""):
|
||||||
|
"""Recursively analyze document structure"""
|
||||||
|
for key, value in doc.items():
|
||||||
|
full_key = f"{prefix}.{key}" if prefix else key
|
||||||
|
|
||||||
|
if full_key not in schema:
|
||||||
|
schema[full_key] = {
|
||||||
|
'types': set(),
|
||||||
|
'null_count': 0,
|
||||||
|
'total_count': 0,
|
||||||
|
'is_array': False,
|
||||||
|
'nested_schema': {}
|
||||||
|
}
|
||||||
|
|
||||||
|
schema[full_key]['total_count'] += 1
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
schema[full_key]['null_count'] += 1
|
||||||
|
schema[full_key]['types'].add('null')
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
schema[full_key]['types'].add('object')
|
||||||
|
if 'nested_schema' not in schema[full_key]:
|
||||||
|
schema[full_key]['nested_schema'] = {}
|
||||||
|
self._analyze_document_schema(value, schema[full_key]['nested_schema'])
|
||||||
|
elif isinstance(value, list):
|
||||||
|
schema[full_key]['types'].add('array')
|
||||||
|
schema[full_key]['is_array'] = True
|
||||||
|
if value and isinstance(value[0], dict):
|
||||||
|
if 'array_item_schema' not in schema[full_key]:
|
||||||
|
schema[full_key]['array_item_schema'] = {}
|
||||||
|
for item in value[:5]: # Sample first 5 items
|
||||||
|
if isinstance(item, dict):
|
||||||
|
self._analyze_document_schema(item, schema[full_key]['array_item_schema'])
|
||||||
|
else:
|
||||||
|
schema[full_key]['types'].add(type(value).__name__)
|
||||||
|
|
||||||
|
def generate_postgres_schema(self) -> Dict[str, str]:
|
||||||
|
"""Generate PostgreSQL CREATE TABLE statements"""
|
||||||
|
print("Generating PostgreSQL schema...")
|
||||||
|
|
||||||
|
table_definitions = {}
|
||||||
|
|
||||||
|
for collection_name, schema in self.schema_info.items():
|
||||||
|
if not schema: # Empty collection
|
||||||
|
continue
|
||||||
|
|
||||||
|
table_name = self._sanitize_table_name(collection_name)
|
||||||
|
columns = []
|
||||||
|
|
||||||
|
# Always add an id column (PostgreSQL doesn't use _id like MongoDB)
|
||||||
|
columns.append("id SERIAL PRIMARY KEY")
|
||||||
|
|
||||||
|
for field_name, field_info in schema.items():
|
||||||
|
if field_name == '_id':
|
||||||
|
columns.append("mongo_id TEXT") # Always allow NULL for mongo_id
|
||||||
|
continue
|
||||||
|
|
||||||
|
col_name = self._sanitize_column_name(field_name)
|
||||||
|
|
||||||
|
# Handle conflicts with PostgreSQL auto-generated columns
|
||||||
|
if col_name in ['id', 'mongo_id', 'created_at', 'updated_at']:
|
||||||
|
col_name = f"field_{col_name}"
|
||||||
|
|
||||||
|
col_type = self._determine_postgres_type(field_info)
|
||||||
|
|
||||||
|
# Make all fields nullable by default to avoid constraint violations
|
||||||
|
columns.append(f"{col_name} {col_type}")
|
||||||
|
|
||||||
|
# Add metadata columns
|
||||||
|
columns.extend([
|
||||||
|
"created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
"updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP"
|
||||||
|
])
|
||||||
|
|
||||||
|
column_definitions = ',\n '.join(columns)
|
||||||
|
table_sql = f"""
|
||||||
|
CREATE TABLE IF NOT EXISTS {table_name} (
|
||||||
|
{column_definitions}
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create indexes based on MongoDB indexes
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Get list of actual columns that will exist in the table
|
||||||
|
existing_columns = set(['id', 'mongo_id', 'created_at', 'updated_at'])
|
||||||
|
for field_name in schema.keys():
|
||||||
|
if field_name != '_id':
|
||||||
|
col_name = self._sanitize_column_name(field_name)
|
||||||
|
# Handle conflicts with PostgreSQL auto-generated columns
|
||||||
|
if col_name in ['id', 'mongo_id', 'created_at', 'updated_at']:
|
||||||
|
col_name = f"field_{col_name}"
|
||||||
|
existing_columns.add(col_name)
|
||||||
|
|
||||||
|
# Add indexes from MongoDB metadata
|
||||||
|
metadata = self.collections[collection_name].get('metadata', {})
|
||||||
|
indexes = metadata.get('indexes', [])
|
||||||
|
|
||||||
|
for index in indexes:
|
||||||
|
if index['name'] != '_id_': # Skip the default _id index
|
||||||
|
# Sanitize index name - remove special characters
|
||||||
|
sanitized_index_name = re.sub(r'[^a-zA-Z0-9_]', '_', index['name'])
|
||||||
|
index_name = f"idx_{table_name}_{sanitized_index_name}"
|
||||||
|
index_keys = list(index['key'].keys())
|
||||||
|
if index_keys:
|
||||||
|
sanitized_keys = []
|
||||||
|
for key in index_keys:
|
||||||
|
if key != '_id':
|
||||||
|
sanitized_key = self._sanitize_column_name(key)
|
||||||
|
# Handle conflicts with PostgreSQL auto-generated columns
|
||||||
|
if sanitized_key in ['id', 'mongo_id', 'created_at', 'updated_at']:
|
||||||
|
sanitized_key = f"field_{sanitized_key}"
|
||||||
|
# Only add if the column actually exists in our table
|
||||||
|
if sanitized_key in existing_columns:
|
||||||
|
sanitized_keys.append(sanitized_key)
|
||||||
|
|
||||||
|
if sanitized_keys:
|
||||||
|
table_sql += f"CREATE INDEX IF NOT EXISTS {index_name} ON {table_name} ({', '.join(sanitized_keys)});\n"
|
||||||
|
|
||||||
|
table_definitions[collection_name] = table_sql
|
||||||
|
|
||||||
|
return table_definitions
|
||||||
|
|
||||||
|
def _sanitize_table_name(self, name: str) -> str:
|
||||||
|
"""Convert MongoDB collection name to PostgreSQL table name"""
|
||||||
|
# Remove rocketchat_ prefix if present
|
||||||
|
if name.startswith('rocketchat_'):
|
||||||
|
name = name[11:]
|
||||||
|
|
||||||
|
# Replace special characters with underscores
|
||||||
|
name = re.sub(r'[^a-zA-Z0-9_]', '_', name)
|
||||||
|
|
||||||
|
# Ensure it starts with a letter
|
||||||
|
if name and name[0].isdigit():
|
||||||
|
name = 'table_' + name
|
||||||
|
|
||||||
|
return name.lower()
|
||||||
|
|
||||||
|
def _sanitize_column_name(self, name: str) -> str:
|
||||||
|
"""Convert MongoDB field name to PostgreSQL column name"""
|
||||||
|
# Handle nested field names (convert dots to underscores)
|
||||||
|
name = name.replace('.', '_')
|
||||||
|
|
||||||
|
# Replace special characters with underscores
|
||||||
|
name = re.sub(r'[^a-zA-Z0-9_]', '_', name)
|
||||||
|
|
||||||
|
# Ensure it starts with a letter or underscore
|
||||||
|
if name and name[0].isdigit():
|
||||||
|
name = 'col_' + name
|
||||||
|
|
||||||
|
# Handle PostgreSQL reserved words
|
||||||
|
reserved = {
|
||||||
|
'user', 'order', 'group', 'table', 'index', 'key', 'value', 'date', 'time', 'timestamp',
|
||||||
|
'default', 'select', 'from', 'where', 'insert', 'update', 'delete', 'create', 'drop',
|
||||||
|
'alter', 'grant', 'revoke', 'commit', 'rollback', 'begin', 'end', 'case', 'when',
|
||||||
|
'then', 'else', 'if', 'null', 'not', 'and', 'or', 'in', 'exists', 'between',
|
||||||
|
'like', 'limit', 'offset', 'union', 'join', 'inner', 'outer', 'left', 'right',
|
||||||
|
'full', 'cross', 'natural', 'on', 'using', 'distinct', 'all', 'any', 'some',
|
||||||
|
'desc', 'asc', 'primary', 'foreign', 'references', 'constraint', 'unique',
|
||||||
|
'check', 'cascade', 'restrict', 'action', 'match', 'partial', 'full'
|
||||||
|
}
|
||||||
|
if name.lower() in reserved:
|
||||||
|
name = name + '_col'
|
||||||
|
|
||||||
|
return name.lower()
|
||||||
|
|
||||||
|
def _determine_postgres_type(self, field_info: Dict[str, Any]) -> str:
|
||||||
|
"""Determine PostgreSQL column type from MongoDB field analysis with improved logic"""
|
||||||
|
types = field_info['types']
|
||||||
|
|
||||||
|
# Convert set to list for easier checking
|
||||||
|
type_list = list(types)
|
||||||
|
|
||||||
|
# If there's only one type (excluding null), use specific typing
|
||||||
|
non_null_types = [t for t in type_list if t != 'null']
|
||||||
|
|
||||||
|
if len(non_null_types) == 1:
|
||||||
|
single_type = non_null_types[0]
|
||||||
|
if single_type == 'bool':
|
||||||
|
return 'BOOLEAN'
|
||||||
|
elif single_type == 'int':
|
||||||
|
return 'INTEGER'
|
||||||
|
elif single_type == 'float':
|
||||||
|
return 'NUMERIC'
|
||||||
|
elif single_type == 'str':
|
||||||
|
return 'TEXT'
|
||||||
|
elif single_type == 'datetime':
|
||||||
|
return 'TIMESTAMP'
|
||||||
|
elif single_type == 'ObjectId':
|
||||||
|
return 'TEXT'
|
||||||
|
|
||||||
|
# Handle mixed types more conservatively
|
||||||
|
if 'array' in types or field_info.get('is_array', False):
|
||||||
|
return 'JSONB' # Arrays always go to JSONB
|
||||||
|
elif 'object' in types:
|
||||||
|
return 'JSONB' # Objects always go to JSONB
|
||||||
|
elif len(non_null_types) > 1:
|
||||||
|
# Multiple non-null types - check for common combinations
|
||||||
|
if set(non_null_types) <= {'int', 'float'}:
|
||||||
|
return 'NUMERIC' # Can handle both int and float
|
||||||
|
elif set(non_null_types) <= {'bool', 'str'}:
|
||||||
|
return 'TEXT' # Convert everything to text
|
||||||
|
elif set(non_null_types) <= {'str', 'ObjectId'}:
|
||||||
|
return 'TEXT' # Both are string-like
|
||||||
|
else:
|
||||||
|
return 'JSONB' # Complex mixed types go to JSONB
|
||||||
|
elif 'ObjectId' in types:
|
||||||
|
return 'TEXT'
|
||||||
|
elif 'datetime' in types:
|
||||||
|
return 'TIMESTAMP'
|
||||||
|
elif 'bool' in types:
|
||||||
|
return 'BOOLEAN'
|
||||||
|
elif 'int' in types:
|
||||||
|
return 'INTEGER'
|
||||||
|
elif 'float' in types:
|
||||||
|
return 'NUMERIC'
|
||||||
|
elif 'str' in types:
|
||||||
|
return 'TEXT'
|
||||||
|
else:
|
||||||
|
return 'TEXT' # Default fallback
|
||||||
|
|
||||||
|
def create_postgres_database(self, table_definitions: Dict[str, str]):
|
||||||
|
"""Create PostgreSQL database and tables"""
|
||||||
|
print("Creating PostgreSQL database schema...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Connect to PostgreSQL
|
||||||
|
conn = psycopg2.connect(**self.postgres_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create tables
|
||||||
|
for collection_name, table_sql in table_definitions.items():
|
||||||
|
print(f"Creating table for {collection_name}...")
|
||||||
|
cursor.execute(table_sql)
|
||||||
|
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
print("Database schema created successfully!")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating database schema: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def convert_and_insert_data(self, batch_size: int = 1000):
|
||||||
|
"""Convert BSON data and insert into PostgreSQL"""
|
||||||
|
print("Converting and inserting data...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = psycopg2.connect(**self.postgres_config)
|
||||||
|
conn.autocommit = False
|
||||||
|
|
||||||
|
for collection_name in self.collections:
|
||||||
|
print(f"Processing {collection_name}...")
|
||||||
|
self._convert_collection(conn, collection_name, batch_size)
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
print("Data conversion completed successfully!")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error converting data: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _convert_collection(self, conn, collection_name: str, batch_size: int):
|
||||||
|
"""Convert a single collection"""
|
||||||
|
bson_file = self.collections[collection_name]['bson_file']
|
||||||
|
|
||||||
|
if bson_file.stat().st_size == 0:
|
||||||
|
print(f" Skipping empty collection {collection_name}")
|
||||||
|
return
|
||||||
|
|
||||||
|
table_name = self._sanitize_table_name(collection_name)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
batch = []
|
||||||
|
total_inserted = 0
|
||||||
|
errors = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(bson_file, 'rb') as f:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
doc_size = int.from_bytes(f.read(4), byteorder='little')
|
||||||
|
if doc_size <= 0:
|
||||||
|
break
|
||||||
|
f.seek(-4, 1)
|
||||||
|
doc_bytes = f.read(doc_size)
|
||||||
|
if len(doc_bytes) != doc_size:
|
||||||
|
break
|
||||||
|
|
||||||
|
doc = bson.decode(doc_bytes)
|
||||||
|
batch.append(doc)
|
||||||
|
|
||||||
|
if len(batch) >= batch_size:
|
||||||
|
inserted, batch_errors = self._insert_batch(cursor, table_name, batch, collection_name)
|
||||||
|
total_inserted += inserted
|
||||||
|
errors += batch_errors
|
||||||
|
batch = []
|
||||||
|
conn.commit()
|
||||||
|
if total_inserted % 5000 == 0: # Less frequent progress updates
|
||||||
|
print(f" Inserted {total_inserted} documents...")
|
||||||
|
|
||||||
|
except (bson.InvalidBSON, struct.error, OSError):
|
||||||
|
break
|
||||||
|
|
||||||
|
# Insert remaining documents
|
||||||
|
if batch:
|
||||||
|
inserted, batch_errors = self._insert_batch(cursor, table_name, batch, collection_name)
|
||||||
|
total_inserted += inserted
|
||||||
|
errors += batch_errors
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
if errors > 0:
|
||||||
|
print(f" Completed {collection_name}: {total_inserted} documents inserted ({errors} errors)")
|
||||||
|
else:
|
||||||
|
print(f" Completed {collection_name}: {total_inserted} documents inserted")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f" Error processing {collection_name}: {e}")
|
||||||
|
conn.rollback()
|
||||||
|
finally:
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
def _insert_batch(self, cursor, table_name: str, documents: List[Dict], collection_name: str):
|
||||||
|
"""Insert a batch of documents with proper transaction handling"""
|
||||||
|
if not documents:
|
||||||
|
return 0, 0
|
||||||
|
|
||||||
|
# Get schema info for this collection
|
||||||
|
schema = self.schema_info.get(collection_name, {})
|
||||||
|
|
||||||
|
# Build column list
|
||||||
|
columns = ['mongo_id']
|
||||||
|
for field_name in schema.keys():
|
||||||
|
if field_name != '_id':
|
||||||
|
col_name = self._sanitize_column_name(field_name)
|
||||||
|
# Handle conflicts with PostgreSQL auto-generated columns
|
||||||
|
if col_name in ['id', 'mongo_id', 'created_at', 'updated_at']:
|
||||||
|
col_name = f"field_{col_name}"
|
||||||
|
columns.append(col_name)
|
||||||
|
|
||||||
|
# Build INSERT statement
|
||||||
|
placeholders = ', '.join(['%s'] * len(columns))
|
||||||
|
sql = f"INSERT INTO {table_name} ({', '.join(columns)}) VALUES ({placeholders})"
|
||||||
|
|
||||||
|
self.log_debug(f"SQL: {sql}", collection_name)
|
||||||
|
|
||||||
|
# Convert documents to tuples
|
||||||
|
rows = []
|
||||||
|
errors = 0
|
||||||
|
|
||||||
|
for doc_idx, doc in enumerate(documents):
|
||||||
|
try:
|
||||||
|
row = []
|
||||||
|
|
||||||
|
# Add mongo_id
|
||||||
|
row.append(str(doc.get('_id', '')))
|
||||||
|
|
||||||
|
# Add other fields
|
||||||
|
for field_name in schema.keys():
|
||||||
|
if field_name != '_id':
|
||||||
|
try:
|
||||||
|
value = self._get_nested_value(doc, field_name)
|
||||||
|
converted_value = self._convert_value_for_postgres(value, field_name, schema)
|
||||||
|
row.append(converted_value)
|
||||||
|
except Exception as e:
|
||||||
|
self.log_error(collection_name, 'field_conversion',
|
||||||
|
f"Field '{field_name}' in doc {doc_idx}: {str(e)}")
|
||||||
|
# Only show debug for collections we're focusing on
|
||||||
|
if collection_name in self.debug_collections:
|
||||||
|
print(f" ⚠️ Error converting field '{field_name}': {e}")
|
||||||
|
row.append(None) # Use NULL for problematic fields
|
||||||
|
|
||||||
|
rows.append(tuple(row))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_error(collection_name, 'document_conversion', f"Document {doc_idx}: {str(e)}")
|
||||||
|
errors += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Execute batch insert
|
||||||
|
if rows:
|
||||||
|
try:
|
||||||
|
cursor.executemany(sql, rows)
|
||||||
|
return len(rows), errors
|
||||||
|
except Exception as batch_error:
|
||||||
|
self.log_error(collection_name, 'batch_insert', str(batch_error))
|
||||||
|
|
||||||
|
# Only show detailed debugging for targeted collections
|
||||||
|
if collection_name in self.debug_collections:
|
||||||
|
print(f" 🔴 Batch insert failed for {collection_name}: {batch_error}")
|
||||||
|
print(" Trying individual inserts with rollback handling...")
|
||||||
|
|
||||||
|
# Rollback the failed transaction
|
||||||
|
cursor.connection.rollback()
|
||||||
|
|
||||||
|
# Try inserting one by one in individual transactions
|
||||||
|
success_count = 0
|
||||||
|
for row_idx, row in enumerate(rows):
|
||||||
|
try:
|
||||||
|
cursor.execute(sql, row)
|
||||||
|
cursor.connection.commit() # Commit each successful insert
|
||||||
|
success_count += 1
|
||||||
|
except Exception as row_error:
|
||||||
|
cursor.connection.rollback() # Rollback failed insert
|
||||||
|
self.log_error(collection_name, 'row_insert', f"Row {row_idx}: {str(row_error)}")
|
||||||
|
|
||||||
|
# Show detailed error only for the first few failures and only for targeted collections
|
||||||
|
if collection_name in self.debug_collections and errors < 3:
|
||||||
|
print(f" Row {row_idx} failed: {row_error}")
|
||||||
|
print(f" Row data: {len(row)} values, expected {len(columns)} columns")
|
||||||
|
|
||||||
|
errors += 1
|
||||||
|
continue
|
||||||
|
return success_count, errors
|
||||||
|
|
||||||
|
return 0, errors
|
||||||
|
|
||||||
|
def _get_nested_value(self, doc: Dict, field_path: str):
|
||||||
|
"""Get value from nested document using dot notation"""
|
||||||
|
keys = field_path.split('.')
|
||||||
|
value = doc
|
||||||
|
|
||||||
|
for key in keys:
|
||||||
|
if isinstance(value, dict) and key in value:
|
||||||
|
value = value[key]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _convert_value_for_postgres(self, value, field_name: str = None, schema: Dict = None):
|
||||||
|
"""Convert MongoDB value to PostgreSQL compatible value with schema-aware conversion"""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get the expected PostgreSQL type for this field if available
|
||||||
|
expected_type = None
|
||||||
|
if schema and field_name and field_name in schema:
|
||||||
|
field_info = schema[field_name]
|
||||||
|
expected_type = self._determine_postgres_type(field_info)
|
||||||
|
|
||||||
|
# Handle conversion based on expected type
|
||||||
|
if expected_type == 'BOOLEAN':
|
||||||
|
if isinstance(value, bool):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return value.lower() in ('true', '1', 'yes', 'on')
|
||||||
|
elif isinstance(value, (int, float)):
|
||||||
|
return bool(value)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
elif expected_type == 'INTEGER':
|
||||||
|
if isinstance(value, int):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, float):
|
||||||
|
return int(value)
|
||||||
|
elif isinstance(value, str) and value.isdigit():
|
||||||
|
return int(value)
|
||||||
|
elif isinstance(value, bool):
|
||||||
|
return int(value)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
elif expected_type == 'NUMERIC':
|
||||||
|
if isinstance(value, (int, float)):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, str):
|
||||||
|
try:
|
||||||
|
return float(value)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
elif isinstance(value, bool):
|
||||||
|
return float(value)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
elif expected_type == 'TEXT':
|
||||||
|
if isinstance(value, str):
|
||||||
|
return value
|
||||||
|
elif value is not None:
|
||||||
|
str_value = str(value)
|
||||||
|
# Handle very long strings
|
||||||
|
if len(str_value) > 65535:
|
||||||
|
return str_value[:65535]
|
||||||
|
return str_value
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
elif expected_type == 'TIMESTAMP':
|
||||||
|
if hasattr(value, 'isoformat'):
|
||||||
|
return value.isoformat()
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return value
|
||||||
|
else:
|
||||||
|
return str(value) if value is not None else None
|
||||||
|
elif expected_type == 'JSONB':
|
||||||
|
if isinstance(value, (dict, list)):
|
||||||
|
return json.dumps(value, default=self._json_serializer)
|
||||||
|
elif isinstance(value, str):
|
||||||
|
# Check if it's already valid JSON
|
||||||
|
try:
|
||||||
|
json.loads(value)
|
||||||
|
return value
|
||||||
|
except (json.JSONDecodeError, TypeError):
|
||||||
|
# Not valid JSON, wrap it
|
||||||
|
return json.dumps(value)
|
||||||
|
else:
|
||||||
|
return json.dumps(value, default=self._json_serializer)
|
||||||
|
|
||||||
|
# Fallback to original logic if no expected type or type not recognized
|
||||||
|
if isinstance(value, bool):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, (int, float)):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, (dict, list)):
|
||||||
|
return json.dumps(value, default=self._json_serializer)
|
||||||
|
elif hasattr(value, 'isoformat'): # datetime
|
||||||
|
return value.isoformat()
|
||||||
|
elif hasattr(value, '__str__'):
|
||||||
|
str_value = str(value)
|
||||||
|
if len(str_value) > 65535:
|
||||||
|
return str_value[:65535]
|
||||||
|
return str_value
|
||||||
|
else:
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
def _json_serializer(self, obj):
|
||||||
|
"""Custom JSON serializer for complex objects with better error handling"""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, 'isoformat'): # datetime
|
||||||
|
return obj.isoformat()
|
||||||
|
elif hasattr(obj, '__str__'):
|
||||||
|
return str(obj)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
self.log_debug(f"JSON serialization error: {e}")
|
||||||
|
return str(obj)
|
||||||
|
|
||||||
|
def run_conversion(self, sample_size: int = 100, batch_size: int = 1000):
|
||||||
|
"""Run the full conversion process with focused debugging"""
|
||||||
|
print("Starting MongoDB to PostgreSQL conversion...")
|
||||||
|
print("This will convert your Rocket.Chat database from MongoDB to PostgreSQL")
|
||||||
|
if self.debug_mode:
|
||||||
|
if self.debug_collections:
|
||||||
|
print(f"🐛 DEBUG MODE: Focusing on collections: {', '.join(self.debug_collections)}")
|
||||||
|
else:
|
||||||
|
print("🐛 DEBUG MODE: All collections")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
# Step 1: Discover collections
|
||||||
|
self.discover_collections()
|
||||||
|
|
||||||
|
# Step 2: Analyze schemas
|
||||||
|
print("\nAnalyzing collection schemas...")
|
||||||
|
for collection_name in self.collections:
|
||||||
|
self.analyze_schema(collection_name, sample_size)
|
||||||
|
|
||||||
|
# Sample problematic collections if debugging
|
||||||
|
if self.debug_mode and self.debug_collections:
|
||||||
|
for coll in self.debug_collections:
|
||||||
|
if coll in self.collections:
|
||||||
|
self.sample_documents(coll, 2)
|
||||||
|
|
||||||
|
# Step 3: Generate PostgreSQL schema
|
||||||
|
table_definitions = self.generate_postgres_schema()
|
||||||
|
|
||||||
|
# Step 4: Create database schema
|
||||||
|
self.create_postgres_database(table_definitions)
|
||||||
|
|
||||||
|
# Step 5: Convert and insert data
|
||||||
|
self.convert_and_insert_data(batch_size)
|
||||||
|
|
||||||
|
# Step 6: Show error summary
|
||||||
|
self._print_error_summary()
|
||||||
|
|
||||||
|
print("=" * 70)
|
||||||
|
print("✅ Conversion completed!")
|
||||||
|
print(f" Database: {self.postgres_config['database']}")
|
||||||
|
print(f" Tables created: {len(table_definitions)}")
|
||||||
|
|
||||||
|
def _print_error_summary(self):
|
||||||
|
"""Print a focused summary of errors"""
|
||||||
|
if not self.error_log:
|
||||||
|
print("\n✅ No errors encountered during conversion!")
|
||||||
|
return
|
||||||
|
|
||||||
|
print("\n⚠️ ERROR SUMMARY:")
|
||||||
|
print("=" * 50)
|
||||||
|
|
||||||
|
# Sort by error count descending
|
||||||
|
sorted_collections = sorted(self.error_log.items(),
|
||||||
|
key=lambda x: len(x[1]), reverse=True)
|
||||||
|
|
||||||
|
for collection, errors in sorted_collections:
|
||||||
|
error_types = {}
|
||||||
|
for error in errors:
|
||||||
|
error_type = error['type']
|
||||||
|
if error_type not in error_types:
|
||||||
|
error_types[error_type] = []
|
||||||
|
error_types[error_type].append(error['details'])
|
||||||
|
|
||||||
|
print(f"\n🔴 {collection} ({len(errors)} total errors):")
|
||||||
|
for error_type, details_list in error_types.items():
|
||||||
|
print(f" {error_type}: {len(details_list)} errors")
|
||||||
|
|
||||||
|
# Show sample errors for critical collections
|
||||||
|
if collection in ['rocketchat_settings', 'rocketchat_room'] and len(details_list) > 0:
|
||||||
|
print(f" Sample: {details_list[0][:100]}...")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Convert MongoDB BSON export to PostgreSQL',
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
Examples:
|
||||||
|
# Basic usage
|
||||||
|
python3 mongo_to_postgres_converter.py \\
|
||||||
|
--mongo-path db/database/62df06d44234d20001289144 \\
|
||||||
|
--pg-database rocketchat_converted \\
|
||||||
|
--pg-user rocketchat_user \\
|
||||||
|
--pg-password mypassword
|
||||||
|
|
||||||
|
# Debug specific failing collections
|
||||||
|
python3 mongo_to_postgres_converter.py \\
|
||||||
|
--mongo-path db/database/62df06d44234d20001289144 \\
|
||||||
|
--pg-database rocketchat_converted \\
|
||||||
|
--pg-user rocketchat_user \\
|
||||||
|
--pg-password mypassword \\
|
||||||
|
--debug-collections rocketchat_settings rocketchat_room
|
||||||
|
|
||||||
|
Before running this script:
|
||||||
|
1. Run: sudo -u postgres psql -f reset_database.sql
|
||||||
|
2. Update the password in reset_database.sql
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument('--mongo-path', required=True, help='Path to MongoDB export directory')
|
||||||
|
parser.add_argument('--pg-host', default='localhost', help='PostgreSQL host (default: localhost)')
|
||||||
|
parser.add_argument('--pg-port', default='5432', help='PostgreSQL port (default: 5432)')
|
||||||
|
parser.add_argument('--pg-database', required=True, help='PostgreSQL database name')
|
||||||
|
parser.add_argument('--pg-user', required=True, help='PostgreSQL username')
|
||||||
|
parser.add_argument('--pg-password', required=True, help='PostgreSQL password')
|
||||||
|
parser.add_argument('--sample-size', type=int, default=100, help='Number of documents to sample for schema analysis (default: 100)')
|
||||||
|
parser.add_argument('--batch-size', type=int, default=1000, help='Batch size for data insertion (default: 1000)')
|
||||||
|
parser.add_argument('--debug', action='store_true', help='Enable debug mode with detailed error logging')
|
||||||
|
parser.add_argument('--debug-collections', nargs='*', help='Specific collections to debug (e.g., rocketchat_settings rocketchat_room)')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
postgres_config = {
|
||||||
|
'host': args.pg_host,
|
||||||
|
'port': args.pg_port,
|
||||||
|
'database': args.pg_database,
|
||||||
|
'user': args.pg_user,
|
||||||
|
'password': args.pg_password
|
||||||
|
}
|
||||||
|
|
||||||
|
# Enable debug mode if debug collections are specified
|
||||||
|
debug_mode = args.debug or (args.debug_collections is not None)
|
||||||
|
|
||||||
|
converter = MongoToPostgresConverter(args.mongo_path, postgres_config, debug_mode, args.debug_collections)
|
||||||
|
converter.run_conversion(args.sample_size, args.batch_size)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
41
inventory-server/chat/db-convert/reset_database.sql
Normal file
41
inventory-server/chat/db-convert/reset_database.sql
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
-- PostgreSQL Database Reset Script for Rocket.Chat Import
|
||||||
|
-- Run as: sudo -u postgres psql -f reset_database.sql
|
||||||
|
|
||||||
|
-- Terminate all connections to the database (force disconnect users)
|
||||||
|
SELECT pg_terminate_backend(pid)
|
||||||
|
FROM pg_stat_activity
|
||||||
|
WHERE datname = 'rocketchat_converted' AND pid <> pg_backend_pid();
|
||||||
|
|
||||||
|
-- Drop the database if it exists
|
||||||
|
DROP DATABASE IF EXISTS rocketchat_converted;
|
||||||
|
|
||||||
|
-- Create fresh database
|
||||||
|
CREATE DATABASE rocketchat_converted;
|
||||||
|
|
||||||
|
-- Create user (if not exists)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT FROM pg_user WHERE usename = 'rocketchat_user') THEN
|
||||||
|
CREATE USER rocketchat_user WITH PASSWORD 'HKjLgt23gWuPXzEAn3rW';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Grant database privileges
|
||||||
|
GRANT CONNECT ON DATABASE rocketchat_converted TO rocketchat_user;
|
||||||
|
GRANT CREATE ON DATABASE rocketchat_converted TO rocketchat_user;
|
||||||
|
|
||||||
|
-- Connect to the new database
|
||||||
|
\c rocketchat_converted;
|
||||||
|
|
||||||
|
-- Grant schema privileges
|
||||||
|
GRANT CREATE ON SCHEMA public TO rocketchat_user;
|
||||||
|
GRANT USAGE ON SCHEMA public TO rocketchat_user;
|
||||||
|
|
||||||
|
-- Grant privileges on all future tables and sequences
|
||||||
|
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO rocketchat_user;
|
||||||
|
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE, SELECT ON SEQUENCES TO rocketchat_user;
|
||||||
|
|
||||||
|
-- Display success message
|
||||||
|
\echo 'Database reset completed successfully!'
|
||||||
|
\echo 'You can now run the converter with:'
|
||||||
|
\echo 'python3 mongo_to_postgres_converter.py --mongo-path db/database/62df06d44234d20001289144 --pg-database rocketchat_converted --pg-user rocketchat_user --pg-password your_password'
|
||||||
54
inventory-server/chat/db-convert/test_converter.py
Normal file
54
inventory-server/chat/db-convert/test_converter.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Quick test script to verify the converter fixes work for problematic collections
|
||||||
|
"""
|
||||||
|
|
||||||
|
from mongo_to_postgres_converter import MongoToPostgresConverter
|
||||||
|
|
||||||
|
def test_problematic_collections():
|
||||||
|
print("🧪 Testing converter fixes for problematic collections...")
|
||||||
|
|
||||||
|
postgres_config = {
|
||||||
|
'host': 'localhost',
|
||||||
|
'port': '5432',
|
||||||
|
'database': 'rocketchat_test',
|
||||||
|
'user': 'rocketchat_user',
|
||||||
|
'password': 'password123'
|
||||||
|
}
|
||||||
|
|
||||||
|
converter = MongoToPostgresConverter(
|
||||||
|
'db/database/62df06d44234d20001289144',
|
||||||
|
postgres_config,
|
||||||
|
debug_mode=True,
|
||||||
|
debug_collections=['rocketchat_settings', 'rocketchat_room']
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test just discovery and schema analysis
|
||||||
|
print("\n1. Testing collection discovery...")
|
||||||
|
converter.discover_collections()
|
||||||
|
|
||||||
|
print("\n2. Testing schema analysis...")
|
||||||
|
if 'rocketchat_settings' in converter.collections:
|
||||||
|
settings_schema = converter.analyze_schema('rocketchat_settings', 10)
|
||||||
|
print(f"Settings schema fields: {len(settings_schema)}")
|
||||||
|
|
||||||
|
# Check specific problematic fields
|
||||||
|
if 'packageValue' in settings_schema:
|
||||||
|
packagevalue_info = settings_schema['packageValue']
|
||||||
|
pg_type = converter._determine_postgres_type(packagevalue_info)
|
||||||
|
print(f"packageValue types: {packagevalue_info['types']} -> PostgreSQL: {pg_type}")
|
||||||
|
|
||||||
|
if 'rocketchat_room' in converter.collections:
|
||||||
|
room_schema = converter.analyze_schema('rocketchat_room', 10)
|
||||||
|
print(f"Room schema fields: {len(room_schema)}")
|
||||||
|
|
||||||
|
# Check specific problematic fields
|
||||||
|
if 'sysMes' in room_schema:
|
||||||
|
sysmes_info = room_schema['sysMes']
|
||||||
|
pg_type = converter._determine_postgres_type(sysmes_info)
|
||||||
|
print(f"sysMes types: {sysmes_info['types']} -> PostgreSQL: {pg_type}")
|
||||||
|
|
||||||
|
print("\n✅ Test completed - check the type mappings above!")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
test_problematic_collections()
|
||||||
1447
inventory-server/chat/package-lock.json
generated
Normal file
1447
inventory-server/chat/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
20
inventory-server/chat/package.json
Normal file
20
inventory-server/chat/package.json
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"name": "chat-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Chat archive server for Rocket.Chat data",
|
||||||
|
"main": "server.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node server.js",
|
||||||
|
"dev": "nodemon server.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"express": "^4.18.2",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"pg": "^8.11.0",
|
||||||
|
"dotenv": "^16.0.3",
|
||||||
|
"morgan": "^1.10.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"nodemon": "^2.0.22"
|
||||||
|
}
|
||||||
|
}
|
||||||
649
inventory-server/chat/routes.js
Normal file
649
inventory-server/chat/routes.js
Normal file
@@ -0,0 +1,649 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const path = require('path');
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Serve uploaded files with proper mapping from database paths to actual file locations
|
||||||
|
router.get('/files/uploads/*', async (req, res) => {
|
||||||
|
try {
|
||||||
|
// Extract the path from the URL (everything after /files/uploads/)
|
||||||
|
const requestPath = req.params[0];
|
||||||
|
|
||||||
|
// The URL path will be like: ufs/AmazonS3:Uploads/274Mf9CyHNG72oF86/filename.jpg
|
||||||
|
// We need to extract the mongo_id (274Mf9CyHNG72oF86) from this path
|
||||||
|
const pathParts = requestPath.split('/');
|
||||||
|
let mongoId = null;
|
||||||
|
|
||||||
|
// Find the mongo_id in the path structure
|
||||||
|
for (let i = 0; i < pathParts.length; i++) {
|
||||||
|
if (pathParts[i].includes('AmazonS3:Uploads') && i + 1 < pathParts.length) {
|
||||||
|
mongoId = pathParts[i + 1];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// Sometimes the mongo_id might be the last part of ufs/AmazonS3:Uploads/mongoId
|
||||||
|
if (pathParts[i] === 'AmazonS3:Uploads' && i + 1 < pathParts.length) {
|
||||||
|
mongoId = pathParts[i + 1];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mongoId) {
|
||||||
|
// Try to get mongo_id from database by matching the full path
|
||||||
|
const result = await global.pool.query(`
|
||||||
|
SELECT mongo_id, name, type
|
||||||
|
FROM uploads
|
||||||
|
WHERE path = $1 OR url = $1
|
||||||
|
LIMIT 1
|
||||||
|
`, [`/ufs/AmazonS3:Uploads/${requestPath}`, `/ufs/AmazonS3:Uploads/${requestPath}`]);
|
||||||
|
|
||||||
|
if (result.rows.length > 0) {
|
||||||
|
mongoId = result.rows[0].mongo_id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mongoId) {
|
||||||
|
return res.status(404).json({ error: 'File not found' });
|
||||||
|
}
|
||||||
|
|
||||||
|
// The actual file is stored with just the mongo_id as filename
|
||||||
|
const filePath = path.join(__dirname, 'db-convert/db/files/uploads', mongoId);
|
||||||
|
|
||||||
|
// Get file info from database for proper content-type
|
||||||
|
const fileInfo = await global.pool.query(`
|
||||||
|
SELECT name, type
|
||||||
|
FROM uploads
|
||||||
|
WHERE mongo_id = $1
|
||||||
|
LIMIT 1
|
||||||
|
`, [mongoId]);
|
||||||
|
|
||||||
|
if (fileInfo.rows.length === 0) {
|
||||||
|
return res.status(404).json({ error: 'File metadata not found' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const { name, type } = fileInfo.rows[0];
|
||||||
|
|
||||||
|
// Set proper content type
|
||||||
|
if (type) {
|
||||||
|
res.set('Content-Type', type);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set content disposition with original filename
|
||||||
|
if (name) {
|
||||||
|
res.set('Content-Disposition', `inline; filename="${name}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send the file
|
||||||
|
res.sendFile(filePath, (err) => {
|
||||||
|
if (err) {
|
||||||
|
console.error('Error serving file:', err);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(404).json({ error: 'File not found on disk' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error serving upload:', error);
|
||||||
|
res.status(500).json({ error: 'Server error' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Also serve files directly by mongo_id for simpler access
|
||||||
|
router.get('/files/by-id/:mongoId', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { mongoId } = req.params;
|
||||||
|
|
||||||
|
// Get file info from database
|
||||||
|
const fileInfo = await global.pool.query(`
|
||||||
|
SELECT name, type
|
||||||
|
FROM uploads
|
||||||
|
WHERE mongo_id = $1
|
||||||
|
LIMIT 1
|
||||||
|
`, [mongoId]);
|
||||||
|
|
||||||
|
if (fileInfo.rows.length === 0) {
|
||||||
|
return res.status(404).json({ error: 'File not found' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const { name, type } = fileInfo.rows[0];
|
||||||
|
const filePath = path.join(__dirname, 'db-convert/db/files/uploads', mongoId);
|
||||||
|
|
||||||
|
// Set proper content type and filename
|
||||||
|
if (type) {
|
||||||
|
res.set('Content-Type', type);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (name) {
|
||||||
|
res.set('Content-Disposition', `inline; filename="${name}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send the file
|
||||||
|
res.sendFile(filePath, (err) => {
|
||||||
|
if (err) {
|
||||||
|
console.error('Error serving file:', err);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(404).json({ error: 'File not found on disk' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error serving upload by ID:', error);
|
||||||
|
res.status(500).json({ error: 'Server error' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Serve user avatars by mongo_id
|
||||||
|
router.get('/avatar/:mongoId', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { mongoId } = req.params;
|
||||||
|
|
||||||
|
console.log(`[Avatar Debug] Looking up avatar for user mongo_id: ${mongoId}`);
|
||||||
|
|
||||||
|
// First try to find avatar by user's avataretag
|
||||||
|
const userResult = await global.pool.query(`
|
||||||
|
SELECT avataretag, username FROM users WHERE mongo_id = $1
|
||||||
|
`, [mongoId]);
|
||||||
|
|
||||||
|
let avatarPath = null;
|
||||||
|
|
||||||
|
if (userResult.rows.length > 0) {
|
||||||
|
const username = userResult.rows[0].username;
|
||||||
|
const avataretag = userResult.rows[0].avataretag;
|
||||||
|
|
||||||
|
// Try method 1: Look up by avataretag -> etag (for users with avataretag set)
|
||||||
|
if (avataretag) {
|
||||||
|
console.log(`[Avatar Debug] Found user ${username} with avataretag: ${avataretag}`);
|
||||||
|
|
||||||
|
const avatarResult = await global.pool.query(`
|
||||||
|
SELECT url, path FROM avatars WHERE etag = $1
|
||||||
|
`, [avataretag]);
|
||||||
|
|
||||||
|
if (avatarResult.rows.length > 0) {
|
||||||
|
const dbPath = avatarResult.rows[0].path || avatarResult.rows[0].url;
|
||||||
|
console.log(`[Avatar Debug] Found avatar record with path: ${dbPath}`);
|
||||||
|
|
||||||
|
if (dbPath) {
|
||||||
|
const pathParts = dbPath.split('/');
|
||||||
|
for (let i = 0; i < pathParts.length; i++) {
|
||||||
|
if (pathParts[i].includes('AmazonS3:Avatars') && i + 1 < pathParts.length) {
|
||||||
|
const avatarMongoId = pathParts[i + 1];
|
||||||
|
avatarPath = path.join(__dirname, 'db-convert/db/files/avatars', avatarMongoId);
|
||||||
|
console.log(`[Avatar Debug] Extracted avatar mongo_id: ${avatarMongoId}, full path: ${avatarPath}`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`[Avatar Debug] No avatar record found for etag: ${avataretag}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try method 2: Look up by userid directly (for users without avataretag)
|
||||||
|
if (!avatarPath) {
|
||||||
|
console.log(`[Avatar Debug] Trying direct userid lookup for user ${username} (${mongoId})`);
|
||||||
|
|
||||||
|
const avatarResult = await global.pool.query(`
|
||||||
|
SELECT url, path FROM avatars WHERE userid = $1
|
||||||
|
`, [mongoId]);
|
||||||
|
|
||||||
|
if (avatarResult.rows.length > 0) {
|
||||||
|
const dbPath = avatarResult.rows[0].path || avatarResult.rows[0].url;
|
||||||
|
console.log(`[Avatar Debug] Found avatar record by userid with path: ${dbPath}`);
|
||||||
|
|
||||||
|
if (dbPath) {
|
||||||
|
const pathParts = dbPath.split('/');
|
||||||
|
for (let i = 0; i < pathParts.length; i++) {
|
||||||
|
if (pathParts[i].includes('AmazonS3:Avatars') && i + 1 < pathParts.length) {
|
||||||
|
const avatarMongoId = pathParts[i + 1];
|
||||||
|
avatarPath = path.join(__dirname, 'db-convert/db/files/avatars', avatarMongoId);
|
||||||
|
console.log(`[Avatar Debug] Extracted avatar mongo_id: ${avatarMongoId}, full path: ${avatarPath}`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`[Avatar Debug] No avatar record found for userid: ${mongoId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`[Avatar Debug] No user found for mongo_id: ${mongoId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: try direct lookup by user mongo_id
|
||||||
|
if (!avatarPath) {
|
||||||
|
avatarPath = path.join(__dirname, 'db-convert/db/files/avatars', mongoId);
|
||||||
|
console.log(`[Avatar Debug] Using fallback path: ${avatarPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set proper content type for images
|
||||||
|
res.set('Content-Type', 'image/jpeg'); // Most avatars are likely JPEG
|
||||||
|
|
||||||
|
// Send the file
|
||||||
|
res.sendFile(avatarPath, (err) => {
|
||||||
|
if (err) {
|
||||||
|
// If avatar doesn't exist, send a default 404 or generate initials
|
||||||
|
console.log(`[Avatar Debug] Avatar file not found at path: ${avatarPath}, error:`, err.message);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(404).json({ error: 'Avatar not found' });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`[Avatar Debug] Successfully served avatar from: ${avatarPath}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error serving avatar:', error);
|
||||||
|
res.status(500).json({ error: 'Server error' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Serve avatars statically as fallback
|
||||||
|
router.use('/files/avatars', express.static(path.join(__dirname, 'db-convert/db/files/avatars')));
|
||||||
|
|
||||||
|
// Get all users for the "view as" dropdown (active and inactive)
|
||||||
|
router.get('/users', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const result = await global.pool.query(`
|
||||||
|
SELECT id, username, name, type, active, status, lastlogin,
|
||||||
|
statustext, utcoffset, statusconnection, mongo_id, avataretag
|
||||||
|
FROM users
|
||||||
|
WHERE type = 'user'
|
||||||
|
ORDER BY
|
||||||
|
active DESC, -- Active users first
|
||||||
|
CASE
|
||||||
|
WHEN status = 'online' THEN 1
|
||||||
|
WHEN status = 'away' THEN 2
|
||||||
|
WHEN status = 'busy' THEN 3
|
||||||
|
ELSE 4
|
||||||
|
END,
|
||||||
|
name ASC
|
||||||
|
`);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: 'success',
|
||||||
|
users: result.rows
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching users:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Failed to fetch users',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get rooms for a specific user with enhanced room names for direct messages
|
||||||
|
router.get('/users/:userId/rooms', async (req, res) => {
|
||||||
|
const { userId } = req.params;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get the current user's mongo_id for filtering
|
||||||
|
const userResult = await global.pool.query(`
|
||||||
|
SELECT mongo_id, username FROM users WHERE id = $1
|
||||||
|
`, [userId]);
|
||||||
|
|
||||||
|
if (userResult.rows.length === 0) {
|
||||||
|
return res.status(404).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'User not found'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentUserMongoId = userResult.rows[0].mongo_id;
|
||||||
|
const currentUsername = userResult.rows[0].username;
|
||||||
|
|
||||||
|
// Get rooms where the user is a member with proper naming from subscription table
|
||||||
|
// Include archived and closed rooms but sort them at the bottom
|
||||||
|
const result = await global.pool.query(`
|
||||||
|
SELECT DISTINCT
|
||||||
|
r.id,
|
||||||
|
r.mongo_id as room_mongo_id,
|
||||||
|
r.name,
|
||||||
|
r.fname,
|
||||||
|
r.t as type,
|
||||||
|
r.msgs,
|
||||||
|
r.lm as last_message_date,
|
||||||
|
r.usernames,
|
||||||
|
r.uids,
|
||||||
|
r.userscount,
|
||||||
|
r.description,
|
||||||
|
r.teamid,
|
||||||
|
r.archived,
|
||||||
|
s.open,
|
||||||
|
-- Use the subscription's name for direct messages (excludes current user)
|
||||||
|
-- For channels/groups, use room's fname or name
|
||||||
|
CASE
|
||||||
|
WHEN r.t = 'd' THEN COALESCE(s.fname, s.name, 'Unknown User')
|
||||||
|
ELSE COALESCE(r.fname, r.name, 'Unnamed Room')
|
||||||
|
END as display_name
|
||||||
|
FROM room r
|
||||||
|
JOIN subscription s ON s.rid = r.mongo_id
|
||||||
|
WHERE s.u->>'_id' = $1
|
||||||
|
ORDER BY
|
||||||
|
s.open DESC NULLS LAST, -- Open rooms first
|
||||||
|
r.archived NULLS FIRST, -- Non-archived first (nulls treated as false)
|
||||||
|
r.lm DESC NULLS LAST
|
||||||
|
LIMIT 50
|
||||||
|
`, [currentUserMongoId]);
|
||||||
|
|
||||||
|
// Enhance rooms with participant information for direct messages
|
||||||
|
const enhancedRooms = await Promise.all(result.rows.map(async (room) => {
|
||||||
|
if (room.type === 'd' && room.uids) {
|
||||||
|
// Get participant info (excluding current user) for direct messages
|
||||||
|
const participantResult = await global.pool.query(`
|
||||||
|
SELECT u.username, u.name, u.mongo_id, u.avataretag
|
||||||
|
FROM users u
|
||||||
|
WHERE u.mongo_id = ANY($1::text[])
|
||||||
|
AND u.mongo_id != $2
|
||||||
|
`, [room.uids, currentUserMongoId]);
|
||||||
|
|
||||||
|
room.participants = participantResult.rows;
|
||||||
|
}
|
||||||
|
return room;
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: 'success',
|
||||||
|
rooms: enhancedRooms
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching user rooms:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Failed to fetch user rooms',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get room details including participants
|
||||||
|
router.get('/rooms/:roomId', async (req, res) => {
|
||||||
|
const { roomId } = req.params;
|
||||||
|
const { userId } = req.query; // Accept current user ID as query parameter
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await global.pool.query(`
|
||||||
|
SELECT r.id, r.name, r.fname, r.t as type, r.msgs, r.description,
|
||||||
|
r.lm as last_message_date, r.usernames, r.uids, r.userscount, r.teamid
|
||||||
|
FROM room r
|
||||||
|
WHERE r.id = $1
|
||||||
|
`, [roomId]);
|
||||||
|
|
||||||
|
if (result.rows.length === 0) {
|
||||||
|
return res.status(404).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Room not found'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const room = result.rows[0];
|
||||||
|
|
||||||
|
// For direct messages, get the proper display name based on current user
|
||||||
|
if (room.type === 'd' && room.uids && userId) {
|
||||||
|
// Get current user's mongo_id
|
||||||
|
const userResult = await global.pool.query(`
|
||||||
|
SELECT mongo_id FROM users WHERE id = $1
|
||||||
|
`, [userId]);
|
||||||
|
|
||||||
|
if (userResult.rows.length > 0) {
|
||||||
|
const currentUserMongoId = userResult.rows[0].mongo_id;
|
||||||
|
|
||||||
|
// Get display name from subscription table for this user
|
||||||
|
// Use room mongo_id to match with subscription.rid
|
||||||
|
const roomMongoResult = await global.pool.query(`
|
||||||
|
SELECT mongo_id FROM room WHERE id = $1
|
||||||
|
`, [roomId]);
|
||||||
|
|
||||||
|
if (roomMongoResult.rows.length > 0) {
|
||||||
|
const roomMongoId = roomMongoResult.rows[0].mongo_id;
|
||||||
|
|
||||||
|
const subscriptionResult = await global.pool.query(`
|
||||||
|
SELECT fname, name FROM subscription
|
||||||
|
WHERE rid = $1 AND u->>'_id' = $2
|
||||||
|
`, [roomMongoId, currentUserMongoId]);
|
||||||
|
|
||||||
|
if (subscriptionResult.rows.length > 0) {
|
||||||
|
const sub = subscriptionResult.rows[0];
|
||||||
|
room.display_name = sub.fname || sub.name || 'Unknown User';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all participants for additional info
|
||||||
|
const participantResult = await global.pool.query(`
|
||||||
|
SELECT username, name
|
||||||
|
FROM users
|
||||||
|
WHERE mongo_id = ANY($1::text[])
|
||||||
|
`, [room.uids]);
|
||||||
|
|
||||||
|
room.participants = participantResult.rows;
|
||||||
|
} else {
|
||||||
|
// For channels/groups, use room's fname or name
|
||||||
|
room.display_name = room.fname || room.name || 'Unnamed Room';
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: 'success',
|
||||||
|
room: room
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching room details:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Failed to fetch room details',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get messages for a specific room (fast, without attachments)
|
||||||
|
router.get('/rooms/:roomId/messages', async (req, res) => {
|
||||||
|
const { roomId } = req.params;
|
||||||
|
const { limit = 50, offset = 0, before } = req.query;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Fast query - just get messages without expensive attachment joins
|
||||||
|
let query = `
|
||||||
|
SELECT m.id, m.msg, m.ts, m.u, m._updatedat, m.urls, m.mentions, m.md
|
||||||
|
FROM message m
|
||||||
|
JOIN room r ON m.rid = r.mongo_id
|
||||||
|
WHERE r.id = $1
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = [roomId];
|
||||||
|
|
||||||
|
if (before) {
|
||||||
|
query += ` AND m.ts < $${params.length + 1}`;
|
||||||
|
params.push(before);
|
||||||
|
}
|
||||||
|
|
||||||
|
query += ` ORDER BY m.ts DESC LIMIT $${params.length + 1} OFFSET $${params.length + 2}`;
|
||||||
|
params.push(limit, offset);
|
||||||
|
|
||||||
|
const result = await global.pool.query(query, params);
|
||||||
|
|
||||||
|
// Add empty attachments array for now - attachments will be loaded separately if needed
|
||||||
|
const messages = result.rows.map(msg => ({
|
||||||
|
...msg,
|
||||||
|
attachments: []
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: 'success',
|
||||||
|
messages: messages.reverse() // Reverse to show oldest first
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching messages:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Failed to fetch messages',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get attachments for specific messages (called separately for performance)
|
||||||
|
router.post('/messages/attachments', async (req, res) => {
|
||||||
|
const { messageIds } = req.body;
|
||||||
|
|
||||||
|
if (!messageIds || !Array.isArray(messageIds) || messageIds.length === 0) {
|
||||||
|
return res.json({ status: 'success', attachments: {} });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get room mongo_id from first message to limit search scope
|
||||||
|
const roomQuery = await global.pool.query(`
|
||||||
|
SELECT r.mongo_id as room_mongo_id
|
||||||
|
FROM message m
|
||||||
|
JOIN room r ON m.rid = r.mongo_id
|
||||||
|
WHERE m.id = $1
|
||||||
|
LIMIT 1
|
||||||
|
`, [messageIds[0]]);
|
||||||
|
|
||||||
|
if (roomQuery.rows.length === 0) {
|
||||||
|
return res.json({ status: 'success', attachments: {} });
|
||||||
|
}
|
||||||
|
|
||||||
|
const roomMongoId = roomQuery.rows[0].room_mongo_id;
|
||||||
|
|
||||||
|
// Get messages and their upload timestamps
|
||||||
|
const messagesQuery = await global.pool.query(`
|
||||||
|
SELECT m.id, m.ts, m.u->>'_id' as user_id
|
||||||
|
FROM message m
|
||||||
|
WHERE m.id = ANY($1::int[])
|
||||||
|
`, [messageIds]);
|
||||||
|
|
||||||
|
if (messagesQuery.rows.length === 0) {
|
||||||
|
return res.json({ status: 'success', attachments: {} });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build a map of user_id -> array of message timestamps for efficient lookup
|
||||||
|
const userTimeMap = {};
|
||||||
|
const messageMap = {};
|
||||||
|
messagesQuery.rows.forEach(msg => {
|
||||||
|
if (!userTimeMap[msg.user_id]) {
|
||||||
|
userTimeMap[msg.user_id] = [];
|
||||||
|
}
|
||||||
|
userTimeMap[msg.user_id].push(msg.ts);
|
||||||
|
messageMap[msg.id] = { ts: msg.ts, user_id: msg.user_id };
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get attachments for this room and these users
|
||||||
|
const uploadsQuery = await global.pool.query(`
|
||||||
|
SELECT mongo_id, name, size, type, url, path, typegroup, identify,
|
||||||
|
userid, uploadedat
|
||||||
|
FROM uploads
|
||||||
|
WHERE rid = $1
|
||||||
|
AND userid = ANY($2::text[])
|
||||||
|
ORDER BY uploadedat
|
||||||
|
`, [roomMongoId, Object.keys(userTimeMap)]);
|
||||||
|
|
||||||
|
// Match attachments to messages based on timestamp proximity (within 5 minutes)
|
||||||
|
const attachmentsByMessage = {};
|
||||||
|
|
||||||
|
uploadsQuery.rows.forEach(upload => {
|
||||||
|
const uploadTime = new Date(upload.uploadedat).getTime();
|
||||||
|
|
||||||
|
// Find the closest message from this user within 5 minutes
|
||||||
|
let closestMessageId = null;
|
||||||
|
let closestTimeDiff = Infinity;
|
||||||
|
|
||||||
|
Object.entries(messageMap).forEach(([msgId, msgData]) => {
|
||||||
|
if (msgData.user_id === upload.userid) {
|
||||||
|
const msgTime = new Date(msgData.ts).getTime();
|
||||||
|
const timeDiff = Math.abs(uploadTime - msgTime);
|
||||||
|
|
||||||
|
if (timeDiff < 300000 && timeDiff < closestTimeDiff) { // 5 minutes = 300000ms
|
||||||
|
closestMessageId = msgId;
|
||||||
|
closestTimeDiff = timeDiff;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (closestMessageId) {
|
||||||
|
if (!attachmentsByMessage[closestMessageId]) {
|
||||||
|
attachmentsByMessage[closestMessageId] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
attachmentsByMessage[closestMessageId].push({
|
||||||
|
id: upload.id,
|
||||||
|
mongo_id: upload.mongo_id,
|
||||||
|
name: upload.name,
|
||||||
|
size: upload.size,
|
||||||
|
type: upload.type,
|
||||||
|
url: upload.url,
|
||||||
|
path: upload.path,
|
||||||
|
typegroup: upload.typegroup,
|
||||||
|
identify: upload.identify
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: 'success',
|
||||||
|
attachments: attachmentsByMessage
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching message attachments:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Failed to fetch attachments',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Search messages in accessible rooms for a user
|
||||||
|
router.get('/users/:userId/search', async (req, res) => {
|
||||||
|
const { userId } = req.params;
|
||||||
|
const { q, limit = 20 } = req.query;
|
||||||
|
|
||||||
|
if (!q || q.length < 2) {
|
||||||
|
return res.status(400).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Search query must be at least 2 characters'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const userResult = await global.pool.query(`
|
||||||
|
SELECT mongo_id FROM users WHERE id = $1
|
||||||
|
`, [userId]);
|
||||||
|
|
||||||
|
if (userResult.rows.length === 0) {
|
||||||
|
return res.status(404).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'User not found'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentUserMongoId = userResult.rows[0].mongo_id;
|
||||||
|
|
||||||
|
const result = await global.pool.query(`
|
||||||
|
SELECT m.id, m.msg, m.ts, m.u, r.id as room_id, r.name as room_name, r.fname as room_fname, r.t as room_type
|
||||||
|
FROM message m
|
||||||
|
JOIN room r ON m.rid = r.mongo_id
|
||||||
|
JOIN subscription s ON s.rid = r.mongo_id AND s.u->>'_id' = $1
|
||||||
|
WHERE m.msg ILIKE $2
|
||||||
|
AND r.archived IS NOT TRUE
|
||||||
|
ORDER BY m.ts DESC
|
||||||
|
LIMIT $3
|
||||||
|
`, [currentUserMongoId, `%${q}%`, limit]);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: 'success',
|
||||||
|
results: result.rows
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error searching messages:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Failed to search messages',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
83
inventory-server/chat/server.js
Normal file
83
inventory-server/chat/server.js
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
require('dotenv').config({ path: '../.env' });
|
||||||
|
const express = require('express');
|
||||||
|
const cors = require('cors');
|
||||||
|
const { Pool } = require('pg');
|
||||||
|
const morgan = require('morgan');
|
||||||
|
const chatRoutes = require('./routes');
|
||||||
|
|
||||||
|
// Log startup configuration
|
||||||
|
console.log('Starting chat server with config:', {
|
||||||
|
host: process.env.CHAT_DB_HOST,
|
||||||
|
user: process.env.CHAT_DB_USER,
|
||||||
|
database: process.env.CHAT_DB_NAME || 'rocketchat_converted',
|
||||||
|
port: process.env.CHAT_DB_PORT,
|
||||||
|
chat_port: process.env.CHAT_PORT || 3014
|
||||||
|
});
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const port = process.env.CHAT_PORT || 3014;
|
||||||
|
|
||||||
|
// Database configuration for rocketchat_converted database
|
||||||
|
const pool = new Pool({
|
||||||
|
host: process.env.CHAT_DB_HOST,
|
||||||
|
user: process.env.CHAT_DB_USER,
|
||||||
|
password: process.env.CHAT_DB_PASSWORD,
|
||||||
|
database: process.env.CHAT_DB_NAME || 'rocketchat_converted',
|
||||||
|
port: process.env.CHAT_DB_PORT,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Make pool available globally
|
||||||
|
global.pool = pool;
|
||||||
|
|
||||||
|
// Middleware
|
||||||
|
app.use(express.json());
|
||||||
|
app.use(morgan('combined'));
|
||||||
|
app.use(cors({
|
||||||
|
origin: ['http://localhost:5175', 'http://localhost:5174', 'https://inventory.kent.pw'],
|
||||||
|
credentials: true
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Test database connection endpoint
|
||||||
|
app.get('/test-db', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const result = await pool.query('SELECT COUNT(*) as user_count FROM users WHERE active = true');
|
||||||
|
const messageResult = await pool.query('SELECT COUNT(*) as message_count FROM message');
|
||||||
|
const roomResult = await pool.query('SELECT COUNT(*) as room_count FROM room');
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: 'success',
|
||||||
|
database: 'rocketchat_converted',
|
||||||
|
stats: {
|
||||||
|
active_users: parseInt(result.rows[0].user_count),
|
||||||
|
total_messages: parseInt(messageResult.rows[0].message_count),
|
||||||
|
total_rooms: parseInt(roomResult.rows[0].room_count)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Database test error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
error: 'Database connection failed',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mount all routes from routes.js
|
||||||
|
app.use('/', chatRoutes);
|
||||||
|
|
||||||
|
// Health check endpoint
|
||||||
|
app.get('/health', (req, res) => {
|
||||||
|
res.json({ status: 'healthy' });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
console.error(err.stack);
|
||||||
|
res.status(500).json({ error: 'Something broke!' });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
app.listen(port, () => {
|
||||||
|
console.log(`Chat server running on port ${port}`);
|
||||||
|
});
|
||||||
20
inventory-server/dashboard/.env-future
Normal file
20
inventory-server/dashboard/.env-future
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Caching Server Configuration
|
||||||
|
PORT=3010
|
||||||
|
NODE_ENV=production
|
||||||
|
|
||||||
|
# Database Configuration
|
||||||
|
MONGODB_URI=mongodb://dashboard_user:WDRFWiGXEeaC6aAyUKuT@localhost:27017/dashboard?authSource=dashboard
|
||||||
|
REDIS_URL=redis://:Wgj32YXxxVLtPZoVzUnP@localhost:6379
|
||||||
|
|
||||||
|
# Gorgias
|
||||||
|
GORGIAS_API_USERNAME=matt@acherryontop.com
|
||||||
|
GORGIAS_API_PASSWORD=d2ed0d23d2a7bf11a633a12fb260769f4e4a970d440693e7d64b8d2223fa6503
|
||||||
|
|
||||||
|
# GA4 credentials
|
||||||
|
GA_PROPERTY_ID=281045851
|
||||||
|
GOOGLE_APPLICATION_CREDENTIALS_JSON={"type": "service_account","project_id": "acot-stats","private_key_id": "259d1fd9864efbfa38b8ba02fdd74dc008ace3c5","private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC5Y6foai8WF98k\nIA0yLn94Y3lmDYlyvI9xL2YqSZSyvgK35wdWRTIaEvHKdiUWuYi3ZPdkYmz1OYiV\njVfR2g+mFpA7MI/JMwyGWwjnV4WW2q6INfgi/PvHlbP3LyyQo0B8CvAY0CHqrpDs\nlJQhAkqmteU24dqcdZoV3vM8JMsDiXm44DqwXsEfWibKv4i0mWNkwiEQr0yImHwb\nbjgclwVLLi5kdM2+49PXr47LCODdL+xmX0uSdgSG6XYqEIVsEOXIUJKzqUe036b/\nEFQ0BxWdJBWs/MYOapn/NNv+Mts+am2ipUuIcgPbOut4xa2Fkky93WnJf0tB+VJP\njFnyZJhdAgMBAAECggEAC980Cp/4zvSNZMNWr6l8ST8u2thavnRmcoGYtx7ffQjK\nT3Dl2TefgJLzqpr2lLt3OVint7p5LsUAmE8lBLpu+RxbH9HkIKbPvQTfD5gyZQQx\nBruqCGzkn2st9fzZNj6gwQYe9P/TGYkUnR8wqI0nLwDZTQful3QNKixiWC4lAAoK\nqdd6H++pqjVUiTqgFwFD3zBAhO0Lp8m/c5vTRT5kxi0wCTK66FaaGLr2OwZHcohp\nE8rEcTZ5kaJzBwqEz522R6ufQqN1Swoq4K6Ul3aAc59539VdrLNs++/eRH38MMVq\n5UTwBrH+zIkXIYv4mtGpR1NWGO2bZ652GzGXNEXcQQKBgQD9WsMmioIeWR9P9I0r\nIY+yyxz1EyscutUtnOtROT36OxokrzQaAKDz/OC3jVnhZSkzG6RcmmK/AJrcU+2m\n1L4mZGfF3DdeTqtK/KkNzGs9yRPDkbb/MF0wgtcvfE8tJH/suiDJKQNsjeaQIQW3\n4NvDxs0w60m9r9tk1CQau94ovQKBgQC7UzeA0mDSxIB5agGbvnzaJJTvAFvnCvhz\nu3ZakTlNecAHu4eOMc0+OCHFPLJlLL4b0oraOxZIszX9BTlgcstBmTUk03TibNsS\nsDiImHFC4hE5x6EPdifnkVFUXPMZ/eF0mHUPBEn41ipw1hoLfl6W+aYW9QUxBMWA\nzdMH4rg4IQKBgQCFcMaUiCNchKhfXnj0HKspCp3n3v64FReu/JVcpH+mSnbMl5Mj\nlu0vVSOuyb5rXvLCPm7lb1NPMqxeG75yPl8grYWSyxhGjbzetBD+eYqKclv8h8UQ\nx5JtuJxKIHk7V5whPS+DhByPknW7uAjg/ogBp7XvbB3c0MEHbEzP3991KQKBgC+a\n610Kmd6WX4v7e6Mn2rTZXRwL/E8QA6nttxs3Etf0m++bIczqLR2lyDdGwJNjtoB9\nlhn1sCkTmiHOBRHUuoDWPaI5NtggD+CE9ikIjKgRqY0EhZLXVTbNQFzvLjypv3UR\nFZaWYXIigzCfyIipOcKmeSYWaJZXfxXHuNylKmnhAoGAFa84AuOOGUr+pEvtUzIr\nvBKu1mnQbbsLEhgf3Tw88K3sO5OlguAwBEvD4eitj/aU5u2vJJhFa67cuERLsZru\n0sjtQwP6CJbWF4uaH0Hso4KQvnwl4BfdKwUncqoKtHrQiuGMvr5P5G941+Ax8brE\nJlC2e/RPUQKxScpK3nNK9mc=\n-----END PRIVATE KEY-----\n","client_email": "matt-dashboard@acot-stats.iam.gserviceaccount.com","client_id": "106112731322970982546","auth_uri": "https://accounts.google.com/o/oauth2/auth","token_uri": "https://oauth2.googleapis.com/token","auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs","client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/matt-dashboard%40acot-stats.iam.gserviceaccount.com","universe_domain": "googleapis.com"}
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
LOG_LEVEL=info
|
||||||
|
LOG_MAX_SIZE=10m
|
||||||
|
LOG_MAX_FILES=5
|
||||||
205
inventory-server/dashboard/acot-server/README.md
Normal file
205
inventory-server/dashboard/acot-server/README.md
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
# ACOT Server
|
||||||
|
|
||||||
|
This server replaces the Klaviyo integration with direct database queries to the production MySQL database via SSH tunnel. It provides seamless API compatibility for all frontend components without requiring any frontend changes.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
1. **Environment Variables**: Copy `.env.example` to `.env` and configure:
|
||||||
|
```
|
||||||
|
DB_HOST=localhost
|
||||||
|
DB_PORT=3306
|
||||||
|
DB_USER=your_db_user
|
||||||
|
DB_PASSWORD=your_db_password
|
||||||
|
DB_NAME=your_db_name
|
||||||
|
PORT=3007
|
||||||
|
NODE_ENV=development
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **SSH Tunnel**: Ensure your SSH tunnel to the production database is running on localhost:3306.
|
||||||
|
|
||||||
|
3. **Install Dependencies**:
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Start Server**:
|
||||||
|
```bash
|
||||||
|
npm start
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
All endpoints provide exact API compatibility with the previous Klaviyo implementation:
|
||||||
|
|
||||||
|
### Main Statistics
|
||||||
|
- `GET /api/acot/events/stats` - Complete statistics dashboard data
|
||||||
|
- Query params: `timeRange` (today, yesterday, thisWeek, lastWeek, thisMonth, lastMonth, last7days, last30days, last90days) or `startDate`/`endDate` for custom ranges
|
||||||
|
- Returns: Revenue, orders, AOV, shipping data, order types, brands/categories, refunds, cancellations, best day, peak hour, order ranges, period progress, projections
|
||||||
|
|
||||||
|
### Daily Details
|
||||||
|
- `GET /api/acot/events/stats/details` - Daily breakdown with previous period comparisons
|
||||||
|
- Query params: `timeRange`, `metric` (revenue, orders, average_order, etc.), `daily=true`
|
||||||
|
- Returns: Array of daily data points with trend comparisons
|
||||||
|
|
||||||
|
### Products
|
||||||
|
- `GET /api/acot/events/products` - Top products with sales data
|
||||||
|
- Query params: `timeRange`
|
||||||
|
- Returns: Product list with images, sales quantities, revenue, and order counts
|
||||||
|
|
||||||
|
### Projections
|
||||||
|
- `GET /api/acot/events/projection` - Smart revenue projections for incomplete periods
|
||||||
|
- Query params: `timeRange`
|
||||||
|
- Returns: Projected revenue with confidence levels based on historical patterns
|
||||||
|
|
||||||
|
### Health Check
|
||||||
|
- `GET /api/acot/test` - Server health and database connectivity test
|
||||||
|
|
||||||
|
## Database Schema
|
||||||
|
|
||||||
|
The server queries the following main tables:
|
||||||
|
|
||||||
|
### Orders (`_order`)
|
||||||
|
- **Key fields**: `order_id`, `date_placed`, `summary_total`, `order_status`, `ship_method_selected`, `stats_waiting_preorder`
|
||||||
|
- **Valid orders**: `order_status > 15`
|
||||||
|
- **Cancelled orders**: `order_status = 15`
|
||||||
|
- **Shipped orders**: `order_status IN (100, 92)`
|
||||||
|
- **Pre-orders**: `stats_waiting_preorder > 0`
|
||||||
|
- **Local pickup**: `ship_method_selected = 'localpickup'`
|
||||||
|
- **On-hold orders**: `ship_method_selected = 'holdit'`
|
||||||
|
|
||||||
|
### Order Items (`order_items`)
|
||||||
|
- **Fields**: `order_id`, `prod_pid`, `qty_ordered`, `prod_price`
|
||||||
|
- **Purpose**: Links orders to products for detailed analysis
|
||||||
|
|
||||||
|
### Products (`products`)
|
||||||
|
- **Fields**: `pid`, `description` (product name), `company`
|
||||||
|
- **Purpose**: Product information and brand data
|
||||||
|
|
||||||
|
### Product Images (`product_images`)
|
||||||
|
- **Fields**: `pid`, `iid`, `order` (priority)
|
||||||
|
- **Primary image**: `order = 255` (highest priority)
|
||||||
|
- **Image URL generation**: `https://sbing.com/i/products/0000/{prefix}/{pid}-{type}-{iid}.jpg`
|
||||||
|
|
||||||
|
### Payments (`order_payment`)
|
||||||
|
- **Refunds**: `payment_amount < 0`
|
||||||
|
- **Purpose**: Track refund amounts and counts
|
||||||
|
|
||||||
|
## Business Logic
|
||||||
|
|
||||||
|
### Time Handling
|
||||||
|
- **Timezone**: All calculations in UTC-5 (Eastern Time)
|
||||||
|
- **Business Day**: 1 AM - 12:59 AM Eastern (25-hour business day)
|
||||||
|
- **Format**: MySQL DATETIME format (YYYY-MM-DD HH:MM:SS)
|
||||||
|
- **Period Boundaries**: Calculated using `timeUtils.js` for consistent time range handling
|
||||||
|
|
||||||
|
### Order Processing
|
||||||
|
- **Revenue Calculation**: Only includes orders with `order_status > 15`
|
||||||
|
- **Order Types**:
|
||||||
|
- Pre-orders: `stats_waiting_preorder > 0`
|
||||||
|
- Local pickup: `ship_method_selected = 'localpickup'`
|
||||||
|
- On-hold: `ship_method_selected = 'holdit'`
|
||||||
|
- **Shipping Methods**: Mapped to friendly names (e.g., `usps_ground_advantage` → "USPS Ground Advantage")
|
||||||
|
|
||||||
|
### Projections
|
||||||
|
- **Period Progress**: Calculated based on current time within the selected period
|
||||||
|
- **Simple Projection**: Linear extrapolation based on current progress
|
||||||
|
- **Smart Projection**: Uses historical data patterns for more accurate forecasting
|
||||||
|
- **Confidence Levels**: Based on data consistency and historical accuracy
|
||||||
|
|
||||||
|
### Image URL Generation
|
||||||
|
- **Pattern**: `https://sbing.com/i/products/0000/{prefix}/{pid}-{type}-{iid}.jpg`
|
||||||
|
- **Prefix**: First 2 digits of product ID
|
||||||
|
- **Type**: "main" for primary images
|
||||||
|
- **Fallback**: Uses primary image (order=255) when available
|
||||||
|
|
||||||
|
## Frontend Integration
|
||||||
|
|
||||||
|
### Service Layer (`services/acotService.js`)
|
||||||
|
- **Purpose**: Replaces direct Klaviyo API calls with acot-server calls
|
||||||
|
- **Methods**: `getStats()`, `getStatsDetails()`, `getProducts()`, `getProjection()`
|
||||||
|
- **Logging**: Axios interceptors for request/response logging
|
||||||
|
- **Environment**: Automatic URL handling (proxy in dev, direct in production)
|
||||||
|
|
||||||
|
### Component Updates
|
||||||
|
All 5 main components updated to use `acotService`:
|
||||||
|
- **StatCards.jsx**: Main dashboard statistics
|
||||||
|
- **MiniStatCards.jsx**: Compact statistics view
|
||||||
|
- **SalesChart.jsx**: Revenue and order trends
|
||||||
|
- **MiniSalesChart.jsx**: Compact chart view
|
||||||
|
- **ProductGrid.jsx**: Top products table
|
||||||
|
|
||||||
|
### Proxy Configuration (`vite.config.js`)
|
||||||
|
```javascript
|
||||||
|
'/api/acot': {
|
||||||
|
target: 'http://localhost:3007',
|
||||||
|
changeOrigin: true,
|
||||||
|
secure: false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Features
|
||||||
|
|
||||||
|
### Complete Business Intelligence
|
||||||
|
- **Revenue Analytics**: Total revenue, trends, projections
|
||||||
|
- **Order Analysis**: Counts, types, status tracking
|
||||||
|
- **Product Performance**: Top sellers, revenue contribution
|
||||||
|
- **Shipping Intelligence**: Methods, locations, distribution
|
||||||
|
- **Customer Insights**: Order value ranges, patterns
|
||||||
|
- **Operational Metrics**: Refunds, cancellations, peak hours
|
||||||
|
|
||||||
|
### Performance Optimizations
|
||||||
|
- **Connection Pooling**: Efficient database connection management
|
||||||
|
- **Query Optimization**: Indexed queries with proper WHERE clauses
|
||||||
|
- **Caching Strategy**: Frontend caching for detail views
|
||||||
|
- **Batch Processing**: Efficient data aggregation
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
- **Database Connectivity**: Graceful handling of connection issues
|
||||||
|
- **Query Failures**: Detailed error logging and user-friendly messages
|
||||||
|
- **Data Validation**: Input sanitization and validation
|
||||||
|
- **Fallback Mechanisms**: Default values for missing data
|
||||||
|
|
||||||
|
## Simplified Elements
|
||||||
|
|
||||||
|
Due to database complexity, some features are simplified:
|
||||||
|
- **Brands**: Shows "Various Brands" (companies table structure complex)
|
||||||
|
- **Categories**: Shows "General" (category relationships complex)
|
||||||
|
|
||||||
|
These can be enhanced in future iterations with proper category mapping.
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Test the server functionality:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Health check
|
||||||
|
curl http://localhost:3007/api/acot/test
|
||||||
|
|
||||||
|
# Today's stats
|
||||||
|
curl http://localhost:3007/api/acot/events/stats?timeRange=today
|
||||||
|
|
||||||
|
# Last 30 days with details
|
||||||
|
curl http://localhost:3007/api/acot/events/stats/details?timeRange=last30days&daily=true
|
||||||
|
|
||||||
|
# Top products
|
||||||
|
curl http://localhost:3007/api/acot/events/products?timeRange=thisWeek
|
||||||
|
|
||||||
|
# Revenue projection
|
||||||
|
curl http://localhost:3007/api/acot/events/projection?timeRange=today
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development Notes
|
||||||
|
|
||||||
|
- **No Frontend Changes**: Complete drop-in replacement for Klaviyo
|
||||||
|
- **API Compatibility**: Maintains exact response structure
|
||||||
|
- **Business Logic**: Implements all complex e-commerce calculations
|
||||||
|
- **Scalability**: Designed for production workloads
|
||||||
|
- **Maintainability**: Well-documented code with clear separation of concerns
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
- Enhanced category and brand mapping
|
||||||
|
- Real-time notifications for significant events
|
||||||
|
- Advanced analytics and forecasting
|
||||||
|
- Customer segmentation analysis
|
||||||
|
- Inventory integration
|
||||||
297
inventory-server/dashboard/acot-server/db/connection.js
Normal file
297
inventory-server/dashboard/acot-server/db/connection.js
Normal file
@@ -0,0 +1,297 @@
|
|||||||
|
const { Client } = require('ssh2');
|
||||||
|
const mysql = require('mysql2/promise');
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
|
// Connection pool configuration
|
||||||
|
const connectionPool = {
|
||||||
|
connections: [],
|
||||||
|
maxConnections: 20,
|
||||||
|
currentConnections: 0,
|
||||||
|
pendingRequests: [],
|
||||||
|
// Cache for query results (key: query string, value: {data, timestamp})
|
||||||
|
queryCache: new Map(),
|
||||||
|
// Cache duration for different query types in milliseconds
|
||||||
|
cacheDuration: {
|
||||||
|
'stats': 60 * 1000, // 1 minute for stats
|
||||||
|
'products': 5 * 60 * 1000, // 5 minutes for products
|
||||||
|
'orders': 60 * 1000, // 1 minute for orders
|
||||||
|
'default': 60 * 1000 // 1 minute default
|
||||||
|
},
|
||||||
|
// Circuit breaker state
|
||||||
|
circuitBreaker: {
|
||||||
|
failures: 0,
|
||||||
|
lastFailure: 0,
|
||||||
|
isOpen: false,
|
||||||
|
threshold: 5,
|
||||||
|
timeout: 30000 // 30 seconds
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a database connection from the pool
|
||||||
|
* @returns {Promise<{connection: object, release: function}>} The database connection and release function
|
||||||
|
*/
|
||||||
|
async function getDbConnection() {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
// Check circuit breaker
|
||||||
|
const now = Date.now();
|
||||||
|
if (connectionPool.circuitBreaker.isOpen) {
|
||||||
|
if (now - connectionPool.circuitBreaker.lastFailure > connectionPool.circuitBreaker.timeout) {
|
||||||
|
// Reset circuit breaker
|
||||||
|
connectionPool.circuitBreaker.isOpen = false;
|
||||||
|
connectionPool.circuitBreaker.failures = 0;
|
||||||
|
console.log('Circuit breaker reset');
|
||||||
|
} else {
|
||||||
|
reject(new Error('Circuit breaker is open - too many connection failures'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if there's an available connection in the pool
|
||||||
|
if (connectionPool.connections.length > 0) {
|
||||||
|
const conn = connectionPool.connections.pop();
|
||||||
|
console.log(`Using pooled connection. Pool size: ${connectionPool.connections.length}`);
|
||||||
|
resolve({
|
||||||
|
connection: conn.connection,
|
||||||
|
release: () => releaseConnection(conn)
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we haven't reached max connections, create a new one
|
||||||
|
if (connectionPool.currentConnections < connectionPool.maxConnections) {
|
||||||
|
try {
|
||||||
|
console.log(`Creating new connection. Current: ${connectionPool.currentConnections}/${connectionPool.maxConnections}`);
|
||||||
|
connectionPool.currentConnections++;
|
||||||
|
|
||||||
|
const tunnel = await setupSshTunnel();
|
||||||
|
const { ssh, stream, dbConfig } = tunnel;
|
||||||
|
|
||||||
|
const connection = await mysql.createConnection({
|
||||||
|
...dbConfig,
|
||||||
|
stream
|
||||||
|
});
|
||||||
|
|
||||||
|
const conn = { ssh, connection, inUse: true, created: Date.now() };
|
||||||
|
|
||||||
|
console.log('Database connection established');
|
||||||
|
|
||||||
|
// Reset circuit breaker on successful connection
|
||||||
|
if (connectionPool.circuitBreaker.failures > 0) {
|
||||||
|
connectionPool.circuitBreaker.failures = 0;
|
||||||
|
connectionPool.circuitBreaker.isOpen = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve({
|
||||||
|
connection: conn.connection,
|
||||||
|
release: () => releaseConnection(conn)
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
connectionPool.currentConnections--;
|
||||||
|
|
||||||
|
// Track circuit breaker failures
|
||||||
|
connectionPool.circuitBreaker.failures++;
|
||||||
|
connectionPool.circuitBreaker.lastFailure = Date.now();
|
||||||
|
|
||||||
|
if (connectionPool.circuitBreaker.failures >= connectionPool.circuitBreaker.threshold) {
|
||||||
|
connectionPool.circuitBreaker.isOpen = true;
|
||||||
|
console.log(`Circuit breaker opened after ${connectionPool.circuitBreaker.failures} failures`);
|
||||||
|
}
|
||||||
|
|
||||||
|
reject(error);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pool is full, queue the request with timeout
|
||||||
|
console.log('Connection pool full, queuing request...');
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
// Remove from queue if still there
|
||||||
|
const index = connectionPool.pendingRequests.findIndex(req => req.resolve === resolve);
|
||||||
|
if (index !== -1) {
|
||||||
|
connectionPool.pendingRequests.splice(index, 1);
|
||||||
|
reject(new Error('Connection pool queue timeout after 15 seconds'));
|
||||||
|
}
|
||||||
|
}, 15000);
|
||||||
|
|
||||||
|
connectionPool.pendingRequests.push({
|
||||||
|
resolve,
|
||||||
|
reject,
|
||||||
|
timeoutId,
|
||||||
|
timestamp: Date.now()
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Release a connection back to the pool
|
||||||
|
*/
|
||||||
|
function releaseConnection(conn) {
|
||||||
|
conn.inUse = false;
|
||||||
|
|
||||||
|
// Check if there are pending requests
|
||||||
|
if (connectionPool.pendingRequests.length > 0) {
|
||||||
|
const { resolve, timeoutId } = connectionPool.pendingRequests.shift();
|
||||||
|
|
||||||
|
// Clear the timeout since we're serving the request
|
||||||
|
if (timeoutId) {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
|
||||||
|
conn.inUse = true;
|
||||||
|
console.log(`Serving queued request. Queue length: ${connectionPool.pendingRequests.length}`);
|
||||||
|
resolve({
|
||||||
|
connection: conn.connection,
|
||||||
|
release: () => releaseConnection(conn)
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Return to pool
|
||||||
|
connectionPool.connections.push(conn);
|
||||||
|
console.log(`Connection returned to pool. Pool size: ${connectionPool.connections.length}, Active: ${connectionPool.currentConnections}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get cached query results or execute query if not cached
|
||||||
|
* @param {string} cacheKey - Unique key to identify the query
|
||||||
|
* @param {string} queryType - Type of query (stats, products, orders, etc.)
|
||||||
|
* @param {Function} queryFn - Function to execute if cache miss
|
||||||
|
* @returns {Promise<any>} The query result
|
||||||
|
*/
|
||||||
|
async function getCachedQuery(cacheKey, queryType, queryFn) {
|
||||||
|
// Get cache duration based on query type
|
||||||
|
const cacheDuration = connectionPool.cacheDuration[queryType] || connectionPool.cacheDuration.default;
|
||||||
|
|
||||||
|
// Check if we have a valid cached result
|
||||||
|
const cachedResult = connectionPool.queryCache.get(cacheKey);
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
|
||||||
|
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
|
||||||
|
return cachedResult.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No valid cache found, execute the query
|
||||||
|
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
|
||||||
|
const result = await queryFn();
|
||||||
|
|
||||||
|
// Cache the result
|
||||||
|
connectionPool.queryCache.set(cacheKey, {
|
||||||
|
data: result,
|
||||||
|
timestamp: now
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup SSH tunnel to production database
|
||||||
|
* @private - Should only be used by getDbConnection
|
||||||
|
* @returns {Promise<{ssh: object, stream: object, dbConfig: object}>}
|
||||||
|
*/
|
||||||
|
async function setupSshTunnel() {
|
||||||
|
const sshConfig = {
|
||||||
|
host: process.env.PROD_SSH_HOST,
|
||||||
|
port: process.env.PROD_SSH_PORT || 22,
|
||||||
|
username: process.env.PROD_SSH_USER,
|
||||||
|
privateKey: process.env.PROD_SSH_KEY_PATH
|
||||||
|
? fs.readFileSync(process.env.PROD_SSH_KEY_PATH)
|
||||||
|
: undefined,
|
||||||
|
compress: true
|
||||||
|
};
|
||||||
|
|
||||||
|
const dbConfig = {
|
||||||
|
host: process.env.PROD_DB_HOST || 'localhost',
|
||||||
|
user: process.env.PROD_DB_USER,
|
||||||
|
password: process.env.PROD_DB_PASSWORD,
|
||||||
|
database: process.env.PROD_DB_NAME,
|
||||||
|
port: process.env.PROD_DB_PORT || 3306,
|
||||||
|
timezone: 'Z'
|
||||||
|
};
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const ssh = new Client();
|
||||||
|
|
||||||
|
ssh.on('error', (err) => {
|
||||||
|
console.error('SSH connection error:', err);
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
ssh.on('ready', () => {
|
||||||
|
ssh.forwardOut(
|
||||||
|
'127.0.0.1',
|
||||||
|
0,
|
||||||
|
dbConfig.host,
|
||||||
|
dbConfig.port,
|
||||||
|
(err, stream) => {
|
||||||
|
if (err) reject(err);
|
||||||
|
resolve({ ssh, stream, dbConfig });
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}).connect(sshConfig);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear cached query results
|
||||||
|
* @param {string} [cacheKey] - Specific cache key to clear (clears all if not provided)
|
||||||
|
*/
|
||||||
|
function clearQueryCache(cacheKey) {
|
||||||
|
if (cacheKey) {
|
||||||
|
connectionPool.queryCache.delete(cacheKey);
|
||||||
|
console.log(`Cleared cache for key: ${cacheKey}`);
|
||||||
|
} else {
|
||||||
|
connectionPool.queryCache.clear();
|
||||||
|
console.log('Cleared all query cache');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force close all active connections
|
||||||
|
* Useful for server shutdown or manual connection reset
|
||||||
|
*/
|
||||||
|
async function closeAllConnections() {
|
||||||
|
// Close all pooled connections
|
||||||
|
for (const conn of connectionPool.connections) {
|
||||||
|
try {
|
||||||
|
await conn.connection.end();
|
||||||
|
conn.ssh.end();
|
||||||
|
console.log('Closed pooled connection');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error closing pooled connection:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset pool state
|
||||||
|
connectionPool.connections = [];
|
||||||
|
connectionPool.currentConnections = 0;
|
||||||
|
connectionPool.pendingRequests = [];
|
||||||
|
connectionPool.queryCache.clear();
|
||||||
|
|
||||||
|
console.log('All connections closed and pool reset');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get connection pool status for debugging
|
||||||
|
*/
|
||||||
|
function getPoolStatus() {
|
||||||
|
return {
|
||||||
|
poolSize: connectionPool.connections.length,
|
||||||
|
activeConnections: connectionPool.currentConnections,
|
||||||
|
maxConnections: connectionPool.maxConnections,
|
||||||
|
pendingRequests: connectionPool.pendingRequests.length,
|
||||||
|
cacheSize: connectionPool.queryCache.size,
|
||||||
|
queuedRequests: connectionPool.pendingRequests.map(req => ({
|
||||||
|
waitTime: Date.now() - req.timestamp,
|
||||||
|
hasTimeout: !!req.timeoutId
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getDbConnection,
|
||||||
|
getCachedQuery,
|
||||||
|
clearQueryCache,
|
||||||
|
closeAllConnections,
|
||||||
|
getPoolStatus
|
||||||
|
};
|
||||||
1553
inventory-server/dashboard/acot-server/package-lock.json
generated
Normal file
1553
inventory-server/dashboard/acot-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
23
inventory-server/dashboard/acot-server/package.json
Normal file
23
inventory-server/dashboard/acot-server/package.json
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"name": "acot-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "A Cherry On Top production database server",
|
||||||
|
"main": "server.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node server.js",
|
||||||
|
"dev": "nodemon server.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"express": "^4.18.2",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"dotenv": "^16.3.1",
|
||||||
|
"morgan": "^1.10.0",
|
||||||
|
"ssh2": "^1.14.0",
|
||||||
|
"mysql2": "^3.6.5",
|
||||||
|
"compression": "^1.7.4",
|
||||||
|
"luxon": "^3.5.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"nodemon": "^3.0.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
495
inventory-server/dashboard/acot-server/routes/discounts.js
Normal file
495
inventory-server/dashboard/acot-server/routes/discounts.js
Normal file
@@ -0,0 +1,495 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const { DateTime } = require('luxon');
|
||||||
|
const { getDbConnection } = require('../db/connection');
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
const RANGE_BOUNDS = [
|
||||||
|
10, 20, 30, 40, 50, 60, 70, 80, 90,
|
||||||
|
100, 110, 120, 130, 140, 150, 160, 170, 180, 190, 200,
|
||||||
|
300, 400, 500, 1000, 1500, 2000
|
||||||
|
];
|
||||||
|
|
||||||
|
const FINAL_BUCKET_KEY = 'PLUS';
|
||||||
|
|
||||||
|
function buildRangeDefinitions() {
|
||||||
|
const ranges = [];
|
||||||
|
let previous = 0;
|
||||||
|
for (const bound of RANGE_BOUNDS) {
|
||||||
|
const label = `$${previous.toLocaleString()} - $${bound.toLocaleString()}`;
|
||||||
|
const key = bound.toString().padStart(5, '0');
|
||||||
|
ranges.push({
|
||||||
|
min: previous,
|
||||||
|
max: bound,
|
||||||
|
label,
|
||||||
|
key,
|
||||||
|
sort: bound
|
||||||
|
});
|
||||||
|
previous = bound;
|
||||||
|
}
|
||||||
|
// Remove the 2000+ category - all orders >2000 will go into the 2000 bucket
|
||||||
|
return ranges;
|
||||||
|
}
|
||||||
|
|
||||||
|
const RANGE_DEFINITIONS = buildRangeDefinitions();
|
||||||
|
|
||||||
|
const BUCKET_CASE = (() => {
|
||||||
|
const parts = [];
|
||||||
|
for (let i = 0; i < RANGE_BOUNDS.length; i++) {
|
||||||
|
const bound = RANGE_BOUNDS[i];
|
||||||
|
const key = bound.toString().padStart(5, '0');
|
||||||
|
if (i === RANGE_BOUNDS.length - 1) {
|
||||||
|
// For the last bucket (2000), include all orders >= 1500 (previous bound)
|
||||||
|
parts.push(`ELSE '${key}'`);
|
||||||
|
} else {
|
||||||
|
parts.push(`WHEN o.summary_subtotal <= ${bound} THEN '${key}'`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return `CASE\n ${parts.join('\n ')}\n END`;
|
||||||
|
})();
|
||||||
|
|
||||||
|
const DEFAULT_POINT_DOLLAR_VALUE = 0.005; // 1000 points = $5
|
||||||
|
|
||||||
|
const DEFAULTS = {
|
||||||
|
merchantFeePercent: 2.9,
|
||||||
|
fixedCostPerOrder: 1.5,
|
||||||
|
pointsPerDollar: 0,
|
||||||
|
pointsRedemptionRate: 0,
|
||||||
|
pointDollarValue: DEFAULT_POINT_DOLLAR_VALUE,
|
||||||
|
};
|
||||||
|
|
||||||
|
function parseDate(value, fallback) {
|
||||||
|
if (!value) {
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
const parsed = DateTime.fromISO(value);
|
||||||
|
if (!parsed.isValid) {
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatDateForSql(dt) {
|
||||||
|
return dt.toFormat('yyyy-LL-dd HH:mm:ss');
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMidpoint(range) {
|
||||||
|
if (range.max == null) {
|
||||||
|
return range.min + 200; // Rough estimate for 2000+
|
||||||
|
}
|
||||||
|
return (range.min + range.max) / 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
router.get('/promos', async (req, res) => {
|
||||||
|
let connection;
|
||||||
|
try {
|
||||||
|
const { connection: conn, release } = await getDbConnection();
|
||||||
|
connection = conn;
|
||||||
|
const releaseConnection = release;
|
||||||
|
|
||||||
|
const { startDate, endDate } = req.query || {};
|
||||||
|
const now = DateTime.now().endOf('day');
|
||||||
|
const defaultStart = now.minus({ years: 3 }).startOf('day');
|
||||||
|
|
||||||
|
const parsedStart = startDate ? parseDate(startDate, defaultStart).startOf('day') : defaultStart;
|
||||||
|
const parsedEnd = endDate ? parseDate(endDate, now).endOf('day') : now;
|
||||||
|
|
||||||
|
const rangeStart = parsedStart <= parsedEnd ? parsedStart : parsedEnd;
|
||||||
|
const rangeEnd = parsedEnd >= parsedStart ? parsedEnd : parsedStart;
|
||||||
|
|
||||||
|
const rangeStartSql = formatDateForSql(rangeStart);
|
||||||
|
const rangeEndSql = formatDateForSql(rangeEnd);
|
||||||
|
|
||||||
|
const sql = `
|
||||||
|
SELECT
|
||||||
|
p.promo_id AS id,
|
||||||
|
p.promo_code AS code,
|
||||||
|
p.promo_description_online AS description_online,
|
||||||
|
p.promo_description_private AS description_private,
|
||||||
|
p.date_start,
|
||||||
|
p.date_end,
|
||||||
|
COALESCE(u.usage_count, 0) AS usage_count
|
||||||
|
FROM promos p
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
discount_code,
|
||||||
|
COUNT(DISTINCT order_id) AS usage_count
|
||||||
|
FROM order_discounts
|
||||||
|
WHERE discount_type = 10 AND discount_active = 1
|
||||||
|
GROUP BY discount_code
|
||||||
|
) u ON u.discount_code = p.promo_id
|
||||||
|
WHERE p.date_start IS NOT NULL
|
||||||
|
AND p.date_end IS NOT NULL
|
||||||
|
AND NOT (p.date_end < ? OR p.date_start > ?)
|
||||||
|
AND p.store = 1
|
||||||
|
AND p.date_start >= '2010-01-01'
|
||||||
|
ORDER BY p.promo_id DESC
|
||||||
|
LIMIT 200
|
||||||
|
`;
|
||||||
|
|
||||||
|
const [rows] = await connection.execute(sql, [rangeStartSql, rangeEndSql]);
|
||||||
|
releaseConnection();
|
||||||
|
|
||||||
|
const promos = rows.map(row => ({
|
||||||
|
id: Number(row.id),
|
||||||
|
code: row.code,
|
||||||
|
description: row.description_online || row.description_private || '',
|
||||||
|
privateDescription: row.description_private || '',
|
||||||
|
promo_description_online: row.description_online || '',
|
||||||
|
promo_description_private: row.description_private || '',
|
||||||
|
dateStart: row.date_start,
|
||||||
|
dateEnd: row.date_end,
|
||||||
|
usageCount: Number(row.usage_count || 0)
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json({ promos });
|
||||||
|
} catch (error) {
|
||||||
|
if (connection) {
|
||||||
|
try {
|
||||||
|
connection.destroy();
|
||||||
|
} catch (destroyError) {
|
||||||
|
console.error('Failed to destroy connection after error:', destroyError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.error('Error fetching promos:', error);
|
||||||
|
res.status(500).json({ error: 'Failed to fetch promos' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
router.post('/simulate', async (req, res) => {
|
||||||
|
const {
|
||||||
|
dateRange = {},
|
||||||
|
filters = {},
|
||||||
|
productPromo = {},
|
||||||
|
shippingPromo = {},
|
||||||
|
shippingTiers = [],
|
||||||
|
merchantFeePercent,
|
||||||
|
fixedCostPerOrder,
|
||||||
|
pointsConfig = {}
|
||||||
|
} = req.body || {};
|
||||||
|
|
||||||
|
const endDefault = DateTime.now();
|
||||||
|
const startDefault = endDefault.minus({ months: 6 });
|
||||||
|
const startDt = parseDate(dateRange.start, startDefault).startOf('day');
|
||||||
|
const endDt = parseDate(dateRange.end, endDefault).endOf('day');
|
||||||
|
|
||||||
|
const shipCountry = filters.shipCountry || 'US';
|
||||||
|
const promoIds = Array.isArray(filters.promoIds) ? filters.promoIds.filter(Boolean) : [];
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
merchantFeePercent: typeof merchantFeePercent === 'number' ? merchantFeePercent : DEFAULTS.merchantFeePercent,
|
||||||
|
fixedCostPerOrder: typeof fixedCostPerOrder === 'number' ? fixedCostPerOrder : DEFAULTS.fixedCostPerOrder,
|
||||||
|
productPromo: {
|
||||||
|
type: productPromo.type || 'none',
|
||||||
|
value: Number(productPromo.value || 0),
|
||||||
|
minSubtotal: Number(productPromo.minSubtotal || 0)
|
||||||
|
},
|
||||||
|
shippingPromo: {
|
||||||
|
type: shippingPromo.type || 'none',
|
||||||
|
value: Number(shippingPromo.value || 0),
|
||||||
|
minSubtotal: Number(shippingPromo.minSubtotal || 0),
|
||||||
|
maxDiscount: Number(shippingPromo.maxDiscount || 0)
|
||||||
|
},
|
||||||
|
shippingTiers: Array.isArray(shippingTiers)
|
||||||
|
? shippingTiers
|
||||||
|
.map(tier => ({
|
||||||
|
threshold: Number(tier.threshold || 0),
|
||||||
|
mode: tier.mode === 'percentage' || tier.mode === 'flat' ? tier.mode : 'percentage',
|
||||||
|
value: Number(tier.value || 0)
|
||||||
|
}))
|
||||||
|
.filter(tier => tier.threshold >= 0 && tier.value >= 0)
|
||||||
|
.sort((a, b) => a.threshold - b.threshold)
|
||||||
|
: [],
|
||||||
|
points: {
|
||||||
|
pointsPerDollar: typeof pointsConfig.pointsPerDollar === 'number' ? pointsConfig.pointsPerDollar : null,
|
||||||
|
redemptionRate: typeof pointsConfig.redemptionRate === 'number' ? pointsConfig.redemptionRate : null,
|
||||||
|
pointDollarValue: typeof pointsConfig.pointDollarValue === 'number'
|
||||||
|
? pointsConfig.pointDollarValue
|
||||||
|
: DEFAULT_POINT_DOLLAR_VALUE
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let connection;
|
||||||
|
let release;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const dbConn = await getDbConnection();
|
||||||
|
connection = dbConn.connection;
|
||||||
|
release = dbConn.release;
|
||||||
|
|
||||||
|
const params = [
|
||||||
|
shipCountry,
|
||||||
|
formatDateForSql(startDt),
|
||||||
|
formatDateForSql(endDt)
|
||||||
|
];
|
||||||
|
const promoJoin = promoIds.length > 0
|
||||||
|
? 'JOIN order_discounts od ON od.order_id = o.order_id AND od.discount_active = 1 AND od.discount_type = 10'
|
||||||
|
: '';
|
||||||
|
|
||||||
|
if (promoIds.length > 0) {
|
||||||
|
params.push(promoIds);
|
||||||
|
}
|
||||||
|
params.push(formatDateForSql(startDt), formatDateForSql(endDt));
|
||||||
|
|
||||||
|
const filteredOrdersQuery = `
|
||||||
|
SELECT
|
||||||
|
o.order_id,
|
||||||
|
o.summary_subtotal,
|
||||||
|
o.summary_discount_subtotal,
|
||||||
|
o.summary_shipping,
|
||||||
|
o.ship_method_rate,
|
||||||
|
o.ship_method_cost,
|
||||||
|
o.summary_points,
|
||||||
|
${BUCKET_CASE} AS bucket_key
|
||||||
|
FROM _order o
|
||||||
|
${promoJoin}
|
||||||
|
WHERE o.summary_total > 0
|
||||||
|
AND o.summary_subtotal > 0
|
||||||
|
AND o.order_status NOT IN (15)
|
||||||
|
AND o.ship_method_selected <> 'holdit'
|
||||||
|
AND o.ship_country = ?
|
||||||
|
AND o.date_placed BETWEEN ? AND ?
|
||||||
|
${promoIds.length > 0 ? 'AND od.discount_code IN (?)' : ''}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const bucketQuery = `
|
||||||
|
SELECT
|
||||||
|
f.bucket_key,
|
||||||
|
COUNT(*) AS order_count,
|
||||||
|
SUM(f.summary_subtotal) AS subtotal_sum,
|
||||||
|
SUM(f.summary_discount_subtotal) AS product_discount_sum,
|
||||||
|
SUM(f.summary_subtotal + f.summary_discount_subtotal) AS regular_subtotal_sum,
|
||||||
|
SUM(f.ship_method_rate) AS ship_rate_sum,
|
||||||
|
SUM(f.ship_method_cost) AS ship_cost_sum,
|
||||||
|
SUM(f.summary_points) AS points_awarded_sum,
|
||||||
|
SUM(COALESCE(p.points_redeemed, 0)) AS points_redeemed_sum,
|
||||||
|
SUM(COALESCE(c.total_cogs, 0)) AS cogs_sum,
|
||||||
|
AVG(f.summary_subtotal) AS avg_subtotal,
|
||||||
|
AVG(f.summary_discount_subtotal) AS avg_product_discount,
|
||||||
|
AVG(f.ship_method_rate) AS avg_ship_rate,
|
||||||
|
AVG(f.ship_method_cost) AS avg_ship_cost,
|
||||||
|
AVG(COALESCE(c.total_cogs, 0)) AS avg_cogs
|
||||||
|
FROM (
|
||||||
|
${filteredOrdersQuery}
|
||||||
|
) AS f
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT order_id, SUM(cogs_amount) AS total_cogs
|
||||||
|
FROM report_sales_data
|
||||||
|
WHERE action IN (1,2,3)
|
||||||
|
AND date_change BETWEEN ? AND ?
|
||||||
|
GROUP BY order_id
|
||||||
|
) AS c ON c.order_id = f.order_id
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT order_id, SUM(discount_amount_points) AS points_redeemed
|
||||||
|
FROM order_discounts
|
||||||
|
WHERE discount_type = 20 AND discount_active = 1
|
||||||
|
GROUP BY order_id
|
||||||
|
) AS p ON p.order_id = f.order_id
|
||||||
|
GROUP BY f.bucket_key
|
||||||
|
`;
|
||||||
|
|
||||||
|
const [rows] = await connection.execute(bucketQuery, params);
|
||||||
|
|
||||||
|
const totals = {
|
||||||
|
orders: 0,
|
||||||
|
subtotal: 0,
|
||||||
|
productDiscount: 0,
|
||||||
|
regularSubtotal: 0,
|
||||||
|
shipRate: 0,
|
||||||
|
shipCost: 0,
|
||||||
|
cogs: 0,
|
||||||
|
pointsAwarded: 0,
|
||||||
|
pointsRedeemed: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
const rowMap = new Map();
|
||||||
|
for (const row of rows) {
|
||||||
|
const key = row.bucket_key || FINAL_BUCKET_KEY;
|
||||||
|
const parsed = {
|
||||||
|
orderCount: Number(row.order_count || 0),
|
||||||
|
subtotalSum: Number(row.subtotal_sum || 0),
|
||||||
|
productDiscountSum: Number(row.product_discount_sum || 0),
|
||||||
|
regularSubtotalSum: Number(row.regular_subtotal_sum || 0),
|
||||||
|
shipRateSum: Number(row.ship_rate_sum || 0),
|
||||||
|
shipCostSum: Number(row.ship_cost_sum || 0),
|
||||||
|
pointsAwardedSum: Number(row.points_awarded_sum || 0),
|
||||||
|
pointsRedeemedSum: Number(row.points_redeemed_sum || 0),
|
||||||
|
cogsSum: Number(row.cogs_sum || 0),
|
||||||
|
avgSubtotal: Number(row.avg_subtotal || 0),
|
||||||
|
avgProductDiscount: Number(row.avg_product_discount || 0),
|
||||||
|
avgShipRate: Number(row.avg_ship_rate || 0),
|
||||||
|
avgShipCost: Number(row.avg_ship_cost || 0),
|
||||||
|
avgCogs: Number(row.avg_cogs || 0)
|
||||||
|
};
|
||||||
|
rowMap.set(key, parsed);
|
||||||
|
|
||||||
|
totals.orders += parsed.orderCount;
|
||||||
|
totals.subtotal += parsed.subtotalSum;
|
||||||
|
totals.productDiscount += parsed.productDiscountSum;
|
||||||
|
totals.regularSubtotal += parsed.regularSubtotalSum;
|
||||||
|
totals.shipRate += parsed.shipRateSum;
|
||||||
|
totals.shipCost += parsed.shipCostSum;
|
||||||
|
totals.cogs += parsed.cogsSum;
|
||||||
|
totals.pointsAwarded += parsed.pointsAwardedSum;
|
||||||
|
totals.pointsRedeemed += parsed.pointsRedeemedSum;
|
||||||
|
}
|
||||||
|
|
||||||
|
const productDiscountRate = totals.regularSubtotal > 0
|
||||||
|
? totals.productDiscount / totals.regularSubtotal
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const pointsPerDollar = config.points.pointsPerDollar != null
|
||||||
|
? config.points.pointsPerDollar
|
||||||
|
: totals.subtotal > 0
|
||||||
|
? totals.pointsAwarded / totals.subtotal
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const redemptionRate = config.points.redemptionRate != null
|
||||||
|
? config.points.redemptionRate
|
||||||
|
: totals.pointsAwarded > 0
|
||||||
|
? Math.min(1, totals.pointsRedeemed / totals.pointsAwarded)
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const pointDollarValue = config.points.pointDollarValue || DEFAULT_POINT_DOLLAR_VALUE;
|
||||||
|
|
||||||
|
const bucketResults = [];
|
||||||
|
let weightedProfitAmount = 0;
|
||||||
|
let weightedProfitPercent = 0;
|
||||||
|
|
||||||
|
for (const range of RANGE_DEFINITIONS) {
|
||||||
|
const data = rowMap.get(range.key) || {
|
||||||
|
orderCount: 0,
|
||||||
|
avgSubtotal: 0,
|
||||||
|
avgShipRate: 0,
|
||||||
|
avgShipCost: 0,
|
||||||
|
avgCogs: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
const orderValue = data.avgSubtotal > 0 ? data.avgSubtotal : getMidpoint(range);
|
||||||
|
const shippingChargeBase = data.avgShipRate > 0 ? data.avgShipRate : 0;
|
||||||
|
const actualShippingCost = data.avgShipCost > 0 ? data.avgShipCost : 0;
|
||||||
|
const productCogs = data.avgCogs > 0 ? data.avgCogs : 0;
|
||||||
|
const productDiscountAmount = orderValue * productDiscountRate;
|
||||||
|
const effectiveRegularPrice = productDiscountRate < 0.99
|
||||||
|
? orderValue / (1 - productDiscountRate)
|
||||||
|
: orderValue;
|
||||||
|
|
||||||
|
let promoProductDiscount = 0;
|
||||||
|
if (config.productPromo.type === 'percentage_subtotal' && orderValue >= config.productPromo.minSubtotal) {
|
||||||
|
promoProductDiscount = Math.min(orderValue, (config.productPromo.value / 100) * orderValue);
|
||||||
|
} else if (config.productPromo.type === 'percentage_regular' && orderValue >= config.productPromo.minSubtotal) {
|
||||||
|
const targetRate = config.productPromo.value / 100;
|
||||||
|
const additionalRate = Math.max(0, targetRate - productDiscountRate);
|
||||||
|
promoProductDiscount = Math.min(orderValue, additionalRate * effectiveRegularPrice);
|
||||||
|
} else if (config.productPromo.type === 'fixed_amount' && orderValue >= config.productPromo.minSubtotal) {
|
||||||
|
promoProductDiscount = Math.min(orderValue, config.productPromo.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
let shippingAfterAuto = shippingChargeBase;
|
||||||
|
for (const tier of config.shippingTiers) {
|
||||||
|
if (orderValue >= tier.threshold) {
|
||||||
|
if (tier.mode === 'percentage') {
|
||||||
|
shippingAfterAuto = shippingChargeBase * Math.max(0, 1 - tier.value / 100);
|
||||||
|
} else if (tier.mode === 'flat') {
|
||||||
|
shippingAfterAuto = tier.value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let shipPromoDiscount = 0;
|
||||||
|
if (config.shippingPromo.type !== 'none' && orderValue >= config.shippingPromo.minSubtotal) {
|
||||||
|
if (config.shippingPromo.type === 'percentage') {
|
||||||
|
shipPromoDiscount = shippingAfterAuto * (config.shippingPromo.value / 100);
|
||||||
|
} else if (config.shippingPromo.type === 'fixed') {
|
||||||
|
shipPromoDiscount = config.shippingPromo.value;
|
||||||
|
}
|
||||||
|
if (config.shippingPromo.maxDiscount > 0) {
|
||||||
|
shipPromoDiscount = Math.min(shipPromoDiscount, config.shippingPromo.maxDiscount);
|
||||||
|
}
|
||||||
|
shipPromoDiscount = Math.min(shipPromoDiscount, shippingAfterAuto);
|
||||||
|
}
|
||||||
|
|
||||||
|
const customerShipCost = Math.max(0, shippingAfterAuto - shipPromoDiscount);
|
||||||
|
const customerItemCost = Math.max(0, orderValue - promoProductDiscount);
|
||||||
|
const totalRevenue = customerItemCost + customerShipCost;
|
||||||
|
|
||||||
|
const merchantFees = totalRevenue * (config.merchantFeePercent / 100);
|
||||||
|
const pointsCost = customerItemCost * pointsPerDollar * redemptionRate * pointDollarValue;
|
||||||
|
const fixedCosts = config.fixedCostPerOrder;
|
||||||
|
const totalCosts = productCogs + actualShippingCost + merchantFees + pointsCost + fixedCosts;
|
||||||
|
const profit = totalRevenue - totalCosts;
|
||||||
|
const profitPercent = totalRevenue > 0 ? (profit / totalRevenue) : 0;
|
||||||
|
const weight = totals.orders > 0 ? (data.orderCount || 0) / totals.orders : 0;
|
||||||
|
|
||||||
|
weightedProfitAmount += profit * weight;
|
||||||
|
weightedProfitPercent += profitPercent * weight;
|
||||||
|
|
||||||
|
bucketResults.push({
|
||||||
|
key: range.key,
|
||||||
|
label: range.label,
|
||||||
|
min: range.min,
|
||||||
|
max: range.max,
|
||||||
|
orderCount: data.orderCount || 0,
|
||||||
|
weight,
|
||||||
|
orderValue,
|
||||||
|
productDiscountAmount,
|
||||||
|
promoProductDiscount,
|
||||||
|
customerItemCost,
|
||||||
|
shippingChargeBase,
|
||||||
|
shippingAfterAuto,
|
||||||
|
shipPromoDiscount,
|
||||||
|
customerShipCost,
|
||||||
|
actualShippingCost,
|
||||||
|
totalRevenue,
|
||||||
|
productCogs,
|
||||||
|
merchantFees,
|
||||||
|
pointsCost,
|
||||||
|
fixedCosts,
|
||||||
|
totalCosts,
|
||||||
|
profit,
|
||||||
|
profitPercent
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (release) {
|
||||||
|
release();
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
dateRange: {
|
||||||
|
start: startDt.toISO(),
|
||||||
|
end: endDt.toISO()
|
||||||
|
},
|
||||||
|
totals: {
|
||||||
|
orders: totals.orders,
|
||||||
|
subtotal: totals.subtotal,
|
||||||
|
productDiscountRate,
|
||||||
|
pointsPerDollar,
|
||||||
|
redemptionRate,
|
||||||
|
pointDollarValue,
|
||||||
|
weightedProfitAmount,
|
||||||
|
weightedProfitPercent
|
||||||
|
},
|
||||||
|
buckets: bucketResults
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
if (release) {
|
||||||
|
try {
|
||||||
|
release();
|
||||||
|
} catch (releaseError) {
|
||||||
|
console.error('Failed to release connection after error:', releaseError);
|
||||||
|
}
|
||||||
|
} else if (connection) {
|
||||||
|
try {
|
||||||
|
connection.destroy();
|
||||||
|
} catch (destroyError) {
|
||||||
|
console.error('Failed to destroy connection after error:', destroyError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.error('Error running discount simulation:', error);
|
||||||
|
res.status(500).json({ error: 'Failed to run discount simulation' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
1068
inventory-server/dashboard/acot-server/routes/events.js
Normal file
1068
inventory-server/dashboard/acot-server/routes/events.js
Normal file
File diff suppressed because it is too large
Load Diff
57
inventory-server/dashboard/acot-server/routes/test.js
Normal file
57
inventory-server/dashboard/acot-server/routes/test.js
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const { getDbConnection, getCachedQuery } = require('../db/connection');
|
||||||
|
|
||||||
|
// Test endpoint to count orders
|
||||||
|
router.get('/order-count', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { connection } = await getDbConnection();
|
||||||
|
|
||||||
|
// Simple query to count orders from _order table
|
||||||
|
const queryFn = async () => {
|
||||||
|
const [rows] = await connection.execute('SELECT COUNT(*) as count FROM _order');
|
||||||
|
return rows[0].count;
|
||||||
|
};
|
||||||
|
|
||||||
|
const cacheKey = 'order-count';
|
||||||
|
const count = await getCachedQuery(cacheKey, 'default', queryFn);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
orderCount: count,
|
||||||
|
timestamp: new Date().toISOString()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching order count:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test connection endpoint
|
||||||
|
router.get('/test-connection', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { connection } = await getDbConnection();
|
||||||
|
|
||||||
|
// Test the connection with a simple query
|
||||||
|
const [rows] = await connection.execute('SELECT 1 as test');
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: 'Database connection successful',
|
||||||
|
data: rows[0]
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error testing connection:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
99
inventory-server/dashboard/acot-server/server.js
Normal file
99
inventory-server/dashboard/acot-server/server.js
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
require('dotenv').config();
|
||||||
|
const express = require('express');
|
||||||
|
const cors = require('cors');
|
||||||
|
const morgan = require('morgan');
|
||||||
|
const compression = require('compression');
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const { closeAllConnections } = require('./db/connection');
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const PORT = process.env.ACOT_PORT || 3012;
|
||||||
|
|
||||||
|
// Create logs directory if it doesn't exist
|
||||||
|
const logDir = path.join(__dirname, 'logs/app');
|
||||||
|
if (!fs.existsSync(logDir)) {
|
||||||
|
fs.mkdirSync(logDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a write stream for access logs
|
||||||
|
const accessLogStream = fs.createWriteStream(
|
||||||
|
path.join(logDir, 'access.log'),
|
||||||
|
{ flags: 'a' }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Middleware
|
||||||
|
app.use(compression());
|
||||||
|
app.use(cors());
|
||||||
|
app.use(express.json());
|
||||||
|
app.use(express.urlencoded({ extended: true }));
|
||||||
|
|
||||||
|
// Logging middleware
|
||||||
|
if (process.env.NODE_ENV === 'production') {
|
||||||
|
app.use(morgan('combined', { stream: accessLogStream }));
|
||||||
|
} else {
|
||||||
|
app.use(morgan('dev'));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Health check endpoint
|
||||||
|
app.get('/health', (req, res) => {
|
||||||
|
res.json({
|
||||||
|
status: 'healthy',
|
||||||
|
service: 'acot-server',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
uptime: process.uptime()
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Routes
|
||||||
|
app.use('/api/acot/test', require('./routes/test'));
|
||||||
|
app.use('/api/acot/events', require('./routes/events'));
|
||||||
|
app.use('/api/acot/discounts', require('./routes/discounts'));
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
console.error('Unhandled error:', err);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: process.env.NODE_ENV === 'production'
|
||||||
|
? 'Internal server error'
|
||||||
|
: err.message
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// 404 handler
|
||||||
|
app.use((req, res) => {
|
||||||
|
res.status(404).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Route not found'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
const server = app.listen(PORT, () => {
|
||||||
|
console.log(`ACOT Server running on port ${PORT}`);
|
||||||
|
console.log(`Environment: ${process.env.NODE_ENV}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Graceful shutdown
|
||||||
|
const gracefulShutdown = async () => {
|
||||||
|
console.log('SIGTERM signal received: closing HTTP server');
|
||||||
|
server.close(async () => {
|
||||||
|
console.log('HTTP server closed');
|
||||||
|
|
||||||
|
// Close database connections
|
||||||
|
try {
|
||||||
|
await closeAllConnections();
|
||||||
|
console.log('Database connections closed');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error closing database connections:', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
process.on('SIGTERM', gracefulShutdown);
|
||||||
|
process.on('SIGINT', gracefulShutdown);
|
||||||
|
|
||||||
|
module.exports = app;
|
||||||
312
inventory-server/dashboard/acot-server/utils/timeUtils.js
Normal file
312
inventory-server/dashboard/acot-server/utils/timeUtils.js
Normal file
@@ -0,0 +1,312 @@
|
|||||||
|
const { DateTime } = require('luxon');
|
||||||
|
|
||||||
|
const TIMEZONE = 'America/New_York';
|
||||||
|
const DB_TIMEZONE = 'UTC-05:00';
|
||||||
|
const BUSINESS_DAY_START_HOUR = 1; // 1 AM Eastern
|
||||||
|
const WEEK_START_DAY = 7; // Sunday (Luxon uses 1 = Monday, 7 = Sunday)
|
||||||
|
const DB_DATETIME_FORMAT = 'yyyy-LL-dd HH:mm:ss';
|
||||||
|
|
||||||
|
const isDateTime = (value) => DateTime.isDateTime(value);
|
||||||
|
|
||||||
|
const ensureDateTime = (value, { zone = TIMEZONE } = {}) => {
|
||||||
|
if (!value) return null;
|
||||||
|
|
||||||
|
if (isDateTime(value)) {
|
||||||
|
return value.setZone(zone);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value instanceof Date) {
|
||||||
|
return DateTime.fromJSDate(value, { zone });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === 'number') {
|
||||||
|
return DateTime.fromMillis(value, { zone });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
let dt = DateTime.fromISO(value, { zone, setZone: true });
|
||||||
|
if (!dt.isValid) {
|
||||||
|
dt = DateTime.fromSQL(value, { zone });
|
||||||
|
}
|
||||||
|
return dt.isValid ? dt : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getNow = () => DateTime.now().setZone(TIMEZONE);
|
||||||
|
|
||||||
|
const getDayStart = (input = getNow()) => {
|
||||||
|
const dt = ensureDateTime(input);
|
||||||
|
if (!dt || !dt.isValid) {
|
||||||
|
const fallback = getNow();
|
||||||
|
return fallback.set({
|
||||||
|
hour: BUSINESS_DAY_START_HOUR,
|
||||||
|
minute: 0,
|
||||||
|
second: 0,
|
||||||
|
millisecond: 0
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const sameDayStart = dt.set({
|
||||||
|
hour: BUSINESS_DAY_START_HOUR,
|
||||||
|
minute: 0,
|
||||||
|
second: 0,
|
||||||
|
millisecond: 0
|
||||||
|
});
|
||||||
|
|
||||||
|
return dt.hour < BUSINESS_DAY_START_HOUR
|
||||||
|
? sameDayStart.minus({ days: 1 })
|
||||||
|
: sameDayStart;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getDayEnd = (input = getNow()) => {
|
||||||
|
return getDayStart(input).plus({ days: 1 }).minus({ milliseconds: 1 });
|
||||||
|
};
|
||||||
|
|
||||||
|
const getWeekStart = (input = getNow()) => {
|
||||||
|
const dt = ensureDateTime(input);
|
||||||
|
if (!dt || !dt.isValid) {
|
||||||
|
return getDayStart();
|
||||||
|
}
|
||||||
|
|
||||||
|
const startOfWeek = dt.set({ weekday: WEEK_START_DAY }).startOf('day');
|
||||||
|
const normalized = startOfWeek > dt ? startOfWeek.minus({ weeks: 1 }) : startOfWeek;
|
||||||
|
return normalized.set({
|
||||||
|
hour: BUSINESS_DAY_START_HOUR,
|
||||||
|
minute: 0,
|
||||||
|
second: 0,
|
||||||
|
millisecond: 0
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const getRangeForTimeRange = (timeRange = 'today', now = getNow()) => {
|
||||||
|
const current = ensureDateTime(now);
|
||||||
|
if (!current || !current.isValid) {
|
||||||
|
throw new Error('Invalid reference time for range calculation');
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (timeRange) {
|
||||||
|
case 'today': {
|
||||||
|
return {
|
||||||
|
start: getDayStart(current),
|
||||||
|
end: getDayEnd(current)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'yesterday': {
|
||||||
|
const target = current.minus({ days: 1 });
|
||||||
|
return {
|
||||||
|
start: getDayStart(target),
|
||||||
|
end: getDayEnd(target)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'twoDaysAgo': {
|
||||||
|
const target = current.minus({ days: 2 });
|
||||||
|
return {
|
||||||
|
start: getDayStart(target),
|
||||||
|
end: getDayEnd(target)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'thisWeek': {
|
||||||
|
return {
|
||||||
|
start: getWeekStart(current),
|
||||||
|
end: getDayEnd(current)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'lastWeek': {
|
||||||
|
const lastWeek = current.minus({ weeks: 1 });
|
||||||
|
const weekStart = getWeekStart(lastWeek);
|
||||||
|
const weekEnd = weekStart.plus({ days: 6 });
|
||||||
|
return {
|
||||||
|
start: weekStart,
|
||||||
|
end: getDayEnd(weekEnd)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'thisMonth': {
|
||||||
|
const dayStart = getDayStart(current);
|
||||||
|
const monthStart = dayStart.startOf('month').set({ hour: BUSINESS_DAY_START_HOUR });
|
||||||
|
return {
|
||||||
|
start: monthStart,
|
||||||
|
end: getDayEnd(current)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'lastMonth': {
|
||||||
|
const lastMonth = current.minus({ months: 1 });
|
||||||
|
const monthStart = lastMonth
|
||||||
|
.startOf('month')
|
||||||
|
.set({ hour: BUSINESS_DAY_START_HOUR, minute: 0, second: 0, millisecond: 0 });
|
||||||
|
const monthEnd = monthStart.plus({ months: 1 }).minus({ days: 1 });
|
||||||
|
return {
|
||||||
|
start: monthStart,
|
||||||
|
end: getDayEnd(monthEnd)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last7days': {
|
||||||
|
const dayStart = getDayStart(current);
|
||||||
|
return {
|
||||||
|
start: dayStart.minus({ days: 6 }),
|
||||||
|
end: getDayEnd(current)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last30days': {
|
||||||
|
const dayStart = getDayStart(current);
|
||||||
|
return {
|
||||||
|
start: dayStart.minus({ days: 29 }),
|
||||||
|
end: getDayEnd(current)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last90days': {
|
||||||
|
const dayStart = getDayStart(current);
|
||||||
|
return {
|
||||||
|
start: dayStart.minus({ days: 89 }),
|
||||||
|
end: getDayEnd(current)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'previous7days': {
|
||||||
|
const currentPeriodStart = getDayStart(current).minus({ days: 6 });
|
||||||
|
const previousEndDay = currentPeriodStart.minus({ days: 1 });
|
||||||
|
const previousStartDay = previousEndDay.minus({ days: 6 });
|
||||||
|
return {
|
||||||
|
start: getDayStart(previousStartDay),
|
||||||
|
end: getDayEnd(previousEndDay)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'previous30days': {
|
||||||
|
const currentPeriodStart = getDayStart(current).minus({ days: 29 });
|
||||||
|
const previousEndDay = currentPeriodStart.minus({ days: 1 });
|
||||||
|
const previousStartDay = previousEndDay.minus({ days: 29 });
|
||||||
|
return {
|
||||||
|
start: getDayStart(previousStartDay),
|
||||||
|
end: getDayEnd(previousEndDay)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'previous90days': {
|
||||||
|
const currentPeriodStart = getDayStart(current).minus({ days: 89 });
|
||||||
|
const previousEndDay = currentPeriodStart.minus({ days: 1 });
|
||||||
|
const previousStartDay = previousEndDay.minus({ days: 89 });
|
||||||
|
return {
|
||||||
|
start: getDayStart(previousStartDay),
|
||||||
|
end: getDayEnd(previousEndDay)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown time range: ${timeRange}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const toDatabaseSqlString = (dt) => {
|
||||||
|
const normalized = ensureDateTime(dt);
|
||||||
|
if (!normalized || !normalized.isValid) {
|
||||||
|
throw new Error('Invalid datetime provided for SQL conversion');
|
||||||
|
}
|
||||||
|
const dbTime = normalized.setZone(DB_TIMEZONE, { keepLocalTime: true });
|
||||||
|
return dbTime.toFormat(DB_DATETIME_FORMAT);
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatBusinessDate = (input) => {
|
||||||
|
const dt = ensureDateTime(input);
|
||||||
|
if (!dt || !dt.isValid) return '';
|
||||||
|
return dt.setZone(TIMEZONE).toFormat('LLL d, yyyy');
|
||||||
|
};
|
||||||
|
|
||||||
|
const getTimeRangeLabel = (timeRange) => {
|
||||||
|
const labels = {
|
||||||
|
today: 'Today',
|
||||||
|
yesterday: 'Yesterday',
|
||||||
|
twoDaysAgo: 'Two Days Ago',
|
||||||
|
thisWeek: 'This Week',
|
||||||
|
lastWeek: 'Last Week',
|
||||||
|
thisMonth: 'This Month',
|
||||||
|
lastMonth: 'Last Month',
|
||||||
|
last7days: 'Last 7 Days',
|
||||||
|
last30days: 'Last 30 Days',
|
||||||
|
last90days: 'Last 90 Days',
|
||||||
|
previous7days: 'Previous 7 Days',
|
||||||
|
previous30days: 'Previous 30 Days',
|
||||||
|
previous90days: 'Previous 90 Days'
|
||||||
|
};
|
||||||
|
|
||||||
|
return labels[timeRange] || timeRange;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getTimeRangeConditions = (timeRange, startDate, endDate) => {
|
||||||
|
if (timeRange === 'custom' && startDate && endDate) {
|
||||||
|
const start = ensureDateTime(startDate);
|
||||||
|
const end = ensureDateTime(endDate);
|
||||||
|
|
||||||
|
if (!start || !start.isValid || !end || !end.isValid) {
|
||||||
|
throw new Error('Invalid custom date range provided');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
whereClause: 'date_placed >= ? AND date_placed <= ?',
|
||||||
|
params: [toDatabaseSqlString(start), toDatabaseSqlString(end)],
|
||||||
|
dateRange: {
|
||||||
|
start: start.toUTC().toISO(),
|
||||||
|
end: end.toUTC().toISO(),
|
||||||
|
label: `${formatBusinessDate(start)} - ${formatBusinessDate(end)}`
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedRange = timeRange || 'today';
|
||||||
|
const range = getRangeForTimeRange(normalizedRange);
|
||||||
|
|
||||||
|
return {
|
||||||
|
whereClause: 'date_placed >= ? AND date_placed <= ?',
|
||||||
|
params: [toDatabaseSqlString(range.start), toDatabaseSqlString(range.end)],
|
||||||
|
dateRange: {
|
||||||
|
start: range.start.toUTC().toISO(),
|
||||||
|
end: range.end.toUTC().toISO(),
|
||||||
|
label: getTimeRangeLabel(normalizedRange)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const getBusinessDayBounds = (timeRange) => {
|
||||||
|
const range = getRangeForTimeRange(timeRange);
|
||||||
|
return {
|
||||||
|
start: range.start.toJSDate(),
|
||||||
|
end: range.end.toJSDate()
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseBusinessDate = (mysqlDatetime) => {
|
||||||
|
if (!mysqlDatetime || mysqlDatetime === '0000-00-00 00:00:00') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dt = DateTime.fromSQL(mysqlDatetime, { zone: DB_TIMEZONE });
|
||||||
|
if (!dt.isValid) {
|
||||||
|
console.error('[timeUtils] Failed to parse MySQL datetime:', mysqlDatetime, dt.invalidExplanation);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return dt.toUTC().toJSDate();
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatMySQLDate = (input) => {
|
||||||
|
if (!input) return null;
|
||||||
|
|
||||||
|
const dt = ensureDateTime(input, { zone: 'utc' });
|
||||||
|
if (!dt || !dt.isValid) return null;
|
||||||
|
|
||||||
|
return dt.setZone(DB_TIMEZONE).toFormat(DB_DATETIME_FORMAT);
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getBusinessDayBounds,
|
||||||
|
getTimeRangeConditions,
|
||||||
|
formatBusinessDate,
|
||||||
|
getTimeRangeLabel,
|
||||||
|
parseBusinessDate,
|
||||||
|
formatMySQLDate,
|
||||||
|
// Expose helpers for tests or advanced consumers
|
||||||
|
_internal: {
|
||||||
|
getDayStart,
|
||||||
|
getDayEnd,
|
||||||
|
getWeekStart,
|
||||||
|
getRangeForTimeRange,
|
||||||
|
BUSINESS_DAY_START_HOUR
|
||||||
|
}
|
||||||
|
};
|
||||||
21
inventory-server/dashboard/aircall-server/.env.example
Normal file
21
inventory-server/dashboard/aircall-server/.env.example
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Server Configuration
|
||||||
|
NODE_ENV=development
|
||||||
|
AIRCALL_PORT=3002
|
||||||
|
LOG_LEVEL=info
|
||||||
|
|
||||||
|
# Aircall API Credentials
|
||||||
|
AIRCALL_API_ID=your_aircall_api_id
|
||||||
|
AIRCALL_API_TOKEN=your_aircall_api_token
|
||||||
|
|
||||||
|
# Database Configuration
|
||||||
|
MONGODB_URI=mongodb://localhost:27017/dashboard
|
||||||
|
MONGODB_DB=dashboard
|
||||||
|
REDIS_URL=redis://localhost:6379
|
||||||
|
|
||||||
|
# Service Configuration
|
||||||
|
TIMEZONE=America/New_York
|
||||||
|
DAY_STARTS_AT=1 # Business day starts at 1 AM ET
|
||||||
|
|
||||||
|
# Optional Settings
|
||||||
|
REDIS_TTL=300 # Cache TTL in seconds (5 minutes)
|
||||||
|
COLLECTION_NAME=aircall_daily_data
|
||||||
55
inventory-server/dashboard/aircall-server/README.md
Normal file
55
inventory-server/dashboard/aircall-server/README.md
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# Aircall Server
|
||||||
|
|
||||||
|
A standalone server for handling Aircall metrics and data processing.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
1. Install dependencies:
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Set up environment variables:
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
Then edit `.env` with your configuration.
|
||||||
|
|
||||||
|
Required environment variables:
|
||||||
|
- `AIRCALL_API_ID`: Your Aircall API ID
|
||||||
|
- `AIRCALL_API_TOKEN`: Your Aircall API Token
|
||||||
|
- `MONGODB_URI`: MongoDB connection string
|
||||||
|
- `REDIS_URL`: Redis connection string
|
||||||
|
- `AIRCALL_PORT`: Server port (default: 3002)
|
||||||
|
|
||||||
|
## Running the Server
|
||||||
|
|
||||||
|
### Development
|
||||||
|
```bash
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
### Production
|
||||||
|
Using PM2:
|
||||||
|
```bash
|
||||||
|
pm2 start ecosystem.config.js --env production
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
### GET /api/aircall/metrics/:timeRange
|
||||||
|
Get Aircall metrics for a specific time range.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- `timeRange`: One of ['today', 'yesterday', 'last7days', 'last30days', 'last90days']
|
||||||
|
|
||||||
|
### GET /api/aircall/health
|
||||||
|
Get server health status.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
The server uses:
|
||||||
|
- Express.js for the API
|
||||||
|
- MongoDB for data storage
|
||||||
|
- Redis for caching
|
||||||
|
- Winston for logging
|
||||||
1882
inventory-server/dashboard/aircall-server/package-lock.json
generated
Normal file
1882
inventory-server/dashboard/aircall-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
23
inventory-server/dashboard/aircall-server/package.json
Normal file
23
inventory-server/dashboard/aircall-server/package.json
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"name": "aircall-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Aircall metrics server",
|
||||||
|
"type": "module",
|
||||||
|
"main": "server.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node server.js",
|
||||||
|
"dev": "nodemon server.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"axios": "^1.6.2",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"dotenv": "^16.3.1",
|
||||||
|
"express": "^4.18.2",
|
||||||
|
"mongodb": "^6.3.0",
|
||||||
|
"redis": "^4.6.11",
|
||||||
|
"winston": "^3.11.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"nodemon": "^3.0.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
83
inventory-server/dashboard/aircall-server/server.js
Normal file
83
inventory-server/dashboard/aircall-server/server.js
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import cors from 'cors';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import path from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import { createRoutes } from './src/routes/index.js';
|
||||||
|
import { aircallConfig } from './src/config/aircall.config.js';
|
||||||
|
import { connectMongoDB } from './src/utils/db.js';
|
||||||
|
import { createRedisClient } from './src/utils/redis.js';
|
||||||
|
import { createLogger } from './src/utils/logger.js';
|
||||||
|
|
||||||
|
// Get directory name in ES modules
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
|
// Load environment variables from the correct path
|
||||||
|
dotenv.config({ path: path.resolve(__dirname, '.env') });
|
||||||
|
|
||||||
|
// Validate required environment variables
|
||||||
|
const requiredEnvVars = ['AIRCALL_API_ID', 'AIRCALL_API_TOKEN', 'MONGODB_URI', 'REDIS_URL'];
|
||||||
|
const missingEnvVars = requiredEnvVars.filter(envVar => !process.env[envVar]);
|
||||||
|
|
||||||
|
if (missingEnvVars.length > 0) {
|
||||||
|
console.error('Missing required environment variables:', missingEnvVars);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const port = process.env.AIRCALL_PORT || 3002;
|
||||||
|
const logger = createLogger('aircall-server');
|
||||||
|
|
||||||
|
// Middleware
|
||||||
|
app.use(cors());
|
||||||
|
app.use(express.json());
|
||||||
|
|
||||||
|
// Connect to databases
|
||||||
|
let mongodb;
|
||||||
|
let redis;
|
||||||
|
|
||||||
|
async function initializeServer() {
|
||||||
|
try {
|
||||||
|
// Connect to MongoDB
|
||||||
|
mongodb = await connectMongoDB();
|
||||||
|
logger.info('Connected to MongoDB');
|
||||||
|
|
||||||
|
// Connect to Redis
|
||||||
|
redis = await createRedisClient();
|
||||||
|
logger.info('Connected to Redis');
|
||||||
|
|
||||||
|
// Initialize configs with database connections
|
||||||
|
const configs = {
|
||||||
|
aircall: {
|
||||||
|
...aircallConfig,
|
||||||
|
mongodb,
|
||||||
|
redis,
|
||||||
|
logger
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initialize routes
|
||||||
|
const routes = createRoutes(configs, logger);
|
||||||
|
app.use('/api', routes);
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
logger.error('Server error:', err);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Internal server error',
|
||||||
|
message: err.message
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
app.listen(port, () => {
|
||||||
|
logger.info(`Aircall server listening on port ${port}`);
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to initialize server:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
initializeServer();
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
export const aircallConfig = {
|
||||||
|
serviceName: 'aircall',
|
||||||
|
apiId: process.env.AIRCALL_API_ID,
|
||||||
|
apiToken: process.env.AIRCALL_API_TOKEN,
|
||||||
|
timezone: 'America/New_York',
|
||||||
|
dayStartsAt: 1,
|
||||||
|
storeHistory: true,
|
||||||
|
collection: 'aircall_daily_data',
|
||||||
|
redisTTL: 300, // 5 minutes cache for current day
|
||||||
|
endpoints: {
|
||||||
|
metrics: {
|
||||||
|
ttl: 300
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import { AircallService } from '../services/aircall/AircallService.js';
|
||||||
|
|
||||||
|
export const createAircallRoutes = (config, logger) => {
|
||||||
|
const router = express.Router();
|
||||||
|
const aircallService = new AircallService(config);
|
||||||
|
|
||||||
|
router.get('/metrics/:timeRange?', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange = 'today' } = req.params;
|
||||||
|
const allowedRanges = ['today', 'yesterday', 'last7days', 'last30days', 'last90days'];
|
||||||
|
|
||||||
|
if (!allowedRanges.includes(timeRange)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Invalid time range',
|
||||||
|
allowedRanges
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const metrics = await aircallService.getMetrics(timeRange);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
...metrics,
|
||||||
|
_meta: {
|
||||||
|
timeRange,
|
||||||
|
generatedAt: new Date().toISOString(),
|
||||||
|
dataPoints: metrics.daily_data?.length || 0
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching Aircall metrics:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to fetch Aircall metrics',
|
||||||
|
message: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Health check endpoint
|
||||||
|
router.get('/health', (req, res) => {
|
||||||
|
const mongoConnected = !!aircallService.mongodb?.db;
|
||||||
|
const redisConnected = !!aircallService.redis?.isOpen;
|
||||||
|
|
||||||
|
const health = {
|
||||||
|
status: mongoConnected && redisConnected ? 'ok' : 'degraded',
|
||||||
|
service: 'aircall',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
connections: {
|
||||||
|
mongodb: mongoConnected,
|
||||||
|
redis: redisConnected
|
||||||
|
}
|
||||||
|
};
|
||||||
|
res.json(health);
|
||||||
|
});
|
||||||
|
|
||||||
|
return router;
|
||||||
|
};
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import { createAircallRoutes } from './aircall.routes.js';
|
||||||
|
|
||||||
|
export const createRoutes = (configs, logger) => {
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Mount Aircall routes
|
||||||
|
router.use('/aircall', createAircallRoutes(configs.aircall, logger));
|
||||||
|
|
||||||
|
// Health check endpoint
|
||||||
|
router.get('/health', (req, res) => {
|
||||||
|
const services = req.services || {};
|
||||||
|
res.status(200).json({
|
||||||
|
status: 'ok',
|
||||||
|
timestamp: new Date(),
|
||||||
|
services: {
|
||||||
|
redis: services.redis?.isReady || false,
|
||||||
|
mongodb: services.mongo?.readyState === 1 || false
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Catch-all 404 handler
|
||||||
|
router.use('*', (req, res) => {
|
||||||
|
res.status(404).json({
|
||||||
|
error: 'Not Found',
|
||||||
|
message: `Route ${req.originalUrl} not found`
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return router;
|
||||||
|
};
|
||||||
@@ -0,0 +1,298 @@
|
|||||||
|
import { DataManager } from "../base/DataManager.js";
|
||||||
|
|
||||||
|
export class AircallDataManager extends DataManager {
|
||||||
|
constructor(mongodb, redis, timeManager) {
|
||||||
|
const options = {
|
||||||
|
collection: "aircall_daily_data",
|
||||||
|
redisTTL: 300 // 5 minutes cache
|
||||||
|
};
|
||||||
|
super(mongodb, redis, timeManager, options);
|
||||||
|
this.options = options;
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureDate(d) {
|
||||||
|
if (d instanceof Date) return d;
|
||||||
|
if (typeof d === 'string') return new Date(d);
|
||||||
|
if (typeof d === 'number') return new Date(d);
|
||||||
|
console.error('Invalid date value:', d);
|
||||||
|
return new Date(); // fallback to current date
|
||||||
|
}
|
||||||
|
|
||||||
|
async storeHistoricalPeriod(start, end, calls) {
|
||||||
|
if (!this.mongodb) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (!Array.isArray(calls)) {
|
||||||
|
console.error("Invalid calls data:", calls);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Group calls by true day boundaries using TimeManager
|
||||||
|
const dailyCallsMap = new Map();
|
||||||
|
|
||||||
|
calls.forEach((call) => {
|
||||||
|
try {
|
||||||
|
const timestamp = call.started_at * 1000; // Convert to milliseconds
|
||||||
|
const callDate = this.ensureDate(timestamp);
|
||||||
|
const dayBounds = this.timeManager.getDayBounds(callDate);
|
||||||
|
const dayKey = dayBounds.start.toISOString();
|
||||||
|
|
||||||
|
if (!dailyCallsMap.has(dayKey)) {
|
||||||
|
dailyCallsMap.set(dayKey, {
|
||||||
|
date: dayBounds.start,
|
||||||
|
calls: [],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
dailyCallsMap.get(dayKey).calls.push(call);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error processing call:', err, call);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Iterate over each day in the period using day boundaries
|
||||||
|
const dates = [];
|
||||||
|
let currentDate = this.ensureDate(start);
|
||||||
|
const endDate = this.ensureDate(end);
|
||||||
|
|
||||||
|
while (currentDate < endDate) {
|
||||||
|
const dayBounds = this.timeManager.getDayBounds(currentDate);
|
||||||
|
dates.push(dayBounds.start);
|
||||||
|
currentDate.setUTCDate(currentDate.getUTCDate() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const date of dates) {
|
||||||
|
try {
|
||||||
|
const dateKey = date.toISOString();
|
||||||
|
const dayData = dailyCallsMap.get(dateKey);
|
||||||
|
const dayCalls = dayData ? dayData.calls : [];
|
||||||
|
|
||||||
|
// Process calls for this day using the same processing logic
|
||||||
|
const metrics = this.processCallData(dayCalls);
|
||||||
|
|
||||||
|
// Insert a daily_data record for this day
|
||||||
|
metrics.daily_data = [
|
||||||
|
{
|
||||||
|
date: date.toISOString().split("T")[0],
|
||||||
|
inbound: metrics.by_direction.inbound,
|
||||||
|
outbound: metrics.by_direction.outbound,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Store this day's processed data as historical
|
||||||
|
await this.storeHistoricalDay(date, metrics);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error processing date:', err, date);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error storing historical period:", error, error.stack);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
processCallData(calls) {
|
||||||
|
// If calls is already processed (has total, by_direction, etc.), return it
|
||||||
|
if (calls && calls.total !== undefined) {
|
||||||
|
console.log('Data already processed:', {
|
||||||
|
total: calls.total,
|
||||||
|
by_direction: calls.by_direction
|
||||||
|
});
|
||||||
|
// Return a clean copy of the processed data
|
||||||
|
return {
|
||||||
|
total: calls.total,
|
||||||
|
by_direction: calls.by_direction,
|
||||||
|
by_status: calls.by_status,
|
||||||
|
by_missed_reason: calls.by_missed_reason,
|
||||||
|
by_hour: calls.by_hour,
|
||||||
|
by_users: calls.by_users,
|
||||||
|
daily_data: calls.daily_data,
|
||||||
|
duration_distribution: calls.duration_distribution,
|
||||||
|
average_duration: calls.average_duration
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Processing raw calls:', {
|
||||||
|
count: calls.length,
|
||||||
|
sample: calls.length > 0 ? {
|
||||||
|
id: calls[0].id,
|
||||||
|
direction: calls[0].direction,
|
||||||
|
status: calls[0].status
|
||||||
|
} : null
|
||||||
|
});
|
||||||
|
|
||||||
|
// Process raw calls
|
||||||
|
const metrics = {
|
||||||
|
total: calls.length,
|
||||||
|
by_direction: { inbound: 0, outbound: 0 },
|
||||||
|
by_status: { answered: 0, missed: 0 },
|
||||||
|
by_missed_reason: {},
|
||||||
|
by_hour: Array(24).fill(0),
|
||||||
|
by_users: {},
|
||||||
|
daily_data: [],
|
||||||
|
duration_distribution: [
|
||||||
|
{ range: "0-1m", count: 0 },
|
||||||
|
{ range: "1-5m", count: 0 },
|
||||||
|
{ range: "5-15m", count: 0 },
|
||||||
|
{ range: "15-30m", count: 0 },
|
||||||
|
{ range: "30m+", count: 0 },
|
||||||
|
],
|
||||||
|
average_duration: 0,
|
||||||
|
total_duration: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Group calls by date for daily data
|
||||||
|
const dailyCallsMap = new Map();
|
||||||
|
|
||||||
|
calls.forEach((call) => {
|
||||||
|
try {
|
||||||
|
// Direction metrics
|
||||||
|
metrics.by_direction[call.direction]++;
|
||||||
|
|
||||||
|
// Get call date and hour using TimeManager
|
||||||
|
const timestamp = call.started_at * 1000; // Convert to milliseconds
|
||||||
|
const callDate = this.ensureDate(timestamp);
|
||||||
|
const dayBounds = this.timeManager.getDayBounds(callDate);
|
||||||
|
const dayKey = dayBounds.start.toISOString().split("T")[0];
|
||||||
|
const hour = callDate.getHours();
|
||||||
|
metrics.by_hour[hour]++;
|
||||||
|
|
||||||
|
// Status and duration metrics
|
||||||
|
if (call.answered_at) {
|
||||||
|
metrics.by_status.answered++;
|
||||||
|
const duration = call.ended_at - call.answered_at;
|
||||||
|
metrics.total_duration += duration;
|
||||||
|
|
||||||
|
// Duration distribution
|
||||||
|
if (duration <= 60) {
|
||||||
|
metrics.duration_distribution[0].count++;
|
||||||
|
} else if (duration <= 300) {
|
||||||
|
metrics.duration_distribution[1].count++;
|
||||||
|
} else if (duration <= 900) {
|
||||||
|
metrics.duration_distribution[2].count++;
|
||||||
|
} else if (duration <= 1800) {
|
||||||
|
metrics.duration_distribution[3].count++;
|
||||||
|
} else {
|
||||||
|
metrics.duration_distribution[4].count++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track user performance
|
||||||
|
if (call.user) {
|
||||||
|
const userId = call.user.id;
|
||||||
|
if (!metrics.by_users[userId]) {
|
||||||
|
metrics.by_users[userId] = {
|
||||||
|
id: userId,
|
||||||
|
name: call.user.name,
|
||||||
|
total: 0,
|
||||||
|
answered: 0,
|
||||||
|
missed: 0,
|
||||||
|
total_duration: 0,
|
||||||
|
average_duration: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
metrics.by_users[userId].total++;
|
||||||
|
metrics.by_users[userId].answered++;
|
||||||
|
metrics.by_users[userId].total_duration += duration;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
metrics.by_status.missed++;
|
||||||
|
if (call.missed_call_reason) {
|
||||||
|
metrics.by_missed_reason[call.missed_call_reason] =
|
||||||
|
(metrics.by_missed_reason[call.missed_call_reason] || 0) + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track missed calls by user
|
||||||
|
if (call.user) {
|
||||||
|
const userId = call.user.id;
|
||||||
|
if (!metrics.by_users[userId]) {
|
||||||
|
metrics.by_users[userId] = {
|
||||||
|
id: userId,
|
||||||
|
name: call.user.name,
|
||||||
|
total: 0,
|
||||||
|
answered: 0,
|
||||||
|
missed: 0,
|
||||||
|
total_duration: 0,
|
||||||
|
average_duration: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
metrics.by_users[userId].total++;
|
||||||
|
metrics.by_users[userId].missed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Group by date for daily data
|
||||||
|
if (!dailyCallsMap.has(dayKey)) {
|
||||||
|
dailyCallsMap.set(dayKey, { date: dayKey, inbound: 0, outbound: 0 });
|
||||||
|
}
|
||||||
|
dailyCallsMap.get(dayKey)[call.direction]++;
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error processing call:', err, call);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Calculate average durations for users
|
||||||
|
Object.values(metrics.by_users).forEach((user) => {
|
||||||
|
if (user.answered > 0) {
|
||||||
|
user.average_duration = Math.round(user.total_duration / user.answered);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Calculate global average duration
|
||||||
|
if (metrics.by_status.answered > 0) {
|
||||||
|
metrics.average_duration = Math.round(
|
||||||
|
metrics.total_duration / metrics.by_status.answered
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert daily data map to sorted array
|
||||||
|
metrics.daily_data = Array.from(dailyCallsMap.values()).sort((a, b) =>
|
||||||
|
a.date.localeCompare(b.date)
|
||||||
|
);
|
||||||
|
|
||||||
|
delete metrics.total_duration;
|
||||||
|
|
||||||
|
console.log('Processed metrics:', {
|
||||||
|
total: metrics.total,
|
||||||
|
by_direction: metrics.by_direction,
|
||||||
|
by_status: metrics.by_status,
|
||||||
|
daily_data_count: metrics.daily_data.length
|
||||||
|
});
|
||||||
|
|
||||||
|
return metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
async storeHistoricalDay(date, data) {
|
||||||
|
if (!this.mongodb) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const collection = this.mongodb.collection(this.options.collection);
|
||||||
|
const dayBounds = this.timeManager.getDayBounds(this.ensureDate(date));
|
||||||
|
|
||||||
|
// Ensure consistent data structure with metrics nested in data field
|
||||||
|
const document = {
|
||||||
|
date: dayBounds.start,
|
||||||
|
data: {
|
||||||
|
total: data.total,
|
||||||
|
by_direction: data.by_direction,
|
||||||
|
by_status: data.by_status,
|
||||||
|
by_missed_reason: data.by_missed_reason,
|
||||||
|
by_hour: data.by_hour,
|
||||||
|
by_users: data.by_users,
|
||||||
|
daily_data: data.daily_data,
|
||||||
|
duration_distribution: data.duration_distribution,
|
||||||
|
average_duration: data.average_duration
|
||||||
|
},
|
||||||
|
updatedAt: new Date()
|
||||||
|
};
|
||||||
|
|
||||||
|
await collection.updateOne(
|
||||||
|
{ date: dayBounds.start },
|
||||||
|
{ $set: document },
|
||||||
|
{ upsert: true }
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error storing historical day:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,138 @@
|
|||||||
|
import axios from "axios";
|
||||||
|
import { Buffer } from "buffer";
|
||||||
|
import { BaseService } from "../base/BaseService.js";
|
||||||
|
import { AircallDataManager } from "./AircallDataManager.js";
|
||||||
|
|
||||||
|
export class AircallService extends BaseService {
|
||||||
|
constructor(config) {
|
||||||
|
super(config);
|
||||||
|
this.baseUrl = "https://api.aircall.io/v1";
|
||||||
|
console.log('Initializing Aircall service with credentials:', {
|
||||||
|
apiId: config.apiId ? 'present' : 'missing',
|
||||||
|
apiToken: config.apiToken ? 'present' : 'missing'
|
||||||
|
});
|
||||||
|
this.auth = Buffer.from(`${config.apiId}:${config.apiToken}`).toString(
|
||||||
|
"base64"
|
||||||
|
);
|
||||||
|
this.dataManager = new AircallDataManager(
|
||||||
|
this.mongodb,
|
||||||
|
this.redis,
|
||||||
|
this.timeManager
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!config.apiId || !config.apiToken) {
|
||||||
|
throw new Error("Aircall API credentials are required");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getMetrics(timeRange) {
|
||||||
|
const dateRange = await this.timeManager.getDateRange(timeRange);
|
||||||
|
console.log('Fetching metrics for date range:', {
|
||||||
|
start: dateRange.start.toISOString(),
|
||||||
|
end: dateRange.end.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return this.dataManager.getData(dateRange, async (range) => {
|
||||||
|
const calls = await this.fetchAllCalls(range.start, range.end);
|
||||||
|
console.log('Fetched calls:', {
|
||||||
|
count: calls.length,
|
||||||
|
sample: calls.length > 0 ? calls[0] : null
|
||||||
|
});
|
||||||
|
return calls;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchAllCalls(start, end) {
|
||||||
|
try {
|
||||||
|
let allCalls = [];
|
||||||
|
let currentPage = 1;
|
||||||
|
let hasMore = true;
|
||||||
|
let totalPages = null;
|
||||||
|
|
||||||
|
while (hasMore) {
|
||||||
|
const response = await this.makeRequest("/calls", {
|
||||||
|
from: Math.floor(start.getTime() / 1000),
|
||||||
|
to: Math.floor(end.getTime() / 1000),
|
||||||
|
order: "asc",
|
||||||
|
page: currentPage,
|
||||||
|
per_page: 50,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('API Response:', {
|
||||||
|
page: currentPage,
|
||||||
|
totalPages: response.meta.total_pages,
|
||||||
|
callsCount: response.calls?.length,
|
||||||
|
params: {
|
||||||
|
from: Math.floor(start.getTime() / 1000),
|
||||||
|
to: Math.floor(end.getTime() / 1000)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.calls) {
|
||||||
|
throw new Error("Invalid API response format");
|
||||||
|
}
|
||||||
|
|
||||||
|
allCalls = [...allCalls, ...response.calls];
|
||||||
|
hasMore = response.meta.next_page_link !== null;
|
||||||
|
totalPages = response.meta.total_pages;
|
||||||
|
currentPage++;
|
||||||
|
|
||||||
|
if (hasMore) {
|
||||||
|
// Rate limiting pause
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return allCalls;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching all calls:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async makeRequest(endpoint, params = {}) {
|
||||||
|
try {
|
||||||
|
console.log('Making API request:', {
|
||||||
|
endpoint,
|
||||||
|
params
|
||||||
|
});
|
||||||
|
const response = await axios.get(`${this.baseUrl}${endpoint}`, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Basic ${this.auth}`,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
params,
|
||||||
|
});
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
if (error.response?.status === 429) {
|
||||||
|
console.log("Rate limit reached, waiting before retry...");
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||||
|
return this.makeRequest(endpoint, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.handleApiError(error, `Error making request to ${endpoint}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
validateApiResponse(response, context = "") {
|
||||||
|
if (!response || typeof response !== "object") {
|
||||||
|
throw new Error(`${context}: Invalid API response format`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.error) {
|
||||||
|
throw new Error(`${context}: ${response.error}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
getPaginationInfo(meta) {
|
||||||
|
return {
|
||||||
|
currentPage: meta.current_page,
|
||||||
|
totalPages: meta.total_pages,
|
||||||
|
hasNextPage: meta.next_page_link !== null,
|
||||||
|
totalRecords: meta.total,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
import { createTimeManager } from '../../utils/timeUtils.js';
|
||||||
|
|
||||||
|
export class BaseService {
|
||||||
|
constructor(config) {
|
||||||
|
this.config = config;
|
||||||
|
this.mongodb = config.mongodb;
|
||||||
|
this.redis = config.redis;
|
||||||
|
this.logger = config.logger;
|
||||||
|
this.timeManager = createTimeManager(config.timezone, config.dayStartsAt);
|
||||||
|
}
|
||||||
|
|
||||||
|
handleApiError(error, context = '') {
|
||||||
|
this.logger.error(`API Error ${context}:`, {
|
||||||
|
message: error.message,
|
||||||
|
status: error.response?.status,
|
||||||
|
data: error.response?.data,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error.response) {
|
||||||
|
const status = error.response.status;
|
||||||
|
const message = error.response.data?.message || error.response.statusText;
|
||||||
|
|
||||||
|
if (status === 429) {
|
||||||
|
throw new Error('API rate limit exceeded. Please try again later.');
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`API error (${status}): ${message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,320 @@
|
|||||||
|
export class DataManager {
|
||||||
|
constructor(mongodb, redis, timeManager, options) {
|
||||||
|
this.mongodb = mongodb;
|
||||||
|
this.redis = redis;
|
||||||
|
this.timeManager = timeManager;
|
||||||
|
this.options = options || {};
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureDate(d) {
|
||||||
|
if (d instanceof Date) return d;
|
||||||
|
if (typeof d === 'string') return new Date(d);
|
||||||
|
if (typeof d === 'number') return new Date(d);
|
||||||
|
if (d && d.date) return new Date(d.date); // Handle MongoDB records
|
||||||
|
console.error('Invalid date value:', d);
|
||||||
|
return new Date(); // fallback to current date
|
||||||
|
}
|
||||||
|
|
||||||
|
async getData(dateRange, fetchFn) {
|
||||||
|
try {
|
||||||
|
// Get historical data from MongoDB
|
||||||
|
const historicalData = await this.getHistoricalDays(dateRange.start, dateRange.end);
|
||||||
|
|
||||||
|
// Find any missing date ranges
|
||||||
|
const missingRanges = this.findMissingDateRanges(dateRange.start, dateRange.end, historicalData);
|
||||||
|
|
||||||
|
// Fetch missing data
|
||||||
|
for (const range of missingRanges) {
|
||||||
|
const data = await fetchFn(range);
|
||||||
|
await this.storeHistoricalPeriod(range.start, range.end, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get updated historical data
|
||||||
|
const updatedData = await this.getHistoricalDays(dateRange.start, dateRange.end);
|
||||||
|
|
||||||
|
// Handle both nested and flat data structures
|
||||||
|
if (updatedData && updatedData.length > 0) {
|
||||||
|
// Process each record and combine them
|
||||||
|
const processedData = updatedData.map(record => {
|
||||||
|
if (record.data) {
|
||||||
|
return record.data;
|
||||||
|
}
|
||||||
|
if (record.total !== undefined) {
|
||||||
|
return {
|
||||||
|
total: record.total,
|
||||||
|
by_direction: record.by_direction,
|
||||||
|
by_status: record.by_status,
|
||||||
|
by_missed_reason: record.by_missed_reason,
|
||||||
|
by_hour: record.by_hour,
|
||||||
|
by_users: record.by_users,
|
||||||
|
daily_data: record.daily_data,
|
||||||
|
duration_distribution: record.duration_distribution,
|
||||||
|
average_duration: record.average_duration
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}).filter(Boolean);
|
||||||
|
|
||||||
|
// Combine the data
|
||||||
|
if (processedData.length > 0) {
|
||||||
|
return this.combineMetrics(processedData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise process as raw call data
|
||||||
|
return this.processCallData(updatedData);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in getData:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
findMissingDateRanges(start, end, existingDates) {
|
||||||
|
const missingRanges = [];
|
||||||
|
const existingDatesSet = new Set(
|
||||||
|
existingDates.map((d) => {
|
||||||
|
// Handle both nested and flat data structures
|
||||||
|
const date = d.date ? d.date : d;
|
||||||
|
return this.ensureDate(date).toISOString().split("T")[0];
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
let current = new Date(start);
|
||||||
|
const endDate = new Date(end);
|
||||||
|
|
||||||
|
while (current < endDate) {
|
||||||
|
const dayBounds = this.timeManager.getDayBounds(current);
|
||||||
|
const dayKey = dayBounds.start.toISOString().split("T")[0];
|
||||||
|
|
||||||
|
if (!existingDatesSet.has(dayKey)) {
|
||||||
|
// Found a missing day
|
||||||
|
const missingStart = new Date(dayBounds.start);
|
||||||
|
const missingEnd = new Date(dayBounds.end);
|
||||||
|
|
||||||
|
missingRanges.push({
|
||||||
|
start: missingStart,
|
||||||
|
end: missingEnd,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move to the next day using timeManager to ensure proper business day boundaries
|
||||||
|
current = new Date(dayBounds.end.getTime() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return missingRanges;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCurrentDay(fetchFn) {
|
||||||
|
const now = new Date();
|
||||||
|
const todayBounds = this.timeManager.getDayBounds(now);
|
||||||
|
const todayKey = this.timeManager.formatDate(todayBounds.start);
|
||||||
|
const cacheKey = `${this.options.collection}:current_day:${todayKey}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check cache first
|
||||||
|
if (this.redis?.isOpen) {
|
||||||
|
const cached = await this.redis.get(cacheKey);
|
||||||
|
if (cached) {
|
||||||
|
const parsedCache = JSON.parse(cached);
|
||||||
|
if (parsedCache.total !== undefined) {
|
||||||
|
// Use timeManager to check if the cached data is for today
|
||||||
|
const cachedDate = new Date(parsedCache.daily_data[0].date);
|
||||||
|
const isToday = this.timeManager.isToday(cachedDate);
|
||||||
|
|
||||||
|
if (isToday) {
|
||||||
|
return parsedCache;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get safe end time that's never in the future
|
||||||
|
const safeEnd = this.timeManager.getCurrentBusinessDayEnd();
|
||||||
|
|
||||||
|
// Fetch and process current day data with safe end time
|
||||||
|
const data = await fetchFn({
|
||||||
|
start: todayBounds.start,
|
||||||
|
end: safeEnd
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache the data with a shorter TTL for today's data
|
||||||
|
if (this.redis?.isOpen) {
|
||||||
|
const ttl = Math.min(
|
||||||
|
this.options.redisTTL,
|
||||||
|
60 * 5 // 5 minutes max for today's data
|
||||||
|
);
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||||
|
EX: ttl,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in getCurrentDay:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getDayCount(start, end) {
|
||||||
|
// Calculate full days between dates using timeManager
|
||||||
|
const startDay = this.timeManager.getDayBounds(start);
|
||||||
|
const endDay = this.timeManager.getDayBounds(end);
|
||||||
|
return Math.ceil((endDay.end - startDay.start) / (24 * 60 * 60 * 1000));
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchMissingDays(start, end, existingData, fetchFn) {
|
||||||
|
const existingDates = new Set(
|
||||||
|
existingData.map((d) => this.timeManager.formatDate(d.date))
|
||||||
|
);
|
||||||
|
const missingData = [];
|
||||||
|
|
||||||
|
let currentDate = new Date(start);
|
||||||
|
while (currentDate < end) {
|
||||||
|
const dayBounds = this.timeManager.getDayBounds(currentDate);
|
||||||
|
const dateString = this.timeManager.formatDate(dayBounds.start);
|
||||||
|
|
||||||
|
if (!existingDates.has(dateString)) {
|
||||||
|
const data = await fetchFn({
|
||||||
|
start: dayBounds.start,
|
||||||
|
end: dayBounds.end,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.storeHistoricalDay(dayBounds.start, data);
|
||||||
|
missingData.push(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move to next day using timeManager to ensure proper business day boundaries
|
||||||
|
currentDate = new Date(dayBounds.end.getTime() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return missingData;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getHistoricalDays(start, end) {
|
||||||
|
try {
|
||||||
|
if (!this.mongodb) return [];
|
||||||
|
|
||||||
|
const collection = this.mongodb.collection(this.options.collection);
|
||||||
|
const startDay = this.timeManager.getDayBounds(start);
|
||||||
|
const endDay = this.timeManager.getDayBounds(end);
|
||||||
|
|
||||||
|
const records = await collection
|
||||||
|
.find({
|
||||||
|
date: {
|
||||||
|
$gte: startDay.start,
|
||||||
|
$lt: endDay.start,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.sort({ date: 1 })
|
||||||
|
.toArray();
|
||||||
|
|
||||||
|
return records;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting historical days:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
combineMetrics(metricsArray) {
|
||||||
|
if (!metricsArray || metricsArray.length === 0) return null;
|
||||||
|
if (metricsArray.length === 1) return metricsArray[0];
|
||||||
|
|
||||||
|
const combined = {
|
||||||
|
total: 0,
|
||||||
|
by_direction: { inbound: 0, outbound: 0 },
|
||||||
|
by_status: { answered: 0, missed: 0 },
|
||||||
|
by_missed_reason: {},
|
||||||
|
by_hour: Array(24).fill(0),
|
||||||
|
by_users: {},
|
||||||
|
daily_data: [],
|
||||||
|
duration_distribution: [
|
||||||
|
{ range: '0-1m', count: 0 },
|
||||||
|
{ range: '1-5m', count: 0 },
|
||||||
|
{ range: '5-15m', count: 0 },
|
||||||
|
{ range: '15-30m', count: 0 },
|
||||||
|
{ range: '30m+', count: 0 }
|
||||||
|
],
|
||||||
|
average_duration: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
let totalAnswered = 0;
|
||||||
|
let totalDuration = 0;
|
||||||
|
|
||||||
|
metricsArray.forEach(metrics => {
|
||||||
|
// Sum basic metrics
|
||||||
|
combined.total += metrics.total;
|
||||||
|
combined.by_direction.inbound += metrics.by_direction.inbound;
|
||||||
|
combined.by_direction.outbound += metrics.by_direction.outbound;
|
||||||
|
combined.by_status.answered += metrics.by_status.answered;
|
||||||
|
combined.by_status.missed += metrics.by_status.missed;
|
||||||
|
|
||||||
|
// Combine missed reasons
|
||||||
|
Object.entries(metrics.by_missed_reason).forEach(([reason, count]) => {
|
||||||
|
combined.by_missed_reason[reason] = (combined.by_missed_reason[reason] || 0) + count;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sum hourly data
|
||||||
|
metrics.by_hour.forEach((count, hour) => {
|
||||||
|
combined.by_hour[hour] += count;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Combine user data
|
||||||
|
Object.entries(metrics.by_users).forEach(([userId, userData]) => {
|
||||||
|
if (!combined.by_users[userId]) {
|
||||||
|
combined.by_users[userId] = {
|
||||||
|
id: userData.id,
|
||||||
|
name: userData.name,
|
||||||
|
total: 0,
|
||||||
|
answered: 0,
|
||||||
|
missed: 0,
|
||||||
|
total_duration: 0,
|
||||||
|
average_duration: 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
combined.by_users[userId].total += userData.total;
|
||||||
|
combined.by_users[userId].answered += userData.answered;
|
||||||
|
combined.by_users[userId].missed += userData.missed;
|
||||||
|
combined.by_users[userId].total_duration += userData.total_duration || 0;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Combine duration distribution
|
||||||
|
metrics.duration_distribution.forEach((dist, index) => {
|
||||||
|
combined.duration_distribution[index].count += dist.count;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Accumulate for average duration calculation
|
||||||
|
if (metrics.average_duration && metrics.by_status.answered) {
|
||||||
|
totalDuration += metrics.average_duration * metrics.by_status.answered;
|
||||||
|
totalAnswered += metrics.by_status.answered;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge daily data
|
||||||
|
if (metrics.daily_data) {
|
||||||
|
combined.daily_data.push(...metrics.daily_data);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Calculate final average duration
|
||||||
|
if (totalAnswered > 0) {
|
||||||
|
combined.average_duration = Math.round(totalDuration / totalAnswered);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate user averages
|
||||||
|
Object.values(combined.by_users).forEach(user => {
|
||||||
|
if (user.answered > 0) {
|
||||||
|
user.average_duration = Math.round(user.total_duration / user.answered);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sort and deduplicate daily data
|
||||||
|
combined.daily_data = Array.from(
|
||||||
|
new Map(combined.daily_data.map(item => [item.date, item])).values()
|
||||||
|
).sort((a, b) => a.date.localeCompare(b.date));
|
||||||
|
|
||||||
|
return combined;
|
||||||
|
}
|
||||||
|
}
|
||||||
15
inventory-server/dashboard/aircall-server/src/utils/db.js
Normal file
15
inventory-server/dashboard/aircall-server/src/utils/db.js
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { MongoClient } from 'mongodb';
|
||||||
|
|
||||||
|
const MONGODB_URI = process.env.MONGODB_URI || 'mongodb://localhost:27017/dashboard';
|
||||||
|
const DB_NAME = process.env.MONGODB_DB || 'dashboard';
|
||||||
|
|
||||||
|
export async function connectMongoDB() {
|
||||||
|
try {
|
||||||
|
const client = await MongoClient.connect(MONGODB_URI);
|
||||||
|
console.log('Connected to MongoDB');
|
||||||
|
return client.db(DB_NAME);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('MongoDB connection error:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
import winston from 'winston';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
export function createLogger(service) {
|
||||||
|
return winston.createLogger({
|
||||||
|
level: process.env.LOG_LEVEL || 'info',
|
||||||
|
format: winston.format.combine(
|
||||||
|
winston.format.timestamp(),
|
||||||
|
winston.format.json()
|
||||||
|
),
|
||||||
|
defaultMeta: { service },
|
||||||
|
transports: [
|
||||||
|
// Write all logs to console
|
||||||
|
new winston.transports.Console({
|
||||||
|
format: winston.format.combine(
|
||||||
|
winston.format.colorize(),
|
||||||
|
winston.format.simple()
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
// Write all logs to service-specific files
|
||||||
|
new winston.transports.File({
|
||||||
|
filename: path.join('logs', `${service}-error.log`),
|
||||||
|
level: 'error'
|
||||||
|
}),
|
||||||
|
new winston.transports.File({
|
||||||
|
filename: path.join('logs', `${service}-combined.log`)
|
||||||
|
})
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
23
inventory-server/dashboard/aircall-server/src/utils/redis.js
Normal file
23
inventory-server/dashboard/aircall-server/src/utils/redis.js
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { createClient } from 'redis';
|
||||||
|
|
||||||
|
const REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
|
||||||
|
|
||||||
|
export async function createRedisClient() {
|
||||||
|
try {
|
||||||
|
const client = createClient({
|
||||||
|
url: REDIS_URL
|
||||||
|
});
|
||||||
|
|
||||||
|
await client.connect();
|
||||||
|
console.log('Connected to Redis');
|
||||||
|
|
||||||
|
client.on('error', (err) => {
|
||||||
|
console.error('Redis error:', err);
|
||||||
|
});
|
||||||
|
|
||||||
|
return client;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Redis connection error:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
262
inventory-server/dashboard/aircall-server/src/utils/timeUtils.js
Normal file
262
inventory-server/dashboard/aircall-server/src/utils/timeUtils.js
Normal file
@@ -0,0 +1,262 @@
|
|||||||
|
class TimeManager {
|
||||||
|
static ALLOWED_RANGES = ['today', 'yesterday', 'last2days', 'last7days', 'last30days', 'last90days',
|
||||||
|
'previous7days', 'previous30days', 'previous90days'];
|
||||||
|
|
||||||
|
constructor(timezone = 'America/New_York', dayStartsAt = 1) {
|
||||||
|
this.timezone = timezone;
|
||||||
|
this.dayStartsAt = dayStartsAt;
|
||||||
|
}
|
||||||
|
|
||||||
|
getDayBounds(date) {
|
||||||
|
try {
|
||||||
|
const now = new Date();
|
||||||
|
const targetDate = new Date(date);
|
||||||
|
|
||||||
|
// For today
|
||||||
|
if (
|
||||||
|
targetDate.getUTCFullYear() === now.getUTCFullYear() &&
|
||||||
|
targetDate.getUTCMonth() === now.getUTCMonth() &&
|
||||||
|
targetDate.getUTCDate() === now.getUTCDate()
|
||||||
|
) {
|
||||||
|
// If current time is before day start (1 AM ET / 6 AM UTC),
|
||||||
|
// use previous day's start until now
|
||||||
|
const todayStart = new Date(Date.UTC(
|
||||||
|
now.getUTCFullYear(),
|
||||||
|
now.getUTCMonth(),
|
||||||
|
now.getUTCDate(),
|
||||||
|
this.dayStartsAt + 5,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
0
|
||||||
|
));
|
||||||
|
|
||||||
|
if (now < todayStart) {
|
||||||
|
const yesterdayStart = new Date(todayStart);
|
||||||
|
yesterdayStart.setUTCDate(yesterdayStart.getUTCDate() - 1);
|
||||||
|
return { start: yesterdayStart, end: now };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { start: todayStart, end: now };
|
||||||
|
}
|
||||||
|
|
||||||
|
// For past days, use full 24-hour period
|
||||||
|
const normalizedDate = new Date(Date.UTC(
|
||||||
|
targetDate.getUTCFullYear(),
|
||||||
|
targetDate.getUTCMonth(),
|
||||||
|
targetDate.getUTCDate()
|
||||||
|
));
|
||||||
|
|
||||||
|
const dayStart = new Date(normalizedDate);
|
||||||
|
dayStart.setUTCHours(this.dayStartsAt + 5, 0, 0, 0);
|
||||||
|
|
||||||
|
const dayEnd = new Date(dayStart);
|
||||||
|
dayEnd.setUTCDate(dayEnd.getUTCDate() + 1);
|
||||||
|
|
||||||
|
return { start: dayStart, end: dayEnd };
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in getDayBounds:', error);
|
||||||
|
throw new Error(`Failed to calculate day bounds: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getDateRange(period) {
|
||||||
|
try {
|
||||||
|
const now = new Date();
|
||||||
|
const todayBounds = this.getDayBounds(now);
|
||||||
|
const end = new Date();
|
||||||
|
|
||||||
|
switch (period) {
|
||||||
|
case 'today':
|
||||||
|
return {
|
||||||
|
start: todayBounds.start,
|
||||||
|
end
|
||||||
|
};
|
||||||
|
case 'yesterday': {
|
||||||
|
const yesterday = new Date(now);
|
||||||
|
yesterday.setDate(yesterday.getDate() - 1);
|
||||||
|
return this.getDayBounds(yesterday);
|
||||||
|
}
|
||||||
|
case 'last2days': {
|
||||||
|
const twoDaysAgo = new Date(now);
|
||||||
|
twoDaysAgo.setDate(twoDaysAgo.getDate() - 2);
|
||||||
|
return this.getDayBounds(twoDaysAgo);
|
||||||
|
}
|
||||||
|
case 'last7days': {
|
||||||
|
const start = new Date(now);
|
||||||
|
start.setDate(start.getDate() - 6);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'previous7days': {
|
||||||
|
const end = new Date(now);
|
||||||
|
end.setDate(end.getDate() - 7);
|
||||||
|
const start = new Date(end);
|
||||||
|
start.setDate(start.getDate() - 6);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end: this.getDayBounds(end).end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last30days': {
|
||||||
|
const start = new Date(now);
|
||||||
|
start.setDate(start.getDate() - 29);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'previous30days': {
|
||||||
|
const end = new Date(now);
|
||||||
|
end.setDate(end.getDate() - 30);
|
||||||
|
const start = new Date(end);
|
||||||
|
start.setDate(start.getDate() - 29);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end: this.getDayBounds(end).end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last90days': {
|
||||||
|
const start = new Date(now);
|
||||||
|
start.setDate(start.getDate() - 89);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'previous90days': {
|
||||||
|
const end = new Date(now);
|
||||||
|
end.setDate(end.getDate() - 90);
|
||||||
|
const start = new Date(end);
|
||||||
|
start.setDate(start.getDate() - 89);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end: this.getDayBounds(end).end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported time period: ${period}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in getDateRange:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getPreviousPeriod(period) {
|
||||||
|
try {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
switch (period) {
|
||||||
|
case 'today':
|
||||||
|
return 'yesterday';
|
||||||
|
case 'yesterday': {
|
||||||
|
// Return bounds for 2 days ago
|
||||||
|
const twoDaysAgo = new Date(now);
|
||||||
|
twoDaysAgo.setDate(twoDaysAgo.getDate() - 2);
|
||||||
|
return this.getDayBounds(twoDaysAgo);
|
||||||
|
}
|
||||||
|
case 'last7days': {
|
||||||
|
// Return bounds for previous 7 days
|
||||||
|
const end = new Date(now);
|
||||||
|
end.setDate(end.getDate() - 7);
|
||||||
|
const start = new Date(end);
|
||||||
|
start.setDate(start.getDate() - 7);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end: this.getDayBounds(end).end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last30days': {
|
||||||
|
const end = new Date(now);
|
||||||
|
end.setDate(end.getDate() - 30);
|
||||||
|
const start = new Date(end);
|
||||||
|
start.setDate(start.getDate() - 30);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end: this.getDayBounds(end).end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last90days': {
|
||||||
|
const end = new Date(now);
|
||||||
|
end.setDate(end.getDate() - 90);
|
||||||
|
const start = new Date(end);
|
||||||
|
start.setDate(start.getDate() - 90);
|
||||||
|
return {
|
||||||
|
start: this.getDayBounds(start).start,
|
||||||
|
end: this.getDayBounds(end).end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported time period: ${period}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in getPreviousPeriod:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getCurrentBusinessDayEnd() {
|
||||||
|
try {
|
||||||
|
const now = new Date();
|
||||||
|
const todayBounds = this.getDayBounds(now);
|
||||||
|
|
||||||
|
// If current time is before day start (1 AM ET / 6 AM UTC),
|
||||||
|
// then we're still in yesterday's business day
|
||||||
|
const todayStart = new Date(Date.UTC(
|
||||||
|
now.getUTCFullYear(),
|
||||||
|
now.getUTCMonth(),
|
||||||
|
now.getUTCDate(),
|
||||||
|
this.dayStartsAt + 5,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
0
|
||||||
|
));
|
||||||
|
|
||||||
|
if (now < todayStart) {
|
||||||
|
const yesterdayBounds = this.getDayBounds(new Date(now.getTime() - 24 * 60 * 60 * 1000));
|
||||||
|
return yesterdayBounds.end;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the earlier of current time or today's end
|
||||||
|
return now < todayBounds.end ? now : todayBounds.end;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in getCurrentBusinessDayEnd:', error);
|
||||||
|
return new Date();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
isValidTimeRange(timeRange) {
|
||||||
|
return TimeManager.ALLOWED_RANGES.includes(timeRange);
|
||||||
|
}
|
||||||
|
|
||||||
|
isToday(date) {
|
||||||
|
const now = new Date();
|
||||||
|
const targetDate = new Date(date);
|
||||||
|
return (
|
||||||
|
targetDate.getUTCFullYear() === now.getUTCFullYear() &&
|
||||||
|
targetDate.getUTCMonth() === now.getUTCMonth() &&
|
||||||
|
targetDate.getUTCDate() === now.getUTCDate()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
formatDate(date) {
|
||||||
|
try {
|
||||||
|
return date.toLocaleString('en-US', {
|
||||||
|
timeZone: this.timezone,
|
||||||
|
year: 'numeric',
|
||||||
|
month: '2-digit',
|
||||||
|
day: '2-digit',
|
||||||
|
hour: '2-digit',
|
||||||
|
minute: '2-digit',
|
||||||
|
second: '2-digit'
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error formatting date:', error);
|
||||||
|
return date.toISOString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createTimeManager = (timezone, dayStartsAt) => new TimeManager(timezone, dayStartsAt);
|
||||||
10
inventory-server/dashboard/auth-server/.env.example
Normal file
10
inventory-server/dashboard/auth-server/.env.example
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# Server Configuration
|
||||||
|
NODE_ENV=development
|
||||||
|
PORT=3003
|
||||||
|
|
||||||
|
# Authentication
|
||||||
|
JWT_SECRET=your-secret-key-here
|
||||||
|
DASHBOARD_PASSWORD=your-dashboard-password-here
|
||||||
|
|
||||||
|
# Cookie Settings
|
||||||
|
COOKIE_DOMAIN=localhost # In production: .kent.pw
|
||||||
203
inventory-server/dashboard/auth-server/index.js
Normal file
203
inventory-server/dashboard/auth-server/index.js
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
// auth-server/index.js
|
||||||
|
const path = require('path');
|
||||||
|
require('dotenv').config({ path: path.join(__dirname, '.env') });
|
||||||
|
const express = require('express');
|
||||||
|
const cors = require('cors');
|
||||||
|
const cookieParser = require('cookie-parser');
|
||||||
|
const jwt = require('jsonwebtoken');
|
||||||
|
|
||||||
|
// Debug environment variables
|
||||||
|
console.log('Environment variables loaded from:', path.join(__dirname, '.env'));
|
||||||
|
console.log('Current directory:', __dirname);
|
||||||
|
console.log('Available env vars:', Object.keys(process.env));
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const PORT = process.env.PORT || 3003;
|
||||||
|
const JWT_SECRET = process.env.JWT_SECRET;
|
||||||
|
const DASHBOARD_PASSWORD = process.env.DASHBOARD_PASSWORD;
|
||||||
|
|
||||||
|
// Validate required environment variables
|
||||||
|
if (!JWT_SECRET || !DASHBOARD_PASSWORD) {
|
||||||
|
console.error('Missing required environment variables:');
|
||||||
|
if (!JWT_SECRET) console.error('- JWT_SECRET');
|
||||||
|
if (!DASHBOARD_PASSWORD) console.error('- DASHBOARD_PASSWORD');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Middleware
|
||||||
|
app.use(express.json());
|
||||||
|
app.use(cookieParser());
|
||||||
|
|
||||||
|
// Configure CORS
|
||||||
|
const corsOptions = {
|
||||||
|
origin: function(origin, callback) {
|
||||||
|
const allowedOrigins = [
|
||||||
|
'http://localhost:3000',
|
||||||
|
'https://dashboard.kent.pw'
|
||||||
|
];
|
||||||
|
|
||||||
|
console.log('CORS check for origin:', origin);
|
||||||
|
|
||||||
|
// Allow local network IPs (192.168.1.xxx)
|
||||||
|
if (origin && origin.match(/^http:\/\/192\.168\.1\.\d{1,3}(:\d+)?$/)) {
|
||||||
|
callback(null, true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if origin is in allowed list
|
||||||
|
if (!origin || allowedOrigins.indexOf(origin) !== -1) {
|
||||||
|
callback(null, true);
|
||||||
|
} else {
|
||||||
|
callback(new Error('Not allowed by CORS'));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
credentials: true,
|
||||||
|
methods: ['GET', 'POST', 'OPTIONS'],
|
||||||
|
allowedHeaders: ['Content-Type', 'Authorization', 'Cookie', 'Accept'],
|
||||||
|
exposedHeaders: ['Set-Cookie']
|
||||||
|
};
|
||||||
|
|
||||||
|
app.use(cors(corsOptions));
|
||||||
|
app.options('*', cors(corsOptions));
|
||||||
|
|
||||||
|
// Debug logging
|
||||||
|
app.use((req, res, next) => {
|
||||||
|
console.log(`${new Date().toISOString()} ${req.method} ${req.url}`);
|
||||||
|
console.log('Headers:', req.headers);
|
||||||
|
console.log('Cookies:', req.cookies);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Health check endpoint
|
||||||
|
app.get('/health', (req, res) => {
|
||||||
|
res.json({
|
||||||
|
status: 'ok',
|
||||||
|
timestamp: new Date().toISOString()
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Auth endpoints
|
||||||
|
app.post('/login', (req, res) => {
|
||||||
|
console.log('Login attempt received');
|
||||||
|
console.log('Request body:', req.body);
|
||||||
|
console.log('Origin:', req.headers.origin);
|
||||||
|
|
||||||
|
const { password } = req.body;
|
||||||
|
|
||||||
|
if (!password) {
|
||||||
|
console.log('No password provided');
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
message: 'Password is required'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Comparing passwords...');
|
||||||
|
console.log('Provided password length:', password.length);
|
||||||
|
console.log('Expected password length:', DASHBOARD_PASSWORD.length);
|
||||||
|
|
||||||
|
if (password === DASHBOARD_PASSWORD) {
|
||||||
|
console.log('Password matched');
|
||||||
|
const token = jwt.sign({ authorized: true }, JWT_SECRET, {
|
||||||
|
expiresIn: '24h'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Determine if request is from local network
|
||||||
|
const isLocalNetwork = req.headers.origin?.includes('192.168.1.') || req.headers.origin?.includes('localhost');
|
||||||
|
|
||||||
|
const cookieOptions = {
|
||||||
|
httpOnly: true,
|
||||||
|
secure: !isLocalNetwork, // Only use secure for non-local requests
|
||||||
|
sameSite: isLocalNetwork ? 'lax' : 'none',
|
||||||
|
path: '/',
|
||||||
|
maxAge: 24 * 60 * 60 * 1000 // 24 hours
|
||||||
|
};
|
||||||
|
|
||||||
|
// Only set domain for production
|
||||||
|
if (!isLocalNetwork) {
|
||||||
|
cookieOptions.domain = '.kent.pw';
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Setting cookie with options:', cookieOptions);
|
||||||
|
res.cookie('token', token, cookieOptions);
|
||||||
|
|
||||||
|
console.log('Response headers:', res.getHeaders());
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
debug: {
|
||||||
|
origin: req.headers.origin,
|
||||||
|
cookieOptions
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.log('Password mismatch');
|
||||||
|
res.status(401).json({
|
||||||
|
success: false,
|
||||||
|
message: 'Invalid password'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Modify the check endpoint to log more info
|
||||||
|
app.get('/check', (req, res) => {
|
||||||
|
console.log('Auth check received');
|
||||||
|
console.log('All cookies:', req.cookies);
|
||||||
|
console.log('Headers:', req.headers);
|
||||||
|
|
||||||
|
const token = req.cookies.token;
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
console.log('No token found in cookies');
|
||||||
|
return res.status(401).json({
|
||||||
|
authenticated: false,
|
||||||
|
error: 'no_token'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const decoded = jwt.verify(token, JWT_SECRET);
|
||||||
|
console.log('Token verified successfully:', decoded);
|
||||||
|
res.json({ authenticated: true });
|
||||||
|
} catch (err) {
|
||||||
|
console.log('Token verification failed:', err.message);
|
||||||
|
res.status(401).json({
|
||||||
|
authenticated: false,
|
||||||
|
error: 'invalid_token',
|
||||||
|
message: err.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post('/logout', (req, res) => {
|
||||||
|
const isLocalNetwork = req.headers.origin?.includes('192.168.1.') || req.headers.origin?.includes('localhost');
|
||||||
|
const cookieOptions = {
|
||||||
|
httpOnly: true,
|
||||||
|
secure: !isLocalNetwork,
|
||||||
|
sameSite: isLocalNetwork ? 'lax' : 'none',
|
||||||
|
path: '/',
|
||||||
|
domain: isLocalNetwork ? undefined : '.kent.pw'
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('Clearing cookie with options:', cookieOptions);
|
||||||
|
res.clearCookie('token', cookieOptions);
|
||||||
|
res.json({ success: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
console.error('Server error:', err);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
message: 'Internal server error',
|
||||||
|
error: err.message
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
app.listen(PORT, () => {
|
||||||
|
console.log(`Auth server running on port ${PORT}`);
|
||||||
|
console.log('Environment:', process.env.NODE_ENV);
|
||||||
|
console.log('CORS origins:', corsOptions.origin);
|
||||||
|
console.log('JWT_SECRET length:', JWT_SECRET?.length);
|
||||||
|
console.log('DASHBOARD_PASSWORD length:', DASHBOARD_PASSWORD?.length);
|
||||||
|
});
|
||||||
1044
inventory-server/dashboard/auth-server/package-lock.json
generated
Normal file
1044
inventory-server/dashboard/auth-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
22
inventory-server/dashboard/auth-server/package.json
Normal file
22
inventory-server/dashboard/auth-server/package.json
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"name": "auth-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"description": "",
|
||||||
|
"dependencies": {
|
||||||
|
"cookie-parser": "^1.4.7",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"date-fns": "^4.1.0",
|
||||||
|
"date-fns-tz": "^3.2.0",
|
||||||
|
"dotenv": "^16.4.7",
|
||||||
|
"express": "^4.21.1",
|
||||||
|
"express-session": "^1.18.1",
|
||||||
|
"jsonwebtoken": "^9.0.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
1
inventory-server/dashboard/dashboard.conf
Symbolic link
1
inventory-server/dashboard/dashboard.conf
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
/etc/nginx/sites-enabled/dashboard.conf
|
||||||
2506
inventory-server/dashboard/google-server/package-lock.json
generated
Normal file
2506
inventory-server/dashboard/google-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
21
inventory-server/dashboard/google-server/package.json
Normal file
21
inventory-server/dashboard/google-server/package.json
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"name": "google-analytics-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Google Analytics server for dashboard",
|
||||||
|
"main": "server.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node server.js",
|
||||||
|
"dev": "nodemon server.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@google-analytics/data": "^4.0.0",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"dotenv": "^16.3.1",
|
||||||
|
"express": "^4.18.2",
|
||||||
|
"redis": "^4.6.11",
|
||||||
|
"winston": "^3.11.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"nodemon": "^3.0.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
254
inventory-server/dashboard/google-server/routes/analytics.js
Normal file
254
inventory-server/dashboard/google-server/routes/analytics.js
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const { BetaAnalyticsDataClient } = require('@google-analytics/data');
|
||||||
|
const router = express.Router();
|
||||||
|
const logger = require('../utils/logger');
|
||||||
|
|
||||||
|
// Initialize GA4 client
|
||||||
|
const analyticsClient = new BetaAnalyticsDataClient({
|
||||||
|
credentials: JSON.parse(process.env.GOOGLE_APPLICATION_CREDENTIALS_JSON)
|
||||||
|
});
|
||||||
|
|
||||||
|
const propertyId = process.env.GA_PROPERTY_ID;
|
||||||
|
|
||||||
|
// Cache durations
|
||||||
|
const CACHE_DURATIONS = {
|
||||||
|
REALTIME_BASIC: 60, // 1 minute
|
||||||
|
REALTIME_DETAILED: 300, // 5 minutes
|
||||||
|
BASIC_METRICS: 3600, // 1 hour
|
||||||
|
USER_BEHAVIOR: 3600 // 1 hour
|
||||||
|
};
|
||||||
|
|
||||||
|
// Basic metrics endpoint
|
||||||
|
router.get('/metrics', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { startDate = '7daysAgo' } = req.query;
|
||||||
|
const cacheKey = `analytics:basic_metrics:${startDate}`;
|
||||||
|
|
||||||
|
// Check Redis cache
|
||||||
|
const cachedData = await req.redisClient.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
logger.info('Returning cached basic metrics data');
|
||||||
|
return res.json({ success: true, data: JSON.parse(cachedData) });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch from GA4
|
||||||
|
const [response] = await analyticsClient.runReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
dateRanges: [{ startDate, endDate: 'today' }],
|
||||||
|
dimensions: [{ name: 'date' }],
|
||||||
|
metrics: [
|
||||||
|
{ name: 'activeUsers' },
|
||||||
|
{ name: 'newUsers' },
|
||||||
|
{ name: 'averageSessionDuration' },
|
||||||
|
{ name: 'screenPageViews' },
|
||||||
|
{ name: 'bounceRate' },
|
||||||
|
{ name: 'conversions' }
|
||||||
|
],
|
||||||
|
returnPropertyQuota: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cache the response
|
||||||
|
await req.redisClient.set(cacheKey, JSON.stringify(response), {
|
||||||
|
EX: CACHE_DURATIONS.BASIC_METRICS
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({ success: true, data: response });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching basic metrics:', error);
|
||||||
|
res.status(500).json({ success: false, error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Realtime basic data endpoint
|
||||||
|
router.get('/realtime/basic', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const cacheKey = 'analytics:realtime:basic';
|
||||||
|
|
||||||
|
// Check Redis cache
|
||||||
|
const cachedData = await req.redisClient.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
logger.info('Returning cached realtime basic data');
|
||||||
|
return res.json({ success: true, data: JSON.parse(cachedData) });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch active users
|
||||||
|
const [userResponse] = await analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
metrics: [{ name: 'activeUsers' }],
|
||||||
|
returnPropertyQuota: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch last 5 minutes
|
||||||
|
const [fiveMinResponse] = await analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
metrics: [{ name: 'activeUsers' }],
|
||||||
|
minuteRanges: [{ startMinutesAgo: 5, endMinutesAgo: 0 }]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch time series data
|
||||||
|
const [timeSeriesResponse] = await analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
dimensions: [{ name: 'minutesAgo' }],
|
||||||
|
metrics: [{ name: 'activeUsers' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
userResponse,
|
||||||
|
fiveMinResponse,
|
||||||
|
timeSeriesResponse,
|
||||||
|
quotaInfo: {
|
||||||
|
projectHourly: userResponse.propertyQuota.tokensPerProjectPerHour,
|
||||||
|
daily: userResponse.propertyQuota.tokensPerDay,
|
||||||
|
serverErrors: userResponse.propertyQuota.serverErrorsPerProjectPerHour,
|
||||||
|
thresholdedRequests: userResponse.propertyQuota.potentiallyThresholdedRequestsPerHour
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the response
|
||||||
|
await req.redisClient.set(cacheKey, JSON.stringify(response), {
|
||||||
|
EX: CACHE_DURATIONS.REALTIME_BASIC
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({ success: true, data: response });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching realtime basic data:', error);
|
||||||
|
res.status(500).json({ success: false, error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Realtime detailed data endpoint
|
||||||
|
router.get('/realtime/detailed', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const cacheKey = 'analytics:realtime:detailed';
|
||||||
|
|
||||||
|
// Check Redis cache
|
||||||
|
const cachedData = await req.redisClient.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
logger.info('Returning cached realtime detailed data');
|
||||||
|
return res.json({ success: true, data: JSON.parse(cachedData) });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch current pages
|
||||||
|
const [pageResponse] = await analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
dimensions: [{ name: 'unifiedScreenName' }],
|
||||||
|
metrics: [{ name: 'screenPageViews' }],
|
||||||
|
orderBy: [{ metric: { metricName: 'screenPageViews' }, desc: true }],
|
||||||
|
limit: 25
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch events
|
||||||
|
const [eventResponse] = await analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
dimensions: [{ name: 'eventName' }],
|
||||||
|
metrics: [{ name: 'eventCount' }],
|
||||||
|
orderBy: [{ metric: { metricName: 'eventCount' }, desc: true }],
|
||||||
|
limit: 25
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch device categories
|
||||||
|
const [deviceResponse] = await analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
dimensions: [{ name: 'deviceCategory' }],
|
||||||
|
metrics: [{ name: 'activeUsers' }],
|
||||||
|
orderBy: [{ metric: { metricName: 'activeUsers' }, desc: true }],
|
||||||
|
limit: 10,
|
||||||
|
returnPropertyQuota: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
pageResponse,
|
||||||
|
eventResponse,
|
||||||
|
sourceResponse: deviceResponse
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the response
|
||||||
|
await req.redisClient.set(cacheKey, JSON.stringify(response), {
|
||||||
|
EX: CACHE_DURATIONS.REALTIME_DETAILED
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({ success: true, data: response });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching realtime detailed data:', error);
|
||||||
|
res.status(500).json({ success: false, error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// User behavior endpoint
|
||||||
|
router.get('/user-behavior', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange = '30' } = req.query;
|
||||||
|
const cacheKey = `analytics:user_behavior:${timeRange}`;
|
||||||
|
|
||||||
|
// Check Redis cache
|
||||||
|
const cachedData = await req.redisClient.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
logger.info('Returning cached user behavior data');
|
||||||
|
return res.json({ success: true, data: JSON.parse(cachedData) });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch page data
|
||||||
|
const [pageResponse] = await analyticsClient.runReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||||
|
dimensions: [{ name: 'pagePath' }],
|
||||||
|
metrics: [
|
||||||
|
{ name: 'screenPageViews' },
|
||||||
|
{ name: 'averageSessionDuration' },
|
||||||
|
{ name: 'bounceRate' },
|
||||||
|
{ name: 'sessions' }
|
||||||
|
],
|
||||||
|
orderBy: [{
|
||||||
|
metric: { metricName: 'screenPageViews' },
|
||||||
|
desc: true
|
||||||
|
}],
|
||||||
|
limit: 25
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch device data
|
||||||
|
const [deviceResponse] = await analyticsClient.runReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||||
|
dimensions: [{ name: 'deviceCategory' }],
|
||||||
|
metrics: [
|
||||||
|
{ name: 'screenPageViews' },
|
||||||
|
{ name: 'sessions' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch source data
|
||||||
|
const [sourceResponse] = await analyticsClient.runReport({
|
||||||
|
property: `properties/${propertyId}`,
|
||||||
|
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||||
|
dimensions: [{ name: 'sessionSource' }],
|
||||||
|
metrics: [
|
||||||
|
{ name: 'sessions' },
|
||||||
|
{ name: 'conversions' }
|
||||||
|
],
|
||||||
|
orderBy: [{
|
||||||
|
metric: { metricName: 'sessions' },
|
||||||
|
desc: true
|
||||||
|
}],
|
||||||
|
limit: 25,
|
||||||
|
returnPropertyQuota: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
pageResponse,
|
||||||
|
deviceResponse,
|
||||||
|
sourceResponse
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the response
|
||||||
|
await req.redisClient.set(cacheKey, JSON.stringify(response), {
|
||||||
|
EX: CACHE_DURATIONS.USER_BEHAVIOR
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({ success: true, data: response });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching user behavior data:', error);
|
||||||
|
res.status(500).json({ success: false, error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -0,0 +1,91 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const analyticsService = require('../services/analytics.service');
|
||||||
|
|
||||||
|
// Basic metrics endpoint
|
||||||
|
router.get('/metrics', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { startDate = '7daysAgo' } = req.query;
|
||||||
|
console.log(`Fetching metrics with startDate: ${startDate}`);
|
||||||
|
|
||||||
|
const data = await analyticsService.getBasicMetrics(startDate);
|
||||||
|
res.json({ success: true, data });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Metrics error:', {
|
||||||
|
startDate: req.query.startDate,
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack
|
||||||
|
});
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to fetch metrics',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Realtime basic data endpoint
|
||||||
|
router.get('/realtime/basic', async (req, res) => {
|
||||||
|
try {
|
||||||
|
console.log('Fetching realtime basic data');
|
||||||
|
const data = await analyticsService.getRealTimeBasicData();
|
||||||
|
res.json({ success: true, data });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Realtime basic error:', {
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack
|
||||||
|
});
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to fetch realtime basic data',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Realtime detailed data endpoint
|
||||||
|
router.get('/realtime/detailed', async (req, res) => {
|
||||||
|
try {
|
||||||
|
console.log('Fetching realtime detailed data');
|
||||||
|
const data = await analyticsService.getRealTimeDetailedData();
|
||||||
|
res.json({ success: true, data });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Realtime detailed error:', {
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack
|
||||||
|
});
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to fetch realtime detailed data',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// User behavior endpoint
|
||||||
|
router.get('/user-behavior', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange = '30' } = req.query;
|
||||||
|
console.log(`Fetching user behavior with timeRange: ${timeRange}`);
|
||||||
|
|
||||||
|
const data = await analyticsService.getUserBehavior(timeRange);
|
||||||
|
res.json({ success: true, data });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('User behavior error:', {
|
||||||
|
timeRange: req.query.timeRange,
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack
|
||||||
|
});
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to fetch user behavior data',
|
||||||
|
details: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
65
inventory-server/dashboard/google-server/server.js
Normal file
65
inventory-server/dashboard/google-server/server.js
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const cors = require('cors');
|
||||||
|
const { createClient } = require('redis');
|
||||||
|
const analyticsRoutes = require('./routes/analytics.routes');
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const port = process.env.GOOGLE_ANALYTICS_PORT || 3007;
|
||||||
|
|
||||||
|
// Redis client setup
|
||||||
|
const redisClient = createClient({
|
||||||
|
url: process.env.REDIS_URL || 'redis://localhost:6379'
|
||||||
|
});
|
||||||
|
|
||||||
|
redisClient.on('error', (err) => console.error('Redis Client Error:', err));
|
||||||
|
redisClient.on('connect', () => console.log('Redis Client Connected'));
|
||||||
|
|
||||||
|
// Connect to Redis
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
await redisClient.connect();
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Redis connection error:', err);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Middleware
|
||||||
|
app.use(cors());
|
||||||
|
app.use(express.json());
|
||||||
|
|
||||||
|
// Make Redis client available in requests
|
||||||
|
app.use((req, res, next) => {
|
||||||
|
req.redisClient = redisClient;
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Routes
|
||||||
|
app.use('/api/analytics', analyticsRoutes);
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
console.error('Server error:', err);
|
||||||
|
res.status(err.status || 500).json({
|
||||||
|
success: false,
|
||||||
|
message: err.message || 'Internal server error',
|
||||||
|
error: process.env.NODE_ENV === 'production' ? err : {}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
app.listen(port, () => {
|
||||||
|
console.log(`Google Analytics server running on port ${port}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle graceful shutdown
|
||||||
|
process.on('SIGTERM', async () => {
|
||||||
|
console.log('SIGTERM received. Shutting down gracefully...');
|
||||||
|
await redisClient.quit();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('SIGINT', async () => {
|
||||||
|
console.log('SIGINT received. Shutting down gracefully...');
|
||||||
|
await redisClient.quit();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
@@ -0,0 +1,283 @@
|
|||||||
|
const { BetaAnalyticsDataClient } = require('@google-analytics/data');
|
||||||
|
const { createClient } = require('redis');
|
||||||
|
|
||||||
|
class AnalyticsService {
|
||||||
|
constructor() {
|
||||||
|
// Initialize Redis client
|
||||||
|
this.redis = createClient({
|
||||||
|
url: process.env.REDIS_URL || 'redis://localhost:6379'
|
||||||
|
});
|
||||||
|
|
||||||
|
this.redis.on('error', err => console.error('Redis Client Error:', err));
|
||||||
|
this.redis.connect().catch(err => console.error('Redis connection error:', err));
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Initialize GA4 client
|
||||||
|
const credentials = process.env.GOOGLE_APPLICATION_CREDENTIALS_JSON;
|
||||||
|
this.analyticsClient = new BetaAnalyticsDataClient({
|
||||||
|
credentials: typeof credentials === 'string' ? JSON.parse(credentials) : credentials
|
||||||
|
});
|
||||||
|
|
||||||
|
this.propertyId = process.env.GA_PROPERTY_ID;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to initialize GA4 client:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache durations
|
||||||
|
CACHE_DURATIONS = {
|
||||||
|
REALTIME_BASIC: 60, // 1 minute
|
||||||
|
REALTIME_DETAILED: 300, // 5 minutes
|
||||||
|
BASIC_METRICS: 3600, // 1 hour
|
||||||
|
USER_BEHAVIOR: 3600 // 1 hour
|
||||||
|
};
|
||||||
|
|
||||||
|
async getBasicMetrics(startDate = '7daysAgo') {
|
||||||
|
const cacheKey = `analytics:basic_metrics:${startDate}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try Redis first
|
||||||
|
const cachedData = await this.redis.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('Analytics metrics found in Redis cache');
|
||||||
|
return JSON.parse(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch from GA4
|
||||||
|
console.log('Fetching fresh metrics data from GA4');
|
||||||
|
const [response] = await this.analyticsClient.runReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
dateRanges: [{ startDate, endDate: 'today' }],
|
||||||
|
dimensions: [{ name: 'date' }],
|
||||||
|
metrics: [
|
||||||
|
{ name: 'activeUsers' },
|
||||||
|
{ name: 'newUsers' },
|
||||||
|
{ name: 'averageSessionDuration' },
|
||||||
|
{ name: 'screenPageViews' },
|
||||||
|
{ name: 'bounceRate' },
|
||||||
|
{ name: 'conversions' }
|
||||||
|
],
|
||||||
|
returnPropertyQuota: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cache the response
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(response), {
|
||||||
|
EX: this.CACHE_DURATIONS.BASIC_METRICS
|
||||||
|
});
|
||||||
|
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching analytics metrics:', {
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getRealTimeBasicData() {
|
||||||
|
const cacheKey = 'analytics:realtime:basic';
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try Redis first
|
||||||
|
const cachedData = await this.redis.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('Realtime basic data found in Redis cache');
|
||||||
|
return JSON.parse(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Fetching fresh realtime data from GA4');
|
||||||
|
|
||||||
|
// Fetch active users
|
||||||
|
const [userResponse] = await this.analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
metrics: [{ name: 'activeUsers' }],
|
||||||
|
returnPropertyQuota: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch last 5 minutes
|
||||||
|
const [fiveMinResponse] = await this.analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
metrics: [{ name: 'activeUsers' }],
|
||||||
|
minuteRanges: [{ startMinutesAgo: 5, endMinutesAgo: 0 }]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch time series data
|
||||||
|
const [timeSeriesResponse] = await this.analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
dimensions: [{ name: 'minutesAgo' }],
|
||||||
|
metrics: [{ name: 'activeUsers' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
userResponse,
|
||||||
|
fiveMinResponse,
|
||||||
|
timeSeriesResponse,
|
||||||
|
quotaInfo: {
|
||||||
|
projectHourly: userResponse.propertyQuota.tokensPerProjectPerHour,
|
||||||
|
daily: userResponse.propertyQuota.tokensPerDay,
|
||||||
|
serverErrors: userResponse.propertyQuota.serverErrorsPerProjectPerHour,
|
||||||
|
thresholdedRequests: userResponse.propertyQuota.potentiallyThresholdedRequestsPerHour
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the response
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(response), {
|
||||||
|
EX: this.CACHE_DURATIONS.REALTIME_BASIC
|
||||||
|
});
|
||||||
|
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching realtime basic data:', {
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getRealTimeDetailedData() {
|
||||||
|
const cacheKey = 'analytics:realtime:detailed';
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try Redis first
|
||||||
|
const cachedData = await this.redis.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('Realtime detailed data found in Redis cache');
|
||||||
|
return JSON.parse(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Fetching fresh realtime detailed data from GA4');
|
||||||
|
|
||||||
|
// Fetch current pages
|
||||||
|
const [pageResponse] = await this.analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
dimensions: [{ name: 'unifiedScreenName' }],
|
||||||
|
metrics: [{ name: 'screenPageViews' }],
|
||||||
|
orderBy: [{ metric: { metricName: 'screenPageViews' }, desc: true }],
|
||||||
|
limit: 25
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch events
|
||||||
|
const [eventResponse] = await this.analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
dimensions: [{ name: 'eventName' }],
|
||||||
|
metrics: [{ name: 'eventCount' }],
|
||||||
|
orderBy: [{ metric: { metricName: 'eventCount' }, desc: true }],
|
||||||
|
limit: 25
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch device categories
|
||||||
|
const [deviceResponse] = await this.analyticsClient.runRealtimeReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
dimensions: [{ name: 'deviceCategory' }],
|
||||||
|
metrics: [{ name: 'activeUsers' }],
|
||||||
|
orderBy: [{ metric: { metricName: 'activeUsers' }, desc: true }],
|
||||||
|
limit: 10,
|
||||||
|
returnPropertyQuota: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
pageResponse,
|
||||||
|
eventResponse,
|
||||||
|
sourceResponse: deviceResponse
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the response
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(response), {
|
||||||
|
EX: this.CACHE_DURATIONS.REALTIME_DETAILED
|
||||||
|
});
|
||||||
|
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching realtime detailed data:', {
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getUserBehavior(timeRange = '30') {
|
||||||
|
const cacheKey = `analytics:user_behavior:${timeRange}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try Redis first
|
||||||
|
const cachedData = await this.redis.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('User behavior data found in Redis cache');
|
||||||
|
return JSON.parse(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Fetching fresh user behavior data from GA4');
|
||||||
|
|
||||||
|
// Fetch page data
|
||||||
|
const [pageResponse] = await this.analyticsClient.runReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||||
|
dimensions: [{ name: 'pagePath' }],
|
||||||
|
metrics: [
|
||||||
|
{ name: 'screenPageViews' },
|
||||||
|
{ name: 'averageSessionDuration' },
|
||||||
|
{ name: 'bounceRate' },
|
||||||
|
{ name: 'sessions' }
|
||||||
|
],
|
||||||
|
orderBy: [{
|
||||||
|
metric: { metricName: 'screenPageViews' },
|
||||||
|
desc: true
|
||||||
|
}],
|
||||||
|
limit: 25
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch device data
|
||||||
|
const [deviceResponse] = await this.analyticsClient.runReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||||
|
dimensions: [{ name: 'deviceCategory' }],
|
||||||
|
metrics: [
|
||||||
|
{ name: 'screenPageViews' },
|
||||||
|
{ name: 'sessions' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch source data
|
||||||
|
const [sourceResponse] = await this.analyticsClient.runReport({
|
||||||
|
property: `properties/${this.propertyId}`,
|
||||||
|
dateRanges: [{ startDate: `${timeRange}daysAgo`, endDate: 'today' }],
|
||||||
|
dimensions: [{ name: 'sessionSource' }],
|
||||||
|
metrics: [
|
||||||
|
{ name: 'sessions' },
|
||||||
|
{ name: 'conversions' }
|
||||||
|
],
|
||||||
|
orderBy: [{
|
||||||
|
metric: { metricName: 'sessions' },
|
||||||
|
desc: true
|
||||||
|
}],
|
||||||
|
limit: 25,
|
||||||
|
returnPropertyQuota: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
pageResponse,
|
||||||
|
deviceResponse,
|
||||||
|
sourceResponse
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the response
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(response), {
|
||||||
|
EX: this.CACHE_DURATIONS.USER_BEHAVIOR
|
||||||
|
});
|
||||||
|
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching user behavior data:', {
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = new AnalyticsService();
|
||||||
35
inventory-server/dashboard/google-server/utils/logger.js
Normal file
35
inventory-server/dashboard/google-server/utils/logger.js
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
const winston = require('winston');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const logger = winston.createLogger({
|
||||||
|
level: process.env.LOG_LEVEL || 'info',
|
||||||
|
format: winston.format.combine(
|
||||||
|
winston.format.timestamp(),
|
||||||
|
winston.format.json()
|
||||||
|
),
|
||||||
|
transports: [
|
||||||
|
new winston.transports.File({
|
||||||
|
filename: path.join(__dirname, '../logs/pm2/error.log'),
|
||||||
|
level: 'error',
|
||||||
|
maxsize: 10485760, // 10MB
|
||||||
|
maxFiles: 5
|
||||||
|
}),
|
||||||
|
new winston.transports.File({
|
||||||
|
filename: path.join(__dirname, '../logs/pm2/combined.log'),
|
||||||
|
maxsize: 10485760, // 10MB
|
||||||
|
maxFiles: 5
|
||||||
|
})
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add console transport in development
|
||||||
|
if (process.env.NODE_ENV !== 'production') {
|
||||||
|
logger.add(new winston.transports.Console({
|
||||||
|
format: winston.format.combine(
|
||||||
|
winston.format.colorize(),
|
||||||
|
winston.format.simple()
|
||||||
|
)
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = logger;
|
||||||
1036
inventory-server/dashboard/gorgias-server/package-lock.json
generated
Normal file
1036
inventory-server/dashboard/gorgias-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
19
inventory-server/dashboard/gorgias-server/package.json
Normal file
19
inventory-server/dashboard/gorgias-server/package.json
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"name": "gorgias-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"description": "",
|
||||||
|
"dependencies": {
|
||||||
|
"axios": "^1.7.9",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"dotenv": "^16.4.7",
|
||||||
|
"express": "^4.21.2",
|
||||||
|
"redis": "^4.7.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,119 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const gorgiasService = require('../services/gorgias.service');
|
||||||
|
|
||||||
|
// Get statistics
|
||||||
|
router.post('/stats/:name', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { name } = req.params;
|
||||||
|
const filters = req.body;
|
||||||
|
|
||||||
|
console.log(`Fetching ${name} statistics with filters:`, filters);
|
||||||
|
|
||||||
|
if (!name) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Missing statistic name',
|
||||||
|
details: 'The name parameter is required'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await gorgiasService.getStatistics(name, filters);
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
return res.status(404).json({
|
||||||
|
error: 'No data found',
|
||||||
|
details: `No statistics found for ${name}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({ data });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Statistics error:', {
|
||||||
|
name: req.params.name,
|
||||||
|
filters: req.body,
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack,
|
||||||
|
response: error.response?.data
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle specific error cases
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'Authentication failed',
|
||||||
|
details: 'Invalid Gorgias API credentials'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.response?.status === 404) {
|
||||||
|
return res.status(404).json({
|
||||||
|
error: 'Not found',
|
||||||
|
details: `Statistics type '${req.params.name}' not found`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.response?.status === 400) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Invalid request',
|
||||||
|
details: error.response?.data?.message || 'The request was invalid',
|
||||||
|
data: error.response?.data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to fetch statistics',
|
||||||
|
details: error.response?.data?.message || error.message,
|
||||||
|
data: error.response?.data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get tickets
|
||||||
|
router.get('/tickets', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const data = await gorgiasService.getTickets(req.query);
|
||||||
|
res.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Tickets error:', {
|
||||||
|
params: req.query,
|
||||||
|
error: error.message,
|
||||||
|
response: error.response?.data
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'Authentication failed',
|
||||||
|
details: 'Invalid Gorgias API credentials'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.response?.status === 400) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Invalid request',
|
||||||
|
details: error.response?.data?.message || 'The request was invalid',
|
||||||
|
data: error.response?.data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to fetch tickets',
|
||||||
|
details: error.response?.data?.message || error.message,
|
||||||
|
data: error.response?.data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get customer satisfaction
|
||||||
|
router.get('/satisfaction', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const data = await gorgiasService.getCustomerSatisfaction(req.query);
|
||||||
|
res.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Satisfaction error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to fetch customer satisfaction',
|
||||||
|
details: error.response?.data || error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
31
inventory-server/dashboard/gorgias-server/server.js
Normal file
31
inventory-server/dashboard/gorgias-server/server.js
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const cors = require('cors');
|
||||||
|
const path = require('path');
|
||||||
|
require('dotenv').config({
|
||||||
|
path: path.resolve(__dirname, '.env')
|
||||||
|
});
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const port = process.env.PORT || 3006;
|
||||||
|
|
||||||
|
app.use(cors());
|
||||||
|
app.use(express.json());
|
||||||
|
|
||||||
|
// Import routes
|
||||||
|
const gorgiasRoutes = require('./routes/gorgias.routes');
|
||||||
|
|
||||||
|
// Use routes
|
||||||
|
app.use('/api/gorgias', gorgiasRoutes);
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
console.error(err.stack);
|
||||||
|
res.status(500).json({ error: 'Something went wrong!' });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
app.listen(port, () => {
|
||||||
|
console.log(`Gorgias API server running on port ${port}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = app;
|
||||||
@@ -0,0 +1,119 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
const { createClient } = require('redis');
|
||||||
|
|
||||||
|
class GorgiasService {
|
||||||
|
constructor() {
|
||||||
|
this.redis = createClient({
|
||||||
|
url: process.env.REDIS_URL
|
||||||
|
});
|
||||||
|
|
||||||
|
this.redis.on('error', err => console.error('Redis Client Error:', err));
|
||||||
|
this.redis.connect().catch(err => console.error('Redis connection error:', err));
|
||||||
|
|
||||||
|
// Create base64 encoded auth string
|
||||||
|
const auth = Buffer.from(`${process.env.GORGIAS_API_USERNAME}:${process.env.GORGIAS_API_KEY}`).toString('base64');
|
||||||
|
|
||||||
|
this.apiClient = axios.create({
|
||||||
|
baseURL: `https://${process.env.GORGIAS_DOMAIN}.gorgias.com/api`,
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Basic ${auth}`,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async getStatistics(name, filters = {}) {
|
||||||
|
const cacheKey = `gorgias:stats:${name}:${JSON.stringify(filters)}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try Redis first
|
||||||
|
const cachedData = await this.redis.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log(`Statistics ${name} found in Redis cache`);
|
||||||
|
return JSON.parse(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Fetching ${name} statistics with filters:`, filters);
|
||||||
|
|
||||||
|
// Convert dates to UTC midnight if not already set
|
||||||
|
if (!filters.start_datetime || !filters.end_datetime) {
|
||||||
|
const start = new Date(filters.start_datetime || filters.start_date);
|
||||||
|
start.setUTCHours(0, 0, 0, 0);
|
||||||
|
const end = new Date(filters.end_datetime || filters.end_date);
|
||||||
|
end.setUTCHours(23, 59, 59, 999);
|
||||||
|
|
||||||
|
filters = {
|
||||||
|
...filters,
|
||||||
|
start_datetime: start.toISOString(),
|
||||||
|
end_datetime: end.toISOString()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch from API
|
||||||
|
const response = await this.apiClient.post(`/stats/${name}`, filters);
|
||||||
|
const data = response.data;
|
||||||
|
|
||||||
|
// Save to Redis with 5 minute expiry
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||||
|
EX: 300 // 5 minutes
|
||||||
|
});
|
||||||
|
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error in getStatistics for ${name}:`, {
|
||||||
|
error: error.message,
|
||||||
|
filters,
|
||||||
|
response: error.response?.data
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getTickets(params = {}) {
|
||||||
|
const cacheKey = `gorgias:tickets:${JSON.stringify(params)}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try Redis first
|
||||||
|
const cachedData = await this.redis.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('Tickets found in Redis cache');
|
||||||
|
return JSON.parse(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert dates to UTC midnight
|
||||||
|
const formattedParams = { ...params };
|
||||||
|
if (params.start_date) {
|
||||||
|
const start = new Date(params.start_date);
|
||||||
|
start.setUTCHours(0, 0, 0, 0);
|
||||||
|
formattedParams.start_datetime = start.toISOString();
|
||||||
|
delete formattedParams.start_date;
|
||||||
|
}
|
||||||
|
if (params.end_date) {
|
||||||
|
const end = new Date(params.end_date);
|
||||||
|
end.setUTCHours(23, 59, 59, 999);
|
||||||
|
formattedParams.end_datetime = end.toISOString();
|
||||||
|
delete formattedParams.end_date;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch from API
|
||||||
|
const response = await this.apiClient.get('/tickets', { params: formattedParams });
|
||||||
|
const data = response.data;
|
||||||
|
|
||||||
|
// Save to Redis with 5 minute expiry
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||||
|
EX: 300 // 5 minutes
|
||||||
|
});
|
||||||
|
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching tickets:', {
|
||||||
|
error: error.message,
|
||||||
|
params,
|
||||||
|
response: error.response?.data
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = new GorgiasService();
|
||||||
1966
inventory-server/dashboard/klaviyo-server/package-lock.json
generated
Normal file
1966
inventory-server/dashboard/klaviyo-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
inventory-server/dashboard/klaviyo-server/package.json
Normal file
25
inventory-server/dashboard/klaviyo-server/package.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"name": "klaviyo-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Klaviyo API integration server",
|
||||||
|
"main": "server.js",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node server.js",
|
||||||
|
"dev": "nodemon server.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"dotenv": "^16.4.7",
|
||||||
|
"esm": "^3.2.25",
|
||||||
|
"express": "^4.18.2",
|
||||||
|
"express-rate-limit": "^7.5.0",
|
||||||
|
"ioredis": "^5.4.1",
|
||||||
|
"luxon": "^3.5.0",
|
||||||
|
"node-fetch": "^3.3.2",
|
||||||
|
"recharts": "^2.15.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"nodemon": "^3.0.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import { CampaignsService } from '../services/campaigns.service.js';
|
||||||
|
import { TimeManager } from '../utils/time.utils.js';
|
||||||
|
|
||||||
|
export function createCampaignsRouter(apiKey, apiRevision) {
|
||||||
|
const router = express.Router();
|
||||||
|
const timeManager = new TimeManager();
|
||||||
|
const campaignsService = new CampaignsService(apiKey, apiRevision);
|
||||||
|
|
||||||
|
// Get campaigns with optional filtering
|
||||||
|
router.get('/', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const params = {
|
||||||
|
pageSize: parseInt(req.query.pageSize) || 50,
|
||||||
|
sort: req.query.sort || '-send_time',
|
||||||
|
status: req.query.status,
|
||||||
|
startDate: req.query.startDate,
|
||||||
|
endDate: req.query.endDate,
|
||||||
|
pageCursor: req.query.pageCursor
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('[Campaigns Route] Fetching campaigns with params:', params);
|
||||||
|
const data = await campaignsService.getCampaigns(params);
|
||||||
|
console.log('[Campaigns Route] Success:', {
|
||||||
|
count: data.data?.length || 0
|
||||||
|
});
|
||||||
|
res.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Campaigns Route] Error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
message: error.message,
|
||||||
|
details: error.response?.data || null
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get campaigns by time range
|
||||||
|
router.get('/:timeRange', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange } = req.params;
|
||||||
|
const { status } = req.query;
|
||||||
|
|
||||||
|
let result;
|
||||||
|
if (timeRange === 'custom') {
|
||||||
|
const { startDate, endDate } = req.query;
|
||||||
|
if (!startDate || !endDate) {
|
||||||
|
return res.status(400).json({ error: 'Custom range requires startDate and endDate' });
|
||||||
|
}
|
||||||
|
|
||||||
|
result = await campaignsService.getCampaigns({
|
||||||
|
startDate,
|
||||||
|
endDate,
|
||||||
|
status
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
result = await campaignsService.getCampaignsByTimeRange(
|
||||||
|
timeRange,
|
||||||
|
{ status }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Campaigns Route] Error:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return router;
|
||||||
|
}
|
||||||
@@ -0,0 +1,480 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import { EventsService } from '../services/events.service.js';
|
||||||
|
import { TimeManager } from '../utils/time.utils.js';
|
||||||
|
import { RedisService } from '../services/redis.service.js';
|
||||||
|
|
||||||
|
// Import METRIC_IDS from events service
|
||||||
|
const METRIC_IDS = {
|
||||||
|
PLACED_ORDER: 'Y8cqcF',
|
||||||
|
SHIPPED_ORDER: 'VExpdL',
|
||||||
|
ACCOUNT_CREATED: 'TeeypV',
|
||||||
|
CANCELED_ORDER: 'YjVMNg',
|
||||||
|
NEW_BLOG_POST: 'YcxeDr',
|
||||||
|
PAYMENT_REFUNDED: 'R7XUYh'
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createEventsRouter(apiKey, apiRevision) {
|
||||||
|
const router = express.Router();
|
||||||
|
const timeManager = new TimeManager();
|
||||||
|
const eventsService = new EventsService(apiKey, apiRevision);
|
||||||
|
const redisService = new RedisService();
|
||||||
|
|
||||||
|
// Get events with optional filtering
|
||||||
|
router.get('/', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const params = {
|
||||||
|
pageSize: parseInt(req.query.pageSize) || 50,
|
||||||
|
sort: req.query.sort || '-datetime',
|
||||||
|
metricId: req.query.metricId,
|
||||||
|
startDate: req.query.startDate,
|
||||||
|
endDate: req.query.endDate,
|
||||||
|
pageCursor: req.query.pageCursor,
|
||||||
|
fields: {}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse fields parameter if provided
|
||||||
|
if (req.query.fields) {
|
||||||
|
try {
|
||||||
|
params.fields = JSON.parse(req.query.fields);
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('[Events Route] Invalid fields parameter:', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('[Events Route] Fetching events with params:', params);
|
||||||
|
const data = await eventsService.getEvents(params);
|
||||||
|
console.log('[Events Route] Success:', {
|
||||||
|
count: data.data?.length || 0,
|
||||||
|
included: data.included?.length || 0
|
||||||
|
});
|
||||||
|
res.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Events Route] Error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
message: error.message,
|
||||||
|
details: error.response?.data || null
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get events by time range
|
||||||
|
router.get('/by-time/:timeRange', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange } = req.params;
|
||||||
|
const { metricId, startDate, endDate } = req.query;
|
||||||
|
|
||||||
|
let result;
|
||||||
|
if (timeRange === 'custom') {
|
||||||
|
if (!startDate || !endDate) {
|
||||||
|
return res.status(400).json({ error: 'Custom range requires startDate and endDate' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const range = timeManager.getCustomRange(startDate, endDate);
|
||||||
|
if (!range) {
|
||||||
|
return res.status(400).json({ error: 'Invalid date range' });
|
||||||
|
}
|
||||||
|
|
||||||
|
result = await eventsService.getEvents({
|
||||||
|
metricId,
|
||||||
|
startDate: range.start.toISO(),
|
||||||
|
endDate: range.end.toISO()
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
result = await eventsService.getEventsByTimeRange(
|
||||||
|
timeRange,
|
||||||
|
{ metricId }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Events Route] Error:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get comprehensive statistics for a time period
|
||||||
|
router.get('/stats', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate } = req.query;
|
||||||
|
console.log('[Events Route] Stats request:', {
|
||||||
|
timeRange,
|
||||||
|
startDate,
|
||||||
|
endDate
|
||||||
|
});
|
||||||
|
|
||||||
|
let range;
|
||||||
|
if (startDate && endDate) {
|
||||||
|
range = timeManager.getCustomRange(startDate, endDate);
|
||||||
|
} else if (timeRange) {
|
||||||
|
range = timeManager.getDateRange(timeRange);
|
||||||
|
} else {
|
||||||
|
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!range) {
|
||||||
|
return res.status(400).json({ error: 'Invalid time range' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
timeRange,
|
||||||
|
startDate: range.start.toISO(),
|
||||||
|
endDate: range.end.toISO()
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('[Events Route] Calculating period stats with params:', params);
|
||||||
|
const stats = await eventsService.calculatePeriodStats(params);
|
||||||
|
console.log('[Events Route] Stats response:', {
|
||||||
|
timeRange: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO()
|
||||||
|
},
|
||||||
|
shippedCount: stats?.shipping?.shippedCount,
|
||||||
|
totalOrders: stats?.orderCount
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
timeRange: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO(),
|
||||||
|
displayStart: timeManager.formatForDisplay(range.start),
|
||||||
|
displayEnd: timeManager.formatForDisplay(range.end)
|
||||||
|
},
|
||||||
|
stats
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Events Route] Error:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add new route for smart revenue projection
|
||||||
|
router.get('/projection', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate } = req.query;
|
||||||
|
console.log('[Events Route] Projection request:', {
|
||||||
|
timeRange,
|
||||||
|
startDate,
|
||||||
|
endDate
|
||||||
|
});
|
||||||
|
|
||||||
|
let range;
|
||||||
|
if (startDate && endDate) {
|
||||||
|
range = timeManager.getCustomRange(startDate, endDate);
|
||||||
|
} else if (timeRange) {
|
||||||
|
range = timeManager.getDateRange(timeRange);
|
||||||
|
} else {
|
||||||
|
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!range) {
|
||||||
|
return res.status(400).json({ error: 'Invalid time range' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
timeRange,
|
||||||
|
startDate: range.start.toISO(),
|
||||||
|
endDate: range.end.toISO()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to get from cache first with a short TTL
|
||||||
|
const cacheKey = redisService._getCacheKey('projection', params);
|
||||||
|
const cachedData = await redisService.get(cacheKey);
|
||||||
|
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('[Events Route] Cache hit for projection');
|
||||||
|
return res.json(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('[Events Route] Calculating smart projection with params:', params);
|
||||||
|
const projection = await eventsService.calculateSmartProjection(params);
|
||||||
|
|
||||||
|
// Cache the results with a short TTL (5 minutes)
|
||||||
|
await redisService.set(cacheKey, projection, 300);
|
||||||
|
|
||||||
|
res.json(projection);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Events Route] Error calculating projection:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add new route for detailed stats
|
||||||
|
router.get('/stats/details', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate, metric, daily = false } = req.query;
|
||||||
|
|
||||||
|
let range;
|
||||||
|
if (startDate && endDate) {
|
||||||
|
range = timeManager.getCustomRange(startDate, endDate);
|
||||||
|
} else if (timeRange) {
|
||||||
|
range = timeManager.getDateRange(timeRange);
|
||||||
|
} else {
|
||||||
|
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!range) {
|
||||||
|
return res.status(400).json({ error: 'Invalid time range' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
timeRange,
|
||||||
|
startDate: range.start.toISO(),
|
||||||
|
endDate: range.end.toISO(),
|
||||||
|
metric,
|
||||||
|
daily: daily === 'true' || daily === true
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to get from cache first
|
||||||
|
const cacheKey = redisService._getCacheKey('stats:details', params);
|
||||||
|
const cachedData = await redisService.get(cacheKey);
|
||||||
|
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('[Events Route] Cache hit for detailed stats');
|
||||||
|
return res.json({
|
||||||
|
timeRange: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO(),
|
||||||
|
displayStart: timeManager.formatForDisplay(range.start),
|
||||||
|
displayEnd: timeManager.formatForDisplay(range.end)
|
||||||
|
},
|
||||||
|
stats: cachedData
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = await eventsService.calculateDetailedStats(params);
|
||||||
|
|
||||||
|
// Cache the results
|
||||||
|
const ttl = redisService._getTTL(timeRange);
|
||||||
|
await redisService.set(cacheKey, stats, ttl);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
timeRange: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO(),
|
||||||
|
displayStart: timeManager.formatForDisplay(range.start),
|
||||||
|
displayEnd: timeManager.formatForDisplay(range.end)
|
||||||
|
},
|
||||||
|
stats
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Events Route] Error:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get product statistics for a time period
|
||||||
|
router.get('/products', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate } = req.query;
|
||||||
|
|
||||||
|
let range;
|
||||||
|
if (startDate && endDate) {
|
||||||
|
range = timeManager.getCustomRange(startDate, endDate);
|
||||||
|
} else if (timeRange) {
|
||||||
|
range = timeManager.getDateRange(timeRange);
|
||||||
|
} else {
|
||||||
|
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!range) {
|
||||||
|
return res.status(400).json({ error: 'Invalid time range' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
timeRange,
|
||||||
|
startDate: range.start.toISO(),
|
||||||
|
endDate: range.end.toISO()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to get from cache first
|
||||||
|
const cacheKey = redisService._getCacheKey('events', params);
|
||||||
|
const cachedData = await redisService.getEventData('products', params);
|
||||||
|
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('[Events Route] Cache hit for products');
|
||||||
|
return res.json({
|
||||||
|
timeRange: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO(),
|
||||||
|
displayStart: timeManager.formatForDisplay(range.start),
|
||||||
|
displayEnd: timeManager.formatForDisplay(range.end)
|
||||||
|
},
|
||||||
|
stats: {
|
||||||
|
products: cachedData
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = await eventsService.calculatePeriodStats(params);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
timeRange: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO(),
|
||||||
|
displayStart: timeManager.formatForDisplay(range.start),
|
||||||
|
displayEnd: timeManager.formatForDisplay(range.end)
|
||||||
|
},
|
||||||
|
stats
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Events Route] Error:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get event feed (multiple event types sorted by time)
|
||||||
|
router.get('/feed', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate, metricIds } = req.query;
|
||||||
|
|
||||||
|
let range;
|
||||||
|
if (startDate && endDate) {
|
||||||
|
range = timeManager.getCustomRange(startDate, endDate);
|
||||||
|
} else if (timeRange) {
|
||||||
|
range = timeManager.getDateRange(timeRange);
|
||||||
|
} else {
|
||||||
|
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!range) {
|
||||||
|
return res.status(400).json({ error: 'Invalid time range' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
timeRange,
|
||||||
|
startDate: range.start.toISO(),
|
||||||
|
endDate: range.end.toISO(),
|
||||||
|
metricIds: metricIds ? JSON.parse(metricIds) : null
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await eventsService.getMultiMetricEvents(params);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
timeRange: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO(),
|
||||||
|
displayStart: timeManager.formatForDisplay(range.start),
|
||||||
|
displayEnd: timeManager.formatForDisplay(range.end)
|
||||||
|
},
|
||||||
|
...result
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Events Route] Error:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get aggregated events data
|
||||||
|
router.get('/aggregate', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate, interval = 'day', metricId, property } = req.query;
|
||||||
|
|
||||||
|
let range;
|
||||||
|
if (startDate && endDate) {
|
||||||
|
range = timeManager.getCustomRange(startDate, endDate);
|
||||||
|
} else if (timeRange) {
|
||||||
|
range = timeManager.getDateRange(timeRange);
|
||||||
|
} else {
|
||||||
|
return res.status(400).json({ error: 'Must provide either timeRange or startDate and endDate' });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!range) {
|
||||||
|
return res.status(400).json({ error: 'Invalid time range' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
timeRange,
|
||||||
|
startDate: range.start.toISO(),
|
||||||
|
endDate: range.end.toISO(),
|
||||||
|
metricId,
|
||||||
|
interval,
|
||||||
|
property
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await eventsService.getEvents(params);
|
||||||
|
const groupedData = timeManager.groupEventsByInterval(result.data, interval, property);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
timeRange: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO(),
|
||||||
|
displayStart: timeManager.formatForDisplay(range.start),
|
||||||
|
displayEnd: timeManager.formatForDisplay(range.end)
|
||||||
|
},
|
||||||
|
data: groupedData
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Events Route] Error:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get date range for a given time period
|
||||||
|
router.get("/dateRange", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate } = req.query;
|
||||||
|
|
||||||
|
let range;
|
||||||
|
if (startDate && endDate) {
|
||||||
|
range = timeManager.getCustomRange(startDate, endDate);
|
||||||
|
} else {
|
||||||
|
range = timeManager.getDateRange(timeRange || 'today');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!range) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Invalid time range parameters"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO(),
|
||||||
|
displayStart: timeManager.formatForDisplay(range.start),
|
||||||
|
displayEnd: timeManager.formatForDisplay(range.end)
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting date range:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to get date range"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clear cache for a specific time range
|
||||||
|
router.post("/clearCache", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate } = req.body;
|
||||||
|
await redisService.clearCache({ timeRange, startDate, endDate });
|
||||||
|
res.json({ message: "Cache cleared successfully" });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error clearing cache:', error);
|
||||||
|
res.status(500).json({ error: "Failed to clear cache" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add new batch metrics endpoint
|
||||||
|
router.get('/batch', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange, startDate, endDate, metrics } = req.query;
|
||||||
|
|
||||||
|
// Parse metrics array from query
|
||||||
|
const metricsList = metrics ? JSON.parse(metrics) : [];
|
||||||
|
|
||||||
|
const params = timeRange === 'custom'
|
||||||
|
? { startDate, endDate, metrics: metricsList }
|
||||||
|
: { timeRange, metrics: metricsList };
|
||||||
|
|
||||||
|
const results = await eventsService.getBatchMetrics(params);
|
||||||
|
|
||||||
|
res.json(results);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Events Route] Error in batch request:', error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return router;
|
||||||
|
}
|
||||||
17
inventory-server/dashboard/klaviyo-server/routes/index.js
Normal file
17
inventory-server/dashboard/klaviyo-server/routes/index.js
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import { createEventsRouter } from './events.routes.js';
|
||||||
|
import { createMetricsRoutes } from './metrics.routes.js';
|
||||||
|
import { createCampaignsRouter } from './campaigns.routes.js';
|
||||||
|
import { createReportingRouter } from './reporting.routes.js';
|
||||||
|
|
||||||
|
export function createApiRouter(apiKey, apiRevision) {
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Mount routers
|
||||||
|
router.use('/events', createEventsRouter(apiKey, apiRevision));
|
||||||
|
router.use('/metrics', createMetricsRoutes(apiKey, apiRevision));
|
||||||
|
router.use('/campaigns', createCampaignsRouter(apiKey, apiRevision));
|
||||||
|
router.use('/reporting', createReportingRouter(apiKey, apiRevision));
|
||||||
|
|
||||||
|
return router;
|
||||||
|
}
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import { MetricsService } from '../services/metrics.service.js';
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
export function createMetricsRoutes(apiKey, apiRevision) {
|
||||||
|
const metricsService = new MetricsService(apiKey, apiRevision);
|
||||||
|
|
||||||
|
// Get all metrics
|
||||||
|
router.get('/', async (req, res) => {
|
||||||
|
try {
|
||||||
|
console.log('[Metrics Route] Fetching metrics');
|
||||||
|
const data = await metricsService.getMetrics();
|
||||||
|
console.log('[Metrics Route] Success:', {
|
||||||
|
count: data.data?.length || 0
|
||||||
|
});
|
||||||
|
res.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Metrics Route] Error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
message: error.message,
|
||||||
|
details: error.response?.data || null
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return router;
|
||||||
|
}
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import { ReportingService } from '../services/reporting.service.js';
|
||||||
|
import { TimeManager } from '../utils/time.utils.js';
|
||||||
|
|
||||||
|
export function createReportingRouter(apiKey, apiRevision) {
|
||||||
|
const router = express.Router();
|
||||||
|
const reportingService = new ReportingService(apiKey, apiRevision);
|
||||||
|
const timeManager = new TimeManager();
|
||||||
|
|
||||||
|
// Get campaign reports by time range
|
||||||
|
router.get('/campaigns/:timeRange', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { timeRange } = req.params;
|
||||||
|
const { channel } = req.query;
|
||||||
|
|
||||||
|
const reports = await reportingService.getCampaignReports({
|
||||||
|
timeRange,
|
||||||
|
channel
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(reports);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[ReportingRoutes] Error fetching campaign reports:', error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return router;
|
||||||
|
}
|
||||||
78
inventory-server/dashboard/klaviyo-server/server.js
Normal file
78
inventory-server/dashboard/klaviyo-server/server.js
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import express from 'express';
|
||||||
|
import cors from 'cors';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import rateLimit from 'express-rate-limit';
|
||||||
|
import { createApiRouter } from './routes/index.js';
|
||||||
|
import path from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
// Get directory name in ES modules
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
|
// Load environment variables
|
||||||
|
const envPath = path.resolve(__dirname, '.env');
|
||||||
|
console.log('[Server] Loading .env file from:', envPath);
|
||||||
|
dotenv.config({ path: envPath });
|
||||||
|
|
||||||
|
// Debug environment variables (without exposing sensitive data)
|
||||||
|
console.log('[Server] Environment variables loaded:', {
|
||||||
|
REDIS_HOST: process.env.REDIS_HOST || '(not set)',
|
||||||
|
REDIS_PORT: process.env.REDIS_PORT || '(not set)',
|
||||||
|
REDIS_USERNAME: process.env.REDIS_USERNAME || '(not set)',
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD ? '(set)' : '(not set)',
|
||||||
|
NODE_ENV: process.env.NODE_ENV || '(not set)',
|
||||||
|
});
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const port = process.env.KLAVIYO_PORT || 3004;
|
||||||
|
|
||||||
|
// Rate limiting for reporting endpoints
|
||||||
|
const reportingLimiter = rateLimit({
|
||||||
|
windowMs: 10 * 60 * 1000, // 10 minutes
|
||||||
|
max: 10, // limit each IP to 10 requests per windowMs
|
||||||
|
message: 'Too many requests to reporting endpoint, please try again later',
|
||||||
|
keyGenerator: (req) => {
|
||||||
|
// Use a combination of IP and endpoint for more granular control
|
||||||
|
return `${req.ip}-reporting`;
|
||||||
|
},
|
||||||
|
skip: (req) => {
|
||||||
|
// Only apply to campaign-values-reports endpoint
|
||||||
|
return !req.path.includes('campaign-values-reports');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Middleware
|
||||||
|
app.use(cors());
|
||||||
|
app.use(express.json());
|
||||||
|
|
||||||
|
// Debug middleware to log all requests
|
||||||
|
app.use((req, res, next) => {
|
||||||
|
console.log(`[${new Date().toISOString()}] ${req.method} ${req.url}`);
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Apply rate limiting to reporting endpoints
|
||||||
|
app.use('/api/klaviyo/reporting', reportingLimiter);
|
||||||
|
|
||||||
|
// Create and mount API routes
|
||||||
|
const apiRouter = createApiRouter(
|
||||||
|
process.env.KLAVIYO_API_KEY,
|
||||||
|
process.env.KLAVIYO_API_REVISION || '2024-02-15'
|
||||||
|
);
|
||||||
|
app.use('/api/klaviyo', apiRouter);
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
console.error('Unhandled error:', err);
|
||||||
|
res.status(500).json({
|
||||||
|
status: 'error',
|
||||||
|
message: 'Internal server error',
|
||||||
|
details: process.env.NODE_ENV === 'development' ? err.message : undefined
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
app.listen(port, '0.0.0.0', () => {
|
||||||
|
console.log(`Klaviyo server listening at http://0.0.0.0:${port}`);
|
||||||
|
});
|
||||||
@@ -0,0 +1,206 @@
|
|||||||
|
import fetch from 'node-fetch';
|
||||||
|
import { TimeManager } from '../utils/time.utils.js';
|
||||||
|
import { RedisService } from './redis.service.js';
|
||||||
|
|
||||||
|
export class CampaignsService {
|
||||||
|
constructor(apiKey, apiRevision) {
|
||||||
|
this.apiKey = apiKey;
|
||||||
|
this.apiRevision = apiRevision;
|
||||||
|
this.baseUrl = 'https://a.klaviyo.com/api';
|
||||||
|
this.timeManager = new TimeManager();
|
||||||
|
this.redisService = new RedisService();
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCampaigns(params = {}) {
|
||||||
|
try {
|
||||||
|
// Add request debouncing
|
||||||
|
const requestKey = JSON.stringify(params);
|
||||||
|
if (this._pendingRequests && this._pendingRequests[requestKey]) {
|
||||||
|
return this._pendingRequests[requestKey];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to get from cache first
|
||||||
|
const cacheKey = this.redisService._getCacheKey('campaigns', params);
|
||||||
|
let cachedData = null;
|
||||||
|
try {
|
||||||
|
cachedData = await this.redisService.get(`${cacheKey}:raw`);
|
||||||
|
if (cachedData) {
|
||||||
|
return cachedData;
|
||||||
|
}
|
||||||
|
} catch (cacheError) {
|
||||||
|
console.warn('[CampaignsService] Cache error:', cacheError);
|
||||||
|
}
|
||||||
|
|
||||||
|
this._pendingRequests = this._pendingRequests || {};
|
||||||
|
this._pendingRequests[requestKey] = (async () => {
|
||||||
|
let allCampaigns = [];
|
||||||
|
let nextCursor = params.pageCursor;
|
||||||
|
let pageCount = 0;
|
||||||
|
|
||||||
|
const filter = params.filter || this._buildFilter(params);
|
||||||
|
|
||||||
|
do {
|
||||||
|
const queryParams = new URLSearchParams();
|
||||||
|
if (filter) {
|
||||||
|
queryParams.append('filter', filter);
|
||||||
|
}
|
||||||
|
queryParams.append('sort', params.sort || '-send_time');
|
||||||
|
|
||||||
|
if (nextCursor) {
|
||||||
|
queryParams.append('page[cursor]', nextCursor);
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = `${this.baseUrl}/campaigns?${queryParams.toString()}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': `Klaviyo-API-Key ${this.apiKey}`,
|
||||||
|
'revision': this.apiRevision
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json();
|
||||||
|
console.error('[CampaignsService] API Error:', errorData);
|
||||||
|
throw new Error(`Klaviyo API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const responseData = await response.json();
|
||||||
|
allCampaigns = allCampaigns.concat(responseData.data || []);
|
||||||
|
pageCount++;
|
||||||
|
|
||||||
|
nextCursor = responseData.links?.next ?
|
||||||
|
new URL(responseData.links.next).searchParams.get('page[cursor]') : null;
|
||||||
|
|
||||||
|
if (nextCursor) {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 50));
|
||||||
|
}
|
||||||
|
} catch (fetchError) {
|
||||||
|
console.error('[CampaignsService] Fetch error:', fetchError);
|
||||||
|
throw fetchError;
|
||||||
|
}
|
||||||
|
|
||||||
|
} while (nextCursor);
|
||||||
|
|
||||||
|
const transformedCampaigns = this._transformCampaigns(allCampaigns);
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
data: transformedCampaigns,
|
||||||
|
meta: {
|
||||||
|
total_count: transformedCampaigns.length,
|
||||||
|
page_count: pageCount
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ttl = this.redisService._getTTL(params.timeRange);
|
||||||
|
await this.redisService.set(`${cacheKey}:raw`, result, ttl);
|
||||||
|
} catch (cacheError) {
|
||||||
|
console.warn('[CampaignsService] Cache set error:', cacheError);
|
||||||
|
}
|
||||||
|
|
||||||
|
delete this._pendingRequests[requestKey];
|
||||||
|
return result;
|
||||||
|
})();
|
||||||
|
|
||||||
|
return await this._pendingRequests[requestKey];
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[CampaignsService] Error fetching campaigns:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_buildFilter(params) {
|
||||||
|
const filters = [];
|
||||||
|
|
||||||
|
if (params.startDate && params.endDate) {
|
||||||
|
const startUtc = this.timeManager.formatForAPI(params.startDate);
|
||||||
|
const endUtc = this.timeManager.formatForAPI(params.endDate);
|
||||||
|
|
||||||
|
filters.push(`greater-or-equal(send_time,${startUtc})`);
|
||||||
|
filters.push(`less-than(send_time,${endUtc})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.status) {
|
||||||
|
filters.push(`equals(status,"${params.status}")`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.customFilters) {
|
||||||
|
filters.push(...params.customFilters);
|
||||||
|
}
|
||||||
|
|
||||||
|
return filters.length > 0 ? (filters.length > 1 ? `and(${filters.join(',')})` : filters[0]) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCampaignsByTimeRange(timeRange, options = {}) {
|
||||||
|
const range = this.timeManager.getDateRange(timeRange);
|
||||||
|
if (!range) {
|
||||||
|
throw new Error('Invalid time range specified');
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
timeRange,
|
||||||
|
startDate: range.start.toISO(),
|
||||||
|
endDate: range.end.toISO(),
|
||||||
|
...options
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to get from cache first
|
||||||
|
const cacheKey = this.redisService._getCacheKey('campaigns', params);
|
||||||
|
let cachedData = null;
|
||||||
|
try {
|
||||||
|
cachedData = await this.redisService.get(`${cacheKey}:raw`);
|
||||||
|
if (cachedData) {
|
||||||
|
return cachedData;
|
||||||
|
}
|
||||||
|
} catch (cacheError) {
|
||||||
|
console.warn('[CampaignsService] Cache error:', cacheError);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.getCampaigns(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
_transformCampaigns(campaigns) {
|
||||||
|
if (!Array.isArray(campaigns)) {
|
||||||
|
console.warn('[CampaignsService] Campaigns is not an array:', campaigns);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return campaigns.map(campaign => {
|
||||||
|
try {
|
||||||
|
const stats = campaign.attributes?.campaign_message?.stats || {};
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: campaign.id,
|
||||||
|
name: campaign.attributes?.name || "Unnamed Campaign",
|
||||||
|
subject: campaign.attributes?.campaign_message?.subject || "",
|
||||||
|
send_time: campaign.attributes?.send_time,
|
||||||
|
stats: {
|
||||||
|
delivery_rate: stats.delivery_rate || 0,
|
||||||
|
delivered: stats.delivered || 0,
|
||||||
|
recipients: stats.recipients || 0,
|
||||||
|
open_rate: stats.open_rate || 0,
|
||||||
|
opens_unique: stats.opens_unique || 0,
|
||||||
|
opens: stats.opens || 0,
|
||||||
|
clicks_unique: stats.clicks_unique || 0,
|
||||||
|
click_rate: stats.click_rate || 0,
|
||||||
|
click_to_open_rate: stats.click_to_open_rate || 0,
|
||||||
|
conversion_value: stats.conversion_value || 0,
|
||||||
|
conversion_uniques: stats.conversion_uniques || 0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[CampaignsService] Error transforming campaign:', error, campaign);
|
||||||
|
return {
|
||||||
|
id: campaign.id || 'unknown',
|
||||||
|
name: 'Error Processing Campaign',
|
||||||
|
stats: {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
2202
inventory-server/dashboard/klaviyo-server/services/events.service.js
Normal file
2202
inventory-server/dashboard/klaviyo-server/services/events.service.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,38 @@
|
|||||||
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
|
export class MetricsService {
|
||||||
|
constructor(apiKey, apiRevision) {
|
||||||
|
this.apiKey = apiKey;
|
||||||
|
this.apiRevision = apiRevision;
|
||||||
|
this.baseUrl = 'https://a.klaviyo.com/api';
|
||||||
|
}
|
||||||
|
async getMetrics() {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${this.baseUrl}/metrics/`, {
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Klaviyo-API-Key ${this.apiKey}`,
|
||||||
|
'revision': this.apiRevision,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Accept': 'application/json'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json();
|
||||||
|
console.error('[MetricsService] API Error:', errorData);
|
||||||
|
throw new Error(`Klaviyo API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
// Sort the results by name before returning
|
||||||
|
if (data.data) {
|
||||||
|
data.data.sort((a, b) => a.attributes.name.localeCompare(b.attributes.name));
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[MetricsService] Error fetching metrics:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,262 @@
|
|||||||
|
import Redis from 'ioredis';
|
||||||
|
import { TimeManager } from '../utils/time.utils.js';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import path from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
// Get directory name in ES modules
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
|
// Load environment variables again (redundant but safe)
|
||||||
|
const envPath = path.resolve(__dirname, '../.env');
|
||||||
|
console.log('[RedisService] Loading .env file from:', envPath);
|
||||||
|
dotenv.config({ path: envPath });
|
||||||
|
|
||||||
|
export class RedisService {
|
||||||
|
constructor() {
|
||||||
|
this.timeManager = new TimeManager();
|
||||||
|
this.DEFAULT_TTL = 5 * 60; // 5 minutes default TTL
|
||||||
|
this.isConnected = false;
|
||||||
|
this._initializeRedis();
|
||||||
|
}
|
||||||
|
|
||||||
|
_initializeRedis() {
|
||||||
|
try {
|
||||||
|
// Debug: Print all environment variables we're looking for
|
||||||
|
console.log('[RedisService] Environment variables state:', {
|
||||||
|
REDIS_HOST: process.env.REDIS_HOST ? '(set)' : '(not set)',
|
||||||
|
REDIS_PORT: process.env.REDIS_PORT ? '(set)' : '(not set)',
|
||||||
|
REDIS_USERNAME: process.env.REDIS_USERNAME ? '(set)' : '(not set)',
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD ? '(set)' : '(not set)',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log Redis configuration (without password)
|
||||||
|
const host = process.env.REDIS_HOST || 'localhost';
|
||||||
|
const port = parseInt(process.env.REDIS_PORT) || 6379;
|
||||||
|
const username = process.env.REDIS_USERNAME || 'default';
|
||||||
|
const password = process.env.REDIS_PASSWORD;
|
||||||
|
|
||||||
|
console.log('[RedisService] Initializing Redis with config:', {
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
username,
|
||||||
|
hasPassword: !!password
|
||||||
|
});
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
username,
|
||||||
|
retryStrategy: (times) => {
|
||||||
|
const delay = Math.min(times * 50, 2000);
|
||||||
|
return delay;
|
||||||
|
},
|
||||||
|
maxRetriesPerRequest: 3,
|
||||||
|
enableReadyCheck: true,
|
||||||
|
connectTimeout: 10000,
|
||||||
|
showFriendlyErrorStack: true,
|
||||||
|
retryUnfulfilled: true,
|
||||||
|
maxRetryAttempts: 5
|
||||||
|
};
|
||||||
|
|
||||||
|
// Only add password if it exists
|
||||||
|
if (password) {
|
||||||
|
console.log('[RedisService] Adding password to config');
|
||||||
|
config.password = password;
|
||||||
|
} else {
|
||||||
|
console.warn('[RedisService] No Redis password found in environment variables!');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.client = new Redis(config);
|
||||||
|
|
||||||
|
// Handle connection events
|
||||||
|
this.client.on('connect', () => {
|
||||||
|
console.log('[RedisService] Connected to Redis');
|
||||||
|
this.isConnected = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
this.client.on('ready', () => {
|
||||||
|
console.log('[RedisService] Redis is ready');
|
||||||
|
this.isConnected = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
this.client.on('error', (err) => {
|
||||||
|
console.error('[RedisService] Redis error:', err);
|
||||||
|
this.isConnected = false;
|
||||||
|
// Log more details about the error
|
||||||
|
if (err.code === 'WRONGPASS') {
|
||||||
|
console.error('[RedisService] Authentication failed. Please check your Redis password.');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.client.on('close', () => {
|
||||||
|
console.log('[RedisService] Redis connection closed');
|
||||||
|
this.isConnected = false;
|
||||||
|
});
|
||||||
|
|
||||||
|
this.client.on('reconnecting', (params) => {
|
||||||
|
console.log('[RedisService] Reconnecting to Redis:', params);
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[RedisService] Error initializing Redis:', error);
|
||||||
|
this.isConnected = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key) {
|
||||||
|
if (!this.isConnected) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = await this.client.get(key);
|
||||||
|
return data ? JSON.parse(data) : null;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[RedisService] Error getting data:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key, data, ttl = this.DEFAULT_TTL) {
|
||||||
|
if (!this.isConnected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.client.setex(key, ttl, JSON.stringify(data));
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[RedisService] Error setting data:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to generate cache keys
|
||||||
|
_getCacheKey(type, params = {}) {
|
||||||
|
const {
|
||||||
|
timeRange,
|
||||||
|
startDate,
|
||||||
|
endDate,
|
||||||
|
metricId,
|
||||||
|
metric,
|
||||||
|
daily,
|
||||||
|
cacheKey,
|
||||||
|
isPreviousPeriod,
|
||||||
|
customFilters
|
||||||
|
} = params;
|
||||||
|
|
||||||
|
let key = `klaviyo:${type}`;
|
||||||
|
|
||||||
|
// Handle "stats:details" for daily or metric-based keys
|
||||||
|
if (type === 'stats:details') {
|
||||||
|
// Add metric to key
|
||||||
|
key += `:${metric || 'all'}`;
|
||||||
|
|
||||||
|
// Add daily flag if present
|
||||||
|
if (daily) {
|
||||||
|
key += ':daily';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add custom filters hash if present
|
||||||
|
if (customFilters?.length) {
|
||||||
|
const filterHash = customFilters.join('').replace(/[^a-zA-Z0-9]/g, '');
|
||||||
|
key += `:${filterHash}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a specific cache key is provided, use it (highest priority)
|
||||||
|
if (cacheKey) {
|
||||||
|
key += `:${cacheKey}`;
|
||||||
|
}
|
||||||
|
// Otherwise, build a default cache key
|
||||||
|
else if (timeRange) {
|
||||||
|
key += `:${timeRange}`;
|
||||||
|
if (metricId) {
|
||||||
|
key += `:${metricId}`;
|
||||||
|
}
|
||||||
|
if (isPreviousPeriod) {
|
||||||
|
key += ':prev';
|
||||||
|
}
|
||||||
|
} else if (startDate && endDate) {
|
||||||
|
// For custom date ranges, include both dates in the key
|
||||||
|
key += `:custom:${startDate}:${endDate}`;
|
||||||
|
if (metricId) {
|
||||||
|
key += `:${metricId}`;
|
||||||
|
}
|
||||||
|
if (isPreviousPeriod) {
|
||||||
|
key += ':prev';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add order type to key if present
|
||||||
|
if (['pre_orders', 'local_pickup', 'on_hold'].includes(metric)) {
|
||||||
|
key += `:${metric}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Get TTL based on time range
|
||||||
|
_getTTL(timeRange) {
|
||||||
|
const TTL_MAP = {
|
||||||
|
'today': 2 * 60, // 2 minutes
|
||||||
|
'yesterday': 30 * 60, // 30 minutes
|
||||||
|
'thisWeek': 5 * 60, // 5 minutes
|
||||||
|
'lastWeek': 60 * 60, // 1 hour
|
||||||
|
'thisMonth': 10 * 60, // 10 minutes
|
||||||
|
'lastMonth': 2 * 60 * 60, // 2 hours
|
||||||
|
'last7days': 5 * 60, // 5 minutes
|
||||||
|
'last30days': 15 * 60, // 15 minutes
|
||||||
|
'custom': 15 * 60 // 15 minutes
|
||||||
|
};
|
||||||
|
return TTL_MAP[timeRange] || this.DEFAULT_TTL;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getEventData(type, params) {
|
||||||
|
if (!this.isConnected) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const baseKey = this._getCacheKey('events', params);
|
||||||
|
const data = await this.get(`${baseKey}:${type}`);
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[RedisService] Error getting event data:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async cacheEventData(type, params, data) {
|
||||||
|
if (!this.isConnected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ttl = this._getTTL(params.timeRange);
|
||||||
|
const baseKey = this._getCacheKey('events', params);
|
||||||
|
|
||||||
|
// Cache raw event data
|
||||||
|
await this.set(`${baseKey}:${type}`, data, ttl);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[RedisService] Error caching event data:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async clearCache(params = {}) {
|
||||||
|
if (!this.isConnected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const pattern = this._getCacheKey('events', params) + '*';
|
||||||
|
const keys = await this.client.keys(pattern);
|
||||||
|
if (keys.length > 0) {
|
||||||
|
await this.client.del(...keys);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[RedisService] Error clearing cache:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,254 @@
|
|||||||
|
import fetch from 'node-fetch';
|
||||||
|
import { TimeManager } from '../utils/time.utils.js';
|
||||||
|
import { RedisService } from './redis.service.js';
|
||||||
|
|
||||||
|
const METRIC_IDS = {
|
||||||
|
PLACED_ORDER: 'Y8cqcF'
|
||||||
|
};
|
||||||
|
|
||||||
|
export class ReportingService {
|
||||||
|
constructor(apiKey, apiRevision) {
|
||||||
|
this.apiKey = apiKey;
|
||||||
|
this.apiRevision = apiRevision;
|
||||||
|
this.baseUrl = 'https://a.klaviyo.com/api';
|
||||||
|
this.timeManager = new TimeManager();
|
||||||
|
this.redisService = new RedisService();
|
||||||
|
this._pendingReportRequest = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCampaignReports(params = {}) {
|
||||||
|
try {
|
||||||
|
// Check if there's a pending request
|
||||||
|
if (this._pendingReportRequest) {
|
||||||
|
console.log('[ReportingService] Using pending campaign report request');
|
||||||
|
return this._pendingReportRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to get from cache first
|
||||||
|
const cacheKey = this.redisService._getCacheKey('campaign_reports', params);
|
||||||
|
let cachedData = null;
|
||||||
|
try {
|
||||||
|
cachedData = await this.redisService.get(`${cacheKey}:raw`);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log('[ReportingService] Using cached campaign report data');
|
||||||
|
return cachedData;
|
||||||
|
}
|
||||||
|
} catch (cacheError) {
|
||||||
|
console.warn('[ReportingService] Cache error:', cacheError);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new request promise
|
||||||
|
this._pendingReportRequest = (async () => {
|
||||||
|
console.log('[ReportingService] Fetching fresh campaign report data');
|
||||||
|
|
||||||
|
const range = this.timeManager.getDateRange(params.timeRange || 'last30days');
|
||||||
|
|
||||||
|
// Determine which channels to fetch based on params
|
||||||
|
const channelsToFetch = params.channel === 'all' || !params.channel
|
||||||
|
? ['email', 'sms']
|
||||||
|
: [params.channel];
|
||||||
|
|
||||||
|
const allResults = [];
|
||||||
|
|
||||||
|
// Fetch each channel
|
||||||
|
for (const channel of channelsToFetch) {
|
||||||
|
const payload = {
|
||||||
|
data: {
|
||||||
|
type: "campaign-values-report",
|
||||||
|
attributes: {
|
||||||
|
timeframe: {
|
||||||
|
start: range.start.toISO(),
|
||||||
|
end: range.end.toISO()
|
||||||
|
},
|
||||||
|
statistics: [
|
||||||
|
"delivery_rate",
|
||||||
|
"delivered",
|
||||||
|
"recipients",
|
||||||
|
"open_rate",
|
||||||
|
"opens_unique",
|
||||||
|
"opens",
|
||||||
|
"click_rate",
|
||||||
|
"clicks_unique",
|
||||||
|
"click_to_open_rate",
|
||||||
|
"conversion_value",
|
||||||
|
"conversion_uniques"
|
||||||
|
],
|
||||||
|
conversion_metric_id: METRIC_IDS.PLACED_ORDER,
|
||||||
|
filter: `equals(send_channel,"${channel}")`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await fetch(`${this.baseUrl}/campaign-values-reports`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': `Klaviyo-API-Key ${this.apiKey}`,
|
||||||
|
'revision': this.apiRevision
|
||||||
|
},
|
||||||
|
body: JSON.stringify(payload)
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json();
|
||||||
|
console.error('[ReportingService] API Error:', errorData);
|
||||||
|
throw new Error(`Klaviyo API error: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const reportData = await response.json();
|
||||||
|
console.log(`[ReportingService] Raw ${channel} report data:`, JSON.stringify(reportData, null, 2));
|
||||||
|
|
||||||
|
// Get campaign IDs from the report
|
||||||
|
const campaignIds = reportData.data?.attributes?.results?.map(result =>
|
||||||
|
result.groupings?.campaign_id
|
||||||
|
).filter(Boolean) || [];
|
||||||
|
|
||||||
|
if (campaignIds.length > 0) {
|
||||||
|
// Get campaign details including send time and subject lines
|
||||||
|
const campaignDetails = await this.getCampaignDetails(campaignIds);
|
||||||
|
|
||||||
|
// Process results for this channel
|
||||||
|
const channelResults = reportData.data.attributes.results.map(result => {
|
||||||
|
const campaignId = result.groupings.campaign_id;
|
||||||
|
const details = campaignDetails.find(detail => detail.id === campaignId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: campaignId,
|
||||||
|
name: details.attributes.name,
|
||||||
|
subject: details.attributes.subject,
|
||||||
|
send_time: details.attributes.send_time,
|
||||||
|
channel: channel, // Use the channel we're currently processing
|
||||||
|
stats: {
|
||||||
|
delivery_rate: result.statistics.delivery_rate,
|
||||||
|
delivered: result.statistics.delivered,
|
||||||
|
recipients: result.statistics.recipients,
|
||||||
|
open_rate: result.statistics.open_rate,
|
||||||
|
opens_unique: result.statistics.opens_unique,
|
||||||
|
opens: result.statistics.opens,
|
||||||
|
click_rate: result.statistics.click_rate,
|
||||||
|
clicks_unique: result.statistics.clicks_unique,
|
||||||
|
click_to_open_rate: result.statistics.click_to_open_rate,
|
||||||
|
conversion_value: result.statistics.conversion_value,
|
||||||
|
conversion_uniques: result.statistics.conversion_uniques
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
allResults.push(...channelResults);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort all results by date
|
||||||
|
const enrichedData = {
|
||||||
|
data: allResults.sort((a, b) => {
|
||||||
|
const dateA = new Date(a.send_time);
|
||||||
|
const dateB = new Date(b.send_time);
|
||||||
|
return dateB - dateA; // Sort by date descending
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('[ReportingService] Enriched data:', JSON.stringify(enrichedData, null, 2));
|
||||||
|
|
||||||
|
// Cache the enriched response for 10 minutes
|
||||||
|
try {
|
||||||
|
await this.redisService.set(`${cacheKey}:raw`, enrichedData, 600);
|
||||||
|
} catch (cacheError) {
|
||||||
|
console.warn('[ReportingService] Cache set error:', cacheError);
|
||||||
|
}
|
||||||
|
|
||||||
|
return enrichedData;
|
||||||
|
})();
|
||||||
|
|
||||||
|
const result = await this._pendingReportRequest;
|
||||||
|
this._pendingReportRequest = null;
|
||||||
|
return result;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[ReportingService] Error fetching campaign reports:', error);
|
||||||
|
this._pendingReportRequest = null;
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCampaignDetails(campaignIds = []) {
|
||||||
|
if (!Array.isArray(campaignIds) || campaignIds.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetchWithTimeout = async (campaignId, retries = 3) => {
|
||||||
|
for (let i = 0; i < retries; i++) {
|
||||||
|
try {
|
||||||
|
const controller = new AbortController();
|
||||||
|
const timeoutId = setTimeout(() => controller.abort(), 10000); // 10 second timeout
|
||||||
|
|
||||||
|
const response = await fetch(
|
||||||
|
`${this.baseUrl}/campaigns/${campaignId}?include=campaign-messages`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Authorization': `Klaviyo-API-Key ${this.apiKey}`,
|
||||||
|
'revision': this.apiRevision
|
||||||
|
},
|
||||||
|
signal: controller.signal
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to fetch campaign ${campaignId}: ${response.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
if (!data.data) {
|
||||||
|
throw new Error(`Invalid response for campaign ${campaignId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const message = data.included?.find(item => item.type === 'campaign-message');
|
||||||
|
|
||||||
|
console.log('[ReportingService] Campaign details for ID:', campaignId, {
|
||||||
|
send_channel: data.data.attributes.send_channel,
|
||||||
|
raw_attributes: data.data.attributes
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: data.data.id,
|
||||||
|
type: data.data.type,
|
||||||
|
attributes: {
|
||||||
|
...data.data.attributes,
|
||||||
|
name: data.data.attributes.name,
|
||||||
|
send_time: data.data.attributes.send_time,
|
||||||
|
subject: message?.attributes?.content?.subject,
|
||||||
|
send_channel: data.data.attributes.send_channel || 'email'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
if (i === retries - 1) throw error;
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1))); // Exponential backoff
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Process in smaller chunks to avoid overwhelming the API
|
||||||
|
const chunkSize = 10;
|
||||||
|
const campaignDetails = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < campaignIds.length; i += chunkSize) {
|
||||||
|
const chunk = campaignIds.slice(i, i + chunkSize);
|
||||||
|
const results = await Promise.all(
|
||||||
|
chunk.map(id => fetchWithTimeout(id).catch(error => {
|
||||||
|
console.error(`Failed to fetch campaign ${id}:`, error);
|
||||||
|
return null;
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
campaignDetails.push(...results.filter(Boolean));
|
||||||
|
|
||||||
|
if (i + chunkSize < campaignIds.length) {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000)); // 1 second delay between chunks
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return campaignDetails;
|
||||||
|
}
|
||||||
|
}
|
||||||
448
inventory-server/dashboard/klaviyo-server/utils/time.utils.js
Normal file
448
inventory-server/dashboard/klaviyo-server/utils/time.utils.js
Normal file
@@ -0,0 +1,448 @@
|
|||||||
|
import { DateTime } from 'luxon';
|
||||||
|
|
||||||
|
export class TimeManager {
|
||||||
|
constructor(dayStartHour = 1) {
|
||||||
|
this.timezone = 'America/New_York';
|
||||||
|
this.dayStartHour = dayStartHour; // Hour (0-23) when the business day starts
|
||||||
|
this.weekStartDay = 7; // 7 = Sunday in Luxon
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the start of the current business day
|
||||||
|
* If current time is before dayStartHour, return previous day at dayStartHour
|
||||||
|
*/
|
||||||
|
getDayStart(dt = this.getNow()) {
|
||||||
|
if (!dt.isValid) {
|
||||||
|
console.error("[TimeManager] Invalid datetime provided to getDayStart");
|
||||||
|
return this.getNow();
|
||||||
|
}
|
||||||
|
const dayStart = dt.set({ hour: this.dayStartHour, minute: 0, second: 0, millisecond: 0 });
|
||||||
|
return dt.hour < this.dayStartHour ? dayStart.minus({ days: 1 }) : dayStart;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the end of the current business day
|
||||||
|
* End is defined as dayStartHour - 1 minute on the next day
|
||||||
|
*/
|
||||||
|
getDayEnd(dt = this.getNow()) {
|
||||||
|
if (!dt.isValid) {
|
||||||
|
console.error("[TimeManager] Invalid datetime provided to getDayEnd");
|
||||||
|
return this.getNow();
|
||||||
|
}
|
||||||
|
const nextDay = this.getDayStart(dt).plus({ days: 1 });
|
||||||
|
return nextDay.minus({ minutes: 1 });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the start of the week containing the given date
|
||||||
|
* Aligns with custom day start time and starts on Sunday
|
||||||
|
*/
|
||||||
|
getWeekStart(dt = this.getNow()) {
|
||||||
|
if (!dt.isValid) {
|
||||||
|
console.error("[TimeManager] Invalid datetime provided to getWeekStart");
|
||||||
|
return this.getNow();
|
||||||
|
}
|
||||||
|
// Set to start of week (Sunday) and adjust hour
|
||||||
|
const weekStart = dt.set({ weekday: this.weekStartDay }).startOf('day');
|
||||||
|
// If the week start time would be after the given time, go back a week
|
||||||
|
if (weekStart > dt) {
|
||||||
|
return weekStart.minus({ weeks: 1 }).set({ hour: this.dayStartHour });
|
||||||
|
}
|
||||||
|
return weekStart.set({ hour: this.dayStartHour });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert any date input to a Luxon DateTime in Eastern time
|
||||||
|
*/
|
||||||
|
toDateTime(date) {
|
||||||
|
if (!date) return null;
|
||||||
|
|
||||||
|
if (date instanceof DateTime) {
|
||||||
|
return date.setZone(this.timezone);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it's an ISO string or Date object, parse it
|
||||||
|
const dt = DateTime.fromISO(date instanceof Date ? date.toISOString() : date);
|
||||||
|
if (!dt.isValid) {
|
||||||
|
console.error("[TimeManager] Invalid date input:", date);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return dt.setZone(this.timezone);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a date for API requests (UTC ISO string)
|
||||||
|
*/
|
||||||
|
formatForAPI(date) {
|
||||||
|
if (!date) return null;
|
||||||
|
|
||||||
|
// Parse the input date
|
||||||
|
const dt = this.toDateTime(date);
|
||||||
|
if (!dt || !dt.isValid) {
|
||||||
|
console.error("[TimeManager] Invalid date for API:", date);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to UTC for API request
|
||||||
|
const utc = dt.toUTC();
|
||||||
|
|
||||||
|
console.log("[TimeManager] API date conversion:", {
|
||||||
|
input: date,
|
||||||
|
eastern: dt.toISO(),
|
||||||
|
utc: utc.toISO(),
|
||||||
|
offset: dt.offset
|
||||||
|
});
|
||||||
|
|
||||||
|
return utc.toISO();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a date for display (in Eastern time)
|
||||||
|
*/
|
||||||
|
formatForDisplay(date) {
|
||||||
|
const dt = this.toDateTime(date);
|
||||||
|
if (!dt || !dt.isValid) return '';
|
||||||
|
return dt.toFormat('LLL d, yyyy h:mm a');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate if a date range is valid
|
||||||
|
*/
|
||||||
|
isValidDateRange(start, end) {
|
||||||
|
const startDt = this.toDateTime(start);
|
||||||
|
const endDt = this.toDateTime(end);
|
||||||
|
return startDt && endDt && endDt > startDt;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current time in Eastern timezone
|
||||||
|
*/
|
||||||
|
getNow() {
|
||||||
|
return DateTime.now().setZone(this.timezone);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a date range for the last N hours
|
||||||
|
*/
|
||||||
|
getLastNHours(hours) {
|
||||||
|
const now = this.getNow();
|
||||||
|
return {
|
||||||
|
start: now.minus({ hours }),
|
||||||
|
end: now
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a date range for the last N days
|
||||||
|
* Aligns with custom day start time
|
||||||
|
*/
|
||||||
|
getLastNDays(days) {
|
||||||
|
const now = this.getNow();
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
return {
|
||||||
|
start: dayStart.minus({ days }),
|
||||||
|
end: this.getDayEnd(now)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a date range for a specific time period
|
||||||
|
* All ranges align with custom day start time
|
||||||
|
*/
|
||||||
|
getDateRange(period) {
|
||||||
|
const now = this.getNow();
|
||||||
|
|
||||||
|
// Normalize period to handle both 'last' and 'previous' prefixes
|
||||||
|
const normalizedPeriod = period.startsWith('previous') ? period.replace('previous', 'last') : period;
|
||||||
|
|
||||||
|
switch (normalizedPeriod) {
|
||||||
|
case 'custom': {
|
||||||
|
// Custom ranges are handled separately via getCustomRange
|
||||||
|
console.warn('[TimeManager] Custom ranges should use getCustomRange method');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
case 'today': {
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
return {
|
||||||
|
start: dayStart,
|
||||||
|
end: this.getDayEnd(now)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'yesterday': {
|
||||||
|
const yesterday = now.minus({ days: 1 });
|
||||||
|
return {
|
||||||
|
start: this.getDayStart(yesterday),
|
||||||
|
end: this.getDayEnd(yesterday)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last7days': {
|
||||||
|
// For last 7 days, we want to include today and the previous 6 days
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
const weekStart = dayStart.minus({ days: 6 });
|
||||||
|
return {
|
||||||
|
start: weekStart,
|
||||||
|
end: this.getDayEnd(now)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last30days': {
|
||||||
|
// Include today and previous 29 days
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
const monthStart = dayStart.minus({ days: 29 });
|
||||||
|
return {
|
||||||
|
start: monthStart,
|
||||||
|
end: this.getDayEnd(now)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last90days': {
|
||||||
|
// Include today and previous 89 days
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
const start = dayStart.minus({ days: 89 });
|
||||||
|
return {
|
||||||
|
start,
|
||||||
|
end: this.getDayEnd(now)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'thisWeek': {
|
||||||
|
// Get the start of the week (Sunday) with custom hour
|
||||||
|
const weekStart = this.getWeekStart(now);
|
||||||
|
return {
|
||||||
|
start: weekStart,
|
||||||
|
end: this.getDayEnd(now)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'lastWeek': {
|
||||||
|
const lastWeek = now.minus({ weeks: 1 });
|
||||||
|
const weekStart = this.getWeekStart(lastWeek);
|
||||||
|
const weekEnd = weekStart.plus({ days: 6 }); // 6 days after start = Saturday
|
||||||
|
return {
|
||||||
|
start: weekStart,
|
||||||
|
end: this.getDayEnd(weekEnd)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'thisMonth': {
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
const monthStart = dayStart.startOf('month').set({ hour: this.dayStartHour });
|
||||||
|
return {
|
||||||
|
start: monthStart,
|
||||||
|
end: this.getDayEnd(now)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'lastMonth': {
|
||||||
|
const lastMonth = now.minus({ months: 1 });
|
||||||
|
const monthStart = lastMonth.startOf('month').set({ hour: this.dayStartHour });
|
||||||
|
const monthEnd = monthStart.plus({ months: 1 }).minus({ days: 1 });
|
||||||
|
return {
|
||||||
|
start: monthStart,
|
||||||
|
end: this.getDayEnd(monthEnd)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
console.warn(`[TimeManager] Unknown period: ${period}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a duration in milliseconds to a human-readable string
|
||||||
|
*/
|
||||||
|
formatDuration(ms) {
|
||||||
|
return DateTime.fromMillis(ms).toFormat("hh'h' mm'm' ss's'");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get relative time string (e.g., "2 hours ago")
|
||||||
|
*/
|
||||||
|
getRelativeTime(date) {
|
||||||
|
const dt = this.toDateTime(date);
|
||||||
|
if (!dt) return '';
|
||||||
|
return dt.toRelative();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a custom date range using exact dates and times provided
|
||||||
|
* @param {string} startDate - ISO string or Date for range start
|
||||||
|
* @param {string} endDate - ISO string or Date for range end
|
||||||
|
* @returns {Object} Object with start and end DateTime objects
|
||||||
|
*/
|
||||||
|
getCustomRange(startDate, endDate) {
|
||||||
|
if (!startDate || !endDate) {
|
||||||
|
console.error("[TimeManager] Custom range requires both start and end dates");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const start = this.toDateTime(startDate);
|
||||||
|
const end = this.toDateTime(endDate);
|
||||||
|
|
||||||
|
if (!start || !end || !start.isValid || !end.isValid) {
|
||||||
|
console.error("[TimeManager] Invalid dates provided for custom range");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the range
|
||||||
|
if (end < start) {
|
||||||
|
console.error("[TimeManager] End date must be after start date");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
start,
|
||||||
|
end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the previous period's date range based on the current period
|
||||||
|
* @param {string} period - The current period
|
||||||
|
* @param {DateTime} now - The current datetime (optional)
|
||||||
|
* @returns {Object} Object with start and end DateTime objects
|
||||||
|
*/
|
||||||
|
getPreviousPeriod(period, now = this.getNow()) {
|
||||||
|
const normalizedPeriod = period.startsWith('previous') ? period.replace('previous', 'last') : period;
|
||||||
|
|
||||||
|
switch (normalizedPeriod) {
|
||||||
|
case 'today': {
|
||||||
|
const yesterday = now.minus({ days: 1 });
|
||||||
|
return {
|
||||||
|
start: this.getDayStart(yesterday),
|
||||||
|
end: this.getDayEnd(yesterday)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'yesterday': {
|
||||||
|
const twoDaysAgo = now.minus({ days: 2 });
|
||||||
|
return {
|
||||||
|
start: this.getDayStart(twoDaysAgo),
|
||||||
|
end: this.getDayEnd(twoDaysAgo)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last7days': {
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
const currentStart = dayStart.minus({ days: 6 });
|
||||||
|
const prevEnd = currentStart.minus({ milliseconds: 1 });
|
||||||
|
const prevStart = prevEnd.minus({ days: 6 });
|
||||||
|
return {
|
||||||
|
start: prevStart,
|
||||||
|
end: prevEnd
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last30days': {
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
const currentStart = dayStart.minus({ days: 29 });
|
||||||
|
const prevEnd = currentStart.minus({ milliseconds: 1 });
|
||||||
|
const prevStart = prevEnd.minus({ days: 29 });
|
||||||
|
return {
|
||||||
|
start: prevStart,
|
||||||
|
end: prevEnd
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'last90days': {
|
||||||
|
const dayStart = this.getDayStart(now);
|
||||||
|
const currentStart = dayStart.minus({ days: 89 });
|
||||||
|
const prevEnd = currentStart.minus({ milliseconds: 1 });
|
||||||
|
const prevStart = prevEnd.minus({ days: 89 });
|
||||||
|
return {
|
||||||
|
start: prevStart,
|
||||||
|
end: prevEnd
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'thisWeek': {
|
||||||
|
const weekStart = this.getWeekStart(now);
|
||||||
|
const prevEnd = weekStart.minus({ milliseconds: 1 });
|
||||||
|
const prevStart = this.getWeekStart(prevEnd);
|
||||||
|
return {
|
||||||
|
start: prevStart,
|
||||||
|
end: prevEnd
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'lastWeek': {
|
||||||
|
const lastWeekStart = this.getWeekStart(now.minus({ weeks: 1 }));
|
||||||
|
const prevEnd = lastWeekStart.minus({ milliseconds: 1 });
|
||||||
|
const prevStart = this.getWeekStart(prevEnd);
|
||||||
|
return {
|
||||||
|
start: prevStart,
|
||||||
|
end: prevEnd
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'thisMonth': {
|
||||||
|
const monthStart = now.startOf('month').set({ hour: this.dayStartHour });
|
||||||
|
const prevEnd = monthStart.minus({ milliseconds: 1 });
|
||||||
|
const prevStart = prevEnd.startOf('month').set({ hour: this.dayStartHour });
|
||||||
|
return {
|
||||||
|
start: prevStart,
|
||||||
|
end: prevEnd
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case 'lastMonth': {
|
||||||
|
const lastMonthStart = now.minus({ months: 1 }).startOf('month').set({ hour: this.dayStartHour });
|
||||||
|
const prevEnd = lastMonthStart.minus({ milliseconds: 1 });
|
||||||
|
const prevStart = prevEnd.startOf('month').set({ hour: this.dayStartHour });
|
||||||
|
return {
|
||||||
|
start: prevStart,
|
||||||
|
end: prevEnd
|
||||||
|
};
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
console.warn(`[TimeManager] No previous period defined for: ${period}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
groupEventsByInterval(events, interval = 'day', property = null) {
|
||||||
|
if (!events?.length) return [];
|
||||||
|
|
||||||
|
const groupedData = new Map();
|
||||||
|
const now = DateTime.now().setZone('America/New_York');
|
||||||
|
|
||||||
|
for (const event of events) {
|
||||||
|
const datetime = DateTime.fromISO(event.attributes.datetime);
|
||||||
|
let groupKey;
|
||||||
|
|
||||||
|
switch (interval) {
|
||||||
|
case 'hour':
|
||||||
|
groupKey = datetime.startOf('hour').toISO();
|
||||||
|
break;
|
||||||
|
case 'day':
|
||||||
|
groupKey = datetime.startOf('day').toISO();
|
||||||
|
break;
|
||||||
|
case 'week':
|
||||||
|
groupKey = datetime.startOf('week').toISO();
|
||||||
|
break;
|
||||||
|
case 'month':
|
||||||
|
groupKey = datetime.startOf('month').toISO();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
groupKey = datetime.startOf('day').toISO();
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingGroup = groupedData.get(groupKey) || {
|
||||||
|
datetime: groupKey,
|
||||||
|
count: 0,
|
||||||
|
value: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
existingGroup.count++;
|
||||||
|
|
||||||
|
if (property) {
|
||||||
|
// Extract property value from event
|
||||||
|
const props = event.attributes?.event_properties || event.attributes?.properties || {};
|
||||||
|
let value = 0;
|
||||||
|
|
||||||
|
if (property === '$value') {
|
||||||
|
// Special case for $value - use event value
|
||||||
|
value = Number(event.attributes?.value || 0);
|
||||||
|
} else {
|
||||||
|
// Otherwise get from properties
|
||||||
|
value = Number(props[property] || 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
existingGroup.value = (existingGroup.value || 0) + value;
|
||||||
|
}
|
||||||
|
|
||||||
|
groupedData.set(groupKey, existingGroup);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to array and sort by datetime
|
||||||
|
return Array.from(groupedData.values())
|
||||||
|
.sort((a, b) => DateTime.fromISO(a.datetime) - DateTime.fromISO(b.datetime));
|
||||||
|
}
|
||||||
|
}
|
||||||
935
inventory-server/dashboard/meta-server/package-lock.json
generated
Normal file
935
inventory-server/dashboard/meta-server/package-lock.json
generated
Normal file
@@ -0,0 +1,935 @@
|
|||||||
|
{
|
||||||
|
"name": "meta-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "meta-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"axios": "^1.7.9",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"dotenv": "^16.4.7",
|
||||||
|
"express": "^4.21.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/accepts": {
|
||||||
|
"version": "1.3.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
|
||||||
|
"integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"mime-types": "~2.1.34",
|
||||||
|
"negotiator": "0.6.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/array-flatten": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/asynckit": {
|
||||||
|
"version": "0.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
|
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/axios": {
|
||||||
|
"version": "1.7.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||||
|
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"follow-redirects": "^1.15.6",
|
||||||
|
"form-data": "^4.0.0",
|
||||||
|
"proxy-from-env": "^1.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/body-parser": {
|
||||||
|
"version": "1.20.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
|
||||||
|
"integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"bytes": "3.1.2",
|
||||||
|
"content-type": "~1.0.5",
|
||||||
|
"debug": "2.6.9",
|
||||||
|
"depd": "2.0.0",
|
||||||
|
"destroy": "1.2.0",
|
||||||
|
"http-errors": "2.0.0",
|
||||||
|
"iconv-lite": "0.4.24",
|
||||||
|
"on-finished": "2.4.1",
|
||||||
|
"qs": "6.13.0",
|
||||||
|
"raw-body": "2.5.2",
|
||||||
|
"type-is": "~1.6.18",
|
||||||
|
"unpipe": "1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8",
|
||||||
|
"npm": "1.2.8000 || >= 1.4.16"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/bytes": {
|
||||||
|
"version": "3.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||||
|
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/call-bind-apply-helpers": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"function-bind": "^1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/call-bound": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bind-apply-helpers": "^1.0.1",
|
||||||
|
"get-intrinsic": "^1.2.6"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/combined-stream": {
|
||||||
|
"version": "1.0.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
|
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"delayed-stream": "~1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/content-disposition": {
|
||||||
|
"version": "0.5.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
|
||||||
|
"integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"safe-buffer": "5.2.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/content-type": {
|
||||||
|
"version": "1.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
|
||||||
|
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/cookie": {
|
||||||
|
"version": "0.7.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz",
|
||||||
|
"integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/cookie-signature": {
|
||||||
|
"version": "1.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
|
||||||
|
"integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/cors": {
|
||||||
|
"version": "2.8.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz",
|
||||||
|
"integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"object-assign": "^4",
|
||||||
|
"vary": "^1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/debug": {
|
||||||
|
"version": "2.6.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||||
|
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"ms": "2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/delayed-stream": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.4.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/depd": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/destroy": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8",
|
||||||
|
"npm": "1.2.8000 || >= 1.4.16"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/dotenv": {
|
||||||
|
"version": "16.4.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
|
||||||
|
"integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://dotenvx.com"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/dunder-proto": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bind-apply-helpers": "^1.0.1",
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"gopd": "^1.2.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ee-first": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/encodeurl": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/es-define-property": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/es-errors": {
|
||||||
|
"version": "1.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
|
||||||
|
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/es-object-atoms": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/escape-html": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/etag": {
|
||||||
|
"version": "1.8.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
|
||||||
|
"integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/express": {
|
||||||
|
"version": "4.21.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
|
||||||
|
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"accepts": "~1.3.8",
|
||||||
|
"array-flatten": "1.1.1",
|
||||||
|
"body-parser": "1.20.3",
|
||||||
|
"content-disposition": "0.5.4",
|
||||||
|
"content-type": "~1.0.4",
|
||||||
|
"cookie": "0.7.1",
|
||||||
|
"cookie-signature": "1.0.6",
|
||||||
|
"debug": "2.6.9",
|
||||||
|
"depd": "2.0.0",
|
||||||
|
"encodeurl": "~2.0.0",
|
||||||
|
"escape-html": "~1.0.3",
|
||||||
|
"etag": "~1.8.1",
|
||||||
|
"finalhandler": "1.3.1",
|
||||||
|
"fresh": "0.5.2",
|
||||||
|
"http-errors": "2.0.0",
|
||||||
|
"merge-descriptors": "1.0.3",
|
||||||
|
"methods": "~1.1.2",
|
||||||
|
"on-finished": "2.4.1",
|
||||||
|
"parseurl": "~1.3.3",
|
||||||
|
"path-to-regexp": "0.1.12",
|
||||||
|
"proxy-addr": "~2.0.7",
|
||||||
|
"qs": "6.13.0",
|
||||||
|
"range-parser": "~1.2.1",
|
||||||
|
"safe-buffer": "5.2.1",
|
||||||
|
"send": "0.19.0",
|
||||||
|
"serve-static": "1.16.2",
|
||||||
|
"setprototypeof": "1.2.0",
|
||||||
|
"statuses": "2.0.1",
|
||||||
|
"type-is": "~1.6.18",
|
||||||
|
"utils-merge": "1.0.1",
|
||||||
|
"vary": "~1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.10.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/express"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/finalhandler": {
|
||||||
|
"version": "1.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz",
|
||||||
|
"integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"debug": "2.6.9",
|
||||||
|
"encodeurl": "~2.0.0",
|
||||||
|
"escape-html": "~1.0.3",
|
||||||
|
"on-finished": "2.4.1",
|
||||||
|
"parseurl": "~1.3.3",
|
||||||
|
"statuses": "2.0.1",
|
||||||
|
"unpipe": "~1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/follow-redirects": {
|
||||||
|
"version": "1.15.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
|
||||||
|
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "individual",
|
||||||
|
"url": "https://github.com/sponsors/RubenVerborgh"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"debug": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/form-data": {
|
||||||
|
"version": "4.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
|
||||||
|
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"asynckit": "^0.4.0",
|
||||||
|
"combined-stream": "^1.0.8",
|
||||||
|
"mime-types": "^2.1.12"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/forwarded": {
|
||||||
|
"version": "0.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
||||||
|
"integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/fresh": {
|
||||||
|
"version": "0.5.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
|
||||||
|
"integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/function-bind": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/get-intrinsic": {
|
||||||
|
"version": "1.2.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.6.tgz",
|
||||||
|
"integrity": "sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bind-apply-helpers": "^1.0.1",
|
||||||
|
"dunder-proto": "^1.0.0",
|
||||||
|
"es-define-property": "^1.0.1",
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"es-object-atoms": "^1.0.0",
|
||||||
|
"function-bind": "^1.1.2",
|
||||||
|
"gopd": "^1.2.0",
|
||||||
|
"has-symbols": "^1.1.0",
|
||||||
|
"hasown": "^2.0.2",
|
||||||
|
"math-intrinsics": "^1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/gopd": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/has-symbols": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/hasown": {
|
||||||
|
"version": "2.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||||
|
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"function-bind": "^1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/http-errors": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"depd": "2.0.0",
|
||||||
|
"inherits": "2.0.4",
|
||||||
|
"setprototypeof": "1.2.0",
|
||||||
|
"statuses": "2.0.1",
|
||||||
|
"toidentifier": "1.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/iconv-lite": {
|
||||||
|
"version": "0.4.24",
|
||||||
|
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||||
|
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"safer-buffer": ">= 2.1.2 < 3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/inherits": {
|
||||||
|
"version": "2.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||||
|
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
|
"node_modules/ipaddr.js": {
|
||||||
|
"version": "1.9.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
|
||||||
|
"integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/math-intrinsics": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/media-typer": {
|
||||||
|
"version": "0.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||||
|
"integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/merge-descriptors": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/methods": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mime": {
|
||||||
|
"version": "1.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
|
||||||
|
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"mime": "cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mime-db": {
|
||||||
|
"version": "1.52.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||||
|
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mime-types": {
|
||||||
|
"version": "2.1.35",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||||
|
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"mime-db": "1.52.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ms": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/negotiator": {
|
||||||
|
"version": "0.6.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
|
||||||
|
"integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/object-assign": {
|
||||||
|
"version": "4.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
||||||
|
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/object-inspect": {
|
||||||
|
"version": "1.13.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz",
|
||||||
|
"integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/on-finished": {
|
||||||
|
"version": "2.4.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
|
||||||
|
"integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"ee-first": "1.1.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/parseurl": {
|
||||||
|
"version": "1.3.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
|
||||||
|
"integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/path-to-regexp": {
|
||||||
|
"version": "0.1.12",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz",
|
||||||
|
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/proxy-addr": {
|
||||||
|
"version": "2.0.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
|
||||||
|
"integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"forwarded": "0.2.0",
|
||||||
|
"ipaddr.js": "1.9.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/proxy-from-env": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/qs": {
|
||||||
|
"version": "6.13.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
|
||||||
|
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
|
||||||
|
"license": "BSD-3-Clause",
|
||||||
|
"dependencies": {
|
||||||
|
"side-channel": "^1.0.6"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.6"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/range-parser": {
|
||||||
|
"version": "1.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
|
||||||
|
"integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/raw-body": {
|
||||||
|
"version": "2.5.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
|
||||||
|
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"bytes": "3.1.2",
|
||||||
|
"http-errors": "2.0.0",
|
||||||
|
"iconv-lite": "0.4.24",
|
||||||
|
"unpipe": "1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/safe-buffer": {
|
||||||
|
"version": "5.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||||
|
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/feross"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "patreon",
|
||||||
|
"url": "https://www.patreon.com/feross"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "consulting",
|
||||||
|
"url": "https://feross.org/support"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/safer-buffer": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/send": {
|
||||||
|
"version": "0.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz",
|
||||||
|
"integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"debug": "2.6.9",
|
||||||
|
"depd": "2.0.0",
|
||||||
|
"destroy": "1.2.0",
|
||||||
|
"encodeurl": "~1.0.2",
|
||||||
|
"escape-html": "~1.0.3",
|
||||||
|
"etag": "~1.8.1",
|
||||||
|
"fresh": "0.5.2",
|
||||||
|
"http-errors": "2.0.0",
|
||||||
|
"mime": "1.6.0",
|
||||||
|
"ms": "2.1.3",
|
||||||
|
"on-finished": "2.4.1",
|
||||||
|
"range-parser": "~1.2.1",
|
||||||
|
"statuses": "2.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/send/node_modules/encodeurl": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/send/node_modules/ms": {
|
||||||
|
"version": "2.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||||
|
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/serve-static": {
|
||||||
|
"version": "1.16.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz",
|
||||||
|
"integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"encodeurl": "~2.0.0",
|
||||||
|
"escape-html": "~1.0.3",
|
||||||
|
"parseurl": "~1.3.3",
|
||||||
|
"send": "0.19.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/setprototypeof": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
|
"node_modules/side-channel": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"object-inspect": "^1.13.3",
|
||||||
|
"side-channel-list": "^1.0.0",
|
||||||
|
"side-channel-map": "^1.0.1",
|
||||||
|
"side-channel-weakmap": "^1.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/side-channel-list": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"object-inspect": "^1.13.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/side-channel-map": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bound": "^1.0.2",
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"get-intrinsic": "^1.2.5",
|
||||||
|
"object-inspect": "^1.13.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/side-channel-weakmap": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"call-bound": "^1.0.2",
|
||||||
|
"es-errors": "^1.3.0",
|
||||||
|
"get-intrinsic": "^1.2.5",
|
||||||
|
"object-inspect": "^1.13.3",
|
||||||
|
"side-channel-map": "^1.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/statuses": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/toidentifier": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/type-is": {
|
||||||
|
"version": "1.6.18",
|
||||||
|
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
|
||||||
|
"integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"media-typer": "0.3.0",
|
||||||
|
"mime-types": "~2.1.24"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/unpipe": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/utils-merge": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.4.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/vary": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
20
inventory-server/dashboard/meta-server/package.json
Normal file
20
inventory-server/dashboard/meta-server/package.json
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"name": "meta-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1",
|
||||||
|
"start": "node server.js",
|
||||||
|
"dev": "nodemon server.js"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"description": "",
|
||||||
|
"dependencies": {
|
||||||
|
"axios": "^1.7.9",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"dotenv": "^16.4.7",
|
||||||
|
"express": "^4.21.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,91 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const {
|
||||||
|
fetchCampaigns,
|
||||||
|
fetchAccountInsights,
|
||||||
|
updateCampaignBudget,
|
||||||
|
updateCampaignStatus,
|
||||||
|
} = require('../services/meta.service');
|
||||||
|
|
||||||
|
// Get all campaigns with insights
|
||||||
|
router.get('/campaigns', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { since, until } = req.query;
|
||||||
|
|
||||||
|
if (!since || !until) {
|
||||||
|
return res.status(400).json({ error: 'Date range is required (since, until)' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const campaigns = await fetchCampaigns(since, until);
|
||||||
|
res.json(campaigns);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Campaign fetch error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to fetch campaigns',
|
||||||
|
details: error.response?.data?.error?.message || error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get account insights
|
||||||
|
router.get('/account-insights', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { since, until } = req.query;
|
||||||
|
|
||||||
|
if (!since || !until) {
|
||||||
|
return res.status(400).json({ error: 'Date range is required (since, until)' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const insights = await fetchAccountInsights(since, until);
|
||||||
|
res.json(insights);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Account insights fetch error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to fetch account insights',
|
||||||
|
details: error.response?.data?.error?.message || error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update campaign budget
|
||||||
|
router.patch('/campaigns/:campaignId/budget', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { campaignId } = req.params;
|
||||||
|
const { budget } = req.body;
|
||||||
|
|
||||||
|
if (!budget) {
|
||||||
|
return res.status(400).json({ error: 'Budget is required' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await updateCampaignBudget(campaignId, budget);
|
||||||
|
res.json(result);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Budget update error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to update campaign budget',
|
||||||
|
details: error.response?.data?.error?.message || error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update campaign status (pause/unpause)
|
||||||
|
router.post('/campaigns/:campaignId/:action', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { campaignId, action } = req.params;
|
||||||
|
|
||||||
|
if (!['pause', 'unpause'].includes(action)) {
|
||||||
|
return res.status(400).json({ error: 'Invalid action. Use "pause" or "unpause"' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await updateCampaignStatus(campaignId, action);
|
||||||
|
res.json(result);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Status update error:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to update campaign status',
|
||||||
|
details: error.response?.data?.error?.message || error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
31
inventory-server/dashboard/meta-server/server.js
Normal file
31
inventory-server/dashboard/meta-server/server.js
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const cors = require('cors');
|
||||||
|
const path = require('path');
|
||||||
|
require('dotenv').config({
|
||||||
|
path: path.resolve(__dirname, '.env')
|
||||||
|
});
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const port = process.env.PORT || 3005;
|
||||||
|
|
||||||
|
app.use(cors());
|
||||||
|
app.use(express.json());
|
||||||
|
|
||||||
|
// Import routes
|
||||||
|
const campaignRoutes = require('./routes/campaigns.routes');
|
||||||
|
|
||||||
|
// Use routes
|
||||||
|
app.use('/api/meta', campaignRoutes);
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
console.error(err.stack);
|
||||||
|
res.status(500).json({ error: 'Something went wrong!' });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
app.listen(port, () => {
|
||||||
|
console.log(`Meta API server running on port ${port}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = app;
|
||||||
@@ -0,0 +1,99 @@
|
|||||||
|
const { default: axios } = require('axios');
|
||||||
|
|
||||||
|
const META_API_VERSION = process.env.META_API_VERSION || 'v21.0';
|
||||||
|
const META_API_BASE_URL = `https://graph.facebook.com/${META_API_VERSION}`;
|
||||||
|
const META_ACCESS_TOKEN = process.env.META_ACCESS_TOKEN;
|
||||||
|
const AD_ACCOUNT_ID = process.env.META_AD_ACCOUNT_ID;
|
||||||
|
|
||||||
|
const metaApiRequest = async (endpoint, params = {}) => {
|
||||||
|
try {
|
||||||
|
const response = await axios.get(`${META_API_BASE_URL}/${endpoint}`, {
|
||||||
|
params: {
|
||||||
|
access_token: META_ACCESS_TOKEN,
|
||||||
|
time_zone: 'America/New_York',
|
||||||
|
...params,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Meta API Error:', {
|
||||||
|
message: error.message,
|
||||||
|
response: error.response?.data,
|
||||||
|
endpoint,
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const fetchCampaigns = async (since, until) => {
|
||||||
|
const campaigns = await metaApiRequest(`act_${AD_ACCOUNT_ID}/campaigns`, {
|
||||||
|
fields: [
|
||||||
|
'id',
|
||||||
|
'name',
|
||||||
|
'status',
|
||||||
|
'objective',
|
||||||
|
'daily_budget',
|
||||||
|
'lifetime_budget',
|
||||||
|
'adsets{daily_budget,lifetime_budget}',
|
||||||
|
`insights.time_range({'since':'${since}','until':'${until}'}).level(campaign){
|
||||||
|
spend,
|
||||||
|
impressions,
|
||||||
|
clicks,
|
||||||
|
ctr,
|
||||||
|
reach,
|
||||||
|
frequency,
|
||||||
|
cpm,
|
||||||
|
cpc,
|
||||||
|
actions,
|
||||||
|
action_values,
|
||||||
|
cost_per_action_type
|
||||||
|
}`,
|
||||||
|
].join(','),
|
||||||
|
limit: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
return campaigns.data.filter(c => c.insights?.data?.[0]?.spend > 0);
|
||||||
|
};
|
||||||
|
|
||||||
|
const fetchAccountInsights = async (since, until) => {
|
||||||
|
const accountInsights = await metaApiRequest(`act_${AD_ACCOUNT_ID}/insights`, {
|
||||||
|
fields: 'reach,spend,impressions,clicks,ctr,cpm,actions,action_values',
|
||||||
|
time_range: JSON.stringify({ since, until }),
|
||||||
|
});
|
||||||
|
|
||||||
|
return accountInsights.data[0] || null;
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateCampaignBudget = async (campaignId, budget) => {
|
||||||
|
try {
|
||||||
|
const response = await axios.post(`${META_API_BASE_URL}/${campaignId}`, {
|
||||||
|
access_token: META_ACCESS_TOKEN,
|
||||||
|
daily_budget: budget * 100, // Convert to cents
|
||||||
|
});
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Update campaign budget error:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateCampaignStatus = async (campaignId, action) => {
|
||||||
|
try {
|
||||||
|
const status = action === 'pause' ? 'PAUSED' : 'ACTIVE';
|
||||||
|
const response = await axios.post(`${META_API_BASE_URL}/${campaignId}`, {
|
||||||
|
access_token: META_ACCESS_TOKEN,
|
||||||
|
status,
|
||||||
|
});
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Update campaign status error:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
fetchCampaigns,
|
||||||
|
fetchAccountInsights,
|
||||||
|
updateCampaignBudget,
|
||||||
|
updateCampaignStatus,
|
||||||
|
};
|
||||||
24
inventory-server/dashboard/package-lock.json
generated
Normal file
24
inventory-server/dashboard/package-lock.json
generated
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"name": "dashboard",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"dependencies": {
|
||||||
|
"dotenv": "^16.4.7"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/dotenv": {
|
||||||
|
"version": "16.4.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
|
||||||
|
"integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://dotenvx.com"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
5
inventory-server/dashboard/package.json
Normal file
5
inventory-server/dashboard/package.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"dependencies": {
|
||||||
|
"dotenv": "^16.4.7"
|
||||||
|
}
|
||||||
|
}
|
||||||
13
inventory-server/dashboard/typeform-server/.env.example
Normal file
13
inventory-server/dashboard/typeform-server/.env.example
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Server Configuration
|
||||||
|
NODE_ENV=development
|
||||||
|
TYPEFORM_PORT=3008
|
||||||
|
|
||||||
|
# Redis Configuration
|
||||||
|
REDIS_URL=redis://localhost:6379
|
||||||
|
|
||||||
|
# Typeform API Configuration
|
||||||
|
TYPEFORM_ACCESS_TOKEN=your_typeform_access_token_here
|
||||||
|
|
||||||
|
# Optional: Form IDs (if you want to store them in env)
|
||||||
|
TYPEFORM_FORM_ID_1=your_first_form_id
|
||||||
|
TYPEFORM_FORM_ID_2=your_second_form_id
|
||||||
1411
inventory-server/dashboard/typeform-server/package-lock.json
generated
Normal file
1411
inventory-server/dashboard/typeform-server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
20
inventory-server/dashboard/typeform-server/package.json
Normal file
20
inventory-server/dashboard/typeform-server/package.json
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"name": "typeform-server",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Typeform API integration server",
|
||||||
|
"main": "server.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node server.js",
|
||||||
|
"dev": "nodemon server.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"axios": "^1.6.2",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"dotenv": "^16.3.1",
|
||||||
|
"express": "^4.18.2",
|
||||||
|
"redis": "^4.6.11"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"nodemon": "^3.0.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,121 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const typeformService = require('../services/typeform.service');
|
||||||
|
|
||||||
|
// Get form responses
|
||||||
|
router.get('/forms/:formId/responses', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { formId } = req.params;
|
||||||
|
const filters = req.query;
|
||||||
|
|
||||||
|
console.log(`Fetching responses for form ${formId} with filters:`, filters);
|
||||||
|
|
||||||
|
if (!formId) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Missing form ID',
|
||||||
|
details: 'The form ID parameter is required'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await typeformService.getFormResponsesWithFilters(formId, filters);
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
return res.status(404).json({
|
||||||
|
error: 'No data found',
|
||||||
|
details: `No responses found for form ${formId}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Form responses error:', {
|
||||||
|
formId: req.params.formId,
|
||||||
|
filters: req.query,
|
||||||
|
error: error.message,
|
||||||
|
stack: error.stack,
|
||||||
|
response: error.response?.data
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle specific error cases
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'Authentication failed',
|
||||||
|
details: 'Invalid Typeform API credentials'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.response?.status === 404) {
|
||||||
|
return res.status(404).json({
|
||||||
|
error: 'Not found',
|
||||||
|
details: `Form '${req.params.formId}' not found`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.response?.status === 400) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Invalid request',
|
||||||
|
details: error.response?.data?.message || 'The request was invalid',
|
||||||
|
data: error.response?.data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to fetch form responses',
|
||||||
|
details: error.response?.data?.message || error.message,
|
||||||
|
data: error.response?.data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get form insights
|
||||||
|
router.get('/forms/:formId/insights', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { formId } = req.params;
|
||||||
|
|
||||||
|
if (!formId) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Missing form ID',
|
||||||
|
details: 'The form ID parameter is required'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await typeformService.getFormInsights(formId);
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
return res.status(404).json({
|
||||||
|
error: 'No data found',
|
||||||
|
details: `No insights found for form ${formId}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Form insights error:', {
|
||||||
|
formId: req.params.formId,
|
||||||
|
error: error.message,
|
||||||
|
response: error.response?.data
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'Authentication failed',
|
||||||
|
details: 'Invalid Typeform API credentials'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error.response?.status === 404) {
|
||||||
|
return res.status(404).json({
|
||||||
|
error: 'Not found',
|
||||||
|
details: `Form '${req.params.formId}' not found`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to fetch form insights',
|
||||||
|
details: error.response?.data?.message || error.message,
|
||||||
|
data: error.response?.data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
31
inventory-server/dashboard/typeform-server/server.js
Normal file
31
inventory-server/dashboard/typeform-server/server.js
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
const express = require('express');
|
||||||
|
const cors = require('cors');
|
||||||
|
const path = require('path');
|
||||||
|
require('dotenv').config({
|
||||||
|
path: path.resolve(__dirname, '.env')
|
||||||
|
});
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
const port = process.env.TYPEFORM_PORT || 3008;
|
||||||
|
|
||||||
|
app.use(cors());
|
||||||
|
app.use(express.json());
|
||||||
|
|
||||||
|
// Import routes
|
||||||
|
const typeformRoutes = require('./routes/typeform.routes');
|
||||||
|
|
||||||
|
// Use routes
|
||||||
|
app.use('/api/typeform', typeformRoutes);
|
||||||
|
|
||||||
|
// Error handling middleware
|
||||||
|
app.use((err, req, res, next) => {
|
||||||
|
console.error(err.stack);
|
||||||
|
res.status(500).json({ error: 'Something went wrong!' });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
app.listen(port, () => {
|
||||||
|
console.log(`Typeform API server running on port ${port}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = app;
|
||||||
@@ -0,0 +1,142 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
const { createClient } = require('redis');
|
||||||
|
|
||||||
|
class TypeformService {
|
||||||
|
constructor() {
|
||||||
|
this.redis = createClient({
|
||||||
|
url: process.env.REDIS_URL
|
||||||
|
});
|
||||||
|
|
||||||
|
this.redis.on('error', err => console.error('Redis Client Error:', err));
|
||||||
|
this.redis.connect().catch(err => console.error('Redis connection error:', err));
|
||||||
|
|
||||||
|
const token = process.env.TYPEFORM_ACCESS_TOKEN;
|
||||||
|
console.log('Initializing Typeform client with token:', token ? `${token.slice(0, 10)}...` : 'missing');
|
||||||
|
|
||||||
|
this.apiClient = axios.create({
|
||||||
|
baseURL: 'https://api.typeform.com',
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${token}`,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test the token
|
||||||
|
this.testConnection();
|
||||||
|
}
|
||||||
|
|
||||||
|
async testConnection() {
|
||||||
|
try {
|
||||||
|
const response = await this.apiClient.get('/forms');
|
||||||
|
console.log('Typeform connection test successful:', {
|
||||||
|
status: response.status,
|
||||||
|
headers: response.headers,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Typeform connection test failed:', {
|
||||||
|
error: error.message,
|
||||||
|
response: error.response?.data,
|
||||||
|
status: error.response?.status,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getFormResponses(formId, params = {}) {
|
||||||
|
const cacheKey = `typeform:responses:${formId}:${JSON.stringify(params)}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try Redis first
|
||||||
|
const cachedData = await this.redis.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log(`Form responses for ${formId} found in Redis cache`);
|
||||||
|
return JSON.parse(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch from API
|
||||||
|
const response = await this.apiClient.get(`/forms/${formId}/responses`, { params });
|
||||||
|
const data = response.data;
|
||||||
|
|
||||||
|
// Save to Redis with 5 minute expiry
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||||
|
EX: 300 // 5 minutes
|
||||||
|
});
|
||||||
|
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error fetching form responses for ${formId}:`, {
|
||||||
|
error: error.message,
|
||||||
|
params,
|
||||||
|
response: error.response?.data
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getFormInsights(formId) {
|
||||||
|
const cacheKey = `typeform:insights:${formId}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try Redis first
|
||||||
|
const cachedData = await this.redis.get(cacheKey);
|
||||||
|
if (cachedData) {
|
||||||
|
console.log(`Form insights for ${formId} found in Redis cache`);
|
||||||
|
return JSON.parse(cachedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log the request details
|
||||||
|
console.log(`Fetching insights for form ${formId}...`, {
|
||||||
|
url: `/insights/${formId}/summary`,
|
||||||
|
headers: this.apiClient.defaults.headers
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch from API
|
||||||
|
const response = await this.apiClient.get(`/insights/${formId}/summary`);
|
||||||
|
console.log('Typeform insights response:', {
|
||||||
|
status: response.status,
|
||||||
|
headers: response.headers,
|
||||||
|
data: response.data
|
||||||
|
});
|
||||||
|
const data = response.data;
|
||||||
|
|
||||||
|
// Save to Redis with 5 minute expiry
|
||||||
|
await this.redis.set(cacheKey, JSON.stringify(data), {
|
||||||
|
EX: 300 // 5 minutes
|
||||||
|
});
|
||||||
|
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error fetching form insights for ${formId}:`, {
|
||||||
|
error: error.message,
|
||||||
|
response: error.response?.data,
|
||||||
|
status: error.response?.status,
|
||||||
|
headers: error.response?.headers,
|
||||||
|
requestUrl: `/insights/${formId}/summary`,
|
||||||
|
requestHeaders: this.apiClient.defaults.headers
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getFormResponsesWithFilters(formId, { since, until, pageSize = 25, ...otherParams } = {}) {
|
||||||
|
try {
|
||||||
|
const params = {
|
||||||
|
page_size: pageSize,
|
||||||
|
...otherParams
|
||||||
|
};
|
||||||
|
|
||||||
|
if (since) {
|
||||||
|
params.since = new Date(since).toISOString();
|
||||||
|
}
|
||||||
|
if (until) {
|
||||||
|
params.until = new Date(until).toISOString();
|
||||||
|
}
|
||||||
|
|
||||||
|
return await this.getFormResponses(formId, params);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in getFormResponsesWithFilters:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = new TypeformService();
|
||||||
@@ -169,6 +169,9 @@ CREATE TABLE IF NOT EXISTS import_history (
|
|||||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||||
records_added INTEGER DEFAULT 0,
|
records_added INTEGER DEFAULT 0,
|
||||||
records_updated INTEGER DEFAULT 0,
|
records_updated INTEGER DEFAULT 0,
|
||||||
|
records_deleted INTEGER DEFAULT 0,
|
||||||
|
records_skipped INTEGER DEFAULT 0,
|
||||||
|
total_processed INTEGER DEFAULT 0,
|
||||||
is_incremental BOOLEAN DEFAULT FALSE,
|
is_incremental BOOLEAN DEFAULT FALSE,
|
||||||
status calculation_status DEFAULT 'running',
|
status calculation_status DEFAULT 'running',
|
||||||
error_message TEXT,
|
error_message TEXT,
|
||||||
@@ -179,3 +182,15 @@ CREATE TABLE IF NOT EXISTS import_history (
|
|||||||
CREATE INDEX IF NOT EXISTS idx_last_calc ON calculate_status(last_calculation_timestamp);
|
CREATE INDEX IF NOT EXISTS idx_last_calc ON calculate_status(last_calculation_timestamp);
|
||||||
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status(last_sync_timestamp);
|
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status(last_sync_timestamp);
|
||||||
CREATE INDEX IF NOT EXISTS idx_table_time ON import_history(table_name, start_time);
|
CREATE INDEX IF NOT EXISTS idx_table_time ON import_history(table_name, start_time);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_import_history_status ON import_history(status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_calculate_history_status ON calculate_history(status);
|
||||||
|
|
||||||
|
-- Add comments for documentation
|
||||||
|
COMMENT ON TABLE import_history IS 'Tracks history of data import operations with detailed statistics';
|
||||||
|
COMMENT ON COLUMN import_history.records_deleted IS 'Number of records deleted during this import';
|
||||||
|
COMMENT ON COLUMN import_history.records_skipped IS 'Number of records skipped (e.g., unchanged, invalid)';
|
||||||
|
COMMENT ON COLUMN import_history.total_processed IS 'Total number of records examined/processed, including skipped';
|
||||||
|
|
||||||
|
COMMENT ON TABLE calculate_history IS 'Tracks history of metrics calculation runs with performance data';
|
||||||
|
COMMENT ON COLUMN calculate_history.duration_seconds IS 'Total duration of the calculation in seconds';
|
||||||
|
COMMENT ON COLUMN calculate_history.additional_info IS 'JSON object containing step timings, row counts, and other detailed metrics';
|
||||||
@@ -116,6 +116,7 @@ CREATE TABLE public.product_metrics (
|
|||||||
-- Lifetime Metrics (Recalculated Hourly/Daily from daily_product_snapshots)
|
-- Lifetime Metrics (Recalculated Hourly/Daily from daily_product_snapshots)
|
||||||
lifetime_sales INT,
|
lifetime_sales INT,
|
||||||
lifetime_revenue NUMERIC(16, 4),
|
lifetime_revenue NUMERIC(16, 4),
|
||||||
|
lifetime_revenue_quality VARCHAR(10), -- 'exact', 'partial', 'estimated'
|
||||||
|
|
||||||
-- First Period Metrics (Calculated Once/Periodically from daily_product_snapshots)
|
-- First Period Metrics (Calculated Once/Periodically from daily_product_snapshots)
|
||||||
first_7_days_sales INT, first_7_days_revenue NUMERIC(14, 4),
|
first_7_days_sales INT, first_7_days_revenue NUMERIC(14, 4),
|
||||||
@@ -176,6 +177,29 @@ CREATE TABLE public.product_metrics (
|
|||||||
-- Product Status (Calculated from metrics)
|
-- Product Status (Calculated from metrics)
|
||||||
status VARCHAR, -- Stores status values like: Critical, Reorder Soon, Healthy, Overstock, At Risk, New
|
status VARCHAR, -- Stores status values like: Critical, Reorder Soon, Healthy, Overstock, At Risk, New
|
||||||
|
|
||||||
|
-- Growth Metrics (P3)
|
||||||
|
sales_growth_30d_vs_prev NUMERIC(10, 2), -- % growth current 30d vs prev 30d
|
||||||
|
revenue_growth_30d_vs_prev NUMERIC(10, 2), -- % growth current 30d vs prev 30d
|
||||||
|
sales_growth_yoy NUMERIC(10, 2), -- Year-over-year sales growth %
|
||||||
|
revenue_growth_yoy NUMERIC(10, 2), -- Year-over-year revenue growth %
|
||||||
|
|
||||||
|
-- Demand Variability Metrics (P3)
|
||||||
|
sales_variance_30d NUMERIC(10, 2), -- Variance of daily sales
|
||||||
|
sales_std_dev_30d NUMERIC(10, 2), -- Standard deviation of daily sales
|
||||||
|
sales_cv_30d NUMERIC(10, 2), -- Coefficient of variation
|
||||||
|
demand_pattern VARCHAR(20), -- 'stable', 'variable', 'sporadic', 'lumpy'
|
||||||
|
|
||||||
|
-- Service Level & Fill Rate (P5)
|
||||||
|
fill_rate_30d NUMERIC(8, 2), -- % of demand fulfilled from stock
|
||||||
|
stockout_incidents_30d INT, -- Days with stockouts
|
||||||
|
service_level_30d NUMERIC(8, 2), -- % of days without stockouts
|
||||||
|
lost_sales_incidents_30d INT, -- Days with potential lost sales
|
||||||
|
|
||||||
|
-- Seasonality (P5)
|
||||||
|
seasonality_index NUMERIC(10, 2), -- Current vs average (100 = average)
|
||||||
|
seasonal_pattern VARCHAR(20), -- 'none', 'weekly', 'monthly', 'quarterly', 'yearly'
|
||||||
|
peak_season VARCHAR(20), -- e.g., 'Q4', 'summer', 'holiday'
|
||||||
|
|
||||||
CONSTRAINT fk_product_metrics_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE
|
CONSTRAINT fk_product_metrics_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -242,7 +266,8 @@ CREATE TABLE public.category_metrics (
|
|||||||
-- Calculated KPIs (Based on 30d aggregates) - Apply to rolled-up metrics
|
-- Calculated KPIs (Based on 30d aggregates) - Apply to rolled-up metrics
|
||||||
avg_margin_30d NUMERIC(7, 3), -- (profit / revenue) * 100
|
avg_margin_30d NUMERIC(7, 3), -- (profit / revenue) * 100
|
||||||
stock_turn_30d NUMERIC(10, 3), -- sales_units / avg_stock_units (Needs avg stock calc)
|
stock_turn_30d NUMERIC(10, 3), -- sales_units / avg_stock_units (Needs avg stock calc)
|
||||||
-- growth_rate_30d NUMERIC(7, 3), -- (current 30d rev - prev 30d rev) / prev 30d rev
|
sales_growth_30d_vs_prev NUMERIC(10, 2), -- % growth in sales units
|
||||||
|
revenue_growth_30d_vs_prev NUMERIC(10, 2), -- % growth in revenue
|
||||||
|
|
||||||
CONSTRAINT fk_category_metrics_cat_id FOREIGN KEY (category_id) REFERENCES public.categories(cat_id) ON DELETE CASCADE ON UPDATE CASCADE
|
CONSTRAINT fk_category_metrics_cat_id FOREIGN KEY (category_id) REFERENCES public.categories(cat_id) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
);
|
);
|
||||||
@@ -280,7 +305,9 @@ CREATE TABLE public.vendor_metrics (
|
|||||||
lifetime_sales INT NOT NULL DEFAULT 0, lifetime_revenue NUMERIC(18, 4) NOT NULL DEFAULT 0.00,
|
lifetime_sales INT NOT NULL DEFAULT 0, lifetime_revenue NUMERIC(18, 4) NOT NULL DEFAULT 0.00,
|
||||||
|
|
||||||
-- Calculated KPIs (Based on 30d aggregates)
|
-- Calculated KPIs (Based on 30d aggregates)
|
||||||
avg_margin_30d NUMERIC(14, 4) -- (profit / revenue) * 100
|
avg_margin_30d NUMERIC(14, 4), -- (profit / revenue) * 100
|
||||||
|
sales_growth_30d_vs_prev NUMERIC(10, 2), -- % growth in sales units
|
||||||
|
revenue_growth_30d_vs_prev NUMERIC(10, 2), -- % growth in revenue
|
||||||
-- Add more KPIs if needed (e.g., avg product value, sell-through rate for vendor)
|
-- Add more KPIs if needed (e.g., avg product value, sell-through rate for vendor)
|
||||||
);
|
);
|
||||||
CREATE INDEX idx_vendor_metrics_active_count ON public.vendor_metrics(active_product_count);
|
CREATE INDEX idx_vendor_metrics_active_count ON public.vendor_metrics(active_product_count);
|
||||||
@@ -309,7 +336,9 @@ CREATE TABLE public.brand_metrics (
|
|||||||
lifetime_sales INT NOT NULL DEFAULT 0, lifetime_revenue NUMERIC(18, 4) NOT NULL DEFAULT 0.00,
|
lifetime_sales INT NOT NULL DEFAULT 0, lifetime_revenue NUMERIC(18, 4) NOT NULL DEFAULT 0.00,
|
||||||
|
|
||||||
-- Calculated KPIs (Based on 30d aggregates)
|
-- Calculated KPIs (Based on 30d aggregates)
|
||||||
avg_margin_30d NUMERIC(7, 3) -- (profit / revenue) * 100
|
avg_margin_30d NUMERIC(7, 3), -- (profit / revenue) * 100
|
||||||
|
sales_growth_30d_vs_prev NUMERIC(10, 2), -- % growth in sales units
|
||||||
|
revenue_growth_30d_vs_prev NUMERIC(10, 2), -- % growth in revenue
|
||||||
-- Add more KPIs if needed (e.g., avg product value, sell-through rate for brand)
|
-- Add more KPIs if needed (e.g., avg product value, sell-through rate for brand)
|
||||||
);
|
);
|
||||||
CREATE INDEX idx_brand_metrics_active_count ON public.brand_metrics(active_product_count);
|
CREATE INDEX idx_brand_metrics_active_count ON public.brand_metrics(active_product_count);
|
||||||
@@ -7,7 +7,7 @@ BEGIN
|
|||||||
-- Check which table is being updated and use the appropriate column
|
-- Check which table is being updated and use the appropriate column
|
||||||
IF TG_TABLE_NAME = 'categories' THEN
|
IF TG_TABLE_NAME = 'categories' THEN
|
||||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||||
ELSIF TG_TABLE_NAME IN ('products', 'orders', 'purchase_orders') THEN
|
ELSIF TG_TABLE_NAME IN ('products', 'orders', 'purchase_orders', 'receivings') THEN
|
||||||
NEW.updated = CURRENT_TIMESTAMP;
|
NEW.updated = CURRENT_TIMESTAMP;
|
||||||
END IF;
|
END IF;
|
||||||
RETURN NEW;
|
RETURN NEW;
|
||||||
@@ -159,27 +159,24 @@ CREATE INDEX idx_orders_pid_date ON orders(pid, date);
|
|||||||
CREATE INDEX idx_orders_updated ON orders(updated);
|
CREATE INDEX idx_orders_updated ON orders(updated);
|
||||||
|
|
||||||
-- Create purchase_orders table with its indexes
|
-- Create purchase_orders table with its indexes
|
||||||
|
-- This table now focuses solely on purchase order intent, not receivings
|
||||||
CREATE TABLE purchase_orders (
|
CREATE TABLE purchase_orders (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
po_id TEXT NOT NULL,
|
po_id TEXT NOT NULL,
|
||||||
vendor TEXT NOT NULL,
|
vendor TEXT NOT NULL,
|
||||||
date DATE NOT NULL,
|
date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||||
expected_date DATE,
|
expected_date DATE,
|
||||||
pid BIGINT NOT NULL,
|
pid BIGINT NOT NULL,
|
||||||
sku TEXT NOT NULL,
|
sku TEXT NOT NULL,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
cost_price NUMERIC(14, 4) NOT NULL,
|
|
||||||
po_cost_price NUMERIC(14, 4) NOT NULL,
|
po_cost_price NUMERIC(14, 4) NOT NULL,
|
||||||
status TEXT DEFAULT 'created',
|
status TEXT DEFAULT 'created',
|
||||||
receiving_status TEXT DEFAULT 'created',
|
|
||||||
notes TEXT,
|
notes TEXT,
|
||||||
long_note TEXT,
|
long_note TEXT,
|
||||||
ordered INTEGER NOT NULL,
|
ordered INTEGER NOT NULL,
|
||||||
received INTEGER DEFAULT 0,
|
supplier_id INTEGER,
|
||||||
received_date DATE,
|
date_created TIMESTAMP WITH TIME ZONE,
|
||||||
last_received_date DATE,
|
date_ordered TIMESTAMP WITH TIME ZONE,
|
||||||
received_by TEXT,
|
|
||||||
receiving_history JSONB,
|
|
||||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
|
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
|
||||||
UNIQUE (po_id, pid)
|
UNIQUE (po_id, pid)
|
||||||
@@ -192,21 +189,61 @@ CREATE TRIGGER update_purchase_orders_updated
|
|||||||
EXECUTE FUNCTION update_updated_column();
|
EXECUTE FUNCTION update_updated_column();
|
||||||
|
|
||||||
COMMENT ON COLUMN purchase_orders.name IS 'Product name from products.description';
|
COMMENT ON COLUMN purchase_orders.name IS 'Product name from products.description';
|
||||||
COMMENT ON COLUMN purchase_orders.po_cost_price IS 'Original cost from PO, before receiving adjustments';
|
COMMENT ON COLUMN purchase_orders.po_cost_price IS 'Original cost from PO';
|
||||||
COMMENT ON COLUMN purchase_orders.status IS 'canceled, created, electronically_ready_send, ordered, preordered, electronically_sent, receiving_started, done';
|
COMMENT ON COLUMN purchase_orders.status IS 'canceled, created, electronically_ready_send, ordered, preordered, electronically_sent, receiving_started, done';
|
||||||
COMMENT ON COLUMN purchase_orders.receiving_status IS 'canceled, created, partial_received, full_received, paid';
|
|
||||||
COMMENT ON COLUMN purchase_orders.receiving_history IS 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag';
|
|
||||||
|
|
||||||
CREATE INDEX idx_po_id ON purchase_orders(po_id);
|
CREATE INDEX idx_po_id ON purchase_orders(po_id);
|
||||||
CREATE INDEX idx_po_sku ON purchase_orders(sku);
|
CREATE INDEX idx_po_sku ON purchase_orders(sku);
|
||||||
CREATE INDEX idx_po_vendor ON purchase_orders(vendor);
|
CREATE INDEX idx_po_vendor ON purchase_orders(vendor);
|
||||||
CREATE INDEX idx_po_status ON purchase_orders(status);
|
CREATE INDEX idx_po_status ON purchase_orders(status);
|
||||||
CREATE INDEX idx_po_receiving_status ON purchase_orders(receiving_status);
|
|
||||||
CREATE INDEX idx_po_expected_date ON purchase_orders(expected_date);
|
CREATE INDEX idx_po_expected_date ON purchase_orders(expected_date);
|
||||||
CREATE INDEX idx_po_last_received_date ON purchase_orders(last_received_date);
|
|
||||||
CREATE INDEX idx_po_pid_status ON purchase_orders(pid, status);
|
CREATE INDEX idx_po_pid_status ON purchase_orders(pid, status);
|
||||||
CREATE INDEX idx_po_pid_date ON purchase_orders(pid, date);
|
CREATE INDEX idx_po_pid_date ON purchase_orders(pid, date);
|
||||||
CREATE INDEX idx_po_updated ON purchase_orders(updated);
|
CREATE INDEX idx_po_updated ON purchase_orders(updated);
|
||||||
|
CREATE INDEX idx_po_supplier_id ON purchase_orders(supplier_id);
|
||||||
|
|
||||||
|
-- Create receivings table to track actual receipt of goods
|
||||||
|
CREATE TABLE receivings (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
receiving_id TEXT NOT NULL,
|
||||||
|
pid BIGINT NOT NULL,
|
||||||
|
sku TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
vendor TEXT,
|
||||||
|
qty_each INTEGER NOT NULL,
|
||||||
|
qty_each_orig INTEGER,
|
||||||
|
cost_each NUMERIC(14, 5) NOT NULL,
|
||||||
|
cost_each_orig NUMERIC(14, 5),
|
||||||
|
received_by INTEGER,
|
||||||
|
received_by_name TEXT,
|
||||||
|
received_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||||
|
receiving_created_date TIMESTAMP WITH TIME ZONE,
|
||||||
|
supplier_id INTEGER,
|
||||||
|
status TEXT DEFAULT 'created',
|
||||||
|
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
|
||||||
|
UNIQUE (receiving_id, pid)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create trigger for receivings
|
||||||
|
CREATE TRIGGER update_receivings_updated
|
||||||
|
BEFORE UPDATE ON receivings
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_updated_column();
|
||||||
|
|
||||||
|
COMMENT ON COLUMN receivings.status IS 'canceled, created, partial_received, full_received, paid';
|
||||||
|
COMMENT ON COLUMN receivings.qty_each_orig IS 'Original quantity from the source system';
|
||||||
|
COMMENT ON COLUMN receivings.cost_each_orig IS 'Original cost from the source system';
|
||||||
|
COMMENT ON COLUMN receivings.vendor IS 'Vendor name, same as in purchase_orders';
|
||||||
|
|
||||||
|
CREATE INDEX idx_receivings_id ON receivings(receiving_id);
|
||||||
|
CREATE INDEX idx_receivings_pid ON receivings(pid);
|
||||||
|
CREATE INDEX idx_receivings_sku ON receivings(sku);
|
||||||
|
CREATE INDEX idx_receivings_status ON receivings(status);
|
||||||
|
CREATE INDEX idx_receivings_received_date ON receivings(received_date);
|
||||||
|
CREATE INDEX idx_receivings_supplier_id ON receivings(supplier_id);
|
||||||
|
CREATE INDEX idx_receivings_vendor ON receivings(vendor);
|
||||||
|
CREATE INDEX idx_receivings_updated ON receivings(updated);
|
||||||
|
|
||||||
SET session_replication_role = 'origin'; -- Re-enable foreign key checks
|
SET session_replication_role = 'origin'; -- Re-enable foreign key checks
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics-new/utils/progress');
|
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../scripts/metrics-new/utils/progress');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { pipeline } = require('stream');
|
const { pipeline } = require('stream');
|
||||||
@@ -24,7 +24,7 @@ process.on('unhandledRejection', (reason, promise) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Load progress module
|
// Load progress module
|
||||||
const progress = require('../utils/progress');
|
const progress = require('../scripts/metrics-new/utils/progress');
|
||||||
|
|
||||||
// Store progress functions in global scope to ensure availability
|
// Store progress functions in global scope to ensure availability
|
||||||
global.formatElapsedTime = progress.formatElapsedTime;
|
global.formatElapsedTime = progress.formatElapsedTime;
|
||||||
@@ -36,7 +36,7 @@ global.getProgress = progress.getProgress;
|
|||||||
global.logError = progress.logError;
|
global.logError = progress.logError;
|
||||||
|
|
||||||
// Load database module
|
// Load database module
|
||||||
const { getConnection, closePool } = require('../utils/db');
|
const { getConnection, closePool } = require('../scripts/metrics-new/utils/db');
|
||||||
|
|
||||||
// Add cancel handler
|
// Add cancel handler
|
||||||
let isCancelled = false;
|
let isCancelled = false;
|
||||||
@@ -91,6 +91,287 @@ function cancelCalculation() {
|
|||||||
process.on('SIGTERM', cancelCalculation);
|
process.on('SIGTERM', cancelCalculation);
|
||||||
process.on('SIGINT', cancelCalculation);
|
process.on('SIGINT', cancelCalculation);
|
||||||
|
|
||||||
|
const calculateInitialMetrics = (client, onProgress) => {
|
||||||
|
return client.query(`
|
||||||
|
-- Truncate the existing metrics tables to ensure clean data
|
||||||
|
TRUNCATE TABLE public.daily_product_snapshots;
|
||||||
|
TRUNCATE TABLE public.product_metrics;
|
||||||
|
|
||||||
|
-- First let's create daily snapshots for all products with order activity
|
||||||
|
WITH SalesData AS (
|
||||||
|
SELECT
|
||||||
|
p.pid,
|
||||||
|
p.sku,
|
||||||
|
o.date::date AS order_date,
|
||||||
|
-- Count orders to ensure we only include products with real activity
|
||||||
|
COUNT(o.id) as order_count,
|
||||||
|
-- Aggregate Sales (Quantity > 0, Status not Canceled/Returned)
|
||||||
|
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
|
||||||
|
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted,
|
||||||
|
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.discount ELSE 0 END), 0.00) AS discounts,
|
||||||
|
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN COALESCE(o.costeach, p.landing_cost_price, p.cost_price) * o.quantity ELSE 0 END), 0.00) AS cogs,
|
||||||
|
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN p.regular_price * o.quantity ELSE 0 END), 0.00) AS gross_regular_revenue,
|
||||||
|
|
||||||
|
-- Aggregate Returns (Quantity < 0 or Status = Returned)
|
||||||
|
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN ABS(o.quantity) ELSE 0 END), 0) AS units_returned,
|
||||||
|
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN o.price * ABS(o.quantity) ELSE 0 END), 0.00) AS returns_revenue
|
||||||
|
FROM public.products p
|
||||||
|
LEFT JOIN public.orders o ON p.pid = o.pid
|
||||||
|
GROUP BY p.pid, p.sku, o.date::date
|
||||||
|
HAVING COUNT(o.id) > 0 -- Only include products with actual orders
|
||||||
|
),
|
||||||
|
ReceivingData AS (
|
||||||
|
SELECT
|
||||||
|
r.pid,
|
||||||
|
r.received_date::date AS receiving_date,
|
||||||
|
-- Count receiving documents to ensure we only include products with real activity
|
||||||
|
COUNT(DISTINCT r.receiving_id) as receiving_count,
|
||||||
|
-- Calculate received quantity for this day
|
||||||
|
SUM(r.received_quantity) AS units_received,
|
||||||
|
-- Calculate received cost for this day
|
||||||
|
SUM(r.received_quantity * r.unit_cost) AS cost_received
|
||||||
|
FROM public.receivings r
|
||||||
|
GROUP BY r.pid, r.received_date::date
|
||||||
|
HAVING COUNT(DISTINCT r.receiving_id) > 0 OR SUM(r.received_quantity) > 0
|
||||||
|
),
|
||||||
|
-- Get current stock quantities
|
||||||
|
StockData AS (
|
||||||
|
SELECT
|
||||||
|
p.pid,
|
||||||
|
p.stock_quantity,
|
||||||
|
COALESCE(p.landing_cost_price, p.cost_price, 0.00) as effective_cost_price,
|
||||||
|
COALESCE(p.price, 0.00) as current_price,
|
||||||
|
COALESCE(p.regular_price, 0.00) as current_regular_price
|
||||||
|
FROM public.products p
|
||||||
|
),
|
||||||
|
-- Combine sales and receiving dates to get all activity dates
|
||||||
|
DatePidCombos AS (
|
||||||
|
SELECT DISTINCT pid, order_date AS activity_date FROM SalesData
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT pid, receiving_date FROM ReceivingData
|
||||||
|
),
|
||||||
|
-- Insert daily snapshots for all product-date combinations
|
||||||
|
SnapshotInsert AS (
|
||||||
|
INSERT INTO public.daily_product_snapshots (
|
||||||
|
snapshot_date,
|
||||||
|
pid,
|
||||||
|
sku,
|
||||||
|
eod_stock_quantity,
|
||||||
|
eod_stock_cost,
|
||||||
|
eod_stock_retail,
|
||||||
|
eod_stock_gross,
|
||||||
|
stockout_flag,
|
||||||
|
units_sold,
|
||||||
|
units_returned,
|
||||||
|
gross_revenue,
|
||||||
|
discounts,
|
||||||
|
returns_revenue,
|
||||||
|
net_revenue,
|
||||||
|
cogs,
|
||||||
|
gross_regular_revenue,
|
||||||
|
profit,
|
||||||
|
units_received,
|
||||||
|
cost_received,
|
||||||
|
calculation_timestamp
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
d.activity_date AS snapshot_date,
|
||||||
|
d.pid,
|
||||||
|
p.sku,
|
||||||
|
-- Use current stock as approximation, since historical stock data is not available
|
||||||
|
s.stock_quantity AS eod_stock_quantity,
|
||||||
|
s.stock_quantity * s.effective_cost_price AS eod_stock_cost,
|
||||||
|
s.stock_quantity * s.current_price AS eod_stock_retail,
|
||||||
|
s.stock_quantity * s.current_regular_price AS eod_stock_gross,
|
||||||
|
(s.stock_quantity <= 0) AS stockout_flag,
|
||||||
|
-- Sales metrics
|
||||||
|
COALESCE(sd.units_sold, 0),
|
||||||
|
COALESCE(sd.units_returned, 0),
|
||||||
|
COALESCE(sd.gross_revenue_unadjusted, 0.00),
|
||||||
|
COALESCE(sd.discounts, 0.00),
|
||||||
|
COALESCE(sd.returns_revenue, 0.00),
|
||||||
|
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) AS net_revenue,
|
||||||
|
COALESCE(sd.cogs, 0.00),
|
||||||
|
COALESCE(sd.gross_regular_revenue, 0.00),
|
||||||
|
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
|
||||||
|
-- Receiving metrics
|
||||||
|
COALESCE(rd.units_received, 0),
|
||||||
|
COALESCE(rd.cost_received, 0.00),
|
||||||
|
now() -- calculation timestamp
|
||||||
|
FROM DatePidCombos d
|
||||||
|
JOIN public.products p ON d.pid = p.pid
|
||||||
|
LEFT JOIN SalesData sd ON d.pid = sd.pid AND d.activity_date = sd.order_date
|
||||||
|
LEFT JOIN ReceivingData rd ON d.pid = rd.pid AND d.activity_date = rd.receiving_date
|
||||||
|
LEFT JOIN StockData s ON d.pid = s.pid
|
||||||
|
RETURNING pid, snapshot_date
|
||||||
|
),
|
||||||
|
-- Now build the aggregated product metrics from the daily snapshots
|
||||||
|
MetricsInsert AS (
|
||||||
|
INSERT INTO public.product_metrics (
|
||||||
|
pid,
|
||||||
|
sku,
|
||||||
|
current_stock_quantity,
|
||||||
|
current_stock_cost,
|
||||||
|
current_stock_retail,
|
||||||
|
current_stock_msrp,
|
||||||
|
is_out_of_stock,
|
||||||
|
total_units_sold,
|
||||||
|
total_units_returned,
|
||||||
|
return_rate,
|
||||||
|
gross_revenue,
|
||||||
|
total_discounts,
|
||||||
|
total_returns,
|
||||||
|
net_revenue,
|
||||||
|
total_cogs,
|
||||||
|
total_gross_revenue,
|
||||||
|
total_profit,
|
||||||
|
profit_margin,
|
||||||
|
avg_daily_units,
|
||||||
|
reorder_point,
|
||||||
|
reorder_alert,
|
||||||
|
days_of_supply,
|
||||||
|
sales_velocity,
|
||||||
|
sales_velocity_score,
|
||||||
|
rank_by_revenue,
|
||||||
|
rank_by_quantity,
|
||||||
|
rank_by_profit,
|
||||||
|
total_received_quantity,
|
||||||
|
total_received_cost,
|
||||||
|
last_sold_date,
|
||||||
|
last_received_date,
|
||||||
|
days_since_last_sale,
|
||||||
|
days_since_last_received,
|
||||||
|
calculation_timestamp
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
p.pid,
|
||||||
|
p.sku,
|
||||||
|
p.stock_quantity AS current_stock_quantity,
|
||||||
|
p.stock_quantity * COALESCE(p.landing_cost_price, p.cost_price, 0) AS current_stock_cost,
|
||||||
|
p.stock_quantity * COALESCE(p.price, 0) AS current_stock_retail,
|
||||||
|
p.stock_quantity * COALESCE(p.regular_price, 0) AS current_stock_msrp,
|
||||||
|
(p.stock_quantity <= 0) AS is_out_of_stock,
|
||||||
|
-- Aggregate metrics
|
||||||
|
COALESCE(SUM(ds.units_sold), 0) AS total_units_sold,
|
||||||
|
COALESCE(SUM(ds.units_returned), 0) AS total_units_returned,
|
||||||
|
CASE
|
||||||
|
WHEN COALESCE(SUM(ds.units_sold), 0) > 0
|
||||||
|
THEN COALESCE(SUM(ds.units_returned), 0)::float / NULLIF(COALESCE(SUM(ds.units_sold), 0), 0)
|
||||||
|
ELSE 0
|
||||||
|
END AS return_rate,
|
||||||
|
COALESCE(SUM(ds.gross_revenue), 0) AS gross_revenue,
|
||||||
|
COALESCE(SUM(ds.discounts), 0) AS total_discounts,
|
||||||
|
COALESCE(SUM(ds.returns_revenue), 0) AS total_returns,
|
||||||
|
COALESCE(SUM(ds.net_revenue), 0) AS net_revenue,
|
||||||
|
COALESCE(SUM(ds.cogs), 0) AS total_cogs,
|
||||||
|
COALESCE(SUM(ds.gross_regular_revenue), 0) AS total_gross_revenue,
|
||||||
|
COALESCE(SUM(ds.profit), 0) AS total_profit,
|
||||||
|
CASE
|
||||||
|
WHEN COALESCE(SUM(ds.net_revenue), 0) > 0
|
||||||
|
THEN COALESCE(SUM(ds.profit), 0) / NULLIF(COALESCE(SUM(ds.net_revenue), 0), 0)
|
||||||
|
ELSE 0
|
||||||
|
END AS profit_margin,
|
||||||
|
-- Calculate average daily units
|
||||||
|
COALESCE(AVG(ds.units_sold), 0) AS avg_daily_units,
|
||||||
|
-- Calculate reorder point (simplified, can be enhanced with lead time and safety stock)
|
||||||
|
CEILING(COALESCE(AVG(ds.units_sold) * 14, 0)) AS reorder_point,
|
||||||
|
(p.stock_quantity <= CEILING(COALESCE(AVG(ds.units_sold) * 14, 0))) AS reorder_alert,
|
||||||
|
-- Days of supply based on average daily sales
|
||||||
|
CASE
|
||||||
|
WHEN COALESCE(AVG(ds.units_sold), 0) > 0
|
||||||
|
THEN p.stock_quantity / NULLIF(COALESCE(AVG(ds.units_sold), 0), 0)
|
||||||
|
ELSE NULL
|
||||||
|
END AS days_of_supply,
|
||||||
|
-- Sales velocity (average units sold per day over last 30 days)
|
||||||
|
(SELECT COALESCE(AVG(recent.units_sold), 0)
|
||||||
|
FROM public.daily_product_snapshots recent
|
||||||
|
WHERE recent.pid = p.pid
|
||||||
|
AND recent.snapshot_date >= CURRENT_DATE - INTERVAL '30 days'
|
||||||
|
) AS sales_velocity,
|
||||||
|
-- Placeholder for sales velocity score (can be calculated based on velocity)
|
||||||
|
0 AS sales_velocity_score,
|
||||||
|
-- Will be updated later by ranking procedure
|
||||||
|
0 AS rank_by_revenue,
|
||||||
|
0 AS rank_by_quantity,
|
||||||
|
0 AS rank_by_profit,
|
||||||
|
-- Receiving data
|
||||||
|
COALESCE(SUM(ds.units_received), 0) AS total_received_quantity,
|
||||||
|
COALESCE(SUM(ds.cost_received), 0) AS total_received_cost,
|
||||||
|
-- Date metrics
|
||||||
|
(SELECT MAX(sd.snapshot_date)
|
||||||
|
FROM public.daily_product_snapshots sd
|
||||||
|
WHERE sd.pid = p.pid AND sd.units_sold > 0
|
||||||
|
) AS last_sold_date,
|
||||||
|
(SELECT MAX(rd.snapshot_date)
|
||||||
|
FROM public.daily_product_snapshots rd
|
||||||
|
WHERE rd.pid = p.pid AND rd.units_received > 0
|
||||||
|
) AS last_received_date,
|
||||||
|
-- Calculate days since last sale/received
|
||||||
|
CASE
|
||||||
|
WHEN (SELECT MAX(sd.snapshot_date)
|
||||||
|
FROM public.daily_product_snapshots sd
|
||||||
|
WHERE sd.pid = p.pid AND sd.units_sold > 0) IS NOT NULL
|
||||||
|
THEN (CURRENT_DATE - (SELECT MAX(sd.snapshot_date)
|
||||||
|
FROM public.daily_product_snapshots sd
|
||||||
|
WHERE sd.pid = p.pid AND sd.units_sold > 0))::integer
|
||||||
|
ELSE NULL
|
||||||
|
END AS days_since_last_sale,
|
||||||
|
CASE
|
||||||
|
WHEN (SELECT MAX(rd.snapshot_date)
|
||||||
|
FROM public.daily_product_snapshots rd
|
||||||
|
WHERE rd.pid = p.pid AND rd.units_received > 0) IS NOT NULL
|
||||||
|
THEN (CURRENT_DATE - (SELECT MAX(rd.snapshot_date)
|
||||||
|
FROM public.daily_product_snapshots rd
|
||||||
|
WHERE rd.pid = p.pid AND rd.units_received > 0))::integer
|
||||||
|
ELSE NULL
|
||||||
|
END AS days_since_last_received,
|
||||||
|
now() -- calculation timestamp
|
||||||
|
FROM public.products p
|
||||||
|
LEFT JOIN public.daily_product_snapshots ds ON p.pid = ds.pid
|
||||||
|
GROUP BY p.pid, p.sku, p.stock_quantity, p.landing_cost_price, p.cost_price, p.price, p.regular_price
|
||||||
|
)
|
||||||
|
|
||||||
|
-- Update the calculate_status table
|
||||||
|
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
|
||||||
|
VALUES
|
||||||
|
('daily_snapshots', now()),
|
||||||
|
('product_metrics', now())
|
||||||
|
ON CONFLICT (module_name) DO UPDATE
|
||||||
|
SET last_calculation_timestamp = now();
|
||||||
|
|
||||||
|
-- Finally, update the ranks for products
|
||||||
|
UPDATE public.product_metrics pm SET
|
||||||
|
rank_by_revenue = rev_ranks.rank
|
||||||
|
FROM (
|
||||||
|
SELECT pid, RANK() OVER (ORDER BY net_revenue DESC) AS rank
|
||||||
|
FROM public.product_metrics
|
||||||
|
WHERE net_revenue > 0
|
||||||
|
) rev_ranks
|
||||||
|
WHERE pm.pid = rev_ranks.pid;
|
||||||
|
|
||||||
|
UPDATE public.product_metrics pm SET
|
||||||
|
rank_by_quantity = qty_ranks.rank
|
||||||
|
FROM (
|
||||||
|
SELECT pid, RANK() OVER (ORDER BY total_units_sold DESC) AS rank
|
||||||
|
FROM public.product_metrics
|
||||||
|
WHERE total_units_sold > 0
|
||||||
|
) qty_ranks
|
||||||
|
WHERE pm.pid = qty_ranks.pid;
|
||||||
|
|
||||||
|
UPDATE public.product_metrics pm SET
|
||||||
|
rank_by_profit = profit_ranks.rank
|
||||||
|
FROM (
|
||||||
|
SELECT pid, RANK() OVER (ORDER BY total_profit DESC) AS rank
|
||||||
|
FROM public.product_metrics
|
||||||
|
WHERE total_profit > 0
|
||||||
|
) profit_ranks
|
||||||
|
WHERE pm.pid = profit_ranks.pid;
|
||||||
|
|
||||||
|
-- Return count of products with metrics
|
||||||
|
SELECT COUNT(*) AS product_count FROM public.product_metrics
|
||||||
|
`);
|
||||||
|
};
|
||||||
|
|
||||||
async function populateInitialMetrics() {
|
async function populateInitialMetrics() {
|
||||||
let connection;
|
let connection;
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
@@ -357,7 +357,7 @@ async function syncSettingsProductTable() {
|
|||||||
* @param {string} config.historyType - Type identifier for calculate_history.
|
* @param {string} config.historyType - Type identifier for calculate_history.
|
||||||
* @param {string} config.statusModule - Module name for calculate_status.
|
* @param {string} config.statusModule - Module name for calculate_status.
|
||||||
* @param {object} progress - Progress utility functions.
|
* @param {object} progress - Progress utility functions.
|
||||||
* @returns {Promise<{success: boolean, message: string, duration: number}>}
|
* @returns {Promise<{success: boolean, message: string, duration: number, rowsAffected: number}>}
|
||||||
*/
|
*/
|
||||||
async function executeSqlStep(config, progress) {
|
async function executeSqlStep(config, progress) {
|
||||||
if (isCancelled) throw new Error(`Calculation skipped step ${config.name} due to prior cancellation.`);
|
if (isCancelled) throw new Error(`Calculation skipped step ${config.name} due to prior cancellation.`);
|
||||||
@@ -366,6 +366,7 @@ async function executeSqlStep(config, progress) {
|
|||||||
console.log(`\n--- Starting Step: ${config.name} ---`);
|
console.log(`\n--- Starting Step: ${config.name} ---`);
|
||||||
const stepStartTime = Date.now();
|
const stepStartTime = Date.now();
|
||||||
let connection = null;
|
let connection = null;
|
||||||
|
let rowsAffected = 0; // Track rows affected by this step
|
||||||
|
|
||||||
// Set timeout for this specific step
|
// Set timeout for this specific step
|
||||||
if (stepTimeoutHandle) clearTimeout(stepTimeoutHandle); // Clear previous step's timeout
|
if (stepTimeoutHandle) clearTimeout(stepTimeoutHandle); // Clear previous step's timeout
|
||||||
@@ -414,7 +415,10 @@ async function executeSqlStep(config, progress) {
|
|||||||
current: 0, total: 100,
|
current: 0, total: 100,
|
||||||
elapsed: progress.formatElapsedTime(stepStartTime),
|
elapsed: progress.formatElapsedTime(stepStartTime),
|
||||||
remaining: 'Calculating...', rate: 0, percentage: '0',
|
remaining: 'Calculating...', rate: 0, percentage: '0',
|
||||||
timing: { start_time: new Date(stepStartTime).toISOString() }
|
timing: {
|
||||||
|
start_time: new Date(stepStartTime).toISOString(),
|
||||||
|
step_start_ms: stepStartTime
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// 5. Execute the Main SQL Query
|
// 5. Execute the Main SQL Query
|
||||||
@@ -423,15 +427,35 @@ async function executeSqlStep(config, progress) {
|
|||||||
operation: `Executing SQL: ${config.name}`,
|
operation: `Executing SQL: ${config.name}`,
|
||||||
current: 25, total: 100,
|
current: 25, total: 100,
|
||||||
elapsed: progress.formatElapsedTime(stepStartTime),
|
elapsed: progress.formatElapsedTime(stepStartTime),
|
||||||
remaining: 'Executing...', rate: 0, percentage: '25',
|
remaining: 'Executing query...', rate: 0, percentage: '25',
|
||||||
timing: { start_time: new Date(stepStartTime).toISOString() }
|
timing: {
|
||||||
|
start_time: new Date(stepStartTime).toISOString(),
|
||||||
|
step_start_ms: stepStartTime
|
||||||
|
}
|
||||||
});
|
});
|
||||||
console.log(`Executing SQL for ${config.name}...`);
|
console.log(`Executing SQL for ${config.name}...`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Try executing exactly as individual scripts do
|
// Try executing exactly as individual scripts do
|
||||||
console.log('Executing SQL with simple query method...');
|
const result = await connection.query(sqlQuery);
|
||||||
await connection.query(sqlQuery);
|
|
||||||
|
// Try to extract row count from result
|
||||||
|
if (result && result.rowCount !== undefined) {
|
||||||
|
rowsAffected = result.rowCount;
|
||||||
|
} else if (Array.isArray(result) && result[0] && result[0].rowCount !== undefined) {
|
||||||
|
rowsAffected = result[0].rowCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the query returned a result set with row count info
|
||||||
|
if (result && result.rows && result.rows.length > 0 && result.rows[0].rows_processed) {
|
||||||
|
rowsAffected = parseInt(result.rows[0].rows_processed) || rowsAffected;
|
||||||
|
console.log(`SQL returned metrics: ${JSON.stringify(result.rows[0])}`);
|
||||||
|
} else if (Array.isArray(result) && result[0] && result[0].rows && result[0].rows[0] && result[0].rows[0].rows_processed) {
|
||||||
|
rowsAffected = parseInt(result[0].rows[0].rows_processed) || rowsAffected;
|
||||||
|
console.log(`SQL returned metrics: ${JSON.stringify(result[0].rows[0])}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`SQL affected ${rowsAffected} rows`);
|
||||||
} catch (sqlError) {
|
} catch (sqlError) {
|
||||||
if (sqlError.message.includes('could not determine data type of parameter')) {
|
if (sqlError.message.includes('could not determine data type of parameter')) {
|
||||||
console.log('Simple query failed with parameter type error, trying alternative method...');
|
console.log('Simple query failed with parameter type error, trying alternative method...');
|
||||||
@@ -492,7 +516,8 @@ async function executeSqlStep(config, progress) {
|
|||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: `${config.name} completed successfully`,
|
message: `${config.name} completed successfully`,
|
||||||
duration: stepDuration
|
duration: stepDuration,
|
||||||
|
rowsAffected: rowsAffected
|
||||||
};
|
};
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -664,6 +689,17 @@ async function runAllCalculations() {
|
|||||||
combinedHistoryId = historyResult.rows[0].id;
|
combinedHistoryId = historyResult.rows[0].id;
|
||||||
console.log(`Created combined history record ID: ${combinedHistoryId}`);
|
console.log(`Created combined history record ID: ${combinedHistoryId}`);
|
||||||
|
|
||||||
|
// Get initial counts for tracking
|
||||||
|
const productCount = await connection.query('SELECT COUNT(*) as count FROM products');
|
||||||
|
const totalProducts = parseInt(productCount.rows[0].count);
|
||||||
|
|
||||||
|
// Update history with initial counts
|
||||||
|
await connection.query(`
|
||||||
|
UPDATE calculate_history
|
||||||
|
SET additional_info = additional_info || jsonb_build_object('total_products', $1::integer)
|
||||||
|
WHERE id = $2
|
||||||
|
`, [totalProducts, combinedHistoryId]);
|
||||||
|
|
||||||
connection.release();
|
connection.release();
|
||||||
} catch (historyError) {
|
} catch (historyError) {
|
||||||
console.error('Error creating combined history record:', historyError);
|
console.error('Error creating combined history record:', historyError);
|
||||||
@@ -692,16 +728,37 @@ async function runAllCalculations() {
|
|||||||
|
|
||||||
// Track completed steps
|
// Track completed steps
|
||||||
const completedSteps = [];
|
const completedSteps = [];
|
||||||
|
const stepTimings = {};
|
||||||
|
const stepRowCounts = {};
|
||||||
|
let currentStepIndex = 0;
|
||||||
|
|
||||||
// Now run the calculation steps
|
// Now run the calculation steps
|
||||||
for (const step of steps) {
|
for (const step of stepsToRun) {
|
||||||
if (step.run) {
|
|
||||||
if (isCancelled) {
|
if (isCancelled) {
|
||||||
console.log(`Skipping step "${step.name}" due to cancellation.`);
|
console.log(`Skipping step "${step.name}" due to cancellation.`);
|
||||||
overallSuccess = false; // Mark as not fully successful if steps are skipped due to cancel
|
overallSuccess = false; // Mark as not fully successful if steps are skipped due to cancel
|
||||||
continue; // Skip to next step
|
continue; // Skip to next step
|
||||||
}
|
}
|
||||||
|
|
||||||
|
currentStepIndex++;
|
||||||
|
|
||||||
|
// Update overall progress
|
||||||
|
progressUtils.outputProgress({
|
||||||
|
status: 'running',
|
||||||
|
operation: 'Running calculations',
|
||||||
|
message: `Step ${currentStepIndex} of ${stepsToRun.length}: ${step.name}`,
|
||||||
|
current: currentStepIndex - 1,
|
||||||
|
total: stepsToRun.length,
|
||||||
|
elapsed: progressUtils.formatElapsedTime(overallStartTime),
|
||||||
|
remaining: progressUtils.estimateRemaining(overallStartTime, currentStepIndex - 1, stepsToRun.length),
|
||||||
|
percentage: Math.round(((currentStepIndex - 1) / stepsToRun.length) * 100).toString(),
|
||||||
|
timing: {
|
||||||
|
overall_start_time: new Date(overallStartTime).toISOString(),
|
||||||
|
current_step: step.name,
|
||||||
|
completed_steps: completedSteps.length
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// Pass the progress utilities to the step executor
|
// Pass the progress utilities to the step executor
|
||||||
const result = await executeSqlStep(step, progressUtils);
|
const result = await executeSqlStep(step, progressUtils);
|
||||||
|
|
||||||
@@ -709,11 +766,11 @@ async function runAllCalculations() {
|
|||||||
completedSteps.push({
|
completedSteps.push({
|
||||||
name: step.name,
|
name: step.name,
|
||||||
duration: result.duration,
|
duration: result.duration,
|
||||||
status: 'completed'
|
status: 'completed',
|
||||||
|
rowsAffected: result.rowsAffected
|
||||||
});
|
});
|
||||||
}
|
stepTimings[step.name] = result.duration;
|
||||||
} else {
|
stepRowCounts[step.name] = result.rowsAffected;
|
||||||
console.log(`Skipping step "${step.name}" (disabled by configuration).`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -726,18 +783,32 @@ async function runAllCalculations() {
|
|||||||
connection = await getConnection();
|
connection = await getConnection();
|
||||||
const totalDuration = Math.round((Date.now() - overallStartTime) / 1000);
|
const totalDuration = Math.round((Date.now() - overallStartTime) / 1000);
|
||||||
|
|
||||||
|
// Get final processed counts
|
||||||
|
const processedCounts = await connection.query(`
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(*) FROM product_metrics WHERE last_calculated >= $1) as processed_products
|
||||||
|
`, [new Date(overallStartTime)]);
|
||||||
|
|
||||||
await connection.query(`
|
await connection.query(`
|
||||||
UPDATE calculate_history
|
UPDATE calculate_history
|
||||||
SET
|
SET
|
||||||
end_time = NOW(),
|
end_time = NOW(),
|
||||||
duration_seconds = $1::integer,
|
duration_seconds = $1::integer,
|
||||||
status = $2::calculation_status,
|
status = $2::calculation_status,
|
||||||
additional_info = additional_info || jsonb_build_object('completed_steps', $3::jsonb)
|
additional_info = additional_info || jsonb_build_object(
|
||||||
WHERE id = $4::integer;
|
'processed_products', $3::integer,
|
||||||
|
'completed_steps', $4::jsonb,
|
||||||
|
'step_timings', $5::jsonb,
|
||||||
|
'step_row_counts', $6::jsonb
|
||||||
|
)
|
||||||
|
WHERE id = $7::integer;
|
||||||
`, [
|
`, [
|
||||||
totalDuration,
|
totalDuration,
|
||||||
isCancelled ? 'cancelled' : 'completed',
|
isCancelled ? 'cancelled' : 'completed',
|
||||||
|
processedCounts.rows[0].processed_products,
|
||||||
JSON.stringify(completedSteps),
|
JSON.stringify(completedSteps),
|
||||||
|
JSON.stringify(stepTimings),
|
||||||
|
JSON.stringify(stepRowCounts),
|
||||||
combinedHistoryId
|
combinedHistoryId
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -753,6 +824,26 @@ async function runAllCalculations() {
|
|||||||
overallSuccess = false;
|
overallSuccess = false;
|
||||||
} else {
|
} else {
|
||||||
console.log("\n--- All enabled calculations finished successfully ---");
|
console.log("\n--- All enabled calculations finished successfully ---");
|
||||||
|
|
||||||
|
// Send final completion progress
|
||||||
|
progressUtils.outputProgress({
|
||||||
|
status: 'complete',
|
||||||
|
operation: 'All calculations completed',
|
||||||
|
message: `Successfully completed ${completedSteps.length} of ${stepsToRun.length} steps`,
|
||||||
|
current: stepsToRun.length,
|
||||||
|
total: stepsToRun.length,
|
||||||
|
elapsed: progressUtils.formatElapsedTime(overallStartTime),
|
||||||
|
remaining: '0s',
|
||||||
|
percentage: '100',
|
||||||
|
timing: {
|
||||||
|
overall_start_time: new Date(overallStartTime).toISOString(),
|
||||||
|
overall_end_time: new Date().toISOString(),
|
||||||
|
total_duration_seconds: Math.round((Date.now() - overallStartTime) / 1000),
|
||||||
|
step_timings: stepTimings,
|
||||||
|
completed_steps: completedSteps.length
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
progressUtils.clearProgress(); // Clear progress only on full success
|
progressUtils.clearProgress(); // Clear progress only on full success
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ const importCategories = require('./import/categories');
|
|||||||
const { importProducts } = require('./import/products');
|
const { importProducts } = require('./import/products');
|
||||||
const importOrders = require('./import/orders');
|
const importOrders = require('./import/orders');
|
||||||
const importPurchaseOrders = require('./import/purchase-orders');
|
const importPurchaseOrders = require('./import/purchase-orders');
|
||||||
const importHistoricalData = require('./import/historical-data');
|
|
||||||
|
|
||||||
dotenv.config({ path: path.join(__dirname, "../.env") });
|
dotenv.config({ path: path.join(__dirname, "../.env") });
|
||||||
|
|
||||||
@@ -15,7 +14,6 @@ const IMPORT_CATEGORIES = true;
|
|||||||
const IMPORT_PRODUCTS = true;
|
const IMPORT_PRODUCTS = true;
|
||||||
const IMPORT_ORDERS = true;
|
const IMPORT_ORDERS = true;
|
||||||
const IMPORT_PURCHASE_ORDERS = true;
|
const IMPORT_PURCHASE_ORDERS = true;
|
||||||
const IMPORT_HISTORICAL_DATA = false;
|
|
||||||
|
|
||||||
// Add flag for incremental updates
|
// Add flag for incremental updates
|
||||||
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
|
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
|
||||||
@@ -80,8 +78,7 @@ async function main() {
|
|||||||
IMPORT_CATEGORIES,
|
IMPORT_CATEGORIES,
|
||||||
IMPORT_PRODUCTS,
|
IMPORT_PRODUCTS,
|
||||||
IMPORT_ORDERS,
|
IMPORT_ORDERS,
|
||||||
IMPORT_PURCHASE_ORDERS,
|
IMPORT_PURCHASE_ORDERS
|
||||||
IMPORT_HISTORICAL_DATA
|
|
||||||
].filter(Boolean).length;
|
].filter(Boolean).length;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -129,11 +126,10 @@ async function main() {
|
|||||||
'categories_enabled', $2::boolean,
|
'categories_enabled', $2::boolean,
|
||||||
'products_enabled', $3::boolean,
|
'products_enabled', $3::boolean,
|
||||||
'orders_enabled', $4::boolean,
|
'orders_enabled', $4::boolean,
|
||||||
'purchase_orders_enabled', $5::boolean,
|
'purchase_orders_enabled', $5::boolean
|
||||||
'historical_data_enabled', $6::boolean
|
|
||||||
)
|
)
|
||||||
) RETURNING id
|
) RETURNING id
|
||||||
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_HISTORICAL_DATA]);
|
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS]);
|
||||||
importHistoryId = historyResult.rows[0].id;
|
importHistoryId = historyResult.rows[0].id;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error creating import history record:", error);
|
console.error("Error creating import history record:", error);
|
||||||
@@ -150,16 +146,21 @@ async function main() {
|
|||||||
categories: null,
|
categories: null,
|
||||||
products: null,
|
products: null,
|
||||||
orders: null,
|
orders: null,
|
||||||
purchaseOrders: null,
|
purchaseOrders: null
|
||||||
historicalData: null
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let totalRecordsAdded = 0;
|
let totalRecordsAdded = 0;
|
||||||
let totalRecordsUpdated = 0;
|
let totalRecordsUpdated = 0;
|
||||||
|
let totalRecordsDeleted = 0; // Add tracking for deleted records
|
||||||
|
let totalRecordsSkipped = 0; // Track skipped/filtered records
|
||||||
|
const stepTimings = {};
|
||||||
|
|
||||||
// Run each import based on constants
|
// Run each import based on constants
|
||||||
if (IMPORT_CATEGORIES) {
|
if (IMPORT_CATEGORIES) {
|
||||||
|
const stepStart = Date.now();
|
||||||
results.categories = await importCategories(prodConnection, localConnection);
|
results.categories = await importCategories(prodConnection, localConnection);
|
||||||
|
stepTimings.categories = Math.round((Date.now() - stepStart) / 1000);
|
||||||
|
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
if (isImportCancelled) throw new Error("Import cancelled");
|
||||||
completedSteps++;
|
completedSteps++;
|
||||||
console.log('Categories import result:', results.categories);
|
console.log('Categories import result:', results.categories);
|
||||||
@@ -168,26 +169,37 @@ async function main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (IMPORT_PRODUCTS) {
|
if (IMPORT_PRODUCTS) {
|
||||||
|
const stepStart = Date.now();
|
||||||
results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||||
|
stepTimings.products = Math.round((Date.now() - stepStart) / 1000);
|
||||||
|
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
if (isImportCancelled) throw new Error("Import cancelled");
|
||||||
completedSteps++;
|
completedSteps++;
|
||||||
console.log('Products import result:', results.products);
|
console.log('Products import result:', results.products);
|
||||||
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0);
|
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0);
|
||||||
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0);
|
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0);
|
||||||
|
totalRecordsSkipped += parseInt(results.products?.skippedUnchanged || 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (IMPORT_ORDERS) {
|
if (IMPORT_ORDERS) {
|
||||||
|
const stepStart = Date.now();
|
||||||
results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||||
|
stepTimings.orders = Math.round((Date.now() - stepStart) / 1000);
|
||||||
|
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
if (isImportCancelled) throw new Error("Import cancelled");
|
||||||
completedSteps++;
|
completedSteps++;
|
||||||
console.log('Orders import result:', results.orders);
|
console.log('Orders import result:', results.orders);
|
||||||
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0);
|
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0);
|
||||||
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0);
|
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0);
|
||||||
|
totalRecordsSkipped += parseInt(results.orders?.totalSkipped || 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (IMPORT_PURCHASE_ORDERS) {
|
if (IMPORT_PURCHASE_ORDERS) {
|
||||||
try {
|
try {
|
||||||
|
const stepStart = Date.now();
|
||||||
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||||
|
stepTimings.purchaseOrders = Math.round((Date.now() - stepStart) / 1000);
|
||||||
|
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
if (isImportCancelled) throw new Error("Import cancelled");
|
||||||
completedSteps++;
|
completedSteps++;
|
||||||
console.log('Purchase orders import result:', results.purchaseOrders);
|
console.log('Purchase orders import result:', results.purchaseOrders);
|
||||||
@@ -198,6 +210,7 @@ async function main() {
|
|||||||
} else {
|
} else {
|
||||||
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0);
|
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0);
|
||||||
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0);
|
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0);
|
||||||
|
totalRecordsDeleted += parseInt(results.purchaseOrders?.recordsDeleted || 0);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error during purchase orders import:', error);
|
console.error('Error during purchase orders import:', error);
|
||||||
@@ -211,32 +224,6 @@ async function main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (IMPORT_HISTORICAL_DATA) {
|
|
||||||
try {
|
|
||||||
results.historicalData = await importHistoricalData(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
|
||||||
if (isImportCancelled) throw new Error("Import cancelled");
|
|
||||||
completedSteps++;
|
|
||||||
console.log('Historical data import result:', results.historicalData);
|
|
||||||
|
|
||||||
// Handle potential error status
|
|
||||||
if (results.historicalData?.status === 'error') {
|
|
||||||
console.error('Historical data import had an error:', results.historicalData.error);
|
|
||||||
} else {
|
|
||||||
totalRecordsAdded += parseInt(results.historicalData?.recordsAdded || 0);
|
|
||||||
totalRecordsUpdated += parseInt(results.historicalData?.recordsUpdated || 0);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error during historical data import:', error);
|
|
||||||
// Continue with other imports, don't fail the whole process
|
|
||||||
results.historicalData = {
|
|
||||||
status: 'error',
|
|
||||||
error: error.message,
|
|
||||||
recordsAdded: 0,
|
|
||||||
recordsUpdated: 0
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const endTime = Date.now();
|
const endTime = Date.now();
|
||||||
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
|
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
|
||||||
|
|
||||||
@@ -254,14 +241,15 @@ async function main() {
|
|||||||
'products_enabled', $5::boolean,
|
'products_enabled', $5::boolean,
|
||||||
'orders_enabled', $6::boolean,
|
'orders_enabled', $6::boolean,
|
||||||
'purchase_orders_enabled', $7::boolean,
|
'purchase_orders_enabled', $7::boolean,
|
||||||
'historical_data_enabled', $8::boolean,
|
'categories_result', COALESCE($8::jsonb, 'null'::jsonb),
|
||||||
'categories_result', COALESCE($9::jsonb, 'null'::jsonb),
|
'products_result', COALESCE($9::jsonb, 'null'::jsonb),
|
||||||
'products_result', COALESCE($10::jsonb, 'null'::jsonb),
|
'orders_result', COALESCE($10::jsonb, 'null'::jsonb),
|
||||||
'orders_result', COALESCE($11::jsonb, 'null'::jsonb),
|
'purchase_orders_result', COALESCE($11::jsonb, 'null'::jsonb),
|
||||||
'purchase_orders_result', COALESCE($12::jsonb, 'null'::jsonb),
|
'total_deleted', $12::integer,
|
||||||
'historical_data_result', COALESCE($13::jsonb, 'null'::jsonb)
|
'total_skipped', $13::integer,
|
||||||
|
'step_timings', $14::jsonb
|
||||||
)
|
)
|
||||||
WHERE id = $14
|
WHERE id = $15
|
||||||
`, [
|
`, [
|
||||||
totalElapsedSeconds,
|
totalElapsedSeconds,
|
||||||
parseInt(totalRecordsAdded),
|
parseInt(totalRecordsAdded),
|
||||||
@@ -270,12 +258,13 @@ async function main() {
|
|||||||
IMPORT_PRODUCTS,
|
IMPORT_PRODUCTS,
|
||||||
IMPORT_ORDERS,
|
IMPORT_ORDERS,
|
||||||
IMPORT_PURCHASE_ORDERS,
|
IMPORT_PURCHASE_ORDERS,
|
||||||
IMPORT_HISTORICAL_DATA,
|
|
||||||
JSON.stringify(results.categories),
|
JSON.stringify(results.categories),
|
||||||
JSON.stringify(results.products),
|
JSON.stringify(results.products),
|
||||||
JSON.stringify(results.orders),
|
JSON.stringify(results.orders),
|
||||||
JSON.stringify(results.purchaseOrders),
|
JSON.stringify(results.purchaseOrders),
|
||||||
JSON.stringify(results.historicalData),
|
totalRecordsDeleted,
|
||||||
|
totalRecordsSkipped,
|
||||||
|
JSON.stringify(stepTimings),
|
||||||
importHistoryId
|
importHistoryId
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|||||||
@@ -92,6 +92,12 @@ async function importCategories(prodConnection, localConnection) {
|
|||||||
description = EXCLUDED.description,
|
description = EXCLUDED.description,
|
||||||
status = EXCLUDED.status,
|
status = EXCLUDED.status,
|
||||||
updated_at = EXCLUDED.updated_at
|
updated_at = EXCLUDED.updated_at
|
||||||
|
WHERE -- Only update if at least one field has changed
|
||||||
|
categories.name IS DISTINCT FROM EXCLUDED.name OR
|
||||||
|
categories.type IS DISTINCT FROM EXCLUDED.type OR
|
||||||
|
categories.parent_id IS DISTINCT FROM EXCLUDED.parent_id OR
|
||||||
|
categories.description IS DISTINCT FROM EXCLUDED.description OR
|
||||||
|
categories.status IS DISTINCT FROM EXCLUDED.status
|
||||||
RETURNING
|
RETURNING
|
||||||
cat_id,
|
cat_id,
|
||||||
CASE
|
CASE
|
||||||
@@ -133,7 +139,7 @@ async function importCategories(prodConnection, localConnection) {
|
|||||||
message: `Imported ${inserted} (updated ${updated}) categories of type ${type}`,
|
message: `Imported ${inserted} (updated ${updated}) categories of type ${type}`,
|
||||||
current: totalInserted + totalUpdated,
|
current: totalInserted + totalUpdated,
|
||||||
total: categories.length,
|
total: categories.length,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime(startTime),
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Rollback to the savepoint for this type
|
// Rollback to the savepoint for this type
|
||||||
@@ -161,7 +167,7 @@ async function importCategories(prodConnection, localConnection) {
|
|||||||
operation: "Categories import completed",
|
operation: "Categories import completed",
|
||||||
current: totalInserted + totalUpdated,
|
current: totalInserted + totalUpdated,
|
||||||
total: totalInserted + totalUpdated,
|
total: totalInserted + totalUpdated,
|
||||||
duration: formatElapsedTime((Date.now() - startTime) / 1000),
|
duration: formatElapsedTime(startTime),
|
||||||
warnings: skippedCategories.length > 0 ? {
|
warnings: skippedCategories.length > 0 ? {
|
||||||
message: "Some categories were skipped due to missing parents",
|
message: "Some categories were skipped due to missing parents",
|
||||||
skippedCategories
|
skippedCategories
|
||||||
|
|||||||
@@ -221,7 +221,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
|
message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
|
||||||
current: processedCount,
|
current: processedCount,
|
||||||
total: totalOrderItems,
|
total: totalOrderItems,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime(startTime),
|
||||||
remaining: estimateRemaining(startTime, processedCount, totalOrderItems),
|
remaining: estimateRemaining(startTime, processedCount, totalOrderItems),
|
||||||
rate: calculateRate(startTime, processedCount)
|
rate: calculateRate(startTime, processedCount)
|
||||||
});
|
});
|
||||||
@@ -530,7 +530,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
message: `Loading order data: ${processedCount} of ${totalUniqueOrders}`,
|
message: `Loading order data: ${processedCount} of ${totalUniqueOrders}`,
|
||||||
current: processedCount,
|
current: processedCount,
|
||||||
total: totalUniqueOrders,
|
total: totalUniqueOrders,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime(startTime),
|
||||||
remaining: estimateRemaining(startTime, processedCount, totalUniqueOrders),
|
remaining: estimateRemaining(startTime, processedCount, totalUniqueOrders),
|
||||||
rate: calculateRate(startTime, processedCount)
|
rate: calculateRate(startTime, processedCount)
|
||||||
});
|
});
|
||||||
@@ -681,6 +681,15 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
status = EXCLUDED.status,
|
status = EXCLUDED.status,
|
||||||
canceled = EXCLUDED.canceled,
|
canceled = EXCLUDED.canceled,
|
||||||
costeach = EXCLUDED.costeach
|
costeach = EXCLUDED.costeach
|
||||||
|
WHERE -- Only update if at least one key field has changed
|
||||||
|
orders.price IS DISTINCT FROM EXCLUDED.price OR
|
||||||
|
orders.quantity IS DISTINCT FROM EXCLUDED.quantity OR
|
||||||
|
orders.discount IS DISTINCT FROM EXCLUDED.discount OR
|
||||||
|
orders.tax IS DISTINCT FROM EXCLUDED.tax OR
|
||||||
|
orders.status IS DISTINCT FROM EXCLUDED.status OR
|
||||||
|
orders.canceled IS DISTINCT FROM EXCLUDED.canceled OR
|
||||||
|
orders.costeach IS DISTINCT FROM EXCLUDED.costeach OR
|
||||||
|
orders.date IS DISTINCT FROM EXCLUDED.date
|
||||||
RETURNING xmax = 0 as inserted
|
RETURNING xmax = 0 as inserted
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
@@ -704,7 +713,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
message: `Importing orders: ${cumulativeProcessedOrders} of ${totalUniqueOrders}`,
|
message: `Importing orders: ${cumulativeProcessedOrders} of ${totalUniqueOrders}`,
|
||||||
current: cumulativeProcessedOrders,
|
current: cumulativeProcessedOrders,
|
||||||
total: totalUniqueOrders,
|
total: totalUniqueOrders,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime(startTime),
|
||||||
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
|
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
|
||||||
rate: calculateRate(startTime, cumulativeProcessedOrders)
|
rate: calculateRate(startTime, cumulativeProcessedOrders)
|
||||||
});
|
});
|
||||||
@@ -751,8 +760,15 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
|||||||
recordsUpdated: parseInt(recordsUpdated) || 0,
|
recordsUpdated: parseInt(recordsUpdated) || 0,
|
||||||
totalSkipped: skippedOrders.size || 0,
|
totalSkipped: skippedOrders.size || 0,
|
||||||
missingProducts: missingProducts.size || 0,
|
missingProducts: missingProducts.size || 0,
|
||||||
|
totalProcessed: orderItems.length, // Total order items in source
|
||||||
incrementalUpdate,
|
incrementalUpdate,
|
||||||
lastSyncTime
|
lastSyncTime,
|
||||||
|
details: {
|
||||||
|
uniqueOrdersProcessed: cumulativeProcessedOrders,
|
||||||
|
totalOrderItems: orderItems.length,
|
||||||
|
skippedDueToMissingProducts: skippedOrders.size,
|
||||||
|
missingProductIds: Array.from(missingProducts).slice(0, 100) // First 100 for debugging
|
||||||
|
}
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error during orders import:", error);
|
console.error("Error during orders import:", error);
|
||||||
|
|||||||
@@ -576,7 +576,7 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
|||||||
message: `Imported ${i + batch.length} of ${prodData.length} products`,
|
message: `Imported ${i + batch.length} of ${prodData.length} products`,
|
||||||
current: i + batch.length,
|
current: i + batch.length,
|
||||||
total: prodData.length,
|
total: prodData.length,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime(startTime),
|
||||||
remaining: estimateRemaining(startTime, i + batch.length, prodData.length),
|
remaining: estimateRemaining(startTime, i + batch.length, prodData.length),
|
||||||
rate: calculateRate(startTime, i + batch.length)
|
rate: calculateRate(startTime, i + batch.length)
|
||||||
});
|
});
|
||||||
@@ -587,6 +587,59 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
|||||||
operation: "Products import",
|
operation: "Products import",
|
||||||
message: "Finished materializing calculations"
|
message: "Finished materializing calculations"
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Add step to identify which products actually need updating
|
||||||
|
outputProgress({
|
||||||
|
status: "running",
|
||||||
|
operation: "Products import",
|
||||||
|
message: "Identifying changed products"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mark products that haven't changed as needs_update = false
|
||||||
|
await localConnection.query(`
|
||||||
|
UPDATE temp_products t
|
||||||
|
SET needs_update = FALSE
|
||||||
|
FROM products p
|
||||||
|
WHERE t.pid = p.pid
|
||||||
|
AND t.title IS NOT DISTINCT FROM p.title
|
||||||
|
AND t.description IS NOT DISTINCT FROM p.description
|
||||||
|
AND t.sku IS NOT DISTINCT FROM p.sku
|
||||||
|
AND t.stock_quantity = p.stock_quantity
|
||||||
|
AND t.price = p.price
|
||||||
|
AND t.regular_price = p.regular_price
|
||||||
|
AND t.cost_price IS NOT DISTINCT FROM p.cost_price
|
||||||
|
AND t.vendor IS NOT DISTINCT FROM p.vendor
|
||||||
|
AND t.brand IS NOT DISTINCT FROM p.brand
|
||||||
|
AND t.visible = p.visible
|
||||||
|
AND t.replenishable = p.replenishable
|
||||||
|
AND t.barcode IS NOT DISTINCT FROM p.barcode
|
||||||
|
AND t.updated_at IS NOT DISTINCT FROM p.updated_at
|
||||||
|
AND t.total_sold IS NOT DISTINCT FROM p.total_sold
|
||||||
|
-- Check key fields that are likely to change
|
||||||
|
-- We don't need to check every single field, just the important ones
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Get count of products that need updating
|
||||||
|
const [countResult] = await localConnection.query(`
|
||||||
|
SELECT
|
||||||
|
COUNT(*) FILTER (WHERE needs_update = true) as update_count,
|
||||||
|
COUNT(*) FILTER (WHERE needs_update = false) as skip_count,
|
||||||
|
COUNT(*) as total_count
|
||||||
|
FROM temp_products
|
||||||
|
`);
|
||||||
|
|
||||||
|
outputProgress({
|
||||||
|
status: "running",
|
||||||
|
operation: "Products import",
|
||||||
|
message: `Found ${countResult.rows[0].update_count} products that need updating, ${countResult.rows[0].skip_count} unchanged`
|
||||||
|
});
|
||||||
|
|
||||||
|
// Return the total products processed
|
||||||
|
return {
|
||||||
|
totalProcessed: prodData.length,
|
||||||
|
needsUpdate: parseInt(countResult.rows[0].update_count),
|
||||||
|
skipped: parseInt(countResult.rows[0].skip_count)
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async function importProducts(prodConnection, localConnection, incrementalUpdate = true) {
|
async function importProducts(prodConnection, localConnection, incrementalUpdate = true) {
|
||||||
@@ -612,7 +665,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
await setupTemporaryTables(localConnection);
|
await setupTemporaryTables(localConnection);
|
||||||
|
|
||||||
// Materialize calculations into temp table
|
// Materialize calculations into temp table
|
||||||
await materializeCalculations(prodConnection, localConnection, incrementalUpdate, lastSyncTime, startTime);
|
const materializeResult = await materializeCalculations(prodConnection, localConnection, incrementalUpdate, lastSyncTime, startTime);
|
||||||
|
|
||||||
// Get the list of products that need updating
|
// Get the list of products that need updating
|
||||||
const [products] = await localConnection.query(`
|
const [products] = await localConnection.query(`
|
||||||
@@ -847,7 +900,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
message: `Processing products: ${i + batch.length} of ${products.rows.length}`,
|
message: `Processing products: ${i + batch.length} of ${products.rows.length}`,
|
||||||
current: i + batch.length,
|
current: i + batch.length,
|
||||||
total: products.rows.length,
|
total: products.rows.length,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime(startTime),
|
||||||
remaining: estimateRemaining(startTime, i + batch.length, products.rows.length),
|
remaining: estimateRemaining(startTime, i + batch.length, products.rows.length),
|
||||||
rate: calculateRate(startTime, i + batch.length)
|
rate: calculateRate(startTime, i + batch.length)
|
||||||
});
|
});
|
||||||
@@ -872,7 +925,10 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
|||||||
recordsAdded,
|
recordsAdded,
|
||||||
recordsUpdated,
|
recordsUpdated,
|
||||||
totalRecords: products.rows.length,
|
totalRecords: products.rows.length,
|
||||||
duration: formatElapsedTime(Date.now() - startTime)
|
totalProcessed: materializeResult.totalProcessed,
|
||||||
|
duration: formatElapsedTime(startTime),
|
||||||
|
needsUpdate: materializeResult.needsUpdate,
|
||||||
|
skippedUnchanged: materializeResult.skipped
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Rollback on error
|
// Rollback on error
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ function validateDate(mysqlDate) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Imports purchase orders and receivings from a production MySQL database to a local PostgreSQL database.
|
* Imports purchase orders and receivings from a production MySQL database to a local PostgreSQL database.
|
||||||
* Implements FIFO allocation of receivings to purchase orders.
|
* Handles these as separate data streams without complex FIFO allocation.
|
||||||
*
|
*
|
||||||
* @param {object} prodConnection - A MySQL connection to production DB
|
* @param {object} prodConnection - A MySQL connection to production DB
|
||||||
* @param {object} localConnection - A PostgreSQL connection to local DB
|
* @param {object} localConnection - A PostgreSQL connection to local DB
|
||||||
@@ -44,8 +44,12 @@ function validateDate(mysqlDate) {
|
|||||||
*/
|
*/
|
||||||
async function importPurchaseOrders(prodConnection, localConnection, incrementalUpdate = true) {
|
async function importPurchaseOrders(prodConnection, localConnection, incrementalUpdate = true) {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
let recordsAdded = 0;
|
let poRecordsAdded = 0;
|
||||||
let recordsUpdated = 0;
|
let poRecordsUpdated = 0;
|
||||||
|
let poRecordsDeleted = 0;
|
||||||
|
let receivingRecordsAdded = 0;
|
||||||
|
let receivingRecordsUpdated = 0;
|
||||||
|
let receivingRecordsDeleted = 0;
|
||||||
let totalProcessed = 0;
|
let totalProcessed = 0;
|
||||||
|
|
||||||
// Batch size constants
|
// Batch size constants
|
||||||
@@ -68,8 +72,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
DROP TABLE IF EXISTS temp_purchase_orders;
|
DROP TABLE IF EXISTS temp_purchase_orders;
|
||||||
DROP TABLE IF EXISTS temp_receivings;
|
DROP TABLE IF EXISTS temp_receivings;
|
||||||
DROP TABLE IF EXISTS temp_receiving_allocations;
|
|
||||||
DROP TABLE IF EXISTS employee_names;
|
DROP TABLE IF EXISTS employee_names;
|
||||||
|
DROP TABLE IF EXISTS temp_supplier_names;
|
||||||
|
|
||||||
-- Temporary table for purchase orders
|
-- Temporary table for purchase orders
|
||||||
CREATE TEMP TABLE temp_purchase_orders (
|
CREATE TEMP TABLE temp_purchase_orders (
|
||||||
@@ -94,11 +98,16 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
-- Temporary table for receivings
|
-- Temporary table for receivings
|
||||||
CREATE TEMP TABLE temp_receivings (
|
CREATE TEMP TABLE temp_receivings (
|
||||||
receiving_id TEXT NOT NULL,
|
receiving_id TEXT NOT NULL,
|
||||||
po_id TEXT,
|
|
||||||
pid BIGINT NOT NULL,
|
pid BIGINT NOT NULL,
|
||||||
|
sku TEXT,
|
||||||
|
name TEXT,
|
||||||
|
vendor TEXT,
|
||||||
qty_each INTEGER,
|
qty_each INTEGER,
|
||||||
cost_each NUMERIC(14, 4),
|
qty_each_orig INTEGER,
|
||||||
|
cost_each NUMERIC(14, 5),
|
||||||
|
cost_each_orig NUMERIC(14, 5),
|
||||||
received_by INTEGER,
|
received_by INTEGER,
|
||||||
|
received_by_name TEXT,
|
||||||
received_date TIMESTAMP WITH TIME ZONE,
|
received_date TIMESTAMP WITH TIME ZONE,
|
||||||
receiving_created_date TIMESTAMP WITH TIME ZONE,
|
receiving_created_date TIMESTAMP WITH TIME ZONE,
|
||||||
supplier_id INTEGER,
|
supplier_id INTEGER,
|
||||||
@@ -106,18 +115,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
PRIMARY KEY (receiving_id, pid)
|
PRIMARY KEY (receiving_id, pid)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Temporary table for tracking FIFO allocations
|
|
||||||
CREATE TEMP TABLE temp_receiving_allocations (
|
|
||||||
po_id TEXT NOT NULL,
|
|
||||||
pid BIGINT NOT NULL,
|
|
||||||
receiving_id TEXT NOT NULL,
|
|
||||||
allocated_qty INTEGER NOT NULL,
|
|
||||||
cost_each NUMERIC(14, 4) NOT NULL,
|
|
||||||
received_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
||||||
received_by INTEGER,
|
|
||||||
PRIMARY KEY (po_id, pid, receiving_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Temporary table for employee names
|
-- Temporary table for employee names
|
||||||
CREATE TEMP TABLE employee_names (
|
CREATE TEMP TABLE employee_names (
|
||||||
employeeid INTEGER PRIMARY KEY,
|
employeeid INTEGER PRIMARY KEY,
|
||||||
@@ -128,7 +125,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
-- Create indexes for efficient joins
|
-- Create indexes for efficient joins
|
||||||
CREATE INDEX idx_temp_po_pid ON temp_purchase_orders(pid);
|
CREATE INDEX idx_temp_po_pid ON temp_purchase_orders(pid);
|
||||||
CREATE INDEX idx_temp_receiving_pid ON temp_receivings(pid);
|
CREATE INDEX idx_temp_receiving_pid ON temp_receivings(pid);
|
||||||
CREATE INDEX idx_temp_receiving_po_id ON temp_receivings(po_id);
|
|
||||||
`);
|
`);
|
||||||
|
|
||||||
// Map status codes to text values
|
// Map status codes to text values
|
||||||
@@ -191,7 +187,56 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
`, employeeValues);
|
`, employeeValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 1. First, fetch all relevant POs
|
// Add this section before the PO import to create a supplier names mapping
|
||||||
|
outputProgress({
|
||||||
|
status: "running",
|
||||||
|
operation: "Purchase orders import",
|
||||||
|
message: "Fetching supplier data for vendor mapping"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch supplier data from production and store in a temp table
|
||||||
|
const [suppliers] = await prodConnection.query(`
|
||||||
|
SELECT
|
||||||
|
supplierid,
|
||||||
|
companyname
|
||||||
|
FROM suppliers
|
||||||
|
WHERE companyname IS NOT NULL AND companyname != ''
|
||||||
|
`);
|
||||||
|
|
||||||
|
if (suppliers.length > 0) {
|
||||||
|
// Create temp table for supplier names
|
||||||
|
await localConnection.query(`
|
||||||
|
DROP TABLE IF EXISTS temp_supplier_names;
|
||||||
|
CREATE TEMP TABLE temp_supplier_names (
|
||||||
|
supplier_id INTEGER PRIMARY KEY,
|
||||||
|
company_name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Insert supplier data in batches
|
||||||
|
for (let i = 0; i < suppliers.length; i += INSERT_BATCH_SIZE) {
|
||||||
|
const batch = suppliers.slice(i, i + INSERT_BATCH_SIZE);
|
||||||
|
|
||||||
|
const placeholders = batch.map((_, idx) => {
|
||||||
|
const base = idx * 2;
|
||||||
|
return `($${base + 1}, $${base + 2})`;
|
||||||
|
}).join(',');
|
||||||
|
|
||||||
|
const values = batch.flatMap(s => [
|
||||||
|
s.supplierid,
|
||||||
|
s.companyname || 'Unnamed Supplier'
|
||||||
|
]);
|
||||||
|
|
||||||
|
await localConnection.query(`
|
||||||
|
INSERT INTO temp_supplier_names (supplier_id, company_name)
|
||||||
|
VALUES ${placeholders}
|
||||||
|
ON CONFLICT (supplier_id) DO UPDATE SET
|
||||||
|
company_name = EXCLUDED.company_name
|
||||||
|
`, values);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Fetch and process purchase orders
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: "running",
|
status: "running",
|
||||||
operation: "Purchase orders import",
|
operation: "Purchase orders import",
|
||||||
@@ -214,6 +259,10 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
const totalPOs = poCount[0].total;
|
const totalPOs = poCount[0].total;
|
||||||
console.log(`Found ${totalPOs} relevant purchase orders`);
|
console.log(`Found ${totalPOs} relevant purchase orders`);
|
||||||
|
|
||||||
|
// Skip processing if no POs to process
|
||||||
|
if (totalPOs === 0) {
|
||||||
|
console.log('No purchase orders to process, skipping PO import step');
|
||||||
|
} else {
|
||||||
// Fetch and process POs in batches
|
// Fetch and process POs in batches
|
||||||
let offset = 0;
|
let offset = 0;
|
||||||
let allPOsProcessed = false;
|
let allPOsProcessed = false;
|
||||||
@@ -349,7 +398,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
message: `Processed ${offset} of ${totalPOs} purchase orders (${totalProcessed} line items)`,
|
message: `Processed ${offset} of ${totalPOs} purchase orders (${totalProcessed} line items)`,
|
||||||
current: offset,
|
current: offset,
|
||||||
total: totalPOs,
|
total: totalPOs,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime(startTime),
|
||||||
remaining: estimateRemaining(startTime, offset, totalPOs),
|
remaining: estimateRemaining(startTime, offset, totalPOs),
|
||||||
rate: calculateRate(startTime, offset)
|
rate: calculateRate(startTime, offset)
|
||||||
});
|
});
|
||||||
@@ -358,6 +407,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
allPOsProcessed = true;
|
allPOsProcessed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// 2. Next, fetch all relevant receivings
|
// 2. Next, fetch all relevant receivings
|
||||||
outputProgress({
|
outputProgress({
|
||||||
@@ -381,6 +431,10 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
const totalReceivings = receivingCount[0].total;
|
const totalReceivings = receivingCount[0].total;
|
||||||
console.log(`Found ${totalReceivings} relevant receivings`);
|
console.log(`Found ${totalReceivings} relevant receivings`);
|
||||||
|
|
||||||
|
// Skip processing if no receivings to process
|
||||||
|
if (totalReceivings === 0) {
|
||||||
|
console.log('No receivings to process, skipping receivings import step');
|
||||||
|
} else {
|
||||||
// Fetch and process receivings in batches
|
// Fetch and process receivings in batches
|
||||||
offset = 0; // Reset offset for receivings
|
offset = 0; // Reset offset for receivings
|
||||||
let allReceivingsProcessed = false;
|
let allReceivingsProcessed = false;
|
||||||
@@ -389,10 +443,16 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
const [receivingList] = await prodConnection.query(`
|
const [receivingList] = await prodConnection.query(`
|
||||||
SELECT
|
SELECT
|
||||||
r.receiving_id,
|
r.receiving_id,
|
||||||
r.po_id,
|
|
||||||
r.supplier_id,
|
r.supplier_id,
|
||||||
r.status,
|
r.status,
|
||||||
r.date_created
|
r.notes,
|
||||||
|
r.shipping,
|
||||||
|
r.total_amount,
|
||||||
|
r.hold,
|
||||||
|
r.for_storefront,
|
||||||
|
r.date_created,
|
||||||
|
r.date_paid,
|
||||||
|
r.date_checked
|
||||||
FROM receivings r
|
FROM receivings r
|
||||||
WHERE r.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL ${yearInterval} YEAR)
|
WHERE r.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL ${yearInterval} YEAR)
|
||||||
${incrementalUpdate ? `
|
${incrementalUpdate ? `
|
||||||
@@ -418,12 +478,17 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
rp.receiving_id,
|
rp.receiving_id,
|
||||||
rp.pid,
|
rp.pid,
|
||||||
rp.qty_each,
|
rp.qty_each,
|
||||||
|
rp.qty_each_orig,
|
||||||
rp.cost_each,
|
rp.cost_each,
|
||||||
|
rp.cost_each_orig,
|
||||||
rp.received_by,
|
rp.received_by,
|
||||||
rp.received_date,
|
rp.received_date,
|
||||||
r.date_created as receiving_created_date
|
r.date_created as receiving_created_date,
|
||||||
|
COALESCE(p.itemnumber, 'NO-SKU') AS sku,
|
||||||
|
COALESCE(p.description, 'Unknown Product') AS name
|
||||||
FROM receivings_products rp
|
FROM receivings_products rp
|
||||||
JOIN receivings r ON rp.receiving_id = r.receiving_id
|
JOIN receivings r ON rp.receiving_id = r.receiving_id
|
||||||
|
LEFT JOIN products p ON rp.pid = p.pid
|
||||||
WHERE rp.receiving_id IN (?)
|
WHERE rp.receiving_id IN (?)
|
||||||
`, [receivingIds]);
|
`, [receivingIds]);
|
||||||
|
|
||||||
@@ -433,13 +498,46 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
const receiving = receivingList.find(r => r.receiving_id == product.receiving_id);
|
const receiving = receivingList.find(r => r.receiving_id == product.receiving_id);
|
||||||
if (!receiving) continue;
|
if (!receiving) continue;
|
||||||
|
|
||||||
|
// Get employee name if available
|
||||||
|
let receivedByName = null;
|
||||||
|
if (product.received_by) {
|
||||||
|
const [employeeResult] = await localConnection.query(`
|
||||||
|
SELECT CONCAT(firstname, ' ', lastname) as full_name
|
||||||
|
FROM employee_names
|
||||||
|
WHERE employeeid = $1
|
||||||
|
`, [product.received_by]);
|
||||||
|
|
||||||
|
if (employeeResult.rows.length > 0) {
|
||||||
|
receivedByName = employeeResult.rows[0].full_name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get vendor name if available
|
||||||
|
let vendorName = 'Unknown Vendor';
|
||||||
|
if (receiving.supplier_id) {
|
||||||
|
const [vendorResult] = await localConnection.query(`
|
||||||
|
SELECT company_name
|
||||||
|
FROM temp_supplier_names
|
||||||
|
WHERE supplier_id = $1
|
||||||
|
`, [receiving.supplier_id]);
|
||||||
|
|
||||||
|
if (vendorResult.rows.length > 0) {
|
||||||
|
vendorName = vendorResult.rows[0].company_name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
completeReceivings.push({
|
completeReceivings.push({
|
||||||
receiving_id: receiving.receiving_id.toString(),
|
receiving_id: receiving.receiving_id.toString(),
|
||||||
po_id: receiving.po_id ? receiving.po_id.toString() : null,
|
|
||||||
pid: product.pid,
|
pid: product.pid,
|
||||||
|
sku: product.sku,
|
||||||
|
name: product.name,
|
||||||
|
vendor: vendorName,
|
||||||
qty_each: product.qty_each,
|
qty_each: product.qty_each,
|
||||||
|
qty_each_orig: product.qty_each_orig,
|
||||||
cost_each: product.cost_each,
|
cost_each: product.cost_each,
|
||||||
|
cost_each_orig: product.cost_each_orig,
|
||||||
received_by: product.received_by,
|
received_by: product.received_by,
|
||||||
|
received_by_name: receivedByName,
|
||||||
received_date: validateDate(product.received_date) || validateDate(product.receiving_created_date),
|
received_date: validateDate(product.received_date) || validateDate(product.receiving_created_date),
|
||||||
receiving_created_date: validateDate(product.receiving_created_date),
|
receiving_created_date: validateDate(product.receiving_created_date),
|
||||||
supplier_id: receiving.supplier_id,
|
supplier_id: receiving.supplier_id,
|
||||||
@@ -452,17 +550,22 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
const batch = completeReceivings.slice(i, i + INSERT_BATCH_SIZE);
|
const batch = completeReceivings.slice(i, i + INSERT_BATCH_SIZE);
|
||||||
|
|
||||||
const placeholders = batch.map((_, idx) => {
|
const placeholders = batch.map((_, idx) => {
|
||||||
const base = idx * 10;
|
const base = idx * 15;
|
||||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10})`;
|
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15})`;
|
||||||
}).join(',');
|
}).join(',');
|
||||||
|
|
||||||
const values = batch.flatMap(r => [
|
const values = batch.flatMap(r => [
|
||||||
r.receiving_id,
|
r.receiving_id,
|
||||||
r.po_id,
|
|
||||||
r.pid,
|
r.pid,
|
||||||
|
r.sku,
|
||||||
|
r.name,
|
||||||
|
r.vendor,
|
||||||
r.qty_each,
|
r.qty_each,
|
||||||
|
r.qty_each_orig,
|
||||||
r.cost_each,
|
r.cost_each,
|
||||||
|
r.cost_each_orig,
|
||||||
r.received_by,
|
r.received_by,
|
||||||
|
r.received_by_name,
|
||||||
r.received_date,
|
r.received_date,
|
||||||
r.receiving_created_date,
|
r.receiving_created_date,
|
||||||
r.supplier_id,
|
r.supplier_id,
|
||||||
@@ -471,15 +574,21 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
|
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
INSERT INTO temp_receivings (
|
INSERT INTO temp_receivings (
|
||||||
receiving_id, po_id, pid, qty_each, cost_each, received_by,
|
receiving_id, pid, sku, name, vendor, qty_each, qty_each_orig,
|
||||||
|
cost_each, cost_each_orig, received_by, received_by_name,
|
||||||
received_date, receiving_created_date, supplier_id, status
|
received_date, receiving_created_date, supplier_id, status
|
||||||
)
|
)
|
||||||
VALUES ${placeholders}
|
VALUES ${placeholders}
|
||||||
ON CONFLICT (receiving_id, pid) DO UPDATE SET
|
ON CONFLICT (receiving_id, pid) DO UPDATE SET
|
||||||
po_id = EXCLUDED.po_id,
|
sku = EXCLUDED.sku,
|
||||||
|
name = EXCLUDED.name,
|
||||||
|
vendor = EXCLUDED.vendor,
|
||||||
qty_each = EXCLUDED.qty_each,
|
qty_each = EXCLUDED.qty_each,
|
||||||
|
qty_each_orig = EXCLUDED.qty_each_orig,
|
||||||
cost_each = EXCLUDED.cost_each,
|
cost_each = EXCLUDED.cost_each,
|
||||||
|
cost_each_orig = EXCLUDED.cost_each_orig,
|
||||||
received_by = EXCLUDED.received_by,
|
received_by = EXCLUDED.received_by,
|
||||||
|
received_by_name = EXCLUDED.received_by_name,
|
||||||
received_date = EXCLUDED.received_date,
|
received_date = EXCLUDED.received_date,
|
||||||
receiving_created_date = EXCLUDED.receiving_created_date,
|
receiving_created_date = EXCLUDED.receiving_created_date,
|
||||||
supplier_id = EXCLUDED.supplier_id,
|
supplier_id = EXCLUDED.supplier_id,
|
||||||
@@ -496,7 +605,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
message: `Processed ${offset} of ${totalReceivings} receivings (${totalProcessed} line items total)`,
|
message: `Processed ${offset} of ${totalReceivings} receivings (${totalProcessed} line items total)`,
|
||||||
current: offset,
|
current: offset,
|
||||||
total: totalReceivings,
|
total: totalReceivings,
|
||||||
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
|
elapsed: formatElapsedTime(startTime),
|
||||||
remaining: estimateRemaining(startTime, offset, totalReceivings),
|
remaining: estimateRemaining(startTime, offset, totalReceivings),
|
||||||
rate: calculateRate(startTime, offset)
|
rate: calculateRate(startTime, offset)
|
||||||
});
|
});
|
||||||
@@ -505,16 +614,15 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
allReceivingsProcessed = true;
|
allReceivingsProcessed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// 3. Implement FIFO allocation of receivings to purchase orders
|
// Add this section to filter out invalid PIDs before final import
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: "running",
|
status: "running",
|
||||||
operation: "Purchase orders import",
|
operation: "Purchase orders import",
|
||||||
message: "Validating product IDs before allocation"
|
message: "Validating product IDs before final import"
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add this section to filter out invalid PIDs before allocation
|
|
||||||
// This will check all PIDs in our temp tables against the products table
|
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
-- Create temp table to store invalid PIDs
|
-- Create temp table to store invalid PIDs
|
||||||
DROP TABLE IF EXISTS temp_invalid_pids;
|
DROP TABLE IF EXISTS temp_invalid_pids;
|
||||||
@@ -552,362 +660,170 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
console.log(`Filtered out ${filteredCount} items with invalid product IDs`);
|
console.log(`Filtered out ${filteredCount} items with invalid product IDs`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Break FIFO allocation into steps with progress tracking
|
// 3. Insert final purchase order records to the actual table
|
||||||
const fifoSteps = [
|
|
||||||
{
|
|
||||||
name: "Direct allocations",
|
|
||||||
query: `
|
|
||||||
INSERT INTO temp_receiving_allocations (
|
|
||||||
po_id, pid, receiving_id, allocated_qty, cost_each, received_date, received_by
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
r.po_id,
|
|
||||||
r.pid,
|
|
||||||
r.receiving_id,
|
|
||||||
LEAST(r.qty_each, po.ordered) as allocated_qty,
|
|
||||||
r.cost_each,
|
|
||||||
COALESCE(r.received_date, NOW()) as received_date,
|
|
||||||
r.received_by
|
|
||||||
FROM temp_receivings r
|
|
||||||
JOIN temp_purchase_orders po ON r.po_id = po.po_id AND r.pid = po.pid
|
|
||||||
WHERE r.po_id IS NOT NULL
|
|
||||||
`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Handling standalone receivings",
|
|
||||||
query: `
|
|
||||||
INSERT INTO temp_purchase_orders (
|
|
||||||
po_id, pid, sku, name, vendor, date, status,
|
|
||||||
ordered, po_cost_price, supplier_id, date_created, date_ordered
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
r.receiving_id::text as po_id,
|
|
||||||
r.pid,
|
|
||||||
COALESCE(p.sku, 'NO-SKU') as sku,
|
|
||||||
COALESCE(p.name, 'Unknown Product') as name,
|
|
||||||
COALESCE(
|
|
||||||
(SELECT vendor FROM temp_purchase_orders
|
|
||||||
WHERE supplier_id = r.supplier_id LIMIT 1),
|
|
||||||
'Unknown Vendor'
|
|
||||||
) as vendor,
|
|
||||||
COALESCE(r.received_date, r.receiving_created_date) as date,
|
|
||||||
'created' as status,
|
|
||||||
NULL as ordered,
|
|
||||||
r.cost_each as po_cost_price,
|
|
||||||
r.supplier_id,
|
|
||||||
COALESCE(r.receiving_created_date, r.received_date) as date_created,
|
|
||||||
NULL as date_ordered
|
|
||||||
FROM temp_receivings r
|
|
||||||
LEFT JOIN (
|
|
||||||
SELECT DISTINCT pid, sku, name FROM temp_purchase_orders
|
|
||||||
) p ON r.pid = p.pid
|
|
||||||
WHERE r.po_id IS NULL
|
|
||||||
OR NOT EXISTS (
|
|
||||||
SELECT 1 FROM temp_purchase_orders po
|
|
||||||
WHERE po.po_id = r.po_id AND po.pid = r.pid
|
|
||||||
)
|
|
||||||
ON CONFLICT (po_id, pid) DO NOTHING
|
|
||||||
`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Allocating standalone receivings",
|
|
||||||
query: `
|
|
||||||
INSERT INTO temp_receiving_allocations (
|
|
||||||
po_id, pid, receiving_id, allocated_qty, cost_each, received_date, received_by
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
r.receiving_id::text as po_id,
|
|
||||||
r.pid,
|
|
||||||
r.receiving_id,
|
|
||||||
r.qty_each as allocated_qty,
|
|
||||||
r.cost_each,
|
|
||||||
COALESCE(r.received_date, NOW()) as received_date,
|
|
||||||
r.received_by
|
|
||||||
FROM temp_receivings r
|
|
||||||
WHERE r.po_id IS NULL
|
|
||||||
OR NOT EXISTS (
|
|
||||||
SELECT 1 FROM temp_purchase_orders po
|
|
||||||
WHERE po.po_id = r.po_id AND po.pid = r.pid
|
|
||||||
)
|
|
||||||
`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "FIFO allocation logic",
|
|
||||||
query: `
|
|
||||||
WITH
|
|
||||||
-- Calculate remaining quantities after direct allocations
|
|
||||||
remaining_po_quantities AS (
|
|
||||||
SELECT
|
|
||||||
po.po_id,
|
|
||||||
po.pid,
|
|
||||||
po.ordered,
|
|
||||||
COALESCE(SUM(ra.allocated_qty), 0) as already_allocated,
|
|
||||||
po.ordered - COALESCE(SUM(ra.allocated_qty), 0) as remaining_qty,
|
|
||||||
po.date_ordered,
|
|
||||||
po.date_created
|
|
||||||
FROM temp_purchase_orders po
|
|
||||||
LEFT JOIN temp_receiving_allocations ra ON po.po_id = ra.po_id AND po.pid = ra.pid
|
|
||||||
WHERE po.ordered IS NOT NULL
|
|
||||||
GROUP BY po.po_id, po.pid, po.ordered, po.date_ordered, po.date_created
|
|
||||||
HAVING po.ordered > COALESCE(SUM(ra.allocated_qty), 0)
|
|
||||||
),
|
|
||||||
remaining_receiving_quantities AS (
|
|
||||||
SELECT
|
|
||||||
r.receiving_id,
|
|
||||||
r.pid,
|
|
||||||
r.qty_each,
|
|
||||||
COALESCE(SUM(ra.allocated_qty), 0) as already_allocated,
|
|
||||||
r.qty_each - COALESCE(SUM(ra.allocated_qty), 0) as remaining_qty,
|
|
||||||
r.received_date,
|
|
||||||
r.cost_each,
|
|
||||||
r.received_by
|
|
||||||
FROM temp_receivings r
|
|
||||||
LEFT JOIN temp_receiving_allocations ra ON r.receiving_id = ra.receiving_id AND r.pid = ra.pid
|
|
||||||
GROUP BY r.receiving_id, r.pid, r.qty_each, r.received_date, r.cost_each, r.received_by
|
|
||||||
HAVING r.qty_each > COALESCE(SUM(ra.allocated_qty), 0)
|
|
||||||
),
|
|
||||||
-- Rank POs by age, with a cutoff for very old POs (1 year)
|
|
||||||
ranked_pos AS (
|
|
||||||
SELECT
|
|
||||||
po.po_id,
|
|
||||||
po.pid,
|
|
||||||
po.remaining_qty,
|
|
||||||
CASE
|
|
||||||
WHEN po.date_ordered IS NULL OR po.date_ordered < NOW() - INTERVAL '1 year' THEN 2
|
|
||||||
ELSE 1
|
|
||||||
END as age_group,
|
|
||||||
ROW_NUMBER() OVER (
|
|
||||||
PARTITION BY po.pid, (CASE WHEN po.date_ordered IS NULL OR po.date_ordered < NOW() - INTERVAL '1 year' THEN 2 ELSE 1 END)
|
|
||||||
ORDER BY COALESCE(po.date_ordered, po.date_created, NOW())
|
|
||||||
) as rank_in_group
|
|
||||||
FROM remaining_po_quantities po
|
|
||||||
),
|
|
||||||
-- Rank receivings by date
|
|
||||||
ranked_receivings AS (
|
|
||||||
SELECT
|
|
||||||
r.receiving_id,
|
|
||||||
r.pid,
|
|
||||||
r.remaining_qty,
|
|
||||||
r.received_date,
|
|
||||||
r.cost_each,
|
|
||||||
r.received_by,
|
|
||||||
ROW_NUMBER() OVER (PARTITION BY r.pid ORDER BY COALESCE(r.received_date, NOW())) as rank
|
|
||||||
FROM remaining_receiving_quantities r
|
|
||||||
),
|
|
||||||
-- First allocate to recent POs
|
|
||||||
allocations_recent AS (
|
|
||||||
SELECT
|
|
||||||
po.po_id,
|
|
||||||
po.pid,
|
|
||||||
r.receiving_id,
|
|
||||||
LEAST(po.remaining_qty, r.remaining_qty) as allocated_qty,
|
|
||||||
r.cost_each,
|
|
||||||
COALESCE(r.received_date, NOW()) as received_date,
|
|
||||||
r.received_by,
|
|
||||||
po.age_group,
|
|
||||||
po.rank_in_group,
|
|
||||||
r.rank,
|
|
||||||
'recent' as allocation_type
|
|
||||||
FROM ranked_pos po
|
|
||||||
JOIN ranked_receivings r ON po.pid = r.pid
|
|
||||||
WHERE po.age_group = 1
|
|
||||||
ORDER BY po.pid, po.rank_in_group, r.rank
|
|
||||||
),
|
|
||||||
-- Then allocate to older POs
|
|
||||||
remaining_after_recent AS (
|
|
||||||
SELECT
|
|
||||||
r.receiving_id,
|
|
||||||
r.pid,
|
|
||||||
r.remaining_qty - COALESCE(SUM(a.allocated_qty), 0) as remaining_qty,
|
|
||||||
r.received_date,
|
|
||||||
r.cost_each,
|
|
||||||
r.received_by,
|
|
||||||
r.rank
|
|
||||||
FROM ranked_receivings r
|
|
||||||
LEFT JOIN allocations_recent a ON r.receiving_id = a.receiving_id AND r.pid = a.pid
|
|
||||||
GROUP BY r.receiving_id, r.pid, r.remaining_qty, r.received_date, r.cost_each, r.received_by, r.rank
|
|
||||||
HAVING r.remaining_qty > COALESCE(SUM(a.allocated_qty), 0)
|
|
||||||
),
|
|
||||||
allocations_old AS (
|
|
||||||
SELECT
|
|
||||||
po.po_id,
|
|
||||||
po.pid,
|
|
||||||
r.receiving_id,
|
|
||||||
LEAST(po.remaining_qty, r.remaining_qty) as allocated_qty,
|
|
||||||
r.cost_each,
|
|
||||||
COALESCE(r.received_date, NOW()) as received_date,
|
|
||||||
r.received_by,
|
|
||||||
po.age_group,
|
|
||||||
po.rank_in_group,
|
|
||||||
r.rank,
|
|
||||||
'old' as allocation_type
|
|
||||||
FROM ranked_pos po
|
|
||||||
JOIN remaining_after_recent r ON po.pid = r.pid
|
|
||||||
WHERE po.age_group = 2
|
|
||||||
ORDER BY po.pid, po.rank_in_group, r.rank
|
|
||||||
),
|
|
||||||
-- Combine allocations
|
|
||||||
combined_allocations AS (
|
|
||||||
SELECT * FROM allocations_recent
|
|
||||||
UNION ALL
|
|
||||||
SELECT * FROM allocations_old
|
|
||||||
)
|
|
||||||
-- Insert into allocations table
|
|
||||||
INSERT INTO temp_receiving_allocations (
|
|
||||||
po_id, pid, receiving_id, allocated_qty, cost_each, received_date, received_by
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
po_id, pid, receiving_id, allocated_qty, cost_each,
|
|
||||||
COALESCE(received_date, NOW()) as received_date,
|
|
||||||
received_by
|
|
||||||
FROM combined_allocations
|
|
||||||
WHERE allocated_qty > 0
|
|
||||||
`
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
// Execute FIFO steps with progress tracking
|
|
||||||
for (let i = 0; i < fifoSteps.length; i++) {
|
|
||||||
const step = fifoSteps[i];
|
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: "running",
|
status: "running",
|
||||||
operation: "Purchase orders import",
|
operation: "Purchase orders import",
|
||||||
message: `FIFO allocation step ${i+1}/${fifoSteps.length}: ${step.name}`,
|
message: "Inserting final purchase order records"
|
||||||
current: i,
|
|
||||||
total: fifoSteps.length
|
|
||||||
});
|
});
|
||||||
|
|
||||||
await localConnection.query(step.query);
|
// Create a temp table to track PO IDs being processed
|
||||||
}
|
await localConnection.query(`
|
||||||
|
DROP TABLE IF EXISTS processed_po_ids;
|
||||||
|
CREATE TEMP TABLE processed_po_ids AS (
|
||||||
|
SELECT DISTINCT po_id FROM temp_purchase_orders
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
// 4. Generate final purchase order records with receiving data
|
// Delete products that were removed from POs and count them
|
||||||
outputProgress({
|
const [poDeletedResult] = await localConnection.query(`
|
||||||
status: "running",
|
WITH deleted AS (
|
||||||
operation: "Purchase orders import",
|
DELETE FROM purchase_orders
|
||||||
message: "Generating final purchase order records"
|
WHERE po_id IN (SELECT po_id FROM processed_po_ids)
|
||||||
});
|
AND NOT EXISTS (
|
||||||
|
SELECT 1 FROM temp_purchase_orders tp
|
||||||
const [finalResult] = await localConnection.query(`
|
WHERE purchase_orders.po_id = tp.po_id AND purchase_orders.pid = tp.pid
|
||||||
WITH
|
|
||||||
receiving_summaries AS (
|
|
||||||
SELECT
|
|
||||||
po_id,
|
|
||||||
pid,
|
|
||||||
SUM(allocated_qty) as total_received,
|
|
||||||
JSONB_AGG(
|
|
||||||
JSONB_BUILD_OBJECT(
|
|
||||||
'receiving_id', receiving_id,
|
|
||||||
'qty', allocated_qty,
|
|
||||||
'date', COALESCE(received_date, NOW()),
|
|
||||||
'cost', cost_each,
|
|
||||||
'received_by', received_by,
|
|
||||||
'received_by_name', CASE
|
|
||||||
WHEN received_by IS NOT NULL AND received_by > 0 THEN
|
|
||||||
(SELECT CONCAT(firstname, ' ', lastname)
|
|
||||||
FROM employee_names
|
|
||||||
WHERE employeeid = received_by)
|
|
||||||
ELSE NULL
|
|
||||||
END
|
|
||||||
) ORDER BY COALESCE(received_date, NOW())
|
|
||||||
) as receiving_history,
|
|
||||||
MIN(COALESCE(received_date, NOW())) as first_received_date,
|
|
||||||
MAX(COALESCE(received_date, NOW())) as last_received_date,
|
|
||||||
STRING_AGG(
|
|
||||||
DISTINCT CASE WHEN received_by IS NOT NULL AND received_by > 0
|
|
||||||
THEN CAST(received_by AS TEXT)
|
|
||||||
ELSE NULL
|
|
||||||
END,
|
|
||||||
','
|
|
||||||
) as received_by_list,
|
|
||||||
STRING_AGG(
|
|
||||||
DISTINCT CASE
|
|
||||||
WHEN ra.received_by IS NOT NULL AND ra.received_by > 0 THEN
|
|
||||||
(SELECT CONCAT(firstname, ' ', lastname)
|
|
||||||
FROM employee_names
|
|
||||||
WHERE employeeid = ra.received_by)
|
|
||||||
ELSE NULL
|
|
||||||
END,
|
|
||||||
', '
|
|
||||||
) as received_by_names
|
|
||||||
FROM temp_receiving_allocations ra
|
|
||||||
GROUP BY po_id, pid
|
|
||||||
),
|
|
||||||
cost_averaging AS (
|
|
||||||
SELECT
|
|
||||||
ra.po_id,
|
|
||||||
ra.pid,
|
|
||||||
SUM(ra.allocated_qty * ra.cost_each) / NULLIF(SUM(ra.allocated_qty), 0) as avg_cost
|
|
||||||
FROM temp_receiving_allocations ra
|
|
||||||
GROUP BY ra.po_id, ra.pid
|
|
||||||
)
|
)
|
||||||
|
RETURNING po_id, pid
|
||||||
|
)
|
||||||
|
SELECT COUNT(*) as count FROM deleted
|
||||||
|
`);
|
||||||
|
|
||||||
|
poRecordsDeleted = poDeletedResult.rows[0]?.count || 0;
|
||||||
|
console.log(`Deleted ${poRecordsDeleted} products that were removed from purchase orders`);
|
||||||
|
|
||||||
|
const [poResult] = await localConnection.query(`
|
||||||
INSERT INTO purchase_orders (
|
INSERT INTO purchase_orders (
|
||||||
po_id, vendor, date, expected_date, pid, sku, name,
|
po_id, vendor, date, expected_date, pid, sku, name,
|
||||||
cost_price, po_cost_price, status, receiving_status, notes, long_note,
|
po_cost_price, status, notes, long_note,
|
||||||
ordered, received, received_date, last_received_date, received_by,
|
ordered, supplier_id, date_created, date_ordered
|
||||||
receiving_history
|
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
po.po_id,
|
po_id,
|
||||||
po.vendor,
|
vendor,
|
||||||
CASE
|
COALESCE(date, date_created, now()) as date,
|
||||||
WHEN po.date IS NOT NULL THEN po.date
|
expected_date,
|
||||||
-- For standalone receivings, try to use the receiving date from history
|
pid,
|
||||||
WHEN po.po_id LIKE 'R%' AND rs.first_received_date IS NOT NULL THEN rs.first_received_date
|
sku,
|
||||||
-- As a last resort for data integrity, use Unix epoch (Jan 1, 1970)
|
name,
|
||||||
ELSE to_timestamp(0)
|
po_cost_price,
|
||||||
END as date,
|
status,
|
||||||
NULLIF(po.expected_date::text, '0000-00-00')::date as expected_date,
|
notes,
|
||||||
po.pid,
|
long_note,
|
||||||
po.sku,
|
ordered,
|
||||||
po.name,
|
supplier_id,
|
||||||
COALESCE(ca.avg_cost, po.po_cost_price) as cost_price,
|
date_created,
|
||||||
po.po_cost_price,
|
date_ordered
|
||||||
COALESCE(po.status, 'created'),
|
FROM temp_purchase_orders
|
||||||
CASE
|
|
||||||
WHEN rs.total_received IS NULL THEN 'created'
|
|
||||||
WHEN rs.total_received = 0 THEN 'created'
|
|
||||||
WHEN rs.total_received < po.ordered THEN 'partial_received'
|
|
||||||
WHEN rs.total_received >= po.ordered THEN 'full_received'
|
|
||||||
ELSE 'created'
|
|
||||||
END as receiving_status,
|
|
||||||
po.notes,
|
|
||||||
po.long_note,
|
|
||||||
COALESCE(po.ordered, 0),
|
|
||||||
COALESCE(rs.total_received, 0),
|
|
||||||
NULLIF(rs.first_received_date::text, '0000-00-00 00:00:00')::timestamp with time zone as received_date,
|
|
||||||
NULLIF(rs.last_received_date::text, '0000-00-00 00:00:00')::timestamp with time zone as last_received_date,
|
|
||||||
CASE
|
|
||||||
WHEN rs.received_by_list IS NULL THEN NULL
|
|
||||||
ELSE rs.received_by_names
|
|
||||||
END as received_by,
|
|
||||||
rs.receiving_history
|
|
||||||
FROM temp_purchase_orders po
|
|
||||||
LEFT JOIN receiving_summaries rs ON po.po_id = rs.po_id AND po.pid = rs.pid
|
|
||||||
LEFT JOIN cost_averaging ca ON po.po_id = ca.po_id AND po.pid = ca.pid
|
|
||||||
ON CONFLICT (po_id, pid) DO UPDATE SET
|
ON CONFLICT (po_id, pid) DO UPDATE SET
|
||||||
vendor = EXCLUDED.vendor,
|
vendor = EXCLUDED.vendor,
|
||||||
date = EXCLUDED.date,
|
date = EXCLUDED.date,
|
||||||
expected_date = EXCLUDED.expected_date,
|
expected_date = EXCLUDED.expected_date,
|
||||||
sku = EXCLUDED.sku,
|
sku = EXCLUDED.sku,
|
||||||
name = EXCLUDED.name,
|
name = EXCLUDED.name,
|
||||||
cost_price = EXCLUDED.cost_price,
|
|
||||||
po_cost_price = EXCLUDED.po_cost_price,
|
po_cost_price = EXCLUDED.po_cost_price,
|
||||||
status = EXCLUDED.status,
|
status = EXCLUDED.status,
|
||||||
receiving_status = EXCLUDED.receiving_status,
|
|
||||||
notes = EXCLUDED.notes,
|
notes = EXCLUDED.notes,
|
||||||
long_note = EXCLUDED.long_note,
|
long_note = EXCLUDED.long_note,
|
||||||
ordered = EXCLUDED.ordered,
|
ordered = EXCLUDED.ordered,
|
||||||
received = EXCLUDED.received,
|
supplier_id = EXCLUDED.supplier_id,
|
||||||
received_date = EXCLUDED.received_date,
|
date_created = EXCLUDED.date_created,
|
||||||
last_received_date = EXCLUDED.last_received_date,
|
date_ordered = EXCLUDED.date_ordered,
|
||||||
received_by = EXCLUDED.received_by,
|
|
||||||
receiving_history = EXCLUDED.receiving_history,
|
|
||||||
updated = CURRENT_TIMESTAMP
|
updated = CURRENT_TIMESTAMP
|
||||||
|
WHERE -- Only update if at least one key field has changed
|
||||||
|
purchase_orders.ordered IS DISTINCT FROM EXCLUDED.ordered OR
|
||||||
|
purchase_orders.po_cost_price IS DISTINCT FROM EXCLUDED.po_cost_price OR
|
||||||
|
purchase_orders.status IS DISTINCT FROM EXCLUDED.status OR
|
||||||
|
purchase_orders.expected_date IS DISTINCT FROM EXCLUDED.expected_date OR
|
||||||
|
purchase_orders.date IS DISTINCT FROM EXCLUDED.date OR
|
||||||
|
purchase_orders.vendor IS DISTINCT FROM EXCLUDED.vendor
|
||||||
RETURNING (xmax = 0) as inserted
|
RETURNING (xmax = 0) as inserted
|
||||||
`);
|
`);
|
||||||
|
|
||||||
recordsAdded = finalResult.rows.filter(r => r.inserted).length;
|
poRecordsAdded = poResult.rows.filter(r => r.inserted).length;
|
||||||
recordsUpdated = finalResult.rows.filter(r => !r.inserted).length;
|
poRecordsUpdated = poResult.rows.filter(r => !r.inserted).length;
|
||||||
|
|
||||||
|
// 4. Insert final receiving records to the actual table
|
||||||
|
outputProgress({
|
||||||
|
status: "running",
|
||||||
|
operation: "Purchase orders import",
|
||||||
|
message: "Inserting final receiving records"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a temp table to track receiving IDs being processed
|
||||||
|
await localConnection.query(`
|
||||||
|
DROP TABLE IF EXISTS processed_receiving_ids;
|
||||||
|
CREATE TEMP TABLE processed_receiving_ids AS (
|
||||||
|
SELECT DISTINCT receiving_id FROM temp_receivings
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Delete products that were removed from receivings and count them
|
||||||
|
const [receivingDeletedResult] = await localConnection.query(`
|
||||||
|
WITH deleted AS (
|
||||||
|
DELETE FROM receivings
|
||||||
|
WHERE receiving_id IN (SELECT receiving_id FROM processed_receiving_ids)
|
||||||
|
AND NOT EXISTS (
|
||||||
|
SELECT 1 FROM temp_receivings tr
|
||||||
|
WHERE receivings.receiving_id = tr.receiving_id AND receivings.pid = tr.pid
|
||||||
|
)
|
||||||
|
RETURNING receiving_id, pid
|
||||||
|
)
|
||||||
|
SELECT COUNT(*) as count FROM deleted
|
||||||
|
`);
|
||||||
|
|
||||||
|
receivingRecordsDeleted = receivingDeletedResult.rows[0]?.count || 0;
|
||||||
|
console.log(`Deleted ${receivingRecordsDeleted} products that were removed from receivings`);
|
||||||
|
|
||||||
|
const [receivingsResult] = await localConnection.query(`
|
||||||
|
INSERT INTO receivings (
|
||||||
|
receiving_id, pid, sku, name, vendor, qty_each, qty_each_orig,
|
||||||
|
cost_each, cost_each_orig, received_by, received_by_name,
|
||||||
|
received_date, receiving_created_date, supplier_id, status
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
receiving_id,
|
||||||
|
pid,
|
||||||
|
sku,
|
||||||
|
name,
|
||||||
|
vendor,
|
||||||
|
qty_each,
|
||||||
|
qty_each_orig,
|
||||||
|
cost_each,
|
||||||
|
cost_each_orig,
|
||||||
|
received_by,
|
||||||
|
received_by_name,
|
||||||
|
COALESCE(received_date, receiving_created_date, now()) as received_date,
|
||||||
|
receiving_created_date,
|
||||||
|
supplier_id,
|
||||||
|
status
|
||||||
|
FROM temp_receivings
|
||||||
|
ON CONFLICT (receiving_id, pid) DO UPDATE SET
|
||||||
|
sku = EXCLUDED.sku,
|
||||||
|
name = EXCLUDED.name,
|
||||||
|
vendor = EXCLUDED.vendor,
|
||||||
|
qty_each = EXCLUDED.qty_each,
|
||||||
|
qty_each_orig = EXCLUDED.qty_each_orig,
|
||||||
|
cost_each = EXCLUDED.cost_each,
|
||||||
|
cost_each_orig = EXCLUDED.cost_each_orig,
|
||||||
|
received_by = EXCLUDED.received_by,
|
||||||
|
received_by_name = EXCLUDED.received_by_name,
|
||||||
|
received_date = EXCLUDED.received_date,
|
||||||
|
receiving_created_date = EXCLUDED.receiving_created_date,
|
||||||
|
supplier_id = EXCLUDED.supplier_id,
|
||||||
|
status = EXCLUDED.status,
|
||||||
|
updated = CURRENT_TIMESTAMP
|
||||||
|
WHERE -- Only update if at least one key field has changed
|
||||||
|
receivings.qty_each IS DISTINCT FROM EXCLUDED.qty_each OR
|
||||||
|
receivings.cost_each IS DISTINCT FROM EXCLUDED.cost_each OR
|
||||||
|
receivings.status IS DISTINCT FROM EXCLUDED.status OR
|
||||||
|
receivings.received_date IS DISTINCT FROM EXCLUDED.received_date OR
|
||||||
|
receivings.received_by IS DISTINCT FROM EXCLUDED.received_by
|
||||||
|
RETURNING (xmax = 0) as inserted
|
||||||
|
`);
|
||||||
|
|
||||||
|
receivingRecordsAdded = receivingsResult.rows.filter(r => r.inserted).length;
|
||||||
|
receivingRecordsUpdated = receivingsResult.rows.filter(r => !r.inserted).length;
|
||||||
|
|
||||||
// Update sync status
|
// Update sync status
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
@@ -921,8 +837,11 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
DROP TABLE IF EXISTS temp_purchase_orders;
|
DROP TABLE IF EXISTS temp_purchase_orders;
|
||||||
DROP TABLE IF EXISTS temp_receivings;
|
DROP TABLE IF EXISTS temp_receivings;
|
||||||
DROP TABLE IF EXISTS temp_receiving_allocations;
|
|
||||||
DROP TABLE IF EXISTS employee_names;
|
DROP TABLE IF EXISTS employee_names;
|
||||||
|
DROP TABLE IF EXISTS temp_supplier_names;
|
||||||
|
DROP TABLE IF EXISTS temp_invalid_pids;
|
||||||
|
DROP TABLE IF EXISTS processed_po_ids;
|
||||||
|
DROP TABLE IF EXISTS processed_receiving_ids;
|
||||||
`);
|
`);
|
||||||
|
|
||||||
// Commit transaction
|
// Commit transaction
|
||||||
@@ -930,8 +849,15 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
status: "complete",
|
status: "complete",
|
||||||
recordsAdded: recordsAdded || 0,
|
recordsAdded: poRecordsAdded + receivingRecordsAdded,
|
||||||
recordsUpdated: recordsUpdated || 0,
|
recordsUpdated: poRecordsUpdated + receivingRecordsUpdated,
|
||||||
|
recordsDeleted: poRecordsDeleted + receivingRecordsDeleted,
|
||||||
|
poRecordsAdded,
|
||||||
|
poRecordsUpdated,
|
||||||
|
poRecordsDeleted,
|
||||||
|
receivingRecordsAdded,
|
||||||
|
receivingRecordsUpdated,
|
||||||
|
receivingRecordsDeleted,
|
||||||
totalRecords: totalProcessed
|
totalRecords: totalProcessed
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -949,6 +875,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
|||||||
error: error.message,
|
error: error.message,
|
||||||
recordsAdded: 0,
|
recordsAdded: 0,
|
||||||
recordsUpdated: 0,
|
recordsUpdated: 0,
|
||||||
|
recordsDeleted: 0,
|
||||||
totalRecords: 0
|
totalRecords: 0
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
-- historically backfilled daily_product_snapshots and current product/PO data.
|
-- historically backfilled daily_product_snapshots and current product/PO data.
|
||||||
-- Calculates all metrics considering the full available history up to 'yesterday'.
|
-- Calculates all metrics considering the full available history up to 'yesterday'.
|
||||||
-- Run ONCE after backfill_historical_snapshots_final.sql completes successfully.
|
-- Run ONCE after backfill_historical_snapshots_final.sql completes successfully.
|
||||||
-- Dependencies: Core import tables (products, purchase_orders), daily_product_snapshots (historically populated),
|
-- Dependencies: Core import tables (products, purchase_orders, receivings), daily_product_snapshots (historically populated),
|
||||||
-- configuration tables (settings_*), product_metrics table must exist.
|
-- configuration tables (settings_*), product_metrics table must exist.
|
||||||
-- Frequency: Run ONCE.
|
-- Frequency: Run ONCE.
|
||||||
DO $$
|
DO $$
|
||||||
@@ -39,35 +39,26 @@ BEGIN
|
|||||||
-- Calculates current on-order quantities and costs
|
-- Calculates current on-order quantities and costs
|
||||||
SELECT
|
SELECT
|
||||||
pid,
|
pid,
|
||||||
COALESCE(SUM(ordered - received), 0) AS on_order_qty,
|
SUM(ordered) AS on_order_qty,
|
||||||
COALESCE(SUM((ordered - received) * cost_price), 0.00) AS on_order_cost,
|
SUM(ordered * po_cost_price) AS on_order_cost,
|
||||||
MIN(expected_date) AS earliest_expected_date
|
MIN(expected_date) AS earliest_expected_date
|
||||||
FROM public.purchase_orders
|
FROM public.purchase_orders
|
||||||
-- Use the most common statuses representing active, unfulfilled POs
|
-- Use the most common statuses representing active, unfulfilled POs
|
||||||
WHERE status IN ('open', 'partially_received', 'ordered', 'preordered', 'receiving_started', 'electronically_sent', 'electronically_ready_send')
|
WHERE status IN ('created', 'ordered', 'preordered', 'electronically_sent', 'electronically_ready_send', 'receiving_started')
|
||||||
AND (ordered - received) > 0
|
AND status NOT IN ('canceled', 'done')
|
||||||
GROUP BY pid
|
GROUP BY pid
|
||||||
),
|
),
|
||||||
HistoricalDates AS (
|
HistoricalDates AS (
|
||||||
-- Determines key historical dates from orders and PO history (receiving_history)
|
-- Determines key historical dates from orders and receivings
|
||||||
SELECT
|
SELECT
|
||||||
p.pid,
|
p.pid,
|
||||||
MIN(o.date)::date AS date_first_sold,
|
MIN(o.date)::date AS date_first_sold,
|
||||||
MAX(o.date)::date AS max_order_date, -- Used as fallback for date_last_sold
|
MAX(o.date)::date AS max_order_date, -- Used as fallback for date_last_sold
|
||||||
MIN(rh.first_receipt_date) AS date_first_received_calc,
|
MIN(r.received_date)::date AS date_first_received_calc,
|
||||||
MAX(rh.last_receipt_date) AS date_last_received_calc
|
MAX(r.received_date)::date AS date_last_received_calc
|
||||||
FROM public.products p
|
FROM public.products p
|
||||||
LEFT JOIN public.orders o ON p.pid = o.pid AND o.quantity > 0 AND o.status NOT IN ('canceled', 'returned')
|
LEFT JOIN public.orders o ON p.pid = o.pid AND o.quantity > 0 AND o.status NOT IN ('canceled', 'returned')
|
||||||
LEFT JOIN (
|
LEFT JOIN public.receivings r ON p.pid = r.pid
|
||||||
SELECT
|
|
||||||
po.pid,
|
|
||||||
MIN((rh.item->>'received_at')::date) as first_receipt_date,
|
|
||||||
MAX((rh.item->>'received_at')::date) as last_receipt_date
|
|
||||||
FROM public.purchase_orders po
|
|
||||||
CROSS JOIN LATERAL jsonb_array_elements(po.receiving_history) AS rh(item)
|
|
||||||
WHERE jsonb_typeof(po.receiving_history) = 'array' AND jsonb_array_length(po.receiving_history) > 0
|
|
||||||
GROUP BY po.pid
|
|
||||||
) rh ON p.pid = rh.pid
|
|
||||||
GROUP BY p.pid
|
GROUP BY p.pid
|
||||||
),
|
),
|
||||||
SnapshotAggregates AS (
|
SnapshotAggregates AS (
|
||||||
@@ -165,22 +156,23 @@ BEGIN
|
|||||||
LEFT JOIN public.settings_vendor sv ON p.vendor = sv.vendor
|
LEFT JOIN public.settings_vendor sv ON p.vendor = sv.vendor
|
||||||
),
|
),
|
||||||
AvgLeadTime AS (
|
AvgLeadTime AS (
|
||||||
-- Calculate Average Lead Time from historical POs
|
-- Calculate Average Lead Time by joining purchase_orders with receivings
|
||||||
SELECT
|
SELECT
|
||||||
pid,
|
po.pid,
|
||||||
AVG(GREATEST(1,
|
AVG(GREATEST(1,
|
||||||
CASE
|
CASE
|
||||||
WHEN last_received_date IS NOT NULL AND date IS NOT NULL
|
WHEN r.received_date IS NOT NULL AND po.date IS NOT NULL
|
||||||
THEN (last_received_date::date - date::date)
|
THEN (r.received_date::date - po.date::date)
|
||||||
ELSE 1
|
ELSE 1
|
||||||
END
|
END
|
||||||
))::int AS avg_lead_time_days_calc
|
))::int AS avg_lead_time_days_calc
|
||||||
FROM public.purchase_orders
|
FROM public.purchase_orders po
|
||||||
WHERE status = 'received' -- Assumes 'received' marks full receipt
|
JOIN public.receivings r ON r.pid = po.pid
|
||||||
AND last_received_date IS NOT NULL
|
WHERE po.status = 'done' -- Completed POs
|
||||||
AND date IS NOT NULL
|
AND r.received_date IS NOT NULL
|
||||||
AND last_received_date >= date
|
AND po.date IS NOT NULL
|
||||||
GROUP BY pid
|
AND r.received_date >= po.date
|
||||||
|
GROUP BY po.pid
|
||||||
),
|
),
|
||||||
RankedForABC AS (
|
RankedForABC AS (
|
||||||
-- Ranks products based on the configured ABC metric (using historical data)
|
-- Ranks products based on the configured ABC metric (using historical data)
|
||||||
@@ -198,7 +190,7 @@ BEGIN
|
|||||||
WHEN 'sales_30d' THEN COALESCE(sa.sales_30d, 0)
|
WHEN 'sales_30d' THEN COALESCE(sa.sales_30d, 0)
|
||||||
WHEN 'lifetime_revenue' THEN COALESCE(sa.lifetime_revenue, 0)::numeric
|
WHEN 'lifetime_revenue' THEN COALESCE(sa.lifetime_revenue, 0)::numeric
|
||||||
ELSE COALESCE(sa.revenue_30d, 0)
|
ELSE COALESCE(sa.revenue_30d, 0)
|
||||||
END) > 0 -- Exclude zero-value products from ranking
|
END) > 0 -- Only include products with non-zero contribution
|
||||||
),
|
),
|
||||||
CumulativeABC AS (
|
CumulativeABC AS (
|
||||||
-- Calculates cumulative metric values for ABC ranking
|
-- Calculates cumulative metric values for ABC ranking
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
-- Description: Rebuilds daily product snapshots from scratch using real orders data.
|
-- Description: Rebuilds daily product snapshots from scratch using real orders data.
|
||||||
-- Fixes issues with duplicated/inflated metrics.
|
-- Fixes issues with duplicated/inflated metrics.
|
||||||
-- Dependencies: Core import tables (products, orders, purchase_orders).
|
-- Dependencies: Core import tables (products, orders, receivings).
|
||||||
-- Frequency: One-time run to clear out problematic data.
|
-- Frequency: One-time run to clear out problematic data.
|
||||||
|
|
||||||
DO $$
|
DO $$
|
||||||
@@ -51,65 +51,17 @@ BEGIN
|
|||||||
),
|
),
|
||||||
ReceivingData AS (
|
ReceivingData AS (
|
||||||
SELECT
|
SELECT
|
||||||
po.pid,
|
r.pid,
|
||||||
-- Count POs to ensure we only include products with real activity
|
-- Count receiving documents to ensure we only include products with real activity
|
||||||
COUNT(po.po_id) as po_count,
|
COUNT(DISTINCT r.receiving_id) as receiving_count,
|
||||||
-- Calculate received quantity for this day
|
-- Calculate received quantity for this day
|
||||||
COALESCE(
|
SUM(r.qty_each) AS units_received,
|
||||||
-- First try the received field from purchase_orders table (if received on this date)
|
-- Calculate received cost for this day
|
||||||
SUM(CASE WHEN po.date::date = _date THEN po.received ELSE 0 END),
|
SUM(r.qty_each * r.cost_each) AS cost_received
|
||||||
|
FROM public.receivings r
|
||||||
-- Otherwise try receiving_history JSON
|
WHERE r.received_date::date = _date
|
||||||
SUM(
|
GROUP BY r.pid
|
||||||
CASE
|
HAVING COUNT(DISTINCT r.receiving_id) > 0 OR SUM(r.qty_each) > 0
|
||||||
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
ELSE 0
|
|
||||||
END
|
|
||||||
),
|
|
||||||
0
|
|
||||||
) AS units_received,
|
|
||||||
|
|
||||||
COALESCE(
|
|
||||||
-- First try the actual cost_price from purchase_orders
|
|
||||||
SUM(CASE WHEN po.date::date = _date THEN po.received * po.cost_price ELSE 0 END),
|
|
||||||
|
|
||||||
-- Otherwise try receiving_history JSON
|
|
||||||
SUM(
|
|
||||||
CASE
|
|
||||||
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
ELSE 0
|
|
||||||
END
|
|
||||||
* COALESCE((rh.item->>'cost')::numeric, po.cost_price)
|
|
||||||
),
|
|
||||||
0.00
|
|
||||||
) AS cost_received
|
|
||||||
FROM public.purchase_orders po
|
|
||||||
LEFT JOIN LATERAL jsonb_array_elements(po.receiving_history) AS rh(item) ON
|
|
||||||
jsonb_typeof(po.receiving_history) = 'array' AND
|
|
||||||
jsonb_array_length(po.receiving_history) > 0 AND
|
|
||||||
(
|
|
||||||
(rh.item->>'date')::date = _date OR
|
|
||||||
(rh.item->>'received_at')::date = _date OR
|
|
||||||
(rh.item->>'receipt_date')::date = _date
|
|
||||||
)
|
|
||||||
-- Include POs with the current date or relevant receiving_history
|
|
||||||
WHERE
|
|
||||||
po.date::date = _date OR
|
|
||||||
jsonb_typeof(po.receiving_history) = 'array' AND
|
|
||||||
jsonb_array_length(po.receiving_history) > 0
|
|
||||||
GROUP BY po.pid
|
|
||||||
HAVING COUNT(po.po_id) > 0 OR SUM(
|
|
||||||
CASE
|
|
||||||
WHEN (rh.item->>'date')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
WHEN (rh.item->>'received_at')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
WHEN (rh.item->>'receipt_date')::date = _date THEN (rh.item->>'qty')::numeric
|
|
||||||
ELSE 0
|
|
||||||
END
|
|
||||||
) > 0
|
|
||||||
),
|
),
|
||||||
-- Get stock quantities for the day - note this is approximate since we're using current products data
|
-- Get stock quantities for the day - note this is approximate since we're using current products data
|
||||||
StockData AS (
|
StockData AS (
|
||||||
@@ -170,7 +122,7 @@ BEGIN
|
|||||||
FROM SalesData sd
|
FROM SalesData sd
|
||||||
FULL OUTER JOIN ReceivingData rd ON sd.pid = rd.pid
|
FULL OUTER JOIN ReceivingData rd ON sd.pid = rd.pid
|
||||||
LEFT JOIN StockData s ON COALESCE(sd.pid, rd.pid) = s.pid
|
LEFT JOIN StockData s ON COALESCE(sd.pid, rd.pid) = s.pid
|
||||||
WHERE (COALESCE(sd.order_count, 0) > 0 OR COALESCE(rd.po_count, 0) > 0);
|
WHERE (COALESCE(sd.order_count, 0) > 0 OR COALESCE(rd.receiving_count, 0) > 0);
|
||||||
|
|
||||||
-- Get record count for this day
|
-- Get record count for this day
|
||||||
GET DIAGNOSTICS _count = ROW_COUNT;
|
GET DIAGNOSTICS _count = ROW_COUNT;
|
||||||
|
|||||||
@@ -42,6 +42,20 @@ BEGIN
|
|||||||
JOIN public.products p ON pm.pid = p.pid
|
JOIN public.products p ON pm.pid = p.pid
|
||||||
GROUP BY brand_group
|
GROUP BY brand_group
|
||||||
),
|
),
|
||||||
|
PreviousPeriodBrandMetrics AS (
|
||||||
|
-- Get previous period metrics for growth calculation
|
||||||
|
SELECT
|
||||||
|
COALESCE(p.brand, 'Unbranded') AS brand_group,
|
||||||
|
SUM(CASE WHEN dps.snapshot_date >= CURRENT_DATE - INTERVAL '59 days'
|
||||||
|
AND dps.snapshot_date < CURRENT_DATE - INTERVAL '29 days'
|
||||||
|
THEN dps.units_sold ELSE 0 END) AS sales_prev_30d,
|
||||||
|
SUM(CASE WHEN dps.snapshot_date >= CURRENT_DATE - INTERVAL '59 days'
|
||||||
|
AND dps.snapshot_date < CURRENT_DATE - INTERVAL '29 days'
|
||||||
|
THEN dps.net_revenue ELSE 0 END) AS revenue_prev_30d
|
||||||
|
FROM public.daily_product_snapshots dps
|
||||||
|
JOIN public.products p ON dps.pid = p.pid
|
||||||
|
GROUP BY brand_group
|
||||||
|
),
|
||||||
AllBrands AS (
|
AllBrands AS (
|
||||||
-- Ensure all brands from products table are included, mapping NULL/empty to 'Unbranded'
|
-- Ensure all brands from products table are included, mapping NULL/empty to 'Unbranded'
|
||||||
SELECT DISTINCT COALESCE(brand, 'Unbranded') as brand_group
|
SELECT DISTINCT COALESCE(brand, 'Unbranded') as brand_group
|
||||||
@@ -53,7 +67,8 @@ BEGIN
|
|||||||
current_stock_units, current_stock_cost, current_stock_retail,
|
current_stock_units, current_stock_cost, current_stock_retail,
|
||||||
sales_7d, revenue_7d, sales_30d, revenue_30d, profit_30d, cogs_30d,
|
sales_7d, revenue_7d, sales_30d, revenue_30d, profit_30d, cogs_30d,
|
||||||
sales_365d, revenue_365d, lifetime_sales, lifetime_revenue,
|
sales_365d, revenue_365d, lifetime_sales, lifetime_revenue,
|
||||||
avg_margin_30d
|
avg_margin_30d,
|
||||||
|
sales_growth_30d_vs_prev, revenue_growth_30d_vs_prev
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
b.brand_group,
|
b.brand_group,
|
||||||
@@ -78,9 +93,13 @@ BEGIN
|
|||||||
-- This is mathematically equivalent to profit/revenue but more explicit
|
-- This is mathematically equivalent to profit/revenue but more explicit
|
||||||
((COALESCE(ba.revenue_30d, 0) - COALESCE(ba.cogs_30d, 0)) / COALESCE(ba.revenue_30d, 1)) * 100.0
|
((COALESCE(ba.revenue_30d, 0) - COALESCE(ba.cogs_30d, 0)) / COALESCE(ba.revenue_30d, 1)) * 100.0
|
||||||
ELSE NULL -- No margin for low/no revenue brands
|
ELSE NULL -- No margin for low/no revenue brands
|
||||||
END
|
END,
|
||||||
|
-- Growth metrics
|
||||||
|
std_numeric(safe_divide((ba.sales_30d - ppbm.sales_prev_30d) * 100.0, ppbm.sales_prev_30d), 2),
|
||||||
|
std_numeric(safe_divide((ba.revenue_30d - ppbm.revenue_prev_30d) * 100.0, ppbm.revenue_prev_30d), 2)
|
||||||
FROM AllBrands b
|
FROM AllBrands b
|
||||||
LEFT JOIN BrandAggregates ba ON b.brand_group = ba.brand_group
|
LEFT JOIN BrandAggregates ba ON b.brand_group = ba.brand_group
|
||||||
|
LEFT JOIN PreviousPeriodBrandMetrics ppbm ON b.brand_group = ppbm.brand_group
|
||||||
|
|
||||||
ON CONFLICT (brand_name) DO UPDATE SET
|
ON CONFLICT (brand_name) DO UPDATE SET
|
||||||
last_calculated = EXCLUDED.last_calculated,
|
last_calculated = EXCLUDED.last_calculated,
|
||||||
@@ -95,7 +114,16 @@ BEGIN
|
|||||||
profit_30d = EXCLUDED.profit_30d, cogs_30d = EXCLUDED.cogs_30d,
|
profit_30d = EXCLUDED.profit_30d, cogs_30d = EXCLUDED.cogs_30d,
|
||||||
sales_365d = EXCLUDED.sales_365d, revenue_365d = EXCLUDED.revenue_365d,
|
sales_365d = EXCLUDED.sales_365d, revenue_365d = EXCLUDED.revenue_365d,
|
||||||
lifetime_sales = EXCLUDED.lifetime_sales, lifetime_revenue = EXCLUDED.lifetime_revenue,
|
lifetime_sales = EXCLUDED.lifetime_sales, lifetime_revenue = EXCLUDED.lifetime_revenue,
|
||||||
avg_margin_30d = EXCLUDED.avg_margin_30d;
|
avg_margin_30d = EXCLUDED.avg_margin_30d,
|
||||||
|
sales_growth_30d_vs_prev = EXCLUDED.sales_growth_30d_vs_prev,
|
||||||
|
revenue_growth_30d_vs_prev = EXCLUDED.revenue_growth_30d_vs_prev
|
||||||
|
WHERE -- Only update if at least one value has changed
|
||||||
|
brand_metrics.product_count IS DISTINCT FROM EXCLUDED.product_count OR
|
||||||
|
brand_metrics.active_product_count IS DISTINCT FROM EXCLUDED.active_product_count OR
|
||||||
|
brand_metrics.current_stock_units IS DISTINCT FROM EXCLUDED.current_stock_units OR
|
||||||
|
brand_metrics.sales_30d IS DISTINCT FROM EXCLUDED.sales_30d OR
|
||||||
|
brand_metrics.revenue_30d IS DISTINCT FROM EXCLUDED.revenue_30d OR
|
||||||
|
brand_metrics.lifetime_sales IS DISTINCT FROM EXCLUDED.lifetime_sales;
|
||||||
|
|
||||||
-- Update calculate_status
|
-- Update calculate_status
|
||||||
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
|
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
|
||||||
@@ -104,3 +132,25 @@ BEGIN
|
|||||||
|
|
||||||
RAISE NOTICE 'Finished % calculation. Duration: %', _module_name, clock_timestamp() - _start_time;
|
RAISE NOTICE 'Finished % calculation. Duration: %', _module_name, clock_timestamp() - _start_time;
|
||||||
END $$;
|
END $$;
|
||||||
|
|
||||||
|
-- Return metrics about the update operation for tracking
|
||||||
|
WITH update_stats AS (
|
||||||
|
SELECT
|
||||||
|
COUNT(*) as total_brands,
|
||||||
|
COUNT(*) FILTER (WHERE last_calculated >= NOW() - INTERVAL '5 minutes') as rows_processed,
|
||||||
|
SUM(product_count) as total_products,
|
||||||
|
SUM(active_product_count) as total_active_products,
|
||||||
|
SUM(sales_30d) as total_sales_30d,
|
||||||
|
SUM(revenue_30d) as total_revenue_30d,
|
||||||
|
AVG(avg_margin_30d) as overall_avg_margin_30d
|
||||||
|
FROM public.brand_metrics
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
rows_processed,
|
||||||
|
total_brands,
|
||||||
|
total_products::int,
|
||||||
|
total_active_products::int,
|
||||||
|
total_sales_30d::int,
|
||||||
|
ROUND(total_revenue_30d, 2) as total_revenue_30d,
|
||||||
|
ROUND(overall_avg_margin_30d, 2) as overall_avg_margin_30d
|
||||||
|
FROM update_stats;
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user