Compare commits
72 Commits
6bffcfb0a4
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| c344fdc3b8 | |||
| ebef903f3b | |||
| 16d2399de8 | |||
| c3e09d5fd1 | |||
| bae8c575bc | |||
| 45ded53530 | |||
| f41b5ab0f6 | |||
| 6834a77a80 | |||
| 38b12c188f | |||
| 6aefc1b40d | |||
| 7c41a7f799 | |||
| 12cc7a4639 | |||
| 9b2f9016f6 | |||
| 8044771301 | |||
| b5469440bf | |||
| fd14af0f9e | |||
| a703019b0b | |||
| 2744e82264 | |||
| 450fd96e19 | |||
| 4372dc5e26 | |||
| dd0e989669 | |||
| 89d518b57f | |||
| ac39257a51 | |||
| 003e1ddd61 | |||
| 2dc8152b53 | |||
| 01d4097030 | |||
| f9e8c9265e | |||
| ee2f314775 | |||
| 11d0555eeb | |||
| ec8ab17d3f | |||
| 100e398aae | |||
| aec02e490a | |||
| 3831cef234 | |||
| 1866cbae7e | |||
| 3d1e8862f9 | |||
| 1dcb47cfc5 | |||
| 167c13c572 | |||
| 7218e7cc3f | |||
| 43d76e011d | |||
| 9ce84fe5b9 | |||
| d15360a7d4 | |||
| 630945e901 | |||
| 54ddaa0492 | |||
| 262890a7be | |||
| ef50aec33c | |||
| 0ffd02e22e | |||
| 738ed94ad5 | |||
| f5b2b4e421 | |||
| b81dfb9649 | |||
| 9be0f34f07 | |||
| ad5b797ce6 | |||
| 78932360d1 | |||
| 217abd41af | |||
| d56beb5143 | |||
| 0b5f3162c7 | |||
| 72930bbc73 | |||
| 0ceef144d7 | |||
| f0e2023803 | |||
| 0a20d74bb6 | |||
| 9761c29934 | |||
| e84c7e568f | |||
| 4953355b91 | |||
| dadcf3b6c6 | |||
| 920c33d119 | |||
| 451d5f0b3b | |||
| dd79298b94 | |||
| 7b7274f72c | |||
| 60875c25a6 | |||
| e10df632d8 | |||
| 945e4a8cc3 | |||
| c6e4fc9cff | |||
| ff17b290aa |
172
.claude/CLAUDE.md
Normal file
172
.claude/CLAUDE.md
Normal file
@@ -0,0 +1,172 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
This is a full-stack inventory management system with a React + TypeScript frontend and Node.js/Express backend using PostgreSQL. The system includes product management, analytics, forecasting, purchase orders, and a comprehensive dashboard for business metrics.
|
||||
|
||||
**Monorepo Structure:**
|
||||
- `inventory/` - Vite-based React frontend with TypeScript
|
||||
- `inventory-server/` - Express backend API server
|
||||
- Root `package.json` contains shared dependencies
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Frontend (inventory/)
|
||||
```bash
|
||||
cd inventory
|
||||
npm run dev # Start dev server on port 5175
|
||||
npm run build # Build for production (outputs to build/ then copies to ../inventory-server/frontend/build)
|
||||
npm run lint # Run ESLint
|
||||
npm run preview # Preview production build
|
||||
```
|
||||
|
||||
### Backend (inventory-server/)
|
||||
```bash
|
||||
cd inventory-server
|
||||
npm run dev # Start with nodemon (auto-reload)
|
||||
npm start # Start server (production)
|
||||
npm run prod # Start with PM2 for production
|
||||
npm run prod:stop # Stop PM2 instance
|
||||
npm run prod:restart # Restart PM2 instance
|
||||
npm run prod:logs # View PM2 logs
|
||||
npm run setup # Create required directories (logs, uploads)
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Frontend Architecture
|
||||
|
||||
**Router Structure:** React Router with lazy loading for code splitting:
|
||||
- Main chunks: Core inventory, Dashboard, Product Import, Chat Archive
|
||||
- Authentication flow uses `RequireAuth` and `Protected` components with permission-based access
|
||||
- All routes except `/login` and `/small` require authentication
|
||||
|
||||
**Key Directories:**
|
||||
- `src/pages/` - Top-level page components (Overview, Products, Analytics, Dashboard, etc.)
|
||||
- `src/components/` - Organized by feature (dashboard/, products/, analytics/, etc.)
|
||||
- `src/components/ui/` - shadcn/ui components
|
||||
- `src/types/` - TypeScript type definitions
|
||||
- `src/contexts/` - React contexts (AuthContext, DashboardScrollContext)
|
||||
- `src/hooks/` - Custom React hooks (use-toast, useDebounce, use-mobile)
|
||||
- `src/utils/` - Utility functions (emojiUtils, productUtils, naturalLanguagePeriod)
|
||||
- `src/services/` - API service layer
|
||||
- `src/config/` - Configuration files
|
||||
|
||||
**State Management:**
|
||||
- React Context for auth and global state
|
||||
- @tanstack/react-query for server state management
|
||||
- zustand for client state management
|
||||
- Local storage for auth tokens, session storage for login state
|
||||
|
||||
**Key Dependencies:**
|
||||
- UI: Radix UI primitives, shadcn/ui, Tailwind CSS, Framer Motion
|
||||
- Data: @tanstack/react-table, react-data-grid, @tanstack/react-virtual
|
||||
- Forms: react-hook-form, zod
|
||||
- Charts: recharts, chart.js, react-chartjs-2
|
||||
- File handling: xlsx for Excel export, react-dropzone for uploads
|
||||
- Other: axios for HTTP, date-fns/luxon for dates
|
||||
|
||||
**Path Alias:** `@/` maps to `./src/`
|
||||
|
||||
### Backend Architecture
|
||||
|
||||
**Entry Point:** `inventory-server/src/server.js`
|
||||
|
||||
**Key Directories:**
|
||||
- `src/routes/` - Express route handlers (products, dashboard, analytics, import, etc.)
|
||||
- `src/middleware/` - Express middleware (CORS, auth, etc.)
|
||||
- `src/utils/` - Utility functions (database connection, API helpers)
|
||||
- `src/types/` - Type definitions (e.g., status-codes)
|
||||
|
||||
**Database:**
|
||||
- PostgreSQL with connection pooling (pg library)
|
||||
- Pool initialized in `utils/db.js` via `initPool()`
|
||||
- Pool attached to `app.locals.pool` for route access
|
||||
- Environment variables loaded from `/var/www/html/inventory/.env` (production path)
|
||||
|
||||
**API Routes:** All prefixed with `/api/`
|
||||
- `/api/products` - Product CRUD operations
|
||||
- `/api/dashboard` - Dashboard metrics and data
|
||||
- `/api/analytics` - Analytics and reporting
|
||||
- `/api/orders` - Order management
|
||||
- `/api/purchase-orders` - Purchase order management
|
||||
- `/api/csv` - CSV import/export (data management)
|
||||
- `/api/import` - Product import workflows
|
||||
- `/api/config` - Configuration management
|
||||
- `/api/metrics` - System metrics
|
||||
- `/api/ai-validation` - AI-powered validation
|
||||
- `/api/ai-prompts` - AI prompt management
|
||||
- `/api/templates` - Template management
|
||||
- `/api/reusable-images` - Image management
|
||||
- `/api/categoriesAggregate`, `/api/vendorsAggregate`, `/api/brandsAggregate` - Aggregate data endpoints
|
||||
|
||||
**Authentication:**
|
||||
- External auth service at `/auth-inv` endpoint
|
||||
- Token-based authentication (Bearer tokens)
|
||||
- Frontend stores tokens in localStorage
|
||||
- Protected routes verify tokens via auth service `/me` endpoint
|
||||
|
||||
**File Uploads:**
|
||||
- Multer middleware for file handling
|
||||
- Uploads directory: `inventory-server/uploads/`
|
||||
|
||||
### Development Proxy Setup
|
||||
|
||||
The Vite dev server (port 5175) proxies API requests to `https://inventory.kent.pw`:
|
||||
- `/api/*` → production API
|
||||
- `/auth-inv/*` → authentication service
|
||||
- `/chat-api/*` → chat service
|
||||
- `/uploads/*` → uploaded files
|
||||
- Various third-party services (Aircall, Klaviyo, Meta, Gorgias, Typeform, ACOT, Clarity)
|
||||
|
||||
### Build Process
|
||||
|
||||
When building the frontend:
|
||||
1. TypeScript compilation (`tsc -b`)
|
||||
2. Vite build (outputs to `inventory/build/`)
|
||||
3. Custom Vite plugin copies build to `inventory-server/frontend/build/`
|
||||
4. Manual chunks for vendor splitting (react-vendor, ui-vendor, query-vendor)
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests for individual components or features:
|
||||
```bash
|
||||
# No test suite currently configured
|
||||
# Tests would typically use Jest or Vitest with React Testing Library
|
||||
```
|
||||
|
||||
## Common Development Workflows
|
||||
|
||||
### Adding a New Page
|
||||
1. Create page component in `inventory/src/pages/YourPage.tsx`
|
||||
2. Add lazy import in `inventory/src/App.tsx`
|
||||
3. Add route with `<Protected>` wrapper and permission check
|
||||
4. Add corresponding backend route in `inventory-server/src/routes/`
|
||||
5. Update permission system if needed
|
||||
|
||||
### Adding a New API Endpoint
|
||||
1. Create or update route file in `inventory-server/src/routes/`
|
||||
2. Use `executeQuery()` helper for database queries
|
||||
3. Register router in `inventory-server/src/server.js`
|
||||
4. Frontend can access at `/api/{route-name}`
|
||||
|
||||
### Working with Database
|
||||
- Use parameterized queries: `executeQuery(sql, [param1, param2])`
|
||||
- Pool is accessed via `db.getPool()` or `app.locals.pool`
|
||||
- Connection helper: `db.getConnection()` returns a client for transactions
|
||||
|
||||
### Permissions System
|
||||
- User permissions stored in `user.permissions` array (permission codes)
|
||||
- Check permissions in `<Protected page="permission_code">` component
|
||||
- Admin users (`is_admin: true`) have access to all pages
|
||||
|
||||
## Important Notes
|
||||
|
||||
- Environment variables must be configured in `/var/www/html/inventory/.env` for production
|
||||
- The frontend expects the backend at `/api` (proxied in dev, served together in production)
|
||||
- PM2 is used for production process management
|
||||
- Database uses PostgreSQL with SSL support (configurable via `DB_SSL` env var)
|
||||
- File uploads stored in `inventory-server/uploads/` directory
|
||||
- Build artifacts in `inventory/build/` are copied to `inventory-server/frontend/build/`
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -73,4 +73,15 @@ inventory-server/scripts/.fuse_hidden00000fa20000000a
|
||||
*/chat/db-convert/mongo_converter_env/*
|
||||
|
||||
# Ignore compiled Vite config to avoid duplication
|
||||
vite.config.js
|
||||
vite.config.js
|
||||
inventory-server/inventory_backup.sql
|
||||
chat-files.tar.gz
|
||||
chat-migration*/
|
||||
**/chat-migration*/
|
||||
chat-migration*/**
|
||||
**/chat-migration*/**
|
||||
|
||||
venv/
|
||||
venv/**
|
||||
**/venv/*
|
||||
**/venv/**
|
||||
4
CLAUDE.md
Normal file
4
CLAUDE.md
Normal file
@@ -0,0 +1,4 @@
|
||||
* Avoid using glob tool for search as it may not work properly on this codebase. Search using bash instead.
|
||||
* If you use the task tool to have an agent investigate something, make sure to let it know to avoid using glob
|
||||
* Prefer solving tasks in a single session. Only spawn subagents for genuinely independent workstreams.
|
||||
* The postgres/query tool is not working and not connected to the current version of the database. If you need to query the database for any reason you can use "ssh netcup" and use psql on the server with inventory_readonly 6D3GUkxuFgi2UghwgnUd
|
||||
375
PRODUCT_IMPORT_ENHANCEMENTS.md
Normal file
375
PRODUCT_IMPORT_ENHANCEMENTS.md
Normal file
@@ -0,0 +1,375 @@
|
||||
# Product Import Module - Enhancement & Issues Outline
|
||||
|
||||
This document outlines the investigation and implementation requirements for each requested enhancement to the product import module.
|
||||
|
||||
---
|
||||
|
||||
## 1. UPC Import - Strip Quotes and Spaces ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** When importing UPCs, strip `'`, `"` characters and any spaces, leaving only numbers.
|
||||
|
||||
**Implementation (Completed):**
|
||||
- Modified `normalizeUpcValue()` in [Import.tsx:661-667](inventory/src/pages/Import.tsx#L661-L667)
|
||||
- Strips single quotes, double quotes, smart quotes (`'"`), and whitespace before processing
|
||||
- Then handles scientific notation and extracts only digits
|
||||
|
||||
**Files Modified:**
|
||||
- `inventory/src/pages/Import.tsx` - `normalizeUpcValue()` function
|
||||
|
||||
---
|
||||
|
||||
## 2. AI Context Columns in Validation Payloads ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** The match columns step has a setting to use a field only for AI context (`isAiSupplemental`). Update AI description validation to include any columns selected with this option in the payload. Also include in sanity check payload. Not needed for names.
|
||||
|
||||
**Current Implementation:**
|
||||
- AI Supplemental toggle: [MatchColumnsStep.tsx:102-118](inventory/src/components/product-import/steps/MatchColumnsStep/MatchColumnsStep.tsx#L102-L118)
|
||||
- AI supplemental data stored in `__aiSupplemental` field on each row
|
||||
- Description payload builder: [inlineAiPayload.ts:183-195](inventory/src/components/product-import/steps/ValidationStep/utils/inlineAiPayload.ts#L183-L195)
|
||||
|
||||
**Implementation:**
|
||||
1. **Update `buildDescriptionValidationPayload()` in `inlineAiPayload.ts`** to include AI supplemental data:
|
||||
```typescript
|
||||
export const buildDescriptionValidationPayload = (
|
||||
row: Data<string>,
|
||||
fieldOptions: FieldOptionsMap,
|
||||
productLinesCache: Map<string, SelectOption[]>,
|
||||
sublinesCache: Map<string, SelectOption[]>
|
||||
) => {
|
||||
const payload: Record<string, unknown> = {
|
||||
name: row.name,
|
||||
description: row.description,
|
||||
company_name: getFieldOptionLabel(row.company, fieldOptions, 'company'),
|
||||
company_id: row.company,
|
||||
categories: getFieldOptionLabel(row.category, fieldOptions, 'category'),
|
||||
};
|
||||
|
||||
// Add AI supplemental context if present
|
||||
if (row.__aiSupplemental && typeof row.__aiSupplemental === 'object') {
|
||||
payload.additional_context = row.__aiSupplemental;
|
||||
}
|
||||
|
||||
return payload;
|
||||
};
|
||||
```
|
||||
|
||||
2. **Update sanity check payload** - Locate sanity check submission logic and include `__aiSupplemental` data
|
||||
|
||||
3. **Verify `__aiSupplemental` is properly populated** from MatchColumnsStep when columns are marked as AI context only
|
||||
|
||||
**Files to Modify:**
|
||||
- `inventory/src/components/product-import/steps/ValidationStep/utils/inlineAiPayload.ts`
|
||||
- Backend sanity check endpoint (if separate from description validation)
|
||||
- Verify data flow in `MatchColumnsStep.tsx` → `ValidationStep`
|
||||
|
||||
---
|
||||
|
||||
## 3. Fresh Taxonomy Data Per Session ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** Ensure taxonomy data is brought in fresh with each session - cache should be invalidated if we exit the import flow and start again.
|
||||
|
||||
**Current Implementation:**
|
||||
- Field options cached 5 minutes: [ValidationStep/index.tsx:128-133](inventory/src/components/product-import/steps/ValidationStep/index.tsx#L128-L133)
|
||||
- Product lines cache: `productLinesCache` in Zustand store
|
||||
- Sublines cache: `sublinesCache` in Zustand store
|
||||
- Caches set to 10-minute stale time
|
||||
|
||||
**Implementation:**
|
||||
1. **Add cache invalidation on import flow mount/unmount** in `UploadFlow.tsx`:
|
||||
```typescript
|
||||
useEffect(() => {
|
||||
// On mount - invalidate import-related query cache
|
||||
queryClient.invalidateQueries({ queryKey: ['import-field-options'] });
|
||||
|
||||
return () => {
|
||||
// On unmount - clear caches
|
||||
queryClient.removeQueries({ queryKey: ['import-field-options'] });
|
||||
queryClient.removeQueries({ queryKey: ['product-lines'] });
|
||||
queryClient.removeQueries({ queryKey: ['sublines'] });
|
||||
};
|
||||
}, []);
|
||||
```
|
||||
|
||||
2. **Clear Zustand store caches** when exiting import flow:
|
||||
- Add action to `validationStore.ts` to clear `productLinesCache` and `sublinesCache`
|
||||
- Call this action on unmount of `UploadFlow` or when navigating away
|
||||
|
||||
3. **Consider adding a `sessionId`** that changes on each import flow start, used as part of cache keys
|
||||
|
||||
**Files to Modify:**
|
||||
- `inventory/src/components/product-import/steps/UploadFlow.tsx` - Add cleanup effect
|
||||
- `inventory/src/components/product-import/steps/ValidationStep/store/validationStore.ts` - Add cache clear action
|
||||
- Potentially `inventory/src/components/product-import/steps/ValidationStep/index.tsx` - Query key updates
|
||||
|
||||
---
|
||||
|
||||
## 4. Save Template from Confirmation Page ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** Add option to save rows of submitted data as a new template on the confirmation page after completing the import flow. Verify this works with new validation step changes.
|
||||
|
||||
**Current Implementation:**
|
||||
- **Import Results section already exists** inline in [Import.tsx:968-1150](inventory/src/pages/Import.tsx#L968-L1150)
|
||||
- Shows created products (lines 1021-1097) with image, name, UPC, item number
|
||||
- Shows errored products (lines 1100-1138) with error details
|
||||
- "Fix products with errors" button resumes validation flow for failed items
|
||||
- Template saving logic in ValidationStep: [useTemplateManagement.ts:204-266](inventory/src/components/product-import/steps/ValidationStep/hooks/useTemplateManagement.ts#L204-L266)
|
||||
- Saves via `POST /api/templates`
|
||||
- `importOutcome.submittedProducts` contains the full product data for each row
|
||||
|
||||
**Implementation:**
|
||||
1. **Add "Save as Template" button** to each created product row in the results section (around line 1087-1092 in Import.tsx):
|
||||
```typescript
|
||||
// Add button after the item number display
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => handleSaveAsTemplate(index)}
|
||||
>
|
||||
<BookmarkPlus className="h-4 w-4" />
|
||||
</Button>
|
||||
```
|
||||
|
||||
2. **Add state and dialog** for template saving in Import.tsx:
|
||||
```typescript
|
||||
const [templateSaveDialogOpen, setTemplateSaveDialogOpen] = useState(false);
|
||||
const [selectedProductForTemplate, setSelectedProductForTemplate] = useState<NormalizedProduct | null>(null);
|
||||
```
|
||||
|
||||
3. **Extract/reuse template save logic** from `useTemplateManagement.ts`:
|
||||
- The `saveNewTemplate()` function (lines 204-266) can be extracted into a shared utility
|
||||
- Or create a `SaveTemplateDialog` component that can be used in both places
|
||||
- Key fields needed: `company` (for template name), `product_type`, and all product field values
|
||||
|
||||
4. **Data mapping consideration:**
|
||||
- `importOutcome.submittedProducts` uses `NormalizedProduct` type
|
||||
- Templates expect raw field values - may need to map back from normalized format
|
||||
- Exclude metadata fields: `['id', '__index', '__meta', '__template', '__original', '__corrected', '__changes', '__aiSupplemental']`
|
||||
|
||||
**Files to Modify:**
|
||||
- `inventory/src/pages/Import.tsx` - Add save template button, state, and dialog
|
||||
- Consider creating `inventory/src/components/product-import/SaveTemplateDialog.tsx` for reusability
|
||||
- Potentially extract core save logic from `useTemplateManagement.ts` into shared utility
|
||||
|
||||
---
|
||||
|
||||
## 5. Sheet Preview on Select Sheet Step ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** On the select sheet step, show a preview of the first 10 lines or so of each sheet underneath the options.
|
||||
|
||||
**Implementation (Completed):**
|
||||
- Added `workbook` prop to `SelectSheetStep` component
|
||||
- Added `sheetPreviews` memoized computation using `XLSXLib.utils.sheet_to_json()`
|
||||
- Shows first 10 rows, 8 columns max per sheet
|
||||
- Added `truncateCell()` helper to limit cell content to 30 characters with ellipsis
|
||||
- Each sheet option is now a clickable card with:
|
||||
- Radio button and sheet name
|
||||
- Row count indicator
|
||||
- Scrollable preview table with horizontal scroll
|
||||
- Selected state highlighted with primary border
|
||||
- Updated `UploadFlow.tsx` to pass workbook prop
|
||||
|
||||
**Files Modified:**
|
||||
- `inventory/src/components/product-import/steps/SelectSheetStep/SelectSheetStep.tsx`
|
||||
- `inventory/src/components/product-import/steps/UploadFlow.tsx`
|
||||
|
||||
---
|
||||
|
||||
## 6. Empty Row Removal ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** When importing a sheet, automatically remove completely empty rows.
|
||||
|
||||
**Current Implementation:**
|
||||
- Empty columns are filtered: [MatchColumnsStep.tsx:616-634](inventory/src/components/product-import/steps/MatchColumnsStep/MatchColumnsStep.tsx#L616-L634)
|
||||
- A "Remove empty/duplicates" button exists that removes empty rows, single-value rows, AND duplicates
|
||||
- The automatic removal should ONLY remove completely empty rows, not duplicates or single-value rows
|
||||
|
||||
**Implementation (Completed):**
|
||||
- Added `isRowCompletelyEmpty()` helper function to [SelectHeaderStep.tsx](inventory/src/components/product-import/steps/SelectHeaderStep/SelectHeaderStep.tsx)
|
||||
- Added `useMemo` to filter empty rows on initial data load
|
||||
- Uses `Object.values(row)` to check all cell values (matches existing button logic)
|
||||
- Only removes rows where ALL values are undefined, null, or whitespace-only strings
|
||||
- Manual "Remove Empty/Duplicates" button still available for additional cleanup (duplicates, single-value rows)
|
||||
|
||||
**Files Modified:**
|
||||
- `inventory/src/components/product-import/steps/SelectHeaderStep/SelectHeaderStep.tsx`
|
||||
|
||||
---
|
||||
|
||||
## 7. Unit Conversion for Weight/Dimensions ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** Add unit conversion feature for weight and dimensions columns - similar to calculator button on cost/msrp, add button that opens popover with options to convert grams → oz, lbs → oz for the whole column at once.
|
||||
|
||||
**Current Implementation:**
|
||||
- Calculator button on price columns: [ValidationTable.tsx:1491-1627](inventory/src/components/product-import/steps/ValidationStep/components/ValidationTable.tsx#L1491-L1627)
|
||||
- `PriceColumnHeader` component shows calculator icon on hover
|
||||
- Weight field defined in config with validation
|
||||
|
||||
**Implementation:**
|
||||
1. **Create `UnitConversionColumnHeader` component** (similar to `PriceColumnHeader`):
|
||||
```typescript
|
||||
const UnitConversionColumnHeader = ({ field, table }) => {
|
||||
const [showPopover, setShowPopover] = useState(false);
|
||||
|
||||
const conversions = {
|
||||
weight: [
|
||||
{ label: 'Grams → Ounces', factor: 0.035274 },
|
||||
{ label: 'Pounds → Ounces', factor: 16 },
|
||||
{ label: 'Kilograms → Ounces', factor: 35.274 },
|
||||
],
|
||||
dimensions: [
|
||||
{ label: 'Centimeters → Inches', factor: 0.393701 },
|
||||
{ label: 'Millimeters → Inches', factor: 0.0393701 },
|
||||
]
|
||||
};
|
||||
|
||||
const applyConversion = (factor: number) => {
|
||||
// Batch update all cells in column
|
||||
table.rows.forEach((row, index) => {
|
||||
const currentValue = parseFloat(row[field.key]);
|
||||
if (!isNaN(currentValue)) {
|
||||
updateCell(index, field.key, (currentValue * factor).toFixed(2));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<Popover open={showPopover} onOpenChange={setShowPopover}>
|
||||
<PopoverTrigger>
|
||||
<Scale className="h-4 w-4" /> {/* or similar icon */}
|
||||
</PopoverTrigger>
|
||||
<PopoverContent>
|
||||
{conversions[fieldType].map(conv => (
|
||||
<Button key={conv.label} onClick={() => applyConversion(conv.factor)}>
|
||||
{conv.label}
|
||||
</Button>
|
||||
))}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
2. **Identify weight/dimension fields** in config:
|
||||
- `weight_oz`, `length_in`, `width_in`, `height_in` (check actual field keys)
|
||||
|
||||
3. **Add to column header render logic** in ValidationTable
|
||||
|
||||
**Files to Modify:**
|
||||
- `inventory/src/components/product-import/steps/ValidationStep/components/ValidationTable.tsx`
|
||||
- Potentially create new component file for `UnitConversionColumnHeader`
|
||||
- Update column header rendering to use new component for weight/dimension fields
|
||||
|
||||
---
|
||||
|
||||
## 8. Expanded MSRP Auto-Fill from Cost ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** Expand auto-fill functionality for MSRP from cost - open small popover with options for 2x, 2.1x, 2.2x, 2.3x, 2.4x, 2.5x multipliers, plus checkbox to round up to nearest 9.
|
||||
|
||||
**Current Implementation:**
|
||||
- Calculator on MSRP column: [ValidationTable.tsx:1540-1584](inventory/src/components/product-import/steps/ValidationStep/components/ValidationTable.tsx#L1540-L1584)
|
||||
- Currently only does `Cost × 2` then subtracts 0.01 if whole number
|
||||
|
||||
**Implementation:**
|
||||
1. **Replace simple click with popover** in `PriceColumnHeader`:
|
||||
```typescript
|
||||
const [selectedMultiplier, setSelectedMultiplier] = useState(2.0);
|
||||
const [roundToNine, setRoundToNine] = useState(false);
|
||||
const multipliers = [2.0, 2.1, 2.2, 2.3, 2.4, 2.5];
|
||||
|
||||
const roundUpToNine = (value: number): number => {
|
||||
// 1.41 → 1.49, 2.78 → 2.79, 12.32 → 12.39
|
||||
const wholePart = Math.floor(value);
|
||||
const decimal = value - wholePart;
|
||||
if (decimal <= 0.09) return wholePart + 0.09;
|
||||
if (decimal <= 0.19) return wholePart + 0.19;
|
||||
// ... continue pattern, or:
|
||||
const lastDigit = Math.floor(decimal * 10);
|
||||
return wholePart + (lastDigit / 10) + 0.09;
|
||||
};
|
||||
|
||||
const calculateMsrp = (cost: number): number => {
|
||||
let result = cost * selectedMultiplier;
|
||||
if (roundToNine) {
|
||||
result = roundUpToNine(result);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
```
|
||||
|
||||
2. **Create popover UI**:
|
||||
```tsx
|
||||
<Popover>
|
||||
<PopoverTrigger><Calculator className="h-4 w-4" /></PopoverTrigger>
|
||||
<PopoverContent className="w-48">
|
||||
<div className="space-y-2">
|
||||
<Label>Multiplier</Label>
|
||||
<div className="grid grid-cols-3 gap-1">
|
||||
{multipliers.map(m => (
|
||||
<Button
|
||||
key={m}
|
||||
variant={selectedMultiplier === m ? 'default' : 'outline'}
|
||||
size="sm"
|
||||
onClick={() => setSelectedMultiplier(m)}
|
||||
>
|
||||
{m}x
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Checkbox checked={roundToNine} onCheckedChange={setRoundToNine} />
|
||||
<Label>Round to .X9</Label>
|
||||
</div>
|
||||
<Button onClick={applyCalculation} className="w-full">
|
||||
Apply
|
||||
</Button>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
```
|
||||
|
||||
**Files to Modify:**
|
||||
- `inventory/src/components/product-import/steps/ValidationStep/components/ValidationTable.tsx` - `PriceColumnHeader` component
|
||||
|
||||
---
|
||||
|
||||
## 9. Debug Mode - Skip API Submission ✅ IMPLEMENTED
|
||||
|
||||
**Issue:** Add a third switch in the footer of image upload step (visible only to users with `admin:debug` permission) that will not submit data to any API, only complete the process and show results page as if it had worked.
|
||||
|
||||
**Implementation (Completed):**
|
||||
- Added `skipApiSubmission` state to `ImageUploadStep.tsx`
|
||||
- Added amber-colored "Skip API (Debug)" switch (visible only with `admin:debug` permission)
|
||||
- When skip is active, "Use Test API" and "Use Test Database" switches are hidden
|
||||
- Added `skipApiSubmission?: boolean` to `SubmitOptions` type in `types.ts`
|
||||
- In `Import.tsx`, when `skipApiSubmission` is true:
|
||||
- Skips the actual API call entirely
|
||||
- Generates mock success response with mock PIDs
|
||||
- Shows `[DEBUG]` prefix in toast and result message
|
||||
- Displays results page as if submission succeeded
|
||||
|
||||
**Files Modified:**
|
||||
- `inventory/src/components/product-import/types.ts` - Added `skipApiSubmission` to `SubmitOptions`
|
||||
- `inventory/src/components/product-import/steps/ImageUploadStep/ImageUploadStep.tsx` - Added switch UI
|
||||
- `inventory/src/pages/Import.tsx` - Added skip logic in `handleData()`
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
| # | Enhancement | Complexity | Status |
|
||||
|---|-------------|------------|--------|
|
||||
| 1 | Strip UPC quotes/spaces | Low | ✅ Implemented |
|
||||
| 2 | AI context in validation | Medium | ✅ Implemented |
|
||||
| 3 | Fresh taxonomy per session | Medium | ✅ Implemented |
|
||||
| 4 | Save template from confirmation | Medium-High | ✅ Implemented |
|
||||
| 5 | Sheet preview | Low-Medium | ✅ Implemented |
|
||||
| 6 | Remove empty rows | Low | ✅ Implemented |
|
||||
| 7 | Unit conversion | Medium | ✅ Implemented |
|
||||
| 8 | MSRP multiplier options | Medium | ✅ Implemented |
|
||||
| 9 | Debug skip API | Low-Medium | ✅ Implemented |
|
||||
|
||||
**Implemented:** 9 of 9 items - All enhancements complete!
|
||||
|
||||
---
|
||||
|
||||
*Document generated: 2026-01-25*
|
||||
346
docs/METRICS_AUDIT.md
Normal file
346
docs/METRICS_AUDIT.md
Normal file
@@ -0,0 +1,346 @@
|
||||
# Metrics Calculation Pipeline Audit
|
||||
|
||||
**Date:** 2026-02-07
|
||||
**Scope:** All 6 SQL calculation scripts, custom DB functions, import pipeline, and live data verification
|
||||
|
||||
## Overview
|
||||
|
||||
The metrics pipeline in `inventory-server/scripts/calculate-metrics-new.js` runs 6 SQL scripts sequentially:
|
||||
|
||||
1. `update_daily_snapshots.sql` — Aggregates daily per-product sales/receiving data
|
||||
2. `update_product_metrics.sql` — Calculates the main product_metrics table (KPIs, forecasting, status)
|
||||
3. `update_periodic_metrics.sql` — ABC classification, average lead time
|
||||
4. `calculate_brand_metrics.sql` — Brand-level aggregated metrics
|
||||
5. `calculate_vendor_metrics.sql` — Vendor-level aggregated metrics
|
||||
6. `calculate_category_metrics.sql` — Category-level metrics with hierarchy rollups
|
||||
|
||||
### Database Scale
|
||||
| Table | Row Count |
|
||||
|---|---|
|
||||
| products | 681,912 |
|
||||
| orders | 2,883,982 |
|
||||
| purchase_orders | 256,809 |
|
||||
| receivings | 313,036 |
|
||||
| daily_product_snapshots | 678,312 (601 distinct dates, since 2024-06-01) |
|
||||
| product_metrics | 681,912 |
|
||||
| brand_metrics | 1,789 |
|
||||
| vendor_metrics | 281 |
|
||||
| category_metrics | 610 |
|
||||
|
||||
---
|
||||
|
||||
## Issues Found
|
||||
|
||||
### ISSUE 1: [HIGH] Order status filter is non-functional — numeric codes vs text comparison
|
||||
|
||||
**Files:** `update_daily_snapshots.sql` lines 86-101, `update_product_metrics.sql` lines 89, 178-183
|
||||
**Confirmed by data:** All order statuses are numeric strings ('100', '50', '55', etc.)
|
||||
**Status mappings from:** `docs/prod_registry.class.php`
|
||||
|
||||
**Description:** The SQL filters `COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned')` and `o.status NOT IN ('canceled', 'returned')` are used throughout the pipeline to exclude canceled/returned orders. However, the import pipeline stores order statuses as their **raw numeric codes** from the production MySQL database (e.g., '100', '50', '55', '90', '92'). There are **zero text status values** in the orders table.
|
||||
|
||||
This means these filters **never exclude any rows** — every comparison is `'100' NOT IN ('canceled', 'returned')` which is always true.
|
||||
|
||||
**Actual status distribution (with confirmed meanings):**
|
||||
| Status | Meaning | Count | Negative Qty | Assessment |
|
||||
|---|---|---|---|---|
|
||||
| 100 | shipped | 2,862,792 | 3,352 | Completed — correct to include |
|
||||
| 50 | awaiting_products | 11,109 | 0 | In-progress — not yet shipped |
|
||||
| 55 | shipping_later | 5,689 | 0 | In-progress — not yet shipped |
|
||||
| 56 | shipping_together | 2,863 | 0 | In-progress — not yet shipped |
|
||||
| 90 | awaiting_shipment | 38 | 0 | Near-complete — not yet shipped |
|
||||
| 92 | awaiting_pickup | 71 | 0 | Near-complete — awaiting customer |
|
||||
| 95 | shipped_confirmed | 5 | 0 | Completed — correct to include |
|
||||
| 15 | cancelled | 1 | 0 | Should be excluded |
|
||||
|
||||
**Full status reference (from prod_registry.class.php):**
|
||||
- 0=created, 10=unfinished, **15=cancelled**, 16=combined, 20=placed, 22=placed_incomplete
|
||||
- 30=cancelled_old (historical), 40=awaiting_payment, 50=awaiting_products
|
||||
- 55=shipping_later, 56=shipping_together, 60=ready, 61=flagged
|
||||
- 62=fix_before_pick, 65=manual_picking, 70=in_pt, 80=picked
|
||||
- 90=awaiting_shipment, 91=remote_wait, **92=awaiting_pickup**, 93=fix_before_ship
|
||||
- **95=shipped_confirmed**, **100=shipped**
|
||||
|
||||
**Severity revised to HIGH (from CRITICAL):** Now that we know the actual meanings, no cancelled/refunded orders are being miscounted (only 1 cancelled order exists, status=15). The real concern is twofold:
|
||||
1. **The text-based filter is dead code** — it can never match any row. Either map statuses to text during import (like POs do) or change SQL to use numeric comparisons.
|
||||
2. **~19,775 unfulfilled orders** (statuses 50/55/56/90/92) are counted as completed sales. These are orders in various stages of fulfillment that haven't shipped yet. While most will eventually ship, counting them now inflates current-period metrics. At 0.69% of total orders, the financial impact is modest but the filter should work correctly on principle.
|
||||
|
||||
**Note:** PO statuses ARE properly mapped to text ('canceled', 'done', etc.) in the import pipeline. Only order statuses are numeric.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 2: [CRITICAL] Daily Snapshots use current stock instead of historical EOD stock
|
||||
|
||||
**File:** `update_daily_snapshots.sql`, lines 126-135, 173
|
||||
**Confirmed by data:** Top product (pid 666925) shows `eod_stock_quantity = 0` for ALL dates even though it sold 28 units on Jan 28 (clearly had stock then)
|
||||
|
||||
**Description:** The `CurrentStock` CTE reads `stock_quantity` directly from the `products` table at query execution time. When the script processes historical dates (today minus 1-4 days), it writes **today's stock** as if it were the end-of-day stock for those past dates.
|
||||
|
||||
**Cascading impact on product_metrics:**
|
||||
- `avg_stock_units_30d` / `avg_stock_cost_30d` — Wrong averages
|
||||
- `stockout_days_30d` — Undercounts (only based on current stock state, not historical)
|
||||
- `stockout_rate_30d`, `service_level_30d`, `fill_rate_30d` — All derived from wrong stockout data
|
||||
- `gmroi_30d` — Wrong denominator (avg stock cost)
|
||||
- `stockturn_30d` — Wrong denominator (avg stock units)
|
||||
- `sell_through_30d` — Affected by stock level inaccuracy
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 3: [CRITICAL] Snapshot coverage is 0.17% — most products have no snapshot data
|
||||
|
||||
**Confirmed by data:** 678,312 snapshot rows across 601 dates = ~1,128 products/day out of 681,912 total
|
||||
|
||||
**Description:** The daily snapshots script only creates rows for products with sales or receiving activity on that date (`ProductsWithActivity` CTE, line 136). This means:
|
||||
- 91.1% of products (621,221) have NULL `sales_30d` — they had no orders in the last 30 days so no snapshot rows exist
|
||||
- `AVG(eod_stock_quantity)` averages only across days with activity, not 30 days
|
||||
- `stockout_days_30d` only counts stockout days where there was ALSO some activity
|
||||
- A product out of stock with zero sales gets zero stockout_days even though it was stocked out
|
||||
|
||||
This is by design (to avoid creating 681K rows/day) but means stock-related metrics are systematically biased.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 4: [HIGH] `costeach` fallback to 50% of price in import pipeline
|
||||
|
||||
**File:** `inventory-server/scripts/import/orders.js` (line ~573)
|
||||
|
||||
**Description:** When the MySQL `order_costs` table has no record for an order item, `costeach` defaults to `price * 0.5`. There is **no flag** in the PostgreSQL data to distinguish actual costs from estimated ones.
|
||||
|
||||
**Data impact:** 385,545 products (56.5%) have `current_cost_price = 0` AND `current_landing_cost_price = 0`. For these products, the COGS calculation in daily_snapshots falls through the chain:
|
||||
1. `o.costeach` — May be the 50% estimate from import
|
||||
2. `get_weighted_avg_cost()` — Returns NULL if no receivings exist
|
||||
3. `p.landing_cost_price` — Always NULL (hardcoded in import)
|
||||
4. `p.cost_price` — 0 for 56.5% of products
|
||||
|
||||
Only 27 products have zero COGS with positive sales, meaning the `costeach` field is doing its job for products that sell, but the 50% fallback means margins for those products are estimates, not actuals.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 5: [HIGH] `landing_cost_price` is always NULL
|
||||
|
||||
**File:** `inventory-server/scripts/import/products.js` (line ~175)
|
||||
|
||||
**Description:** The import explicitly sets `landing_cost_price = NULL` for all products. The daily_snapshots COGS calculation uses it as a fallback: `COALESCE(o.costeach, get_weighted_avg_cost(...), p.landing_cost_price, p.cost_price)`. Since it's always NULL, this fallback step is useless and the chain jumps straight to `cost_price`.
|
||||
|
||||
The `product_metrics` field `current_landing_cost_price` is populated as `COALESCE(p.landing_cost_price, p.cost_price, 0.00)`, so it equals `cost_price` for all products. Any UI showing "landing cost" is actually just showing `cost_price`.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 6: [HIGH] Vendor lead time is drastically wrong — missing supplier_id join
|
||||
|
||||
**File:** `calculate_vendor_metrics.sql`, lines 62-82
|
||||
**Confirmed by data:** Vendor-level lead times are 2-10x higher than product-level lead times
|
||||
|
||||
**Description:** The vendor metrics lead time joins POs to receivings only by `pid`:
|
||||
```sql
|
||||
LEFT JOIN public.receivings r ON r.pid = po.pid
|
||||
```
|
||||
But the periodic metrics lead time correctly matches supplier:
|
||||
```sql
|
||||
JOIN public.receivings r ON r.pid = po.pid AND r.supplier_id = po.supplier_id
|
||||
```
|
||||
|
||||
Without supplier matching, a PO for product X from Vendor A can match a receiving of product X from Vendor B, creating inflated/wrong lead times.
|
||||
|
||||
**Measured discrepancies:**
|
||||
| Vendor | Vendor Metrics Lead Time | Avg Product Lead Time |
|
||||
|---|---|---|
|
||||
| doodlebug design inc. | 66 days | 14 days |
|
||||
| Notions | 55 days | 4 days |
|
||||
| Simple Stories | 59 days | 27 days |
|
||||
| Ranger Industries | 31 days | 5 days |
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 7: [MEDIUM] Net revenue does not subtract returns
|
||||
|
||||
**File:** `update_daily_snapshots.sql`, line 184
|
||||
|
||||
**Description:** `net_revenue = gross_revenue - discounts`. Standard accounting: `net_revenue = gross_revenue - discounts - returns`. The `returns_revenue` is calculated separately but not deducted.
|
||||
|
||||
**Data impact:** There are 3,352 orders with negative quantities (returns), totaling -5,499 units. These returns are tracked in `returns_revenue` but not reflected in `net_revenue`, which means all downstream revenue-based metrics are slightly overstated.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 8: [MEDIUM] Lifetime revenue subquery references wrong table columns
|
||||
|
||||
**File:** `update_product_metrics.sql`, lines 323-329
|
||||
|
||||
**Description:** The lifetime revenue estimation fallback queries:
|
||||
```sql
|
||||
SELECT revenue_7d / NULLIF(sales_7d, 0)
|
||||
FROM daily_product_snapshots
|
||||
WHERE pid = ci.pid AND sales_7d > 0
|
||||
```
|
||||
But `daily_product_snapshots` does NOT have `revenue_7d` or `sales_7d` columns — those exist in `product_metrics`. This subquery either errors silently or returns NULL. The effect is that the estimation always falls back to `current_price * total_sold`.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 9: [MEDIUM] Brand/Vendor metrics COGS filter inflates margins
|
||||
|
||||
**Files:** `calculate_brand_metrics.sql` lines 31, `calculate_vendor_metrics.sql` line 32
|
||||
|
||||
**Description:** `SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END)` excludes products with zero COGS. But if a product has sales revenue and zero COGS (missing cost data), the brand/vendor totals will include the revenue but not the COGS, artificially inflating the margin.
|
||||
|
||||
**Data context:** Brand metrics revenue matches product_metrics aggregation exactly for sales counts, but shows small discrepancies in revenue (e.g., Stamperia: $7,613.98 brand vs $7,611.11 actual). These tiny diffs come from the `> 0` filtering excluding products with negative revenue.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 10: [MEDIUM] Extreme margin values from $0.01 price orders
|
||||
|
||||
**Confirmed by data:** 73 products with margin > 100%, 119 with margin < -100%
|
||||
|
||||
**Examples:**
|
||||
| Product | Revenue | COGS | Margin |
|
||||
|---|---|---|---|
|
||||
| Flower Gift Box Die (pid 624756) | $0.02 | $29.98 | -149,800% |
|
||||
| Special Flowers Stamp Set (pid 614513) | $0.01 | $11.97 | -119,632% |
|
||||
|
||||
These are products with extremely low prices (likely samples, promos, or data errors) where the order price was $0.01. The margin calculation is mathematically correct but these outliers skew any aggregate margin statistics.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 11: [MEDIUM] Sell-through rate has edge cases yielding negative/extreme values
|
||||
|
||||
**File:** `update_product_metrics.sql`, lines 358-361
|
||||
**Confirmed by data:** 30 products with negative sell-through, 10 with sell-through > 200%
|
||||
|
||||
**Description:** Beginning inventory is approximated as `current_stock + sales - received + returns`. When inventory adjustments, shrinkage, or manual corrections occur, this approximation breaks. Edge cases:
|
||||
- Products with many manual stock adjustments → negative denominator → negative sell-through
|
||||
- Products with beginning stock near zero but decent sales → sell-through > 100%
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 12: [MEDIUM] `total_sold` uses different status filter than orders import
|
||||
|
||||
**Import pipeline confirmed:**
|
||||
- Orders import: `order_status >= 15` (includes processing/pending orders)
|
||||
- `total_sold` in products: `order_status >= 20` (more restrictive)
|
||||
|
||||
This means `lifetime_sales` (from `total_sold`) is systematically lower than what you'd calculate by summing the orders table. The discrepancy is confirmed:
|
||||
| Product | total_sold | orders sum | Gap |
|
||||
|---|---|---|---|
|
||||
| pid 31286 | 13,786 | 4,241 | 9,545 |
|
||||
| pid 44309 | 11,978 | 3,119 | 8,859 |
|
||||
|
||||
The large gaps are because the orders table only has data from the import start date (~2024), while `total_sold` includes all-time sales from MySQL. This is expected behavior, not a bug, but it means the `lifetime_revenue_quality` flag is important — most products show 'estimated' quality.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 13: [MEDIUM] Category rollup may double-count products in multiple hierarchy levels
|
||||
|
||||
**File:** `calculate_category_metrics.sql`, lines 42-66
|
||||
|
||||
**Description:** The `RolledUpMetrics` CTE uses:
|
||||
```sql
|
||||
dcm.cat_id = ch.cat_id OR dcm.cat_id = ANY(SELECT cat_id FROM category_hierarchy WHERE ch.cat_id = ANY(ancestor_ids))
|
||||
```
|
||||
If products are assigned to categories at multiple levels in the same branch (e.g., both "Paper Crafts" and "Scrapbook Paper" which is a child of "Paper Crafts"), those products' metrics would be counted twice in the parent's rollup.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 14: [LOW] `exclude_forecast` removes products from metrics entirely
|
||||
|
||||
**File:** `update_product_metrics.sql`, line 509
|
||||
|
||||
**Description:** `WHERE s.exclude_forecast IS FALSE OR s.exclude_forecast IS NULL` is on the main INSERT's WHERE clause. Products with `exclude_forecast = TRUE` won't appear in `product_metrics` at all, rather than just having forecast fields nulled. Currently all 681,912 products are in product_metrics so this appears to not affect any products yet.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 15: [LOW] Daily snapshots only look back 5 days
|
||||
|
||||
**File:** `update_daily_snapshots.sql`, line 14 — `_process_days INT := 5`
|
||||
|
||||
If import data arrives late (>5 days), those days will never get snapshots populated. There is a separate `backfill/rebuild_daily_snapshots.sql` for historical rebuilds.
|
||||
|
||||
---
|
||||
|
||||
### ISSUE 16: [INFO] Timezone risk in order date import
|
||||
|
||||
**File:** `inventory-server/scripts/import/orders.js`
|
||||
|
||||
MySQL `DATETIME` values are timezone-naive. The import uses `new Date(order.date)` which interprets them using the import server's local timezone. The SSH config specifies `timezone: '-05:00'` for MySQL (always EST). If the import server is in a different timezone, orders near midnight could land on the wrong date in the daily snapshots calculation.
|
||||
|
||||
---
|
||||
|
||||
## Custom Functions Review
|
||||
|
||||
### `calculate_sales_velocity(sales_30d, stockout_days_30d)`
|
||||
- Divides `sales_30d` by effective selling days: `GREATEST(30 - stockout_days, CASE WHEN sales > 0 THEN 14 ELSE 30 END)`
|
||||
- The 14-day floor prevents extreme velocity for products mostly out of stock
|
||||
- **Sound approach** — the only concern is that stockout_days is unreliable (Issues 2, 3)
|
||||
|
||||
### `get_weighted_avg_cost(pid, date)`
|
||||
- Weighted average of last 10 receivings by cost*qty/qty
|
||||
- Returns NULL if no receivings — sound fallback behavior
|
||||
- **Correct implementation**
|
||||
|
||||
### `safe_divide(numerator, denominator)`
|
||||
- Returns NULL on divide-by-zero — **correct**
|
||||
|
||||
### `std_numeric(value, precision)`
|
||||
- Rounds to precision digits — **correct**
|
||||
|
||||
### `classify_demand_pattern(avg_demand, cv)`
|
||||
- Uses coefficient of variation thresholds: ≤0.2 = stable, ≤0.5 = variable, low-volume+high-CV = sporadic, else lumpy
|
||||
- **Reasonable classification**, though only based on 30-day window
|
||||
|
||||
### `detect_seasonal_pattern(pid)`
|
||||
- CROSS JOIN LATERAL (runs per product) — **expensive**: queries `daily_product_snapshots` twice per product
|
||||
- Compares current month average to yearly average — very simplistic
|
||||
- **Functional but could be a performance bottleneck** with 681K products
|
||||
|
||||
### `category_hierarchy` (materialized view)
|
||||
- Recursive CTE building tree from categories — **correct implementation**
|
||||
- Refreshed concurrently before category metrics calculation — **good practice**
|
||||
|
||||
---
|
||||
|
||||
## Data Health Summary
|
||||
|
||||
| Metric | Count | % of Total |
|
||||
|---|---|---|
|
||||
| Products with zero cost_price | 385,545 | 56.5% |
|
||||
| Products with NULL sales_30d | 621,221 | 91.1% |
|
||||
| Products with no lifetime_sales | 321,321 | 47.1% |
|
||||
| Products with zero COGS but positive sales | 27 | <0.01% |
|
||||
| Products with margin > 100% | 73 | <0.01% |
|
||||
| Products with margin < -100% | 119 | <0.01% |
|
||||
| Products with negative sell-through | 30 | <0.01% |
|
||||
| Products with NULL status | 0 | 0% |
|
||||
| Duplicate daily snapshots (same pid+date) | 0 | 0% |
|
||||
| Net revenue formula mismatches | 0 | 0% |
|
||||
|
||||
### ABC Classification Distribution (replenishable products only)
|
||||
| Class | Products | Revenue % |
|
||||
|---|---|---|
|
||||
| A | 7,727 | 80.72% |
|
||||
| B | 12,048 | 15.10% |
|
||||
| C | 113,647 | 4.18% |
|
||||
|
||||
ABC distribution looks healthy — A ≈ 80%, A+B ≈ 96%.
|
||||
|
||||
### Brand Metrics Consistency
|
||||
Product counts and sales_30d match exactly between `brand_metrics` and direct aggregation from `product_metrics`. Revenue shows sub-dollar discrepancies due to the `> 0` filter excluding products with negative revenue. **Consistent within expected tolerance.**
|
||||
|
||||
---
|
||||
|
||||
## Priority Recommendations
|
||||
|
||||
### Must Fix (Correctness Issues)
|
||||
1. **Issue 1: Fix order status handling** — The text-based filter (`NOT IN ('canceled', 'returned')`) is dead code against numeric statuses. Two options: (a) map numeric statuses to text during import (like POs already do), or (b) change SQL to filter on numeric codes (e.g., `o.status::int >= 20` to exclude cancelled/unfinished, or `o.status IN ('100', '95')` for shipped-only). The ~19.7K unfulfilled orders (0.69%) are a minor financial impact but the filter should be functional.
|
||||
2. **Issue 6: Add supplier_id join to vendor lead time** — One-line fix in `calculate_vendor_metrics.sql`
|
||||
3. **Issue 8: Fix lifetime revenue subquery** — Use correct column names from `daily_product_snapshots` (e.g., `net_revenue / NULLIF(units_sold, 0)`)
|
||||
|
||||
### Should Fix (Data Quality)
|
||||
4. **Issue 2/3: Snapshot coverage** — Consider creating snapshot rows for all in-stock products, not just those with activity. Or at minimum, calculate stockout metrics by comparing snapshot existence to product existence.
|
||||
5. **Issue 5: Populate landing_cost_price** — If available in the source system, import it. Otherwise remove references to avoid confusion.
|
||||
6. **Issue 7: Subtract returns from net_revenue** — `net_revenue = gross_revenue - discounts - returns_revenue`
|
||||
7. **Issue 9: Remove > 0 filter on COGS** — Use `SUM(pm.cogs_30d)` instead of conditional sums
|
||||
|
||||
### Nice to Fix (Edge Cases)
|
||||
8. **Issue 4: Flag estimated costs** — Add a `costeach_estimated BOOLEAN` to orders during import
|
||||
9. **Issue 10: Cap or flag extreme margins** — Exclude $0.01-price orders from margin calculations
|
||||
10. **Issue 11: Clamp sell-through** — `GREATEST(0, LEAST(sell_through_30d, 200))` or flag outliers
|
||||
11. **Issue 12: Verify category assignment policy** — Check if products are assigned to leaf categories only
|
||||
12. **Issue 13: Category rollup query** — Verify no double-counting with actual data
|
||||
276
docs/METRICS_AUDIT2.md
Normal file
276
docs/METRICS_AUDIT2.md
Normal file
@@ -0,0 +1,276 @@
|
||||
# Metrics Pipeline Audit Report
|
||||
|
||||
**Date:** 2026-02-08
|
||||
**Scope:** All 6 SQL scripts in `inventory-server/scripts/metrics-new/`, import pipeline, custom functions, and post-calculation data verification.
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
The metrics pipeline is architecturally sound and the core calculations are mostly correct. The 30-day sales, revenue, replenishment, and aggregate metrics (brand/vendor/category) all cross-check accurately between the snapshots, product_metrics, and direct orders queries. However, several issues were found ranging from **critical data bugs** to **design limitations** that affect accuracy of specific metrics.
|
||||
|
||||
**Issues found: 13** (3 Critical, 4 Medium, 6 Low/Informational)
|
||||
|
||||
---
|
||||
|
||||
## CRITICAL Issues
|
||||
|
||||
### C1. `net_revenue` in daily snapshots never subtracts returns ($35.6K affected)
|
||||
|
||||
**Location:** `update_daily_snapshots.sql`, line 181
|
||||
**Symptom:** `net_revenue` is stored as `gross_revenue - discounts` but should be `gross_revenue - discounts - returns_revenue`.
|
||||
|
||||
The SQL formula on line 181 appears correct:
|
||||
```sql
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00) AS net_revenue
|
||||
```
|
||||
|
||||
However, actual data shows `net_revenue = gross_revenue - discounts` for ALL 3,252 snapshots that have returns. Total returns not subtracted: **$35,630.03** across 2,946 products. This may be caused by the `returns_revenue` in the SalesData CTE not properly flowing through to the INSERT, or by a prior version of the code that stored these values differently. The profit column (line 184) has the same issue: `(gross - discounts) - cogs` instead of `(gross - discounts - returns) - cogs`.
|
||||
|
||||
**Impact:** Net revenue and profit are overstated by the amount of returns. This cascades to all metrics derived from snapshots: `revenue_30d`, `profit_30d`, `margin_30d`, `avg_ros_30d`, and all brand/vendor/category aggregate revenue.
|
||||
|
||||
**Recommended fix:** Debug why the returns subtraction isn't taking effect. The formula in the SQL looks correct, so this may be a data-type issue or an execution path issue. After fixing, rebuild snapshots.
|
||||
|
||||
**Status:** Owner will resolve. Code formula is correct; snapshots need rebuilding after prior fix deployment.
|
||||
|
||||
---
|
||||
|
||||
### C2. `eod_stock_quantity` uses CURRENT stock, not historical end-of-day stock
|
||||
|
||||
**Location:** `update_daily_snapshots.sql`, lines 123-132 (CurrentStock CTE)
|
||||
**Symptom:** Every snapshot for a given product shows the same stock quantity regardless of the snapshot date.
|
||||
|
||||
The `CurrentStock` CTE simply reads `stock_quantity` from the `products` table:
|
||||
```sql
|
||||
SELECT pid, stock_quantity, ... FROM public.products
|
||||
```
|
||||
|
||||
This means a snapshot from January 10 shows the SAME stock as today (February 8). Verified in data:
|
||||
- Product 662561: stock = 36 on every date (Feb 1-7)
|
||||
- Product 665397: stock = 25 on every date (Feb 1-7)
|
||||
- All products checked show identical stock across all snapshot dates
|
||||
|
||||
**Impact:** All stock-derived metrics are inaccurate for historical analysis:
|
||||
- `eod_stock_cost`, `eod_stock_retail`, `eod_stock_gross` (all wrong for past dates)
|
||||
- `stockout_flag` (based on current stock, not historical)
|
||||
- `stockout_days_30d` (undercounted since stockout_flag uses current stock)
|
||||
- `avg_stock_units_30d`, `avg_stock_cost_30d` (no variance, just current stock repeated)
|
||||
- `gmroi_30d`, `stockturn_30d` (based on avg_stock which is flat)
|
||||
- `sell_through_30d` (denominator uses current stock assumption)
|
||||
- `service_level_30d`, `fill_rate_30d`
|
||||
|
||||
**This is a known architectural limitation** noted in MEMORY.md. Fixing requires either:
|
||||
1. Storing stock snapshots separately at end-of-day (ideally via a cron job that records stock before any changes)
|
||||
2. Reconstructing historical stock from orders and receivings (complex but possible)
|
||||
|
||||
**Status: FIXED.** MySQL's `snap_product_value` table (daily EOD stock per product since 2012) is now imported into PostgreSQL `stock_snapshots` table via `scripts/import/stock-snapshots.js`. The `CurrentStock` CTE in `update_daily_snapshots.sql` now uses `LEFT JOIN stock_snapshots` for historical stock, falling back to `products.stock_quantity` when no historical data exists. Requires: run import, then rebuild daily snapshots.
|
||||
|
||||
---
|
||||
|
||||
### C3. `ON CONFLICT DO UPDATE WHERE` check skips 91%+ of product_metrics updates
|
||||
|
||||
**Location:** `update_product_metrics.sql`, lines 558-574
|
||||
**Symptom:** 623,205 of 681,912 products (91.4%) have `last_calculated` older than 1 day. 592,369 are over 30 days old. 914 products with active 30-day sales haven't been updated in over 7 days.
|
||||
|
||||
The upsert's `WHERE` clause only updates if specific fields changed:
|
||||
```sql
|
||||
WHERE product_metrics.current_stock IS DISTINCT FROM EXCLUDED.current_stock OR
|
||||
product_metrics.current_price IS DISTINCT FROM EXCLUDED.current_price OR ...
|
||||
```
|
||||
|
||||
Fields NOT checked include: `stockout_days_30d`, `margin_30d`, `gmroi_30d`, `demand_pattern`, `seasonality_index`, `sales_growth_*`, `service_level_30d`, and many others. If a product's stock, price, sales, and revenue haven't changed, the entire row is skipped even though growth metrics, variability, and other derived fields may need updating.
|
||||
|
||||
**Impact:** Most derived metrics (growth, demand patterns, seasonality) are stale for the majority of products. Products with steady sales but unchanged stock/price never get their growth metrics recalculated.
|
||||
|
||||
**Recommended fix:** Either:
|
||||
1. Remove the `WHERE` clause entirely (accept the performance cost of writing all rows every run)
|
||||
2. Add `last_calculated` age check: `OR product_metrics.last_calculated < NOW() - INTERVAL '7 days'`
|
||||
3. Add the missing fields to the change-detection check
|
||||
|
||||
**Status: FIXED.** Added 12 derived fields to the `IS DISTINCT FROM` check (`profit_30d`, `cogs_30d`, `margin_30d`, `stockout_days_30d`, `sell_through_30d`, `sales_growth_30d_vs_prev`, `revenue_growth_30d_vs_prev`, `demand_pattern`, `seasonal_pattern`, `seasonality_index`, `service_level_30d`, `fill_rate_30d`) plus a time-based safety net: `OR product_metrics.last_calculated < NOW() - INTERVAL '1 day'`. This guarantees every row is refreshed at least daily.
|
||||
|
||||
---
|
||||
|
||||
## MEDIUM Issues
|
||||
|
||||
### M1. Demand variability calculated only over activity days, not full 30-day window
|
||||
|
||||
**Location:** `update_product_metrics.sql`, DemandVariability CTE (lines 206-223)
|
||||
**Symptom:** Variance, std_dev, and CV are computed over only the days that appear in snapshots (activity days), not the full 30-day period including zero-sales days.
|
||||
|
||||
Example: Product 41141 (Mexican Poppy) sold 102 units in 30 days across only 3 snapshot days (1, 1, 100). The variance/CV is calculated over just those 3 data points instead of 30 (with 27 zero-sales days).
|
||||
|
||||
**Impact:**
|
||||
- CV is computed on sparse data (3-10 points instead of 30), making it statistically unreliable
|
||||
- Products with sporadic large orders appear less variable than they really are
|
||||
- `demand_pattern` classification is affected (stable/variable/sporadic/lumpy)
|
||||
|
||||
**Recommended fix:** Join against a generated 30-day date series and COALESCE missing days to 0 units sold before computing variance/stddev/CV.
|
||||
|
||||
**Status: FIXED.** Rewrote `DemandVariability` CTE to use `generate_series()` for the full 30-day date range, `CROSS JOIN` with distinct PIDs from snapshots, and `LEFT JOIN` actual snapshot data with `COALESCE(dps.units_sold, 0)` for missing days. Variance/stddev/CV now computed over all 30 data points.
|
||||
|
||||
---
|
||||
|
||||
### M2. `costeach` fallback to `price * 0.5` affects 32.5% of recent orders
|
||||
|
||||
**Location:** `orders.js`, line 600 and 634
|
||||
**Symptom:** When no cost record exists in `order_costs`, the import falls back to `price * 0.5`.
|
||||
|
||||
Data shows 9,839 of 30,266 recent orders (32.5%) use this fallback. Among these, 79 paid products have `costeach = 0` because `price = 0 * 0.5 = 0`, even though the product has a real cost_price.
|
||||
|
||||
The daily snapshot has a second line of defense (using `get_weighted_avg_cost()` and then `p.cost_price`), but the orders table's `costeach` column itself contains inaccurate data for ~1/3 of orders.
|
||||
|
||||
**Impact:** COGS calculations at the order level are approximate for 1/3 of orders. The snapshot's fallback chain mitigates this somewhat, but any analytics using `orders.costeach` directly will be affected.
|
||||
|
||||
**Status: FIXED.** Added `products.cost_price` as intermediate fallback: `COALESCE(oc.costeach, p.cost_price, oi.price * 0.5)`. The products table join was added to both the `order_totals` CTE and the outer SELECT in `orders.js`. Requires a full orders re-import to apply retroactively.
|
||||
|
||||
---
|
||||
|
||||
### M3. `lifetime_sales` uses MySQL `total_sold` (status >= 20) but orders import uses status >= 15
|
||||
|
||||
**Location:** `products.js` line 200 vs `orders.js` line 69
|
||||
**Symptom:** `total_sold` in the products table comes from MySQL with `order_status >= 20`, excluding status 15 (canceled) and 16 (combined). But the orders import fetches orders with `order_status >= 15`.
|
||||
|
||||
Verified in MySQL: For product 31286, `total_sold` (>=20) = 13,786 vs (>=15) = 13,905 (difference of 119 units).
|
||||
|
||||
**Impact:** `lifetime_sales` in product_metrics (sourced from `products.total_sold`) slightly understates compared to what the orders table contains. The `lifetime_revenue_quality` field correctly flags most as "estimated" since the orders table only covers ~5 years while `total_sold` is all-time. This is a minor inconsistency (< 1% difference).
|
||||
|
||||
**Status:** Accepted. < 1% difference, not worth the complexity of aligning thresholds.
|
||||
|
||||
---
|
||||
|
||||
### M4. `sell_through_30d` has 868 NULL values and 547 anomalous values for products with sales
|
||||
|
||||
**Location:** `update_product_metrics.sql`, lines 356-361
|
||||
**Formula:** `(sales_30d / (current_stock + sales_30d + returns_units_30d - received_qty_30d)) * 100`
|
||||
|
||||
- 868 products with sales but NULL sell_through (denominator = 0, which happens when `current_stock + sales - received = 0`, i.e. all stock came from receiving and was sold)
|
||||
- 259 products with sell_through > 100%
|
||||
- 288 products with negative sell_through
|
||||
|
||||
**Impact:** Sell-through rate is unreliable for products with significant receiving activity in the same period. The formula tries to approximate "beginning inventory" but the approximation breaks when current stock ≠ actual beginning stock (which is always, per issue C2).
|
||||
|
||||
**Status:** Will improve once C2 fix (historical stock) is deployed and snapshots are rebuilt, since `current_stock` in the formula will then reflect actual beginning inventory.
|
||||
|
||||
---
|
||||
|
||||
## LOW / INFORMATIONAL Issues
|
||||
|
||||
### L1. Snapshots only cover ~1,167 products/day out of 681K
|
||||
|
||||
Only products with order or receiving activity on a given day get snapshots. This is by design (the `ProductsWithActivity` CTE on line 133 of `update_daily_snapshots.sql`), but it means:
|
||||
- 560K+ products have zero snapshot history
|
||||
- Stockout tracking is impossible for products with no sales (they can't appear in snapshots)
|
||||
- The "avg_stock" metrics (avg_stock_units_30d, etc.) only average over activity days, not all 30 days
|
||||
|
||||
This is acceptable for storage efficiency but should be understood when interpreting metrics.
|
||||
|
||||
**Status:** Accepted (by design).
|
||||
|
||||
---
|
||||
|
||||
### L2. `detect_seasonal_pattern` function only compares current month to yearly average
|
||||
|
||||
The seasonality detection is simplistic: it compares current month's avg daily sales to yearly avg. This means:
|
||||
- It can only detect if the CURRENT month is above average, not identify historical seasonal patterns
|
||||
- Running in January vs July will give completely different results for the same product
|
||||
- The "peak_season" field always shows the current month/quarter when seasonal (not the actual peak)
|
||||
|
||||
This is noted as a P5 (low priority) feature and is adequate for a first pass but should not be relied upon for demand planning.
|
||||
|
||||
**Status: FIXED.** Rewrote `detect_seasonal_pattern` function to compare monthly average sales across the full last 12 months. Uses CV across months + peak-to-average ratio for classification: `strong` (CV > 0.5, peak > 150%), `moderate` (CV > 0.3, peak > 120%), `none`. Peak season now identifies the actual highest-sales month. Requires at least 3 months of data. Saved in `db/functions.sql`.
|
||||
|
||||
---
|
||||
|
||||
### L3. Free product with negative revenue in top sellers
|
||||
|
||||
Product 476848 ("Thank You, From ACOT!") shows 254 sales with -$1.00 revenue because one order applied a $1 discount to a $0 product. This is a data oddity, not a calculation bug. Could be addressed by excluding $0-price products from revenue metrics or by data cleanup.
|
||||
|
||||
**Status:** Accepted (data oddity, not a bug).
|
||||
|
||||
---
|
||||
|
||||
### L4. `landing_cost_price` is always NULL
|
||||
|
||||
`current_landing_cost_price` in product_metrics is mapped from `current_effective_cost` which is just `cost_price`. The `landing_cost_price` concept (cost + shipping + duties) is not implemented. The field exists but has no meaningful data.
|
||||
|
||||
**Status: FIXED.** Removed `landing_cost_price` from `db/schema.sql`, `current_landing_cost_price` from `db/metrics-schema-new.sql`, `update_product_metrics.sql`, and `backfill/populate_initial_product_metrics.sql`. Column should be dropped from the live database via `ALTER TABLE`.
|
||||
|
||||
---
|
||||
|
||||
### L5. Custom SQL functions not tracked in version control
|
||||
|
||||
All 6 custom functions (`calculate_sales_velocity`, `get_weighted_avg_cost`, `safe_divide`, `std_numeric`, `classify_demand_pattern`, `detect_seasonal_pattern`) and the `category_hierarchy` materialized view exist only in the database. They are not defined in any migration or schema file in the repository.
|
||||
|
||||
If the database needs to be recreated, these would be lost.
|
||||
|
||||
**Status: FIXED.** All 6 functions and the `category_hierarchy` materialized view definition saved to `inventory-server/db/functions.sql`. File is re-runnable via `psql -f functions.sql`.
|
||||
|
||||
---
|
||||
|
||||
### L6. `get_weighted_avg_cost` limited to last 10 receivings
|
||||
|
||||
The function `LIMIT 10` for performance, but this means products with many small receivings may not accurately reflect the true weighted average cost if the cost has changed significantly beyond the last 10 receiving records.
|
||||
|
||||
**Status: FIXED.** Removed `LIMIT 10` from `get_weighted_avg_cost`. Data shows max receivings per product is 142 (p95 = 11, avg = 3), so performance impact is negligible. Updated definition in `db/functions.sql`.
|
||||
|
||||
---
|
||||
|
||||
## Verification Summary
|
||||
|
||||
### What's Working Correctly
|
||||
|
||||
| Check | Result |
|
||||
|-------|--------|
|
||||
| 30d sales: product_metrics vs orders vs snapshots | **MATCH** (verified top 10 sellers) |
|
||||
| Replenishment formula: manual calc vs stored | **MATCH** (verified 10 products) |
|
||||
| Brand metrics vs sum of product_metrics | **MATCH** (0 difference across all brands) |
|
||||
| Order status mapping (numeric → text) | **CORRECT** (all statuses mapped, no numeric remain) |
|
||||
| Cost price: PostgreSQL vs MySQL source | **MATCH** (within rounding, verified 5 products) |
|
||||
| total_sold: PostgreSQL vs MySQL source | **MATCH** (verified 5 products) |
|
||||
| Category rollups (rolled-up > direct for parents) | **CORRECT** |
|
||||
| ABC classification distribution | **REASONABLE** (A: 8K, B: 12.5K, C: 113K) |
|
||||
| Lead time calculation (PO → receiving) | **CORRECT** (verified examples) |
|
||||
|
||||
### Data Overview
|
||||
|
||||
| Metric | Value |
|
||||
|--------|-------|
|
||||
| Total products | 681,912 |
|
||||
| Products in product_metrics | 681,912 (100%) |
|
||||
| Products with 30d sales | 10,291 (1.5%) |
|
||||
| Products with negative profit & revenue | 139 (mostly cost > price) |
|
||||
| Products with negative stock | 0 |
|
||||
| Snapshot date range | 2020-06-18 to 2026-02-08 |
|
||||
| Avg products per snapshot day | 1,167 |
|
||||
| Order date range | 2020-06-18 to 2026-02-08 |
|
||||
| Total orders | 2,885,825 |
|
||||
| 'returned' status orders | 0 (returns via negative quantity only) |
|
||||
|
||||
---
|
||||
|
||||
## Fix Status Summary
|
||||
|
||||
| Issue | Severity | Status | Deployment Action Needed |
|
||||
|-------|----------|--------|--------------------------|
|
||||
| C1 | Critical | Owner resolving | Rebuild daily snapshots |
|
||||
| C2 | Critical | **FIXED** | Run import, rebuild daily snapshots |
|
||||
| C3 | Critical | **FIXED** | Deploy updated `update_product_metrics.sql` |
|
||||
| M1 | Medium | **FIXED** | Deploy updated `update_product_metrics.sql` |
|
||||
| M2 | Medium | **FIXED** | Full orders re-import (`--full`) |
|
||||
| M3 | Medium | Accepted | None |
|
||||
| M4 | Medium | Pending C2 | Will improve after C2 deployment |
|
||||
| L1 | Low | Accepted | None |
|
||||
| L2 | Low | **FIXED** | Deploy `db/functions.sql` to database |
|
||||
| L3 | Low | Accepted | None |
|
||||
| L4 | Low | **FIXED** | `ALTER TABLE` to drop columns |
|
||||
| L5 | Low | **FIXED** | None (file committed) |
|
||||
| L6 | Low | **FIXED** | Deploy `db/functions.sql` to database |
|
||||
|
||||
### Deployment Steps
|
||||
|
||||
1. Deploy `db/functions.sql` to PostgreSQL: `psql -d inventory_db -f db/functions.sql` (L2, L6)
|
||||
2. Run import (includes stock snapshots first load) (C2, M2)
|
||||
3. Drop stale columns: `ALTER TABLE products DROP COLUMN IF EXISTS landing_cost_price; ALTER TABLE product_metrics DROP COLUMN IF EXISTS current_landing_cost_price;` (L4)
|
||||
4. Rebuild daily snapshots (C1, C2)
|
||||
5. Re-run metrics calculation (C3, M1 take effect automatically)
|
||||
2846
docs/ai-validation-redesign.md
Normal file
2846
docs/ai-validation-redesign.md
Normal file
File diff suppressed because it is too large
Load Diff
73544
docs/klaviyoopenapi.json
Normal file
73544
docs/klaviyoopenapi.json
Normal file
File diff suppressed because it is too large
Load Diff
1106
docs/prod_registry.class.php
Normal file
1106
docs/prod_registry.class.php
Normal file
File diff suppressed because it is too large
Load Diff
106
inventory-server/auth/package-lock.json
generated
106
inventory-server/auth/package-lock.json
generated
@@ -18,6 +18,43 @@
|
||||
"pg": "^8.11.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@inquirer/external-editor": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-1.0.2.tgz",
|
||||
"integrity": "sha512-yy9cOoBnx58TlsPrIxauKIFQTiyH+0MK4e97y4sV9ERbI+zDxw7i2hxHLCIEGIE/8PPvDxGhgzIOTSOWcs6/MQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chardet": "^2.1.0",
|
||||
"iconv-lite": "^0.7.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/node": ">=18"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/node": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@inquirer/external-editor/node_modules/iconv-lite": {
|
||||
"version": "0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz",
|
||||
"integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safer-buffer": ">= 2.1.2 < 3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
|
||||
@@ -251,9 +288,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
@@ -345,9 +382,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/chardet": {
|
||||
"version": "0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz",
|
||||
"integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==",
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.0.tgz",
|
||||
"integrity": "sha512-bNFETTG/pM5ryzQ9Ad0lJOTa6HWD/YsScAR3EnCPZRPlQh77JocYktSHOUHelyhm8IARL+o4c4F1bP5KVOjiRA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/chownr": {
|
||||
@@ -700,20 +737,6 @@
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/external-editor": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz",
|
||||
"integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chardet": "^0.7.0",
|
||||
"iconv-lite": "^0.4.24",
|
||||
"tmp": "^0.0.33"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/figures": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz",
|
||||
@@ -1036,16 +1059,16 @@
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/inquirer": {
|
||||
"version": "8.2.6",
|
||||
"resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.6.tgz",
|
||||
"integrity": "sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg==",
|
||||
"version": "8.2.7",
|
||||
"resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.7.tgz",
|
||||
"integrity": "sha512-UjOaSel/iddGZJ5xP/Eixh6dY1XghiBw4XK13rCCIJcJfyhhoul/7KhLLUGtebEj6GDYM6Vnx/mVsjx2L/mFIA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@inquirer/external-editor": "^1.0.0",
|
||||
"ansi-escapes": "^4.2.1",
|
||||
"chalk": "^4.1.1",
|
||||
"cli-cursor": "^3.1.0",
|
||||
"cli-width": "^3.0.0",
|
||||
"external-editor": "^3.0.3",
|
||||
"figures": "^3.0.0",
|
||||
"lodash": "^4.17.21",
|
||||
"mute-stream": "0.0.8",
|
||||
@@ -1374,16 +1397,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/morgan": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz",
|
||||
"integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==",
|
||||
"version": "1.10.1",
|
||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.1.tgz",
|
||||
"integrity": "sha512-223dMRJtI/l25dJKWpgij2cMtywuG/WiUKXdvwfbhGKBhy1puASqXwFzmWZ7+K73vUPoR7SS2Qz2cI/g9MKw0A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"basic-auth": "~2.0.1",
|
||||
"debug": "2.6.9",
|
||||
"depd": "~2.0.0",
|
||||
"on-finished": "~2.3.0",
|
||||
"on-headers": "~1.0.2"
|
||||
"on-headers": "~1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
@@ -1510,9 +1533,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/on-headers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz",
|
||||
"integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz",
|
||||
"integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
@@ -1565,15 +1588,6 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/os-tmpdir": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
|
||||
"integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/parseurl": {
|
||||
"version": "1.3.3",
|
||||
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
|
||||
@@ -2109,18 +2123,6 @@
|
||||
"integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tmp": {
|
||||
"version": "0.0.33",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
|
||||
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"os-tmpdir": "~1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/toidentifier": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
|
||||
|
||||
@@ -229,11 +229,14 @@ router.post('/users', authenticate, requirePermission('create:users'), async (re
|
||||
const hashedPassword = await bcrypt.hash(password, saltRounds);
|
||||
|
||||
// Insert new user
|
||||
// Convert rocket_chat_user_id to integer if provided
|
||||
const rcUserId = rocket_chat_user_id ? parseInt(rocket_chat_user_id, 10) : null;
|
||||
|
||||
const userResult = await client.query(`
|
||||
INSERT INTO users (username, email, password, is_admin, is_active, rocket_chat_user_id, created_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, CURRENT_TIMESTAMP)
|
||||
RETURNING id
|
||||
`, [username, email || null, hashedPassword, !!is_admin, is_active !== false, rocket_chat_user_id || null]);
|
||||
`, [username, email || null, hashedPassword, !!is_admin, is_active !== false, rcUserId]);
|
||||
|
||||
const userId = userResult.rows[0].id;
|
||||
|
||||
@@ -360,7 +363,9 @@ router.put('/users/:id', authenticate, requirePermission('edit:users'), async (r
|
||||
|
||||
if (rocket_chat_user_id !== undefined) {
|
||||
updateFields.push(`rocket_chat_user_id = $${paramIndex++}`);
|
||||
updateValues.push(rocket_chat_user_id || null);
|
||||
// Convert to integer if not null/undefined, otherwise null
|
||||
const rcUserId = rocket_chat_user_id ? parseInt(rocket_chat_user_id, 10) : null;
|
||||
updateValues.push(rcUserId);
|
||||
}
|
||||
|
||||
// Update password if provided
|
||||
|
||||
@@ -35,7 +35,7 @@ global.pool = pool;
|
||||
app.use(express.json());
|
||||
app.use(morgan('combined'));
|
||||
app.use(cors({
|
||||
origin: ['http://localhost:5175', 'http://localhost:5174', 'https://inventory.kent.pw'],
|
||||
origin: ['http://localhost:5175', 'http://localhost:5174', 'https://inventory.kent.pw', 'https://acot.site', 'https://tools.acherryontop.com'],
|
||||
credentials: true
|
||||
}));
|
||||
|
||||
@@ -62,6 +62,12 @@ app.post('/login', async (req, res) => {
|
||||
return res.status(403).json({ error: 'Account is inactive' });
|
||||
}
|
||||
|
||||
// Update last login timestamp
|
||||
await pool.query(
|
||||
'UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = $1',
|
||||
[user.id]
|
||||
);
|
||||
|
||||
// Generate JWT token
|
||||
const token = jwt.sign(
|
||||
{ userId: user.id, username: user.username },
|
||||
@@ -76,7 +82,7 @@ app.post('/login', async (req, res) => {
|
||||
JOIN user_permissions up ON p.id = up.permission_id
|
||||
WHERE up.user_id = $1
|
||||
`, [user.id]);
|
||||
|
||||
|
||||
const permissions = permissionsResult.rows.map(row => row.code);
|
||||
|
||||
res.json({
|
||||
|
||||
45
inventory-server/chat/create-new-database.sql
Normal file
45
inventory-server/chat/create-new-database.sql
Normal file
@@ -0,0 +1,45 @@
|
||||
-- PostgreSQL Database Creation Script for New Server
|
||||
-- Run as: sudo -u postgres psql -f create-new-database.sql
|
||||
|
||||
-- Terminate all connections to the database (if it exists)
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = 'rocketchat_converted' AND pid <> pg_backend_pid();
|
||||
|
||||
-- Drop the database if it exists
|
||||
DROP DATABASE IF EXISTS rocketchat_converted;
|
||||
|
||||
-- Create fresh database
|
||||
CREATE DATABASE rocketchat_converted;
|
||||
|
||||
-- Create user (if not exists) - UPDATE PASSWORD BEFORE RUNNING!
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_user WHERE usename = 'rocketchat_user') THEN
|
||||
CREATE USER rocketchat_user WITH PASSWORD 'HKjLgt23gWuPXzEAn3rW';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Grant database privileges
|
||||
GRANT CONNECT ON DATABASE rocketchat_converted TO rocketchat_user;
|
||||
GRANT CREATE ON DATABASE rocketchat_converted TO rocketchat_user;
|
||||
|
||||
-- Connect to the new database
|
||||
\c rocketchat_converted;
|
||||
|
||||
-- Grant schema privileges
|
||||
GRANT CREATE ON SCHEMA public TO rocketchat_user;
|
||||
GRANT USAGE ON SCHEMA public TO rocketchat_user;
|
||||
|
||||
-- Grant privileges on all future tables and sequences
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO rocketchat_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE, SELECT ON SEQUENCES TO rocketchat_user;
|
||||
|
||||
-- Display success message
|
||||
\echo 'Database created successfully!'
|
||||
\echo 'IMPORTANT: Update the password for rocketchat_user before proceeding'
|
||||
\echo 'Next steps:'
|
||||
\echo '1. Update the password in this file'
|
||||
\echo '2. Run export-chat-data.sh on your current server'
|
||||
\echo '3. Transfer the exported files to this server'
|
||||
\echo '4. Run import-chat-data.sh on this server'
|
||||
147
inventory-server/chat/export-chat-data.sh
Executable file
147
inventory-server/chat/export-chat-data.sh
Executable file
@@ -0,0 +1,147 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Chat Database Export Script
|
||||
# This script exports the chat database schema and data for migration
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
echo "🚀 Starting chat database export..."
|
||||
|
||||
# Configuration - Update these values for your setup
|
||||
DB_HOST="${CHAT_DB_HOST:-localhost}"
|
||||
DB_PORT="${CHAT_DB_PORT:-5432}"
|
||||
DB_NAME="${CHAT_DB_NAME:-rocketchat_converted}"
|
||||
DB_USER="${CHAT_DB_USER:-rocketchat_user}"
|
||||
|
||||
# Check if database connection info is available
|
||||
if [ -z "$CHAT_DB_PASSWORD" ]; then
|
||||
echo "⚠️ CHAT_DB_PASSWORD environment variable not set"
|
||||
echo "Please set it with: export CHAT_DB_PASSWORD='your_password'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📊 Database: $DB_NAME on $DB_HOST:$DB_PORT"
|
||||
|
||||
# Create export directory
|
||||
EXPORT_DIR="chat-migration-$(date +%Y%m%d-%H%M%S)"
|
||||
mkdir -p "$EXPORT_DIR"
|
||||
|
||||
echo "📁 Export directory: $EXPORT_DIR"
|
||||
|
||||
# Export database schema
|
||||
echo "📋 Exporting database schema..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" pg_dump \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
--schema-only \
|
||||
--no-owner \
|
||||
--no-privileges \
|
||||
-f "$EXPORT_DIR/chat-schema.sql"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Schema exported successfully"
|
||||
else
|
||||
echo "❌ Schema export failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Export database data
|
||||
echo "💾 Exporting database data..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" pg_dump \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
--data-only \
|
||||
--no-owner \
|
||||
--no-privileges \
|
||||
--disable-triggers \
|
||||
--column-inserts \
|
||||
-f "$EXPORT_DIR/chat-data.sql"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Data exported successfully"
|
||||
else
|
||||
echo "❌ Data export failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Export file uploads and avatars
|
||||
echo "📎 Exporting chat files (uploads and avatars)..."
|
||||
if [ -d "db-convert/db/files" ]; then
|
||||
cd db-convert/db
|
||||
tar -czf "../../$EXPORT_DIR/chat-files.tar.gz" files/
|
||||
cd ../..
|
||||
echo "✅ Files exported successfully"
|
||||
else
|
||||
echo "⚠️ No files directory found at db-convert/db/files"
|
||||
echo " This is normal if you have no file uploads"
|
||||
touch "$EXPORT_DIR/chat-files.tar.gz"
|
||||
fi
|
||||
|
||||
# Get table statistics for verification
|
||||
echo "📈 Generating export statistics..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-c "
|
||||
SELECT
|
||||
schemaname,
|
||||
tablename,
|
||||
n_tup_ins as inserted_rows,
|
||||
n_tup_upd as updated_rows,
|
||||
n_tup_del as deleted_rows,
|
||||
n_live_tup as live_rows,
|
||||
n_dead_tup as dead_rows
|
||||
FROM pg_stat_user_tables
|
||||
ORDER BY n_live_tup DESC;
|
||||
" > "$EXPORT_DIR/table-stats.txt"
|
||||
|
||||
# Create export summary
|
||||
cat > "$EXPORT_DIR/export-summary.txt" << EOF
|
||||
Chat Database Export Summary
|
||||
===========================
|
||||
|
||||
Export Date: $(date)
|
||||
Database: $DB_NAME
|
||||
Host: $DB_HOST:$DB_PORT
|
||||
User: $DB_USER
|
||||
|
||||
Files Generated:
|
||||
- chat-schema.sql: Database schema (tables, indexes, constraints)
|
||||
- chat-data.sql: All table data
|
||||
- chat-files.tar.gz: Uploaded files and avatars
|
||||
- table-stats.txt: Database statistics
|
||||
- export-summary.txt: This summary
|
||||
|
||||
Next Steps:
|
||||
1. Transfer these files to your new server
|
||||
2. Run create-new-database.sql on the new server first
|
||||
3. Run import-chat-data.sh on the new server
|
||||
4. Update your application configuration
|
||||
5. Run verify-migration.js to validate the migration
|
||||
|
||||
Important Notes:
|
||||
- Keep these files secure as they contain your chat data
|
||||
- Ensure the new server has enough disk space
|
||||
- Plan for application downtime during the migration
|
||||
EOF
|
||||
|
||||
echo ""
|
||||
echo "🎉 Export completed successfully!"
|
||||
echo "📁 Files are in: $EXPORT_DIR/"
|
||||
echo ""
|
||||
echo "📋 Export Summary:"
|
||||
ls -lh "$EXPORT_DIR/"
|
||||
echo ""
|
||||
echo "🚚 Next steps:"
|
||||
echo "1. Transfer the $EXPORT_DIR/ directory to your new server"
|
||||
echo "2. Run create-new-database.sql on the new server (update password first!)"
|
||||
echo "3. Run import-chat-data.sh on the new server"
|
||||
echo ""
|
||||
echo "💡 To transfer files to new server:"
|
||||
echo " scp -r $EXPORT_DIR/ user@new-server:/tmp/"
|
||||
167
inventory-server/chat/import-chat-data.sh
Normal file
167
inventory-server/chat/import-chat-data.sh
Normal file
@@ -0,0 +1,167 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Chat Database Import Script
|
||||
# This script imports the chat database schema and data on the new server
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
echo "🚀 Starting chat database import..."
|
||||
|
||||
# Configuration - Update these values for your new server
|
||||
DB_HOST="${CHAT_DB_HOST:-localhost}"
|
||||
DB_PORT="${CHAT_DB_PORT:-5432}"
|
||||
DB_NAME="${CHAT_DB_NAME:-rocketchat_converted}"
|
||||
DB_USER="${CHAT_DB_USER:-rocketchat_user}"
|
||||
|
||||
# Check if database connection info is available
|
||||
if [ -z "$CHAT_DB_PASSWORD" ]; then
|
||||
echo "⚠️ CHAT_DB_PASSWORD environment variable not set"
|
||||
echo "Please set it with: export CHAT_DB_PASSWORD='your_password'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Find the migration directory
|
||||
MIGRATION_DIR=""
|
||||
if [ -d "/tmp" ]; then
|
||||
MIGRATION_DIR=$(find /tmp -maxdepth 1 -name "chat-migration-*" -type d | head -1)
|
||||
fi
|
||||
|
||||
if [ -z "$MIGRATION_DIR" ]; then
|
||||
echo "❌ No migration directory found in /tmp/"
|
||||
echo "Please specify the migration directory:"
|
||||
read -p "Enter full path to migration directory: " MIGRATION_DIR
|
||||
fi
|
||||
|
||||
if [ ! -d "$MIGRATION_DIR" ]; then
|
||||
echo "❌ Migration directory not found: $MIGRATION_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📁 Using migration directory: $MIGRATION_DIR"
|
||||
echo "📊 Target database: $DB_NAME on $DB_HOST:$DB_PORT"
|
||||
|
||||
# Verify required files exist
|
||||
REQUIRED_FILES=("chat-schema.sql" "chat-data.sql" "chat-files.tar.gz")
|
||||
for file in "${REQUIRED_FILES[@]}"; do
|
||||
if [ ! -f "$MIGRATION_DIR/$file" ]; then
|
||||
echo "❌ Required file not found: $MIGRATION_DIR/$file"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✅ All required files found"
|
||||
|
||||
# Test database connection
|
||||
echo "🔗 Testing database connection..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-c "SELECT version();" > /dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Database connection successful"
|
||||
else
|
||||
echo "❌ Database connection failed"
|
||||
echo "Please ensure:"
|
||||
echo " 1. PostgreSQL is running"
|
||||
echo " 2. Database '$DB_NAME' exists"
|
||||
echo " 3. User '$DB_USER' has access"
|
||||
echo " 4. Password is correct"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Import database schema
|
||||
echo "📋 Importing database schema..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-f "$MIGRATION_DIR/chat-schema.sql"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Schema imported successfully"
|
||||
else
|
||||
echo "❌ Schema import failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Import database data
|
||||
echo "💾 Importing database data..."
|
||||
echo " This may take a while depending on data size..."
|
||||
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-f "$MIGRATION_DIR/chat-data.sql"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Data imported successfully"
|
||||
else
|
||||
echo "❌ Data import failed"
|
||||
echo "Check the error messages above for details"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create files directory and import files
|
||||
echo "📎 Setting up files directory..."
|
||||
mkdir -p "db-convert/db"
|
||||
|
||||
if [ -s "$MIGRATION_DIR/chat-files.tar.gz" ]; then
|
||||
echo "📂 Extracting chat files..."
|
||||
cd db-convert/db
|
||||
tar -xzf "$MIGRATION_DIR/chat-files.tar.gz"
|
||||
cd ../..
|
||||
|
||||
# Set proper permissions
|
||||
if [ -d "db-convert/db/files" ]; then
|
||||
chmod -R 755 db-convert/db/files
|
||||
echo "✅ Files imported and permissions set"
|
||||
else
|
||||
echo "⚠️ Files directory not created properly"
|
||||
fi
|
||||
else
|
||||
echo "ℹ️ No files to import (empty archive)"
|
||||
mkdir -p "db-convert/db/files/uploads"
|
||||
mkdir -p "db-convert/db/files/avatars"
|
||||
fi
|
||||
|
||||
# Get final table statistics
|
||||
echo "📈 Generating import statistics..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-c "
|
||||
SELECT
|
||||
tablename,
|
||||
n_live_tup as row_count
|
||||
FROM pg_stat_user_tables
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY n_live_tup DESC;
|
||||
"
|
||||
|
||||
# Create import summary
|
||||
echo ""
|
||||
echo "🎉 Import completed successfully!"
|
||||
echo ""
|
||||
echo "📋 Import Summary:"
|
||||
echo " Database: $DB_NAME"
|
||||
echo " Host: $DB_HOST:$DB_PORT"
|
||||
echo " Files location: $(pwd)/db-convert/db/files/"
|
||||
echo ""
|
||||
echo "🔍 Next steps:"
|
||||
echo "1. Update your application configuration to use this database"
|
||||
echo "2. Run verify-migration.js to validate the migration"
|
||||
echo "3. Test your application thoroughly"
|
||||
echo "4. Update DNS/load balancer to point to new server"
|
||||
echo ""
|
||||
echo "⚠️ Important:"
|
||||
echo "- Keep the original data as backup until migration is fully validated"
|
||||
echo "- Monitor the application closely after switching"
|
||||
echo "- Have a rollback plan ready"
|
||||
86
inventory-server/chat/migrate-to-new-server.md
Normal file
86
inventory-server/chat/migrate-to-new-server.md
Normal file
@@ -0,0 +1,86 @@
|
||||
# Chat Database Migration Guide
|
||||
|
||||
This guide will help you migrate your chat database from the current server to a new PostgreSQL server.
|
||||
|
||||
## Overview
|
||||
Your chat system uses:
|
||||
- Database: `rocketchat_converted` (PostgreSQL)
|
||||
- Main tables: users, message, room, uploads, avatars, subscription
|
||||
- File storage: db-convert/db/files/ directory with uploads and avatars
|
||||
- Environment configuration for database connection
|
||||
|
||||
## Migration Steps
|
||||
|
||||
### 1. Pre-Migration Setup
|
||||
|
||||
On your **new server**, ensure PostgreSQL is installed and running:
|
||||
```bash
|
||||
# Install PostgreSQL (if not already done)
|
||||
sudo apt update
|
||||
sudo apt install postgresql postgresql-contrib
|
||||
|
||||
# Start PostgreSQL service
|
||||
sudo systemctl start postgresql
|
||||
sudo systemctl enable postgresql
|
||||
```
|
||||
|
||||
### 2. Create Database Schema on New Server
|
||||
|
||||
Run the provided migration script:
|
||||
```bash
|
||||
# On new server
|
||||
sudo -u postgres psql -f create-new-database.sql
|
||||
```
|
||||
|
||||
### 3. Export Data from Current Server
|
||||
|
||||
Run the export script:
|
||||
```bash
|
||||
# On current server
|
||||
./export-chat-data.sh
|
||||
```
|
||||
|
||||
This will create:
|
||||
- `chat-schema.sql` - Database schema
|
||||
- `chat-data.sql` - All table data
|
||||
- `chat-files.tar.gz` - All uploaded files and avatars
|
||||
|
||||
### 4. Transfer Data to New Server
|
||||
|
||||
```bash
|
||||
# Copy files to new server
|
||||
scp chat-schema.sql chat-data.sql chat-files.tar.gz user@new-server:/tmp/
|
||||
```
|
||||
|
||||
### 5. Import Data on New Server
|
||||
|
||||
```bash
|
||||
# On new server
|
||||
./import-chat-data.sh
|
||||
```
|
||||
|
||||
### 6. Update Configuration
|
||||
|
||||
Update your environment variables to point to the new database server.
|
||||
|
||||
### 7. Verify Migration
|
||||
|
||||
Run the verification script to ensure everything transferred correctly:
|
||||
```bash
|
||||
node verify-migration.js
|
||||
```
|
||||
|
||||
## Files Provided
|
||||
|
||||
1. `create-new-database.sql` - Creates database and user on new server
|
||||
2. `export-chat-data.sh` - Exports data from current server
|
||||
3. `import-chat-data.sh` - Imports data to new server
|
||||
4. `verify-migration.js` - Verifies data integrity
|
||||
5. `update-config-template.env` - Template for new configuration
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **Backup first**: Always backup your current database before migration
|
||||
- **Downtime**: Plan for application downtime during migration
|
||||
- **File permissions**: Ensure file permissions are preserved during transfer
|
||||
- **Network access**: Ensure new server can accept connections from your application
|
||||
77
inventory-server/chat/package-lock.json
generated
77
inventory-server/chat/package-lock.json
generated
@@ -15,7 +15,7 @@
|
||||
"pg": "^8.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.1.10"
|
||||
"nodemon": "^2.0.22"
|
||||
}
|
||||
},
|
||||
"node_modules/accepts": {
|
||||
@@ -764,16 +764,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/morgan": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz",
|
||||
"integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==",
|
||||
"version": "1.10.1",
|
||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.1.tgz",
|
||||
"integrity": "sha512-223dMRJtI/l25dJKWpgij2cMtywuG/WiUKXdvwfbhGKBhy1puASqXwFzmWZ7+K73vUPoR7SS2Qz2cI/g9MKw0A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"basic-auth": "~2.0.1",
|
||||
"debug": "2.6.9",
|
||||
"depd": "~2.0.0",
|
||||
"on-finished": "~2.3.0",
|
||||
"on-headers": "~1.0.2"
|
||||
"on-headers": "~1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
@@ -807,19 +807,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/nodemon": {
|
||||
"version": "3.1.10",
|
||||
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.10.tgz",
|
||||
"integrity": "sha512-WDjw3pJ0/0jMFmyNDp3gvY2YizjLmmOUQo6DEBY+JgdvW/yQ9mEeSw6H5ythl5Ny2ytb7f9C2nIbjSxMNzbJXw==",
|
||||
"version": "2.0.22",
|
||||
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-2.0.22.tgz",
|
||||
"integrity": "sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chokidar": "^3.5.2",
|
||||
"debug": "^4",
|
||||
"debug": "^3.2.7",
|
||||
"ignore-by-default": "^1.0.1",
|
||||
"minimatch": "^3.1.2",
|
||||
"pstree.remy": "^1.1.8",
|
||||
"semver": "^7.5.3",
|
||||
"simple-update-notifier": "^2.0.0",
|
||||
"semver": "^5.7.1",
|
||||
"simple-update-notifier": "^1.0.7",
|
||||
"supports-color": "^5.5.0",
|
||||
"touch": "^3.1.0",
|
||||
"undefsafe": "^2.0.5"
|
||||
@@ -828,7 +828,7 @@
|
||||
"nodemon": "bin/nodemon.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
"node": ">=8.10.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
@@ -836,21 +836,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/nodemon/node_modules/debug": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"version": "3.2.7",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
|
||||
"integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
"ms": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/nodemon/node_modules/ms": {
|
||||
@@ -904,9 +896,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/on-headers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz",
|
||||
"integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz",
|
||||
"integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
@@ -1167,16 +1159,13 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.7.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
|
||||
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
|
||||
"version": "5.7.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
|
||||
"integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
"semver": "bin/semver"
|
||||
}
|
||||
},
|
||||
"node_modules/send": {
|
||||
@@ -1312,16 +1301,26 @@
|
||||
}
|
||||
},
|
||||
"node_modules/simple-update-notifier": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
|
||||
"integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz",
|
||||
"integrity": "sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"semver": "^7.5.3"
|
||||
"semver": "~7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
"node": ">=8.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/simple-update-notifier/node_modules/semver": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz",
|
||||
"integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/split2": {
|
||||
|
||||
@@ -33,7 +33,7 @@ global.pool = pool;
|
||||
app.use(express.json());
|
||||
app.use(morgan('combined'));
|
||||
app.use(cors({
|
||||
origin: ['http://localhost:5175', 'http://localhost:5174', 'https://inventory.kent.pw'],
|
||||
origin: ['http://localhost:5175', 'http://localhost:5174', 'https://inventory.kent.pw', 'https://acot.site', 'https://tools.acherryontop.com'],
|
||||
credentials: true
|
||||
}));
|
||||
|
||||
|
||||
26
inventory-server/chat/update-config-template.env
Normal file
26
inventory-server/chat/update-config-template.env
Normal file
@@ -0,0 +1,26 @@
|
||||
# Chat Server Database Configuration Template
|
||||
# Copy this to your .env file and update the values for your new server
|
||||
|
||||
# Database Configuration for New Server
|
||||
CHAT_DB_HOST=your-new-server-ip-or-hostname
|
||||
CHAT_DB_PORT=5432
|
||||
CHAT_DB_NAME=rocketchat_converted
|
||||
CHAT_DB_USER=rocketchat_user
|
||||
CHAT_DB_PASSWORD=your-secure-password
|
||||
|
||||
# Chat Server Port
|
||||
CHAT_PORT=3014
|
||||
|
||||
# Example configuration:
|
||||
# CHAT_DB_HOST=192.168.1.100
|
||||
# CHAT_DB_PORT=5432
|
||||
# CHAT_DB_NAME=rocketchat_converted
|
||||
# CHAT_DB_USER=rocketchat_user
|
||||
# CHAT_DB_PASSWORD=MySecureP@ssw0rd123
|
||||
|
||||
# Notes:
|
||||
# - Replace 'your-new-server-ip-or-hostname' with actual server address
|
||||
# - Use a strong password for CHAT_DB_PASSWORD
|
||||
# - Ensure the new server allows connections from your application server
|
||||
# - Update any firewall rules to allow PostgreSQL connections (port 5432)
|
||||
# - Test connectivity before updating production configuration
|
||||
231
inventory-server/chat/verify-migration.js
Executable file
231
inventory-server/chat/verify-migration.js
Executable file
@@ -0,0 +1,231 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Chat Database Migration Verification Script
|
||||
*
|
||||
* This script verifies that the chat database migration was successful
|
||||
* by comparing record counts and testing basic functionality.
|
||||
*/
|
||||
|
||||
require('dotenv').config({ path: '../.env' });
|
||||
const { Pool } = require('pg');
|
||||
|
||||
// Database configuration
|
||||
const pool = new Pool({
|
||||
host: process.env.CHAT_DB_HOST || 'localhost',
|
||||
user: process.env.CHAT_DB_USER || 'rocketchat_user',
|
||||
password: process.env.CHAT_DB_PASSWORD,
|
||||
database: process.env.CHAT_DB_NAME || 'rocketchat_converted',
|
||||
port: process.env.CHAT_DB_PORT || 5432,
|
||||
});
|
||||
|
||||
const originalStats = process.argv[2] ? JSON.parse(process.argv[2]) : null;
|
||||
|
||||
async function verifyMigration() {
|
||||
console.log('🔍 Starting migration verification...\n');
|
||||
|
||||
try {
|
||||
// Test basic connection
|
||||
console.log('🔗 Testing database connection...');
|
||||
const versionResult = await pool.query('SELECT version()');
|
||||
console.log('✅ Database connection successful');
|
||||
console.log(` PostgreSQL version: ${versionResult.rows[0].version.split(' ')[1]}\n`);
|
||||
|
||||
// Get table statistics
|
||||
console.log('📊 Checking table statistics...');
|
||||
const statsResult = await pool.query(`
|
||||
SELECT
|
||||
tablename,
|
||||
n_live_tup as row_count,
|
||||
n_dead_tup as dead_rows,
|
||||
schemaname
|
||||
FROM pg_stat_user_tables
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY n_live_tup DESC
|
||||
`);
|
||||
|
||||
if (statsResult.rows.length === 0) {
|
||||
console.log('❌ No tables found! Migration may have failed.');
|
||||
return false;
|
||||
}
|
||||
|
||||
console.log('📋 Table Statistics:');
|
||||
console.log(' Table Name | Row Count | Dead Rows');
|
||||
console.log(' -------------------|-----------|----------');
|
||||
|
||||
let totalRows = 0;
|
||||
const tableStats = {};
|
||||
|
||||
for (const row of statsResult.rows) {
|
||||
const rowCount = parseInt(row.row_count) || 0;
|
||||
const deadRows = parseInt(row.dead_rows) || 0;
|
||||
totalRows += rowCount;
|
||||
tableStats[row.tablename] = rowCount;
|
||||
|
||||
console.log(` ${row.tablename.padEnd(18)} | ${rowCount.toString().padStart(9)} | ${deadRows.toString().padStart(8)}`);
|
||||
}
|
||||
|
||||
console.log(`\n Total rows across all tables: ${totalRows}\n`);
|
||||
|
||||
// Verify critical tables exist and have data
|
||||
const criticalTables = ['users', 'message', 'room'];
|
||||
console.log('🔑 Checking critical tables...');
|
||||
|
||||
for (const table of criticalTables) {
|
||||
if (tableStats[table] > 0) {
|
||||
console.log(`✅ ${table}: ${tableStats[table]} rows`);
|
||||
} else if (tableStats[table] === 0) {
|
||||
console.log(`⚠️ ${table}: table exists but is empty`);
|
||||
} else {
|
||||
console.log(`❌ ${table}: table not found`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Test specific functionality
|
||||
console.log('\n🧪 Testing specific functionality...');
|
||||
|
||||
// Test users table
|
||||
const userTest = await pool.query(`
|
||||
SELECT COUNT(*) as total_users,
|
||||
COUNT(*) FILTER (WHERE active = true) as active_users,
|
||||
COUNT(*) FILTER (WHERE type = 'user') as regular_users
|
||||
FROM users
|
||||
`);
|
||||
|
||||
if (userTest.rows[0]) {
|
||||
const { total_users, active_users, regular_users } = userTest.rows[0];
|
||||
console.log(`✅ Users: ${total_users} total, ${active_users} active, ${regular_users} regular users`);
|
||||
}
|
||||
|
||||
// Test messages table
|
||||
const messageTest = await pool.query(`
|
||||
SELECT COUNT(*) as total_messages,
|
||||
COUNT(DISTINCT rid) as unique_rooms,
|
||||
MIN(ts) as oldest_message,
|
||||
MAX(ts) as newest_message
|
||||
FROM message
|
||||
`);
|
||||
|
||||
if (messageTest.rows[0]) {
|
||||
const { total_messages, unique_rooms, oldest_message, newest_message } = messageTest.rows[0];
|
||||
console.log(`✅ Messages: ${total_messages} total across ${unique_rooms} rooms`);
|
||||
if (oldest_message && newest_message) {
|
||||
console.log(` Date range: ${oldest_message.toISOString().split('T')[0]} to ${newest_message.toISOString().split('T')[0]}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Test rooms table
|
||||
const roomTest = await pool.query(`
|
||||
SELECT COUNT(*) as total_rooms,
|
||||
COUNT(*) FILTER (WHERE t = 'c') as channels,
|
||||
COUNT(*) FILTER (WHERE t = 'p') as private_groups,
|
||||
COUNT(*) FILTER (WHERE t = 'd') as direct_messages
|
||||
FROM room
|
||||
`);
|
||||
|
||||
if (roomTest.rows[0]) {
|
||||
const { total_rooms, channels, private_groups, direct_messages } = roomTest.rows[0];
|
||||
console.log(`✅ Rooms: ${total_rooms} total (${channels} channels, ${private_groups} private, ${direct_messages} DMs)`);
|
||||
}
|
||||
|
||||
// Test file uploads if table exists
|
||||
if (tableStats.uploads > 0) {
|
||||
const uploadTest = await pool.query(`
|
||||
SELECT COUNT(*) as total_uploads,
|
||||
COUNT(DISTINCT typegroup) as file_types,
|
||||
pg_size_pretty(SUM(size)) as total_size
|
||||
FROM uploads
|
||||
WHERE size IS NOT NULL
|
||||
`);
|
||||
|
||||
if (uploadTest.rows[0]) {
|
||||
const { total_uploads, file_types, total_size } = uploadTest.rows[0];
|
||||
console.log(`✅ Uploads: ${total_uploads} files, ${file_types} types, ${total_size || 'unknown size'}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Test server health endpoint simulation
|
||||
console.log('\n🏥 Testing application endpoints simulation...');
|
||||
|
||||
try {
|
||||
const healthTest = await pool.query(`
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM users WHERE active = true) as active_users,
|
||||
(SELECT COUNT(*) FROM message) as total_messages,
|
||||
(SELECT COUNT(*) FROM room) as total_rooms
|
||||
`);
|
||||
|
||||
if (healthTest.rows[0]) {
|
||||
const stats = healthTest.rows[0];
|
||||
console.log('✅ Health check simulation passed');
|
||||
console.log(` Active users: ${stats.active_users}`);
|
||||
console.log(` Total messages: ${stats.total_messages}`);
|
||||
console.log(` Total rooms: ${stats.total_rooms}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`⚠️ Health check simulation failed: ${error.message}`);
|
||||
}
|
||||
|
||||
// Check indexes
|
||||
console.log('\n📇 Checking database indexes...');
|
||||
const indexResult = await pool.query(`
|
||||
SELECT
|
||||
schemaname,
|
||||
tablename,
|
||||
indexname,
|
||||
indexdef
|
||||
FROM pg_indexes
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY tablename, indexname
|
||||
`);
|
||||
|
||||
const indexesByTable = {};
|
||||
for (const idx of indexResult.rows) {
|
||||
if (!indexesByTable[idx.tablename]) {
|
||||
indexesByTable[idx.tablename] = [];
|
||||
}
|
||||
indexesByTable[idx.tablename].push(idx.indexname);
|
||||
}
|
||||
|
||||
for (const [table, indexes] of Object.entries(indexesByTable)) {
|
||||
console.log(` ${table}: ${indexes.length} indexes`);
|
||||
}
|
||||
|
||||
console.log('\n🎉 Migration verification completed successfully!');
|
||||
console.log('\n✅ Summary:');
|
||||
console.log(` - Database connection: Working`);
|
||||
console.log(` - Tables created: ${statsResult.rows.length}`);
|
||||
console.log(` - Total data rows: ${totalRows}`);
|
||||
console.log(` - Critical tables: All present`);
|
||||
console.log(` - Indexes: ${indexResult.rows.length} total`);
|
||||
|
||||
console.log('\n🚀 Next steps:');
|
||||
console.log(' 1. Update your application configuration');
|
||||
console.log(' 2. Start your chat server');
|
||||
console.log(' 3. Test chat functionality in the browser');
|
||||
console.log(' 4. Monitor logs for any issues');
|
||||
|
||||
return true;
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Migration verification failed:', error.message);
|
||||
console.error('\n🔧 Troubleshooting steps:');
|
||||
console.error(' 1. Check database connection settings');
|
||||
console.error(' 2. Verify database and user exist');
|
||||
console.error(' 3. Check PostgreSQL logs');
|
||||
console.error(' 4. Ensure import completed without errors');
|
||||
return false;
|
||||
} finally {
|
||||
await pool.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Run verification
|
||||
if (require.main === module) {
|
||||
verifyMigration().then(success => {
|
||||
process.exit(success ? 0 : 1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { verifyMigration };
|
||||
@@ -153,9 +153,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -261,16 +261,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/compression": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/compression/-/compression-1.8.0.tgz",
|
||||
"integrity": "sha512-k6WLKfunuqCYD3t6AsuPGvQWaKwuLLh2/xHNcX4qE+vIfDNXpSqnrhwA7O53R7WVQUnt8dVAIW+YHr7xTgOgGA==",
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz",
|
||||
"integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"compressible": "~2.0.18",
|
||||
"debug": "2.6.9",
|
||||
"negotiator": "~0.6.4",
|
||||
"on-headers": "~1.0.2",
|
||||
"on-headers": "~1.1.0",
|
||||
"safe-buffer": "5.2.1",
|
||||
"vary": "~1.1.2"
|
||||
},
|
||||
@@ -928,16 +928,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/morgan": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz",
|
||||
"integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==",
|
||||
"version": "1.10.1",
|
||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.1.tgz",
|
||||
"integrity": "sha512-223dMRJtI/l25dJKWpgij2cMtywuG/WiUKXdvwfbhGKBhy1puASqXwFzmWZ7+K73vUPoR7SS2Qz2cI/g9MKw0A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"basic-auth": "~2.0.1",
|
||||
"debug": "2.6.9",
|
||||
"depd": "~2.0.0",
|
||||
"on-finished": "~2.3.0",
|
||||
"on-headers": "~1.0.2"
|
||||
"on-headers": "~1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
@@ -1119,9 +1119,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/on-headers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz",
|
||||
"integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz",
|
||||
"integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
|
||||
@@ -163,6 +163,7 @@ router.post('/simulate', async (req, res) => {
|
||||
productPromo = {},
|
||||
shippingPromo = {},
|
||||
shippingTiers = [],
|
||||
surcharges = [],
|
||||
merchantFeePercent,
|
||||
fixedCostPerOrder,
|
||||
cogsCalculationMode = 'actual',
|
||||
@@ -219,6 +220,17 @@ router.post('/simulate', async (req, res) => {
|
||||
.filter(tier => tier.threshold >= 0 && tier.value >= 0)
|
||||
.sort((a, b) => a.threshold - b.threshold)
|
||||
: [],
|
||||
surcharges: Array.isArray(surcharges)
|
||||
? surcharges
|
||||
.map(s => ({
|
||||
threshold: Number(s.threshold || 0),
|
||||
maxThreshold: typeof s.maxThreshold === 'number' && s.maxThreshold > 0 ? s.maxThreshold : null,
|
||||
target: s.target === 'shipping' || s.target === 'order' ? s.target : 'shipping',
|
||||
amount: Number(s.amount || 0)
|
||||
}))
|
||||
.filter(s => s.threshold >= 0 && s.amount >= 0)
|
||||
.sort((a, b) => a.threshold - b.threshold)
|
||||
: [],
|
||||
points: {
|
||||
pointsPerDollar: typeof pointsConfig.pointsPerDollar === 'number' ? pointsConfig.pointsPerDollar : null,
|
||||
redemptionRate: typeof pointsConfig.redemptionRate === 'number' ? pointsConfig.redemptionRate : null,
|
||||
@@ -407,7 +419,7 @@ router.post('/simulate', async (req, res) => {
|
||||
};
|
||||
|
||||
const orderValue = data.avgSubtotal > 0 ? data.avgSubtotal : getMidpoint(range);
|
||||
const shippingChargeBase = data.avgShipRate > 0 ? data.avgShipRate : 0;
|
||||
const shippingChargeBase = data.avgShipCost > 0 ? data.avgShipCost : 0;
|
||||
const actualShippingCost = data.avgShipCost > 0 ? data.avgShipCost : 0;
|
||||
|
||||
// Calculate COGS based on the selected mode
|
||||
@@ -459,8 +471,23 @@ router.post('/simulate', async (req, res) => {
|
||||
shipPromoDiscount = Math.min(shipPromoDiscount, shippingAfterAuto);
|
||||
}
|
||||
|
||||
const customerShipCost = Math.max(0, shippingAfterAuto - shipPromoDiscount);
|
||||
const customerItemCost = Math.max(0, orderValue - promoProductDiscount);
|
||||
// Calculate surcharges
|
||||
let shippingSurcharge = 0;
|
||||
let orderSurcharge = 0;
|
||||
for (const surcharge of config.surcharges) {
|
||||
const meetsMin = orderValue >= surcharge.threshold;
|
||||
const meetsMax = surcharge.maxThreshold == null || orderValue < surcharge.maxThreshold;
|
||||
if (meetsMin && meetsMax) {
|
||||
if (surcharge.target === 'shipping') {
|
||||
shippingSurcharge += surcharge.amount;
|
||||
} else if (surcharge.target === 'order') {
|
||||
orderSurcharge += surcharge.amount;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const customerShipCost = Math.max(0, shippingAfterAuto - shipPromoDiscount + shippingSurcharge);
|
||||
const customerItemCost = Math.max(0, orderValue - promoProductDiscount + orderSurcharge);
|
||||
const totalRevenue = customerItemCost + customerShipCost;
|
||||
|
||||
const merchantFees = totalRevenue * (config.merchantFeePercent / 100);
|
||||
@@ -488,6 +515,8 @@ router.post('/simulate', async (req, res) => {
|
||||
shippingChargeBase,
|
||||
shippingAfterAuto,
|
||||
shipPromoDiscount,
|
||||
shippingSurcharge,
|
||||
orderSurcharge,
|
||||
customerShipCost,
|
||||
actualShippingCost,
|
||||
totalRevenue,
|
||||
|
||||
@@ -0,0 +1,683 @@
|
||||
const express = require('express');
|
||||
const { DateTime } = require('luxon');
|
||||
|
||||
const router = express.Router();
|
||||
const { getDbConnection, getPoolStatus } = require('../db/connection');
|
||||
const {
|
||||
getTimeRangeConditions,
|
||||
_internal: timeHelpers
|
||||
} = require('../utils/timeUtils');
|
||||
|
||||
const TIMEZONE = 'America/New_York';
|
||||
|
||||
// Punch types from the database
|
||||
const PUNCH_TYPES = {
|
||||
OUT: 0,
|
||||
IN: 1,
|
||||
BREAK_START: 2,
|
||||
BREAK_END: 3,
|
||||
};
|
||||
|
||||
// Standard hours for FTE calculation (40 hours per week)
|
||||
const STANDARD_WEEKLY_HOURS = 40;
|
||||
|
||||
/**
|
||||
* Calculate working hours from timeclock entries
|
||||
* Groups punches by employee and date, pairs in/out punches
|
||||
* Returns both total hours (with breaks, for FTE) and productive hours (without breaks, for productivity)
|
||||
*/
|
||||
function calculateHoursFromPunches(punches) {
|
||||
// Group by employee
|
||||
const byEmployee = new Map();
|
||||
|
||||
punches.forEach(punch => {
|
||||
if (!byEmployee.has(punch.EmployeeID)) {
|
||||
byEmployee.set(punch.EmployeeID, []);
|
||||
}
|
||||
byEmployee.get(punch.EmployeeID).push(punch);
|
||||
});
|
||||
|
||||
const employeeHours = [];
|
||||
let totalHours = 0;
|
||||
let totalBreakHours = 0;
|
||||
|
||||
byEmployee.forEach((employeePunches, employeeId) => {
|
||||
// Sort by timestamp
|
||||
employeePunches.sort((a, b) => new Date(a.TimeStamp) - new Date(b.TimeStamp));
|
||||
|
||||
let hours = 0;
|
||||
let breakHours = 0;
|
||||
let currentIn = null;
|
||||
let breakStart = null;
|
||||
|
||||
employeePunches.forEach(punch => {
|
||||
const punchTime = new Date(punch.TimeStamp);
|
||||
|
||||
switch (punch.PunchType) {
|
||||
case PUNCH_TYPES.IN:
|
||||
currentIn = punchTime;
|
||||
break;
|
||||
case PUNCH_TYPES.OUT:
|
||||
if (currentIn) {
|
||||
hours += (punchTime - currentIn) / (1000 * 60 * 60); // Convert ms to hours
|
||||
currentIn = null;
|
||||
}
|
||||
break;
|
||||
case PUNCH_TYPES.BREAK_START:
|
||||
breakStart = punchTime;
|
||||
break;
|
||||
case PUNCH_TYPES.BREAK_END:
|
||||
if (breakStart) {
|
||||
breakHours += (punchTime - breakStart) / (1000 * 60 * 60);
|
||||
breakStart = null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
totalHours += hours;
|
||||
totalBreakHours += breakHours;
|
||||
|
||||
employeeHours.push({
|
||||
employeeId,
|
||||
hours,
|
||||
breakHours,
|
||||
productiveHours: hours - breakHours,
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
employeeHours,
|
||||
totalHours,
|
||||
totalBreakHours,
|
||||
totalProductiveHours: totalHours - totalBreakHours
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate FTE (Full Time Equivalents) for a period
|
||||
* @param {number} totalHours - Total hours worked
|
||||
* @param {Date} startDate - Period start
|
||||
* @param {Date} endDate - Period end
|
||||
*/
|
||||
function calculateFTE(totalHours, startDate, endDate) {
|
||||
const start = new Date(startDate);
|
||||
const end = new Date(endDate);
|
||||
const days = Math.max(1, (end - start) / (1000 * 60 * 60 * 24));
|
||||
const weeks = days / 7;
|
||||
const expectedHours = weeks * STANDARD_WEEKLY_HOURS;
|
||||
|
||||
return expectedHours > 0 ? totalHours / expectedHours : 0;
|
||||
}
|
||||
|
||||
// Main employee metrics endpoint
|
||||
router.get('/', async (req, res) => {
|
||||
const startTime = Date.now();
|
||||
console.log(`[EMPLOYEE-METRICS] Starting request for timeRange: ${req.query.timeRange}`);
|
||||
|
||||
const timeoutPromise = new Promise((_, reject) => {
|
||||
setTimeout(() => reject(new Error('Request timeout after 30 seconds')), 30000);
|
||||
});
|
||||
|
||||
try {
|
||||
const mainOperation = async () => {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
console.log(`[EMPLOYEE-METRICS] Getting DB connection...`);
|
||||
const { connection, release } = await getDbConnection();
|
||||
console.log(`[EMPLOYEE-METRICS] DB connection obtained in ${Date.now() - startTime}ms`);
|
||||
|
||||
const { whereClause, params, dateRange } = getTimeRangeConditions(timeRange, startDate, endDate);
|
||||
|
||||
// Adapt where clause for timeclock table (uses TimeStamp instead of date_placed)
|
||||
const timeclockWhere = whereClause.replace(/date_placed/g, 'tc.TimeStamp');
|
||||
|
||||
// Query for timeclock data with employee names
|
||||
const timeclockQuery = `
|
||||
SELECT
|
||||
tc.EmployeeID,
|
||||
tc.TimeStamp,
|
||||
tc.PunchType,
|
||||
e.firstname,
|
||||
e.lastname
|
||||
FROM timeclock tc
|
||||
LEFT JOIN employees e ON tc.EmployeeID = e.employeeid
|
||||
WHERE ${timeclockWhere}
|
||||
AND e.hidden = 0
|
||||
AND e.disabled = 0
|
||||
ORDER BY tc.EmployeeID, tc.TimeStamp
|
||||
`;
|
||||
|
||||
const [timeclockRows] = await connection.execute(timeclockQuery, params);
|
||||
|
||||
// Calculate hours (includes both total hours for FTE and productive hours for productivity)
|
||||
const { employeeHours, totalHours, totalBreakHours, totalProductiveHours } = calculateHoursFromPunches(timeclockRows);
|
||||
|
||||
// Get employee names for the results
|
||||
const employeeNames = new Map();
|
||||
timeclockRows.forEach(row => {
|
||||
if (!employeeNames.has(row.EmployeeID)) {
|
||||
employeeNames.set(row.EmployeeID, {
|
||||
firstname: row.firstname || '',
|
||||
lastname: row.lastname || '',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Enrich employee hours with names
|
||||
const enrichedEmployeeHours = employeeHours.map(eh => ({
|
||||
...eh,
|
||||
name: employeeNames.has(eh.employeeId)
|
||||
? `${employeeNames.get(eh.employeeId).firstname} ${employeeNames.get(eh.employeeId).lastname}`.trim()
|
||||
: `Employee ${eh.employeeId}`,
|
||||
})).sort((a, b) => b.hours - a.hours);
|
||||
|
||||
// Query for picking tickets - using subquery to avoid duplication from bucket join
|
||||
// Ship-together orders: only count main orders (is_sub = 0 or NULL), not sub-orders
|
||||
const pickingWhere = whereClause.replace(/date_placed/g, 'pt.createddate');
|
||||
|
||||
// First get picking ticket stats without the bucket join (to avoid duplication)
|
||||
const pickingStatsQuery = `
|
||||
SELECT
|
||||
pt.createdby as employeeId,
|
||||
e.firstname,
|
||||
e.lastname,
|
||||
COUNT(DISTINCT pt.pickingid) as ticketCount,
|
||||
SUM(pt.totalpieces_picked) as piecesPicked,
|
||||
SUM(TIMESTAMPDIFF(SECOND, pt.createddate, pt.closeddate)) as pickingTimeSeconds,
|
||||
AVG(NULLIF(pt.picking_speed, 0)) as avgPickingSpeed
|
||||
FROM picking_ticket pt
|
||||
LEFT JOIN employees e ON pt.createdby = e.employeeid
|
||||
WHERE ${pickingWhere}
|
||||
AND pt.closeddate IS NOT NULL
|
||||
GROUP BY pt.createdby, e.firstname, e.lastname
|
||||
`;
|
||||
|
||||
// Separate query for order counts (needs bucket join for ship-together handling)
|
||||
const orderCountQuery = `
|
||||
SELECT
|
||||
pt.createdby as employeeId,
|
||||
COUNT(DISTINCT CASE WHEN ptb.is_sub = 0 OR ptb.is_sub IS NULL THEN ptb.orderid END) as ordersPicked
|
||||
FROM picking_ticket pt
|
||||
LEFT JOIN picking_ticket_buckets ptb ON pt.pickingid = ptb.pickingid
|
||||
WHERE ${pickingWhere}
|
||||
AND pt.closeddate IS NOT NULL
|
||||
GROUP BY pt.createdby
|
||||
`;
|
||||
|
||||
const [[pickingStatsRows], [orderCountRows]] = await Promise.all([
|
||||
connection.execute(pickingStatsQuery, params),
|
||||
connection.execute(orderCountQuery, params)
|
||||
]);
|
||||
|
||||
// Merge the results
|
||||
const orderCountMap = new Map();
|
||||
orderCountRows.forEach(row => {
|
||||
orderCountMap.set(row.employeeId, parseInt(row.ordersPicked || 0));
|
||||
});
|
||||
|
||||
// Aggregate picking totals
|
||||
let totalOrdersPicked = 0;
|
||||
let totalPiecesPicked = 0;
|
||||
let totalTickets = 0;
|
||||
let totalPickingTimeSeconds = 0;
|
||||
let pickingSpeedSum = 0;
|
||||
let pickingSpeedCount = 0;
|
||||
|
||||
const pickingByEmployee = pickingStatsRows.map(row => {
|
||||
const ordersPicked = orderCountMap.get(row.employeeId) || 0;
|
||||
totalOrdersPicked += ordersPicked;
|
||||
totalPiecesPicked += parseInt(row.piecesPicked || 0);
|
||||
totalTickets += parseInt(row.ticketCount || 0);
|
||||
totalPickingTimeSeconds += parseInt(row.pickingTimeSeconds || 0);
|
||||
if (row.avgPickingSpeed && row.avgPickingSpeed > 0) {
|
||||
pickingSpeedSum += parseFloat(row.avgPickingSpeed);
|
||||
pickingSpeedCount++;
|
||||
}
|
||||
|
||||
const empPickingHours = parseInt(row.pickingTimeSeconds || 0) / 3600;
|
||||
|
||||
return {
|
||||
employeeId: row.employeeId,
|
||||
name: `${row.firstname || ''} ${row.lastname || ''}`.trim() || `Employee ${row.employeeId}`,
|
||||
ticketCount: parseInt(row.ticketCount || 0),
|
||||
ordersPicked,
|
||||
piecesPicked: parseInt(row.piecesPicked || 0),
|
||||
pickingHours: empPickingHours,
|
||||
avgPickingSpeed: row.avgPickingSpeed ? parseFloat(row.avgPickingSpeed) : null,
|
||||
};
|
||||
});
|
||||
|
||||
const totalPickingHours = totalPickingTimeSeconds / 3600;
|
||||
const avgPickingSpeed = pickingSpeedCount > 0 ? pickingSpeedSum / pickingSpeedCount : 0;
|
||||
|
||||
// Query for shipped orders - totals
|
||||
// Ship-together orders: only count main orders (order_type != 8 for sub-orders, or use parent tracking)
|
||||
const shippingWhere = whereClause.replace(/date_placed/g, 'o.date_shipped');
|
||||
|
||||
const shippingQuery = `
|
||||
SELECT
|
||||
COUNT(DISTINCT CASE WHEN o.order_type != 8 OR o.order_type IS NULL THEN o.order_id END) as ordersShipped,
|
||||
COALESCE(SUM(o.stats_prod_pieces), 0) as piecesShipped
|
||||
FROM _order o
|
||||
WHERE ${shippingWhere}
|
||||
AND o.order_status IN (100, 92)
|
||||
`;
|
||||
|
||||
const [shippingRows] = await connection.execute(shippingQuery, params);
|
||||
const shipping = shippingRows[0] || { ordersShipped: 0, piecesShipped: 0 };
|
||||
|
||||
// Query for shipped orders by employee
|
||||
const shippingByEmployeeQuery = `
|
||||
SELECT
|
||||
e.employeeid,
|
||||
e.firstname,
|
||||
e.lastname,
|
||||
COUNT(DISTINCT CASE WHEN o.order_type != 8 OR o.order_type IS NULL THEN o.order_id END) as ordersShipped,
|
||||
COALESCE(SUM(o.stats_prod_pieces), 0) as piecesShipped
|
||||
FROM _order o
|
||||
JOIN employees e ON o.stats_cid_shipped = e.cid
|
||||
WHERE ${shippingWhere}
|
||||
AND o.order_status IN (100, 92)
|
||||
AND e.hidden = 0
|
||||
AND e.disabled = 0
|
||||
GROUP BY e.employeeid, e.firstname, e.lastname
|
||||
ORDER BY ordersShipped DESC
|
||||
`;
|
||||
|
||||
const [shippingByEmployeeRows] = await connection.execute(shippingByEmployeeQuery, params);
|
||||
const shippingByEmployee = shippingByEmployeeRows.map(row => ({
|
||||
employeeId: row.employeeid,
|
||||
name: `${row.firstname || ''} ${row.lastname || ''}`.trim() || `Employee ${row.employeeid}`,
|
||||
ordersShipped: parseInt(row.ordersShipped || 0),
|
||||
piecesShipped: parseInt(row.piecesShipped || 0),
|
||||
}));
|
||||
|
||||
// Calculate period dates for FTE calculation
|
||||
let periodStart, periodEnd;
|
||||
if (dateRange?.start) {
|
||||
periodStart = new Date(dateRange.start);
|
||||
} else if (params[0]) {
|
||||
periodStart = new Date(params[0]);
|
||||
} else {
|
||||
periodStart = new Date();
|
||||
periodStart.setDate(periodStart.getDate() - 30);
|
||||
}
|
||||
|
||||
if (dateRange?.end) {
|
||||
periodEnd = new Date(dateRange.end);
|
||||
} else if (params[1]) {
|
||||
periodEnd = new Date(params[1]);
|
||||
} else {
|
||||
periodEnd = new Date();
|
||||
}
|
||||
|
||||
const fte = calculateFTE(totalHours, periodStart, periodEnd);
|
||||
const activeEmployees = enrichedEmployeeHours.filter(e => e.hours > 0).length;
|
||||
|
||||
// Calculate weeks in period for weekly averages
|
||||
const periodDays = Math.max(1, (periodEnd - periodStart) / (1000 * 60 * 60 * 24));
|
||||
const weeksInPeriod = periodDays / 7;
|
||||
|
||||
// Get daily trend data for hours
|
||||
// Use DATE_FORMAT to get date string in Eastern timezone, avoiding JS timezone conversion issues
|
||||
// Business day starts at 1 AM, so subtract 1 hour before taking the date
|
||||
const trendWhere = whereClause.replace(/date_placed/g, 'tc.TimeStamp');
|
||||
const trendQuery = `
|
||||
SELECT
|
||||
DATE_FORMAT(DATE_SUB(tc.TimeStamp, INTERVAL 1 HOUR), '%Y-%m-%d') as date,
|
||||
tc.EmployeeID,
|
||||
tc.TimeStamp,
|
||||
tc.PunchType
|
||||
FROM timeclock tc
|
||||
LEFT JOIN employees e ON tc.EmployeeID = e.employeeid
|
||||
WHERE ${trendWhere}
|
||||
AND e.hidden = 0
|
||||
AND e.disabled = 0
|
||||
ORDER BY date, tc.EmployeeID, tc.TimeStamp
|
||||
`;
|
||||
|
||||
const [trendRows] = await connection.execute(trendQuery, params);
|
||||
|
||||
// Get daily picking data for trend
|
||||
// Ship-together orders: only count main orders (is_sub = 0 or NULL)
|
||||
// Use DATE_FORMAT for consistent date string format
|
||||
const pickingTrendWhere = whereClause.replace(/date_placed/g, 'pt.createddate');
|
||||
const pickingTrendQuery = `
|
||||
SELECT
|
||||
DATE_FORMAT(DATE_SUB(pt.createddate, INTERVAL 1 HOUR), '%Y-%m-%d') as date,
|
||||
COUNT(DISTINCT CASE WHEN ptb.is_sub = 0 OR ptb.is_sub IS NULL THEN ptb.orderid END) as ordersPicked,
|
||||
COALESCE(SUM(pt.totalpieces_picked), 0) as piecesPicked
|
||||
FROM picking_ticket pt
|
||||
LEFT JOIN picking_ticket_buckets ptb ON pt.pickingid = ptb.pickingid
|
||||
WHERE ${pickingTrendWhere}
|
||||
AND pt.closeddate IS NOT NULL
|
||||
GROUP BY DATE_FORMAT(DATE_SUB(pt.createddate, INTERVAL 1 HOUR), '%Y-%m-%d')
|
||||
ORDER BY date
|
||||
`;
|
||||
|
||||
const [pickingTrendRows] = await connection.execute(pickingTrendQuery, params);
|
||||
|
||||
// Create a map of picking data by date
|
||||
const pickingByDate = new Map();
|
||||
pickingTrendRows.forEach(row => {
|
||||
// Date is already a string in YYYY-MM-DD format from DATE_FORMAT
|
||||
const date = String(row.date);
|
||||
pickingByDate.set(date, {
|
||||
ordersPicked: parseInt(row.ordersPicked || 0),
|
||||
piecesPicked: parseInt(row.piecesPicked || 0),
|
||||
});
|
||||
});
|
||||
|
||||
// Group timeclock by date for trend
|
||||
const byDate = new Map();
|
||||
trendRows.forEach(row => {
|
||||
// Date is already a string in YYYY-MM-DD format from DATE_FORMAT
|
||||
const date = String(row.date);
|
||||
if (!byDate.has(date)) {
|
||||
byDate.set(date, []);
|
||||
}
|
||||
byDate.get(date).push(row);
|
||||
});
|
||||
|
||||
// Generate all dates in the period range for complete trend data
|
||||
const allDatesInRange = [];
|
||||
const startDt = DateTime.fromJSDate(periodStart).setZone(TIMEZONE).startOf('day');
|
||||
const endDt = DateTime.fromJSDate(periodEnd).setZone(TIMEZONE).startOf('day');
|
||||
|
||||
let currentDt = startDt;
|
||||
while (currentDt <= endDt) {
|
||||
allDatesInRange.push(currentDt.toFormat('yyyy-MM-dd'));
|
||||
currentDt = currentDt.plus({ days: 1 });
|
||||
}
|
||||
|
||||
// Build trend data for all dates in range, filling zeros for missing days
|
||||
const trend = allDatesInRange.map(date => {
|
||||
const punches = byDate.get(date) || [];
|
||||
const { totalHours: dayHours, employeeHours: dayEmployeeHours } = calculateHoursFromPunches(punches);
|
||||
const picking = pickingByDate.get(date) || { ordersPicked: 0, piecesPicked: 0 };
|
||||
|
||||
// Parse date string in Eastern timezone to get proper ISO timestamp
|
||||
const dateDt = DateTime.fromFormat(date, 'yyyy-MM-dd', { zone: TIMEZONE });
|
||||
|
||||
return {
|
||||
date,
|
||||
timestamp: dateDt.toISO(),
|
||||
hours: dayHours,
|
||||
activeEmployees: dayEmployeeHours.filter(e => e.hours > 0).length,
|
||||
ordersPicked: picking.ordersPicked,
|
||||
piecesPicked: picking.piecesPicked,
|
||||
};
|
||||
});
|
||||
|
||||
// Get previous period data for comparison
|
||||
const previousRange = getPreviousPeriodRange(timeRange, startDate, endDate);
|
||||
let comparison = null;
|
||||
let previousTotals = null;
|
||||
|
||||
if (previousRange) {
|
||||
const prevTimeclockWhere = previousRange.whereClause.replace(/date_placed/g, 'tc.TimeStamp');
|
||||
|
||||
const [prevTimeclockRows] = await connection.execute(
|
||||
`SELECT tc.EmployeeID, tc.TimeStamp, tc.PunchType
|
||||
FROM timeclock tc
|
||||
LEFT JOIN employees e ON tc.EmployeeID = e.employeeid
|
||||
WHERE ${prevTimeclockWhere}
|
||||
AND e.hidden = 0
|
||||
AND e.disabled = 0
|
||||
ORDER BY tc.EmployeeID, tc.TimeStamp`,
|
||||
previousRange.params
|
||||
);
|
||||
|
||||
const {
|
||||
totalHours: prevTotalHours,
|
||||
totalProductiveHours: prevProductiveHours,
|
||||
employeeHours: prevEmployeeHours
|
||||
} = calculateHoursFromPunches(prevTimeclockRows);
|
||||
const prevActiveEmployees = prevEmployeeHours.filter(e => e.hours > 0).length;
|
||||
|
||||
// Previous picking data (ship-together fix applied)
|
||||
// Use separate queries to avoid duplication from bucket join
|
||||
const prevPickingWhere = previousRange.whereClause.replace(/date_placed/g, 'pt.createddate');
|
||||
|
||||
const [[prevPickingStatsRows], [prevOrderCountRows]] = await Promise.all([
|
||||
connection.execute(
|
||||
`SELECT
|
||||
SUM(pt.totalpieces_picked) as piecesPicked,
|
||||
SUM(TIMESTAMPDIFF(SECOND, pt.createddate, pt.closeddate)) as pickingTimeSeconds
|
||||
FROM picking_ticket pt
|
||||
WHERE ${prevPickingWhere}
|
||||
AND pt.closeddate IS NOT NULL`,
|
||||
previousRange.params
|
||||
),
|
||||
connection.execute(
|
||||
`SELECT
|
||||
COUNT(DISTINCT CASE WHEN ptb.is_sub = 0 OR ptb.is_sub IS NULL THEN ptb.orderid END) as ordersPicked
|
||||
FROM picking_ticket pt
|
||||
LEFT JOIN picking_ticket_buckets ptb ON pt.pickingid = ptb.pickingid
|
||||
WHERE ${prevPickingWhere}
|
||||
AND pt.closeddate IS NOT NULL`,
|
||||
previousRange.params
|
||||
)
|
||||
]);
|
||||
|
||||
const prevPickingStats = prevPickingStatsRows[0] || { piecesPicked: 0, pickingTimeSeconds: 0 };
|
||||
const prevOrderCount = prevOrderCountRows[0] || { ordersPicked: 0 };
|
||||
const prevPicking = {
|
||||
ordersPicked: parseInt(prevOrderCount.ordersPicked || 0),
|
||||
piecesPicked: parseInt(prevPickingStats.piecesPicked || 0),
|
||||
pickingTimeSeconds: parseInt(prevPickingStats.pickingTimeSeconds || 0)
|
||||
};
|
||||
const prevPickingHours = prevPicking.pickingTimeSeconds / 3600;
|
||||
|
||||
// Previous shipping data
|
||||
const prevShippingWhere = previousRange.whereClause.replace(/date_placed/g, 'o.date_shipped');
|
||||
const [prevShippingRows] = await connection.execute(
|
||||
`SELECT
|
||||
COUNT(DISTINCT CASE WHEN o.order_type != 8 OR o.order_type IS NULL THEN o.order_id END) as ordersShipped,
|
||||
COALESCE(SUM(o.stats_prod_pieces), 0) as piecesShipped
|
||||
FROM _order o
|
||||
WHERE ${prevShippingWhere}
|
||||
AND o.order_status IN (100, 92)`,
|
||||
previousRange.params
|
||||
);
|
||||
const prevShipping = prevShippingRows[0] || { ordersShipped: 0, piecesShipped: 0 };
|
||||
|
||||
// Calculate previous period FTE and productivity
|
||||
const prevFte = calculateFTE(prevTotalHours, previousRange.start || periodStart, previousRange.end || periodEnd);
|
||||
const prevOrdersPerHour = prevProductiveHours > 0 ? parseInt(prevPicking.ordersPicked || 0) / prevProductiveHours : 0;
|
||||
const prevPiecesPerHour = prevProductiveHours > 0 ? parseInt(prevPicking.piecesPicked || 0) / prevProductiveHours : 0;
|
||||
|
||||
previousTotals = {
|
||||
hours: prevTotalHours,
|
||||
productiveHours: prevProductiveHours,
|
||||
activeEmployees: prevActiveEmployees,
|
||||
fte: prevFte,
|
||||
ordersPicked: parseInt(prevPicking.ordersPicked || 0),
|
||||
piecesPicked: parseInt(prevPicking.piecesPicked || 0),
|
||||
pickingHours: prevPickingHours,
|
||||
ordersShipped: parseInt(prevShipping.ordersShipped || 0),
|
||||
piecesShipped: parseInt(prevShipping.piecesShipped || 0),
|
||||
ordersPerHour: prevOrdersPerHour,
|
||||
piecesPerHour: prevPiecesPerHour,
|
||||
};
|
||||
|
||||
// Calculate productivity metrics for comparison
|
||||
const currentOrdersPerHour = totalProductiveHours > 0 ? totalOrdersPicked / totalProductiveHours : 0;
|
||||
const currentPiecesPerHour = totalProductiveHours > 0 ? totalPiecesPicked / totalProductiveHours : 0;
|
||||
|
||||
comparison = {
|
||||
hours: calculateComparison(totalHours, prevTotalHours),
|
||||
productiveHours: calculateComparison(totalProductiveHours, prevProductiveHours),
|
||||
activeEmployees: calculateComparison(activeEmployees, prevActiveEmployees),
|
||||
fte: calculateComparison(fte, prevFte),
|
||||
ordersPicked: calculateComparison(totalOrdersPicked, parseInt(prevPicking.ordersPicked || 0)),
|
||||
piecesPicked: calculateComparison(totalPiecesPicked, parseInt(prevPicking.piecesPicked || 0)),
|
||||
ordersShipped: calculateComparison(parseInt(shipping.ordersShipped || 0), parseInt(prevShipping.ordersShipped || 0)),
|
||||
piecesShipped: calculateComparison(parseInt(shipping.piecesShipped || 0), parseInt(prevShipping.piecesShipped || 0)),
|
||||
ordersPerHour: calculateComparison(currentOrdersPerHour, prevOrdersPerHour),
|
||||
piecesPerHour: calculateComparison(currentPiecesPerHour, prevPiecesPerHour),
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate efficiency (picking time vs productive hours)
|
||||
const pickingEfficiency = totalProductiveHours > 0 ? (totalPickingHours / totalProductiveHours) * 100 : 0;
|
||||
|
||||
const response = {
|
||||
dateRange,
|
||||
totals: {
|
||||
// Time metrics
|
||||
hours: totalHours,
|
||||
breakHours: totalBreakHours,
|
||||
productiveHours: totalProductiveHours,
|
||||
pickingHours: totalPickingHours,
|
||||
|
||||
// Employee metrics
|
||||
activeEmployees,
|
||||
fte,
|
||||
weeksInPeriod,
|
||||
|
||||
// Picking metrics
|
||||
ordersPicked: totalOrdersPicked,
|
||||
piecesPicked: totalPiecesPicked,
|
||||
ticketCount: totalTickets,
|
||||
|
||||
// Shipping metrics
|
||||
ordersShipped: parseInt(shipping.ordersShipped || 0),
|
||||
piecesShipped: parseInt(shipping.piecesShipped || 0),
|
||||
|
||||
// Calculated metrics - standardized to weekly
|
||||
hoursPerWeek: weeksInPeriod > 0 ? totalHours / weeksInPeriod : 0,
|
||||
hoursPerEmployeePerWeek: activeEmployees > 0 && weeksInPeriod > 0
|
||||
? (totalHours / activeEmployees) / weeksInPeriod
|
||||
: 0,
|
||||
|
||||
// Productivity metrics (uses productive hours - excludes breaks)
|
||||
ordersPerHour: totalProductiveHours > 0 ? totalOrdersPicked / totalProductiveHours : 0,
|
||||
piecesPerHour: totalProductiveHours > 0 ? totalPiecesPicked / totalProductiveHours : 0,
|
||||
|
||||
// Picking speed from database (more accurate, only counts picking time)
|
||||
avgPickingSpeed,
|
||||
|
||||
// Efficiency metrics
|
||||
pickingEfficiency,
|
||||
},
|
||||
previousTotals,
|
||||
comparison,
|
||||
byEmployee: {
|
||||
hours: enrichedEmployeeHours,
|
||||
picking: pickingByEmployee,
|
||||
shipping: shippingByEmployee,
|
||||
},
|
||||
trend,
|
||||
};
|
||||
|
||||
return { response, release };
|
||||
};
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await Promise.race([mainOperation(), timeoutPromise]);
|
||||
} catch (error) {
|
||||
if (error.message.includes('timeout')) {
|
||||
console.log(`[EMPLOYEE-METRICS] Request timed out in ${Date.now() - startTime}ms`);
|
||||
throw error;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const { response, release } = result;
|
||||
|
||||
if (release) release();
|
||||
|
||||
console.log(`[EMPLOYEE-METRICS] Request completed in ${Date.now() - startTime}ms`);
|
||||
res.json(response);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error in /employee-metrics:', error);
|
||||
console.log(`[EMPLOYEE-METRICS] Request failed in ${Date.now() - startTime}ms`);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Health check
|
||||
router.get('/health', async (req, res) => {
|
||||
try {
|
||||
const { connection, release } = await getDbConnection();
|
||||
await connection.execute('SELECT 1 as test');
|
||||
release();
|
||||
|
||||
res.json({
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
pool: getPoolStatus(),
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
status: 'unhealthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Helper functions
|
||||
function calculateComparison(currentValue, previousValue) {
|
||||
if (typeof previousValue !== 'number') {
|
||||
return { absolute: null, percentage: null };
|
||||
}
|
||||
|
||||
const absolute = typeof currentValue === 'number' ? currentValue - previousValue : null;
|
||||
const percentage =
|
||||
absolute !== null && previousValue !== 0
|
||||
? (absolute / Math.abs(previousValue)) * 100
|
||||
: null;
|
||||
|
||||
return { absolute, percentage };
|
||||
}
|
||||
|
||||
function getPreviousPeriodRange(timeRange, startDate, endDate) {
|
||||
if (timeRange && timeRange !== 'custom') {
|
||||
const prevTimeRange = getPreviousTimeRange(timeRange);
|
||||
if (!prevTimeRange || prevTimeRange === timeRange) {
|
||||
return null;
|
||||
}
|
||||
return getTimeRangeConditions(prevTimeRange);
|
||||
}
|
||||
|
||||
const hasCustomDates = (timeRange === 'custom' || !timeRange) && startDate && endDate;
|
||||
if (!hasCustomDates) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const start = new Date(startDate);
|
||||
const end = new Date(endDate);
|
||||
|
||||
if (Number.isNaN(start.getTime()) || Number.isNaN(end.getTime())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const duration = end.getTime() - start.getTime();
|
||||
if (!Number.isFinite(duration) || duration <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const prevEnd = new Date(start.getTime() - 1);
|
||||
const prevStart = new Date(prevEnd.getTime() - duration);
|
||||
|
||||
return getTimeRangeConditions('custom', prevStart.toISOString(), prevEnd.toISOString());
|
||||
}
|
||||
|
||||
function getPreviousTimeRange(timeRange) {
|
||||
const map = {
|
||||
today: 'yesterday',
|
||||
thisWeek: 'lastWeek',
|
||||
thisMonth: 'lastMonth',
|
||||
last7days: 'previous7days',
|
||||
last30days: 'previous30days',
|
||||
last90days: 'previous90days',
|
||||
yesterday: 'twoDaysAgo'
|
||||
};
|
||||
return map[timeRange] || timeRange;
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,484 @@
|
||||
const express = require('express');
|
||||
const { DateTime } = require('luxon');
|
||||
|
||||
const router = express.Router();
|
||||
const { getDbConnection, getPoolStatus } = require('../db/connection');
|
||||
const {
|
||||
getTimeRangeConditions,
|
||||
} = require('../utils/timeUtils');
|
||||
|
||||
const TIMEZONE = 'America/New_York';
|
||||
|
||||
// Main operations metrics endpoint - focused on picking and shipping
|
||||
router.get('/', async (req, res) => {
|
||||
const startTime = Date.now();
|
||||
console.log(`[OPERATIONS-METRICS] Starting request for timeRange: ${req.query.timeRange}`);
|
||||
|
||||
const timeoutPromise = new Promise((_, reject) => {
|
||||
setTimeout(() => reject(new Error('Request timeout after 30 seconds')), 30000);
|
||||
});
|
||||
|
||||
try {
|
||||
const mainOperation = async () => {
|
||||
const { timeRange, startDate, endDate } = req.query;
|
||||
console.log(`[OPERATIONS-METRICS] Getting DB connection...`);
|
||||
const { connection, release } = await getDbConnection();
|
||||
console.log(`[OPERATIONS-METRICS] DB connection obtained in ${Date.now() - startTime}ms`);
|
||||
|
||||
const { whereClause, params, dateRange } = getTimeRangeConditions(timeRange, startDate, endDate);
|
||||
|
||||
// Query for picking tickets - using subquery to avoid duplication from bucket join
|
||||
// Ship-together orders: only count main orders (is_sub = 0 or NULL), not sub-orders
|
||||
const pickingWhere = whereClause.replace(/date_placed/g, 'pt.createddate');
|
||||
|
||||
// First get picking ticket stats without the bucket join (to avoid duplication)
|
||||
const pickingStatsQuery = `
|
||||
SELECT
|
||||
pt.createdby as employeeId,
|
||||
e.firstname,
|
||||
e.lastname,
|
||||
COUNT(DISTINCT pt.pickingid) as ticketCount,
|
||||
SUM(pt.totalpieces_picked) as piecesPicked,
|
||||
SUM(TIMESTAMPDIFF(SECOND, pt.createddate, pt.closeddate)) as pickingTimeSeconds,
|
||||
AVG(NULLIF(pt.picking_speed, 0)) as avgPickingSpeed
|
||||
FROM picking_ticket pt
|
||||
LEFT JOIN employees e ON pt.createdby = e.employeeid
|
||||
WHERE ${pickingWhere}
|
||||
AND pt.closeddate IS NOT NULL
|
||||
GROUP BY pt.createdby, e.firstname, e.lastname
|
||||
`;
|
||||
|
||||
// Separate query for order counts (needs bucket join for ship-together handling)
|
||||
const orderCountQuery = `
|
||||
SELECT
|
||||
pt.createdby as employeeId,
|
||||
COUNT(DISTINCT CASE WHEN ptb.is_sub = 0 OR ptb.is_sub IS NULL THEN ptb.orderid END) as ordersPicked
|
||||
FROM picking_ticket pt
|
||||
LEFT JOIN picking_ticket_buckets ptb ON pt.pickingid = ptb.pickingid
|
||||
WHERE ${pickingWhere}
|
||||
AND pt.closeddate IS NOT NULL
|
||||
GROUP BY pt.createdby
|
||||
`;
|
||||
|
||||
const [[pickingStatsRows], [orderCountRows]] = await Promise.all([
|
||||
connection.execute(pickingStatsQuery, params),
|
||||
connection.execute(orderCountQuery, params)
|
||||
]);
|
||||
|
||||
// Merge the results
|
||||
const orderCountMap = new Map();
|
||||
orderCountRows.forEach(row => {
|
||||
orderCountMap.set(row.employeeId, parseInt(row.ordersPicked || 0));
|
||||
});
|
||||
|
||||
// Aggregate picking totals
|
||||
let totalOrdersPicked = 0;
|
||||
let totalPiecesPicked = 0;
|
||||
let totalTickets = 0;
|
||||
let totalPickingTimeSeconds = 0;
|
||||
let pickingSpeedSum = 0;
|
||||
let pickingSpeedCount = 0;
|
||||
|
||||
const pickingByEmployee = pickingStatsRows.map(row => {
|
||||
const ordersPicked = orderCountMap.get(row.employeeId) || 0;
|
||||
totalOrdersPicked += ordersPicked;
|
||||
totalPiecesPicked += parseInt(row.piecesPicked || 0);
|
||||
totalTickets += parseInt(row.ticketCount || 0);
|
||||
totalPickingTimeSeconds += parseInt(row.pickingTimeSeconds || 0);
|
||||
if (row.avgPickingSpeed && row.avgPickingSpeed > 0) {
|
||||
pickingSpeedSum += parseFloat(row.avgPickingSpeed);
|
||||
pickingSpeedCount++;
|
||||
}
|
||||
|
||||
const empPickingHours = parseInt(row.pickingTimeSeconds || 0) / 3600;
|
||||
|
||||
return {
|
||||
employeeId: row.employeeId,
|
||||
name: `${row.firstname || ''} ${row.lastname || ''}`.trim() || `Employee ${row.employeeId}`,
|
||||
ticketCount: parseInt(row.ticketCount || 0),
|
||||
ordersPicked,
|
||||
piecesPicked: parseInt(row.piecesPicked || 0),
|
||||
pickingHours: empPickingHours,
|
||||
avgPickingSpeed: row.avgPickingSpeed ? parseFloat(row.avgPickingSpeed) : null,
|
||||
};
|
||||
});
|
||||
|
||||
const totalPickingHours = totalPickingTimeSeconds / 3600;
|
||||
const avgPickingSpeed = pickingSpeedCount > 0 ? pickingSpeedSum / pickingSpeedCount : 0;
|
||||
|
||||
// Query for shipped orders - totals
|
||||
// Ship-together orders: only count main orders (order_type != 8 for sub-orders)
|
||||
const shippingWhere = whereClause.replace(/date_placed/g, 'o.date_shipped');
|
||||
|
||||
const shippingQuery = `
|
||||
SELECT
|
||||
COUNT(DISTINCT CASE WHEN o.order_type != 8 OR o.order_type IS NULL THEN o.order_id END) as ordersShipped,
|
||||
COALESCE(SUM(o.stats_prod_pieces), 0) as piecesShipped
|
||||
FROM _order o
|
||||
WHERE ${shippingWhere}
|
||||
AND o.order_status IN (100, 92)
|
||||
`;
|
||||
|
||||
const [shippingRows] = await connection.execute(shippingQuery, params);
|
||||
const shipping = shippingRows[0] || { ordersShipped: 0, piecesShipped: 0 };
|
||||
|
||||
// Query for shipped orders by employee
|
||||
const shippingByEmployeeQuery = `
|
||||
SELECT
|
||||
e.employeeid,
|
||||
e.firstname,
|
||||
e.lastname,
|
||||
COUNT(DISTINCT CASE WHEN o.order_type != 8 OR o.order_type IS NULL THEN o.order_id END) as ordersShipped,
|
||||
COALESCE(SUM(o.stats_prod_pieces), 0) as piecesShipped
|
||||
FROM _order o
|
||||
JOIN employees e ON o.stats_cid_shipped = e.cid
|
||||
WHERE ${shippingWhere}
|
||||
AND o.order_status IN (100, 92)
|
||||
AND e.hidden = 0
|
||||
AND e.disabled = 0
|
||||
GROUP BY e.employeeid, e.firstname, e.lastname
|
||||
ORDER BY ordersShipped DESC
|
||||
`;
|
||||
|
||||
const [shippingByEmployeeRows] = await connection.execute(shippingByEmployeeQuery, params);
|
||||
const shippingByEmployee = shippingByEmployeeRows.map(row => ({
|
||||
employeeId: row.employeeid,
|
||||
name: `${row.firstname || ''} ${row.lastname || ''}`.trim() || `Employee ${row.employeeid}`,
|
||||
ordersShipped: parseInt(row.ordersShipped || 0),
|
||||
piecesShipped: parseInt(row.piecesShipped || 0),
|
||||
}));
|
||||
|
||||
// Calculate period dates
|
||||
let periodStart, periodEnd;
|
||||
if (dateRange?.start) {
|
||||
periodStart = new Date(dateRange.start);
|
||||
} else if (params[0]) {
|
||||
periodStart = new Date(params[0]);
|
||||
} else {
|
||||
periodStart = new Date();
|
||||
periodStart.setDate(periodStart.getDate() - 30);
|
||||
}
|
||||
|
||||
if (dateRange?.end) {
|
||||
periodEnd = new Date(dateRange.end);
|
||||
} else if (params[1]) {
|
||||
periodEnd = new Date(params[1]);
|
||||
} else {
|
||||
periodEnd = new Date();
|
||||
}
|
||||
|
||||
// Calculate productivity (orders/pieces per picking hour)
|
||||
const ordersPerHour = totalPickingHours > 0 ? totalOrdersPicked / totalPickingHours : 0;
|
||||
const piecesPerHour = totalPickingHours > 0 ? totalPiecesPicked / totalPickingHours : 0;
|
||||
|
||||
// Get daily trend data for picking
|
||||
// Use DATE_FORMAT to get date string in Eastern timezone
|
||||
// Business day starts at 1 AM, so subtract 1 hour before taking the date
|
||||
const pickingTrendWhere = whereClause.replace(/date_placed/g, 'pt.createddate');
|
||||
const pickingTrendQuery = `
|
||||
SELECT
|
||||
pt_agg.date,
|
||||
COALESCE(order_counts.ordersPicked, 0) as ordersPicked,
|
||||
pt_agg.piecesPicked
|
||||
FROM (
|
||||
SELECT
|
||||
DATE_FORMAT(DATE_SUB(pt.createddate, INTERVAL 1 HOUR), '%Y-%m-%d') as date,
|
||||
COALESCE(SUM(pt.totalpieces_picked), 0) as piecesPicked
|
||||
FROM picking_ticket pt
|
||||
WHERE ${pickingTrendWhere}
|
||||
AND pt.closeddate IS NOT NULL
|
||||
GROUP BY DATE_FORMAT(DATE_SUB(pt.createddate, INTERVAL 1 HOUR), '%Y-%m-%d')
|
||||
) pt_agg
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
DATE_FORMAT(DATE_SUB(pt.createddate, INTERVAL 1 HOUR), '%Y-%m-%d') as date,
|
||||
COUNT(DISTINCT CASE WHEN ptb.is_sub = 0 OR ptb.is_sub IS NULL THEN ptb.orderid END) as ordersPicked
|
||||
FROM picking_ticket pt
|
||||
LEFT JOIN picking_ticket_buckets ptb ON pt.pickingid = ptb.pickingid
|
||||
WHERE ${pickingTrendWhere}
|
||||
AND pt.closeddate IS NOT NULL
|
||||
GROUP BY DATE_FORMAT(DATE_SUB(pt.createddate, INTERVAL 1 HOUR), '%Y-%m-%d')
|
||||
) order_counts ON pt_agg.date = order_counts.date
|
||||
ORDER BY pt_agg.date
|
||||
`;
|
||||
|
||||
// Get shipping trend data
|
||||
const shippingTrendWhere = whereClause.replace(/date_placed/g, 'o.date_shipped');
|
||||
const shippingTrendQuery = `
|
||||
SELECT
|
||||
DATE_FORMAT(DATE_SUB(o.date_shipped, INTERVAL 1 HOUR), '%Y-%m-%d') as date,
|
||||
COUNT(DISTINCT CASE WHEN o.order_type != 8 OR o.order_type IS NULL THEN o.order_id END) as ordersShipped,
|
||||
COALESCE(SUM(o.stats_prod_pieces), 0) as piecesShipped
|
||||
FROM _order o
|
||||
WHERE ${shippingTrendWhere}
|
||||
AND o.order_status IN (100, 92)
|
||||
GROUP BY DATE_FORMAT(DATE_SUB(o.date_shipped, INTERVAL 1 HOUR), '%Y-%m-%d')
|
||||
ORDER BY date
|
||||
`;
|
||||
|
||||
const [[pickingTrendRows], [shippingTrendRows]] = await Promise.all([
|
||||
connection.execute(pickingTrendQuery, [...params, ...params]),
|
||||
connection.execute(shippingTrendQuery, params),
|
||||
]);
|
||||
|
||||
// Create maps for trend data
|
||||
const pickingByDate = new Map();
|
||||
pickingTrendRows.forEach(row => {
|
||||
const date = String(row.date);
|
||||
pickingByDate.set(date, {
|
||||
ordersPicked: parseInt(row.ordersPicked || 0),
|
||||
piecesPicked: parseInt(row.piecesPicked || 0),
|
||||
});
|
||||
});
|
||||
|
||||
const shippingByDate = new Map();
|
||||
shippingTrendRows.forEach(row => {
|
||||
const date = String(row.date);
|
||||
shippingByDate.set(date, {
|
||||
ordersShipped: parseInt(row.ordersShipped || 0),
|
||||
piecesShipped: parseInt(row.piecesShipped || 0),
|
||||
});
|
||||
});
|
||||
|
||||
// Generate all dates in the period range for complete trend data
|
||||
const allDatesInRange = [];
|
||||
const startDt = DateTime.fromJSDate(periodStart).setZone(TIMEZONE).startOf('day');
|
||||
const endDt = DateTime.fromJSDate(periodEnd).setZone(TIMEZONE).startOf('day');
|
||||
|
||||
let currentDt = startDt;
|
||||
while (currentDt <= endDt) {
|
||||
allDatesInRange.push(currentDt.toFormat('yyyy-MM-dd'));
|
||||
currentDt = currentDt.plus({ days: 1 });
|
||||
}
|
||||
|
||||
// Build trend data for all dates in range
|
||||
const trend = allDatesInRange.map(date => {
|
||||
const picking = pickingByDate.get(date) || { ordersPicked: 0, piecesPicked: 0 };
|
||||
const shippingData = shippingByDate.get(date) || { ordersShipped: 0, piecesShipped: 0 };
|
||||
|
||||
// Parse date string in Eastern timezone to get proper ISO timestamp
|
||||
const dateDt = DateTime.fromFormat(date, 'yyyy-MM-dd', { zone: TIMEZONE });
|
||||
|
||||
return {
|
||||
date,
|
||||
timestamp: dateDt.toISO(),
|
||||
ordersPicked: picking.ordersPicked,
|
||||
piecesPicked: picking.piecesPicked,
|
||||
ordersShipped: shippingData.ordersShipped,
|
||||
piecesShipped: shippingData.piecesShipped,
|
||||
};
|
||||
});
|
||||
|
||||
// Get previous period data for comparison
|
||||
const previousRange = getPreviousPeriodRange(timeRange, startDate, endDate);
|
||||
let comparison = null;
|
||||
let previousTotals = null;
|
||||
|
||||
if (previousRange) {
|
||||
// Previous picking data
|
||||
const prevPickingWhere = previousRange.whereClause.replace(/date_placed/g, 'pt.createddate');
|
||||
|
||||
const [[prevPickingStatsRows], [prevOrderCountRows]] = await Promise.all([
|
||||
connection.execute(
|
||||
`SELECT
|
||||
SUM(pt.totalpieces_picked) as piecesPicked,
|
||||
SUM(TIMESTAMPDIFF(SECOND, pt.createddate, pt.closeddate)) as pickingTimeSeconds
|
||||
FROM picking_ticket pt
|
||||
WHERE ${prevPickingWhere}
|
||||
AND pt.closeddate IS NOT NULL`,
|
||||
previousRange.params
|
||||
),
|
||||
connection.execute(
|
||||
`SELECT
|
||||
COUNT(DISTINCT CASE WHEN ptb.is_sub = 0 OR ptb.is_sub IS NULL THEN ptb.orderid END) as ordersPicked
|
||||
FROM picking_ticket pt
|
||||
LEFT JOIN picking_ticket_buckets ptb ON pt.pickingid = ptb.pickingid
|
||||
WHERE ${prevPickingWhere}
|
||||
AND pt.closeddate IS NOT NULL`,
|
||||
previousRange.params
|
||||
)
|
||||
]);
|
||||
|
||||
const prevPickingStats = prevPickingStatsRows[0] || { piecesPicked: 0, pickingTimeSeconds: 0 };
|
||||
const prevOrderCount = prevOrderCountRows[0] || { ordersPicked: 0 };
|
||||
const prevPicking = {
|
||||
ordersPicked: parseInt(prevOrderCount.ordersPicked || 0),
|
||||
piecesPicked: parseInt(prevPickingStats.piecesPicked || 0),
|
||||
pickingTimeSeconds: parseInt(prevPickingStats.pickingTimeSeconds || 0)
|
||||
};
|
||||
const prevPickingHours = prevPicking.pickingTimeSeconds / 3600;
|
||||
|
||||
// Previous shipping data
|
||||
const prevShippingWhere = previousRange.whereClause.replace(/date_placed/g, 'o.date_shipped');
|
||||
const [prevShippingRows] = await connection.execute(
|
||||
`SELECT
|
||||
COUNT(DISTINCT CASE WHEN o.order_type != 8 OR o.order_type IS NULL THEN o.order_id END) as ordersShipped,
|
||||
COALESCE(SUM(o.stats_prod_pieces), 0) as piecesShipped
|
||||
FROM _order o
|
||||
WHERE ${prevShippingWhere}
|
||||
AND o.order_status IN (100, 92)`,
|
||||
previousRange.params
|
||||
);
|
||||
const prevShipping = prevShippingRows[0] || { ordersShipped: 0, piecesShipped: 0 };
|
||||
|
||||
// Calculate previous productivity
|
||||
const prevOrdersPerHour = prevPickingHours > 0 ? parseInt(prevPicking.ordersPicked || 0) / prevPickingHours : 0;
|
||||
const prevPiecesPerHour = prevPickingHours > 0 ? parseInt(prevPicking.piecesPicked || 0) / prevPickingHours : 0;
|
||||
|
||||
previousTotals = {
|
||||
ordersPicked: parseInt(prevPicking.ordersPicked || 0),
|
||||
piecesPicked: parseInt(prevPicking.piecesPicked || 0),
|
||||
pickingHours: prevPickingHours,
|
||||
ordersShipped: parseInt(prevShipping.ordersShipped || 0),
|
||||
piecesShipped: parseInt(prevShipping.piecesShipped || 0),
|
||||
ordersPerHour: prevOrdersPerHour,
|
||||
piecesPerHour: prevPiecesPerHour,
|
||||
};
|
||||
|
||||
comparison = {
|
||||
ordersPicked: calculateComparison(totalOrdersPicked, parseInt(prevPicking.ordersPicked || 0)),
|
||||
piecesPicked: calculateComparison(totalPiecesPicked, parseInt(prevPicking.piecesPicked || 0)),
|
||||
ordersShipped: calculateComparison(parseInt(shipping.ordersShipped || 0), parseInt(prevShipping.ordersShipped || 0)),
|
||||
piecesShipped: calculateComparison(parseInt(shipping.piecesShipped || 0), parseInt(prevShipping.piecesShipped || 0)),
|
||||
ordersPerHour: calculateComparison(ordersPerHour, prevOrdersPerHour),
|
||||
piecesPerHour: calculateComparison(piecesPerHour, prevPiecesPerHour),
|
||||
};
|
||||
}
|
||||
|
||||
const response = {
|
||||
dateRange,
|
||||
totals: {
|
||||
// Picking metrics
|
||||
ordersPicked: totalOrdersPicked,
|
||||
piecesPicked: totalPiecesPicked,
|
||||
ticketCount: totalTickets,
|
||||
pickingHours: totalPickingHours,
|
||||
|
||||
// Shipping metrics
|
||||
ordersShipped: parseInt(shipping.ordersShipped || 0),
|
||||
piecesShipped: parseInt(shipping.piecesShipped || 0),
|
||||
|
||||
// Productivity metrics
|
||||
ordersPerHour,
|
||||
piecesPerHour,
|
||||
avgPickingSpeed,
|
||||
},
|
||||
previousTotals,
|
||||
comparison,
|
||||
byEmployee: {
|
||||
picking: pickingByEmployee,
|
||||
shipping: shippingByEmployee,
|
||||
},
|
||||
trend,
|
||||
};
|
||||
|
||||
return { response, release };
|
||||
};
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await Promise.race([mainOperation(), timeoutPromise]);
|
||||
} catch (error) {
|
||||
if (error.message.includes('timeout')) {
|
||||
console.log(`[OPERATIONS-METRICS] Request timed out in ${Date.now() - startTime}ms`);
|
||||
throw error;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const { response, release } = result;
|
||||
|
||||
if (release) release();
|
||||
|
||||
console.log(`[OPERATIONS-METRICS] Request completed in ${Date.now() - startTime}ms`);
|
||||
res.json(response);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error in /operations-metrics:', error);
|
||||
console.log(`[OPERATIONS-METRICS] Request failed in ${Date.now() - startTime}ms`);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Health check
|
||||
router.get('/health', async (req, res) => {
|
||||
try {
|
||||
const { connection, release } = await getDbConnection();
|
||||
await connection.execute('SELECT 1 as test');
|
||||
release();
|
||||
|
||||
res.json({
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
pool: getPoolStatus(),
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
status: 'unhealthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Helper functions
|
||||
function calculateComparison(currentValue, previousValue) {
|
||||
if (typeof previousValue !== 'number') {
|
||||
return { absolute: null, percentage: null };
|
||||
}
|
||||
|
||||
const absolute = typeof currentValue === 'number' ? currentValue - previousValue : null;
|
||||
const percentage =
|
||||
absolute !== null && previousValue !== 0
|
||||
? (absolute / Math.abs(previousValue)) * 100
|
||||
: null;
|
||||
|
||||
return { absolute, percentage };
|
||||
}
|
||||
|
||||
function getPreviousPeriodRange(timeRange, startDate, endDate) {
|
||||
if (timeRange && timeRange !== 'custom') {
|
||||
const prevTimeRange = getPreviousTimeRange(timeRange);
|
||||
if (!prevTimeRange || prevTimeRange === timeRange) {
|
||||
return null;
|
||||
}
|
||||
return getTimeRangeConditions(prevTimeRange);
|
||||
}
|
||||
|
||||
const hasCustomDates = (timeRange === 'custom' || !timeRange) && startDate && endDate;
|
||||
if (!hasCustomDates) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const start = new Date(startDate);
|
||||
const end = new Date(endDate);
|
||||
|
||||
if (Number.isNaN(start.getTime()) || Number.isNaN(end.getTime())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const duration = end.getTime() - start.getTime();
|
||||
if (!Number.isFinite(duration) || duration <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const prevEnd = new Date(start.getTime() - 1);
|
||||
const prevStart = new Date(prevEnd.getTime() - duration);
|
||||
|
||||
return getTimeRangeConditions('custom', prevStart.toISOString(), prevEnd.toISOString());
|
||||
}
|
||||
|
||||
function getPreviousTimeRange(timeRange) {
|
||||
const map = {
|
||||
today: 'yesterday',
|
||||
thisWeek: 'lastWeek',
|
||||
thisMonth: 'lastMonth',
|
||||
last7days: 'previous7days',
|
||||
last30days: 'previous30days',
|
||||
last90days: 'previous90days',
|
||||
yesterday: 'twoDaysAgo'
|
||||
};
|
||||
return map[timeRange] || timeRange;
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
505
inventory-server/dashboard/acot-server/routes/payroll-metrics.js
Normal file
505
inventory-server/dashboard/acot-server/routes/payroll-metrics.js
Normal file
@@ -0,0 +1,505 @@
|
||||
const express = require('express');
|
||||
const { DateTime } = require('luxon');
|
||||
|
||||
const router = express.Router();
|
||||
const { getDbConnection, getPoolStatus } = require('../db/connection');
|
||||
|
||||
const TIMEZONE = 'America/New_York';
|
||||
|
||||
// Punch types from the database
|
||||
const PUNCH_TYPES = {
|
||||
OUT: 0,
|
||||
IN: 1,
|
||||
BREAK_START: 2,
|
||||
BREAK_END: 3,
|
||||
};
|
||||
|
||||
// Standard hours for overtime calculation (40 hours per week)
|
||||
const STANDARD_WEEKLY_HOURS = 40;
|
||||
|
||||
// Reference pay period start date (January 25, 2026 is a Sunday, first day of a pay period)
|
||||
const PAY_PERIOD_REFERENCE = DateTime.fromObject(
|
||||
{ year: 2026, month: 1, day: 25 },
|
||||
{ zone: TIMEZONE }
|
||||
);
|
||||
|
||||
/**
|
||||
* Calculate the pay period that contains a given date
|
||||
* Pay periods are 14 days starting on Sunday
|
||||
* @param {DateTime} date - The date to find the pay period for
|
||||
* @returns {{ start: DateTime, end: DateTime, week1: { start: DateTime, end: DateTime }, week2: { start: DateTime, end: DateTime } }}
|
||||
*/
|
||||
function getPayPeriodForDate(date) {
|
||||
const dt = DateTime.isDateTime(date) ? date : DateTime.fromJSDate(date, { zone: TIMEZONE });
|
||||
|
||||
// Calculate days since reference
|
||||
const daysSinceReference = Math.floor(dt.diff(PAY_PERIOD_REFERENCE, 'days').days);
|
||||
|
||||
// Find which pay period this falls into (can be negative for dates before reference)
|
||||
const payPeriodIndex = Math.floor(daysSinceReference / 14);
|
||||
|
||||
// Calculate the start of this pay period
|
||||
const start = PAY_PERIOD_REFERENCE.plus({ days: payPeriodIndex * 14 }).startOf('day');
|
||||
const end = start.plus({ days: 13 }).endOf('day');
|
||||
|
||||
// Week 1: Sunday through Saturday
|
||||
const week1Start = start;
|
||||
const week1End = start.plus({ days: 6 }).endOf('day');
|
||||
|
||||
// Week 2: Sunday through Saturday
|
||||
const week2Start = start.plus({ days: 7 }).startOf('day');
|
||||
const week2End = end;
|
||||
|
||||
return {
|
||||
start,
|
||||
end,
|
||||
week1: { start: week1Start, end: week1End },
|
||||
week2: { start: week2Start, end: week2End },
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current pay period
|
||||
*/
|
||||
function getCurrentPayPeriod() {
|
||||
return getPayPeriodForDate(DateTime.now().setZone(TIMEZONE));
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigate to previous or next pay period
|
||||
* @param {DateTime} currentStart - Current pay period start
|
||||
* @param {number} offset - Number of pay periods to move (negative for previous)
|
||||
*/
|
||||
function navigatePayPeriod(currentStart, offset) {
|
||||
const newStart = currentStart.plus({ days: offset * 14 });
|
||||
return getPayPeriodForDate(newStart);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate working hours from timeclock entries, broken down by week
|
||||
* @param {Array} punches - Timeclock punch entries
|
||||
* @param {Object} payPeriod - Pay period with week boundaries
|
||||
*/
|
||||
function calculateHoursByWeek(punches, payPeriod) {
|
||||
// Group by employee
|
||||
const byEmployee = new Map();
|
||||
|
||||
punches.forEach(punch => {
|
||||
if (!byEmployee.has(punch.EmployeeID)) {
|
||||
byEmployee.set(punch.EmployeeID, {
|
||||
employeeId: punch.EmployeeID,
|
||||
firstname: punch.firstname || '',
|
||||
lastname: punch.lastname || '',
|
||||
punches: [],
|
||||
});
|
||||
}
|
||||
byEmployee.get(punch.EmployeeID).punches.push(punch);
|
||||
});
|
||||
|
||||
const employeeResults = [];
|
||||
let totalHours = 0;
|
||||
let totalBreakHours = 0;
|
||||
let totalOvertimeHours = 0;
|
||||
let totalRegularHours = 0;
|
||||
let week1TotalHours = 0;
|
||||
let week1TotalOvertime = 0;
|
||||
let week2TotalHours = 0;
|
||||
let week2TotalOvertime = 0;
|
||||
|
||||
byEmployee.forEach((employeeData) => {
|
||||
// Sort punches by timestamp
|
||||
employeeData.punches.sort((a, b) => new Date(a.TimeStamp) - new Date(b.TimeStamp));
|
||||
|
||||
// Calculate hours for each week
|
||||
const week1Punches = employeeData.punches.filter(p => {
|
||||
const dt = DateTime.fromJSDate(new Date(p.TimeStamp), { zone: TIMEZONE });
|
||||
return dt >= payPeriod.week1.start && dt <= payPeriod.week1.end;
|
||||
});
|
||||
|
||||
const week2Punches = employeeData.punches.filter(p => {
|
||||
const dt = DateTime.fromJSDate(new Date(p.TimeStamp), { zone: TIMEZONE });
|
||||
return dt >= payPeriod.week2.start && dt <= payPeriod.week2.end;
|
||||
});
|
||||
|
||||
const week1Hours = calculateHoursFromPunches(week1Punches);
|
||||
const week2Hours = calculateHoursFromPunches(week2Punches);
|
||||
|
||||
// Calculate overtime per week (anything over 40 hours)
|
||||
const week1Overtime = Math.max(0, week1Hours.hours - STANDARD_WEEKLY_HOURS);
|
||||
const week2Overtime = Math.max(0, week2Hours.hours - STANDARD_WEEKLY_HOURS);
|
||||
const week1Regular = week1Hours.hours - week1Overtime;
|
||||
const week2Regular = week2Hours.hours - week2Overtime;
|
||||
|
||||
const employeeTotal = week1Hours.hours + week2Hours.hours;
|
||||
const employeeBreaks = week1Hours.breakHours + week2Hours.breakHours;
|
||||
const employeeOvertime = week1Overtime + week2Overtime;
|
||||
const employeeRegular = employeeTotal - employeeOvertime;
|
||||
|
||||
totalHours += employeeTotal;
|
||||
totalBreakHours += employeeBreaks;
|
||||
totalOvertimeHours += employeeOvertime;
|
||||
totalRegularHours += employeeRegular;
|
||||
week1TotalHours += week1Hours.hours;
|
||||
week1TotalOvertime += week1Overtime;
|
||||
week2TotalHours += week2Hours.hours;
|
||||
week2TotalOvertime += week2Overtime;
|
||||
|
||||
employeeResults.push({
|
||||
employeeId: employeeData.employeeId,
|
||||
name: `${employeeData.firstname} ${employeeData.lastname}`.trim() || `Employee ${employeeData.employeeId}`,
|
||||
week1Hours: week1Hours.hours,
|
||||
week1BreakHours: week1Hours.breakHours,
|
||||
week1Overtime,
|
||||
week1Regular,
|
||||
week2Hours: week2Hours.hours,
|
||||
week2BreakHours: week2Hours.breakHours,
|
||||
week2Overtime,
|
||||
week2Regular,
|
||||
totalHours: employeeTotal,
|
||||
totalBreakHours: employeeBreaks,
|
||||
overtimeHours: employeeOvertime,
|
||||
regularHours: employeeRegular,
|
||||
});
|
||||
});
|
||||
|
||||
// Sort by total hours descending
|
||||
employeeResults.sort((a, b) => b.totalHours - a.totalHours);
|
||||
|
||||
return {
|
||||
byEmployee: employeeResults,
|
||||
totals: {
|
||||
hours: totalHours,
|
||||
breakHours: totalBreakHours,
|
||||
overtimeHours: totalOvertimeHours,
|
||||
regularHours: totalRegularHours,
|
||||
activeEmployees: employeeResults.filter(e => e.totalHours > 0).length,
|
||||
},
|
||||
byWeek: [
|
||||
{
|
||||
week: 1,
|
||||
start: payPeriod.week1.start.toISODate(),
|
||||
end: payPeriod.week1.end.toISODate(),
|
||||
hours: week1TotalHours,
|
||||
overtime: week1TotalOvertime,
|
||||
regular: week1TotalHours - week1TotalOvertime,
|
||||
},
|
||||
{
|
||||
week: 2,
|
||||
start: payPeriod.week2.start.toISODate(),
|
||||
end: payPeriod.week2.end.toISODate(),
|
||||
hours: week2TotalHours,
|
||||
overtime: week2TotalOvertime,
|
||||
regular: week2TotalHours - week2TotalOvertime,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate hours from a set of punches
|
||||
*/
|
||||
function calculateHoursFromPunches(punches) {
|
||||
let hours = 0;
|
||||
let breakHours = 0;
|
||||
let currentIn = null;
|
||||
let breakStart = null;
|
||||
|
||||
punches.forEach(punch => {
|
||||
const punchTime = new Date(punch.TimeStamp);
|
||||
|
||||
switch (punch.PunchType) {
|
||||
case PUNCH_TYPES.IN:
|
||||
currentIn = punchTime;
|
||||
break;
|
||||
case PUNCH_TYPES.OUT:
|
||||
if (currentIn) {
|
||||
hours += (punchTime - currentIn) / (1000 * 60 * 60);
|
||||
currentIn = null;
|
||||
}
|
||||
break;
|
||||
case PUNCH_TYPES.BREAK_START:
|
||||
breakStart = punchTime;
|
||||
break;
|
||||
case PUNCH_TYPES.BREAK_END:
|
||||
if (breakStart) {
|
||||
breakHours += (punchTime - breakStart) / (1000 * 60 * 60);
|
||||
breakStart = null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return { hours, breakHours };
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate FTE for a pay period (based on 80 hours = 1 FTE for 2-week period)
|
||||
* @param {number} totalHours - Total hours worked
|
||||
* @param {number} elapsedFraction - Fraction of the period elapsed (0-1). Defaults to 1 for complete periods.
|
||||
*/
|
||||
function calculateFTE(totalHours, elapsedFraction = 1) {
|
||||
const fullTimePeriodHours = STANDARD_WEEKLY_HOURS * 2; // 80 hours for 2 weeks
|
||||
const proratedHours = fullTimePeriodHours * elapsedFraction;
|
||||
return proratedHours > 0 ? totalHours / proratedHours : 0;
|
||||
}
|
||||
|
||||
// Main payroll metrics endpoint
|
||||
router.get('/', async (req, res) => {
|
||||
const startTime = Date.now();
|
||||
console.log(`[PAYROLL-METRICS] Starting request`);
|
||||
|
||||
const timeoutPromise = new Promise((_, reject) => {
|
||||
setTimeout(() => reject(new Error('Request timeout after 30 seconds')), 30000);
|
||||
});
|
||||
|
||||
try {
|
||||
const mainOperation = async () => {
|
||||
const { payPeriodStart, navigate } = req.query;
|
||||
|
||||
let payPeriod;
|
||||
|
||||
if (payPeriodStart) {
|
||||
// Parse the provided start date
|
||||
const startDate = DateTime.fromISO(payPeriodStart, { zone: TIMEZONE });
|
||||
if (!startDate.isValid) {
|
||||
return res.status(400).json({ error: 'Invalid payPeriodStart date format' });
|
||||
}
|
||||
payPeriod = getPayPeriodForDate(startDate);
|
||||
} else {
|
||||
// Default to current pay period
|
||||
payPeriod = getCurrentPayPeriod();
|
||||
}
|
||||
|
||||
// Handle navigation if requested
|
||||
if (navigate) {
|
||||
const offset = parseInt(navigate, 10);
|
||||
if (!isNaN(offset)) {
|
||||
payPeriod = navigatePayPeriod(payPeriod.start, offset);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[PAYROLL-METRICS] Getting DB connection...`);
|
||||
const { connection, release } = await getDbConnection();
|
||||
console.log(`[PAYROLL-METRICS] DB connection obtained in ${Date.now() - startTime}ms`);
|
||||
|
||||
// Build query for the pay period
|
||||
const periodStart = payPeriod.start.toJSDate();
|
||||
const periodEnd = payPeriod.end.toJSDate();
|
||||
|
||||
const timeclockQuery = `
|
||||
SELECT
|
||||
tc.EmployeeID,
|
||||
tc.TimeStamp,
|
||||
tc.PunchType,
|
||||
e.firstname,
|
||||
e.lastname
|
||||
FROM timeclock tc
|
||||
LEFT JOIN employees e ON tc.EmployeeID = e.employeeid
|
||||
WHERE tc.TimeStamp >= ? AND tc.TimeStamp <= ?
|
||||
AND e.hidden = 0
|
||||
AND e.disabled = 0
|
||||
ORDER BY tc.EmployeeID, tc.TimeStamp
|
||||
`;
|
||||
|
||||
const [timeclockRows] = await connection.execute(timeclockQuery, [periodStart, periodEnd]);
|
||||
|
||||
// Calculate hours with week breakdown
|
||||
const hoursData = calculateHoursByWeek(timeclockRows, payPeriod);
|
||||
|
||||
// Calculate FTE — prorate for in-progress periods so the value reflects
|
||||
// the pace employees are on rather than raw hours / 80
|
||||
let elapsedFraction = 1;
|
||||
if (isCurrentPayPeriod(payPeriod)) {
|
||||
const now = DateTime.now().setZone(TIMEZONE);
|
||||
const elapsedDays = Math.max(1, Math.ceil(now.diff(payPeriod.start, 'days').days));
|
||||
elapsedFraction = Math.min(1, elapsedDays / 14);
|
||||
}
|
||||
const fte = calculateFTE(hoursData.totals.hours, elapsedFraction);
|
||||
const activeEmployees = hoursData.totals.activeEmployees;
|
||||
const avgHoursPerEmployee = activeEmployees > 0 ? hoursData.totals.hours / activeEmployees : 0;
|
||||
|
||||
// Get previous pay period data for comparison
|
||||
const prevPayPeriod = navigatePayPeriod(payPeriod.start, -1);
|
||||
const [prevTimeclockRows] = await connection.execute(timeclockQuery, [
|
||||
prevPayPeriod.start.toJSDate(),
|
||||
prevPayPeriod.end.toJSDate(),
|
||||
]);
|
||||
|
||||
const prevHoursData = calculateHoursByWeek(prevTimeclockRows, prevPayPeriod);
|
||||
const prevFte = calculateFTE(prevHoursData.totals.hours);
|
||||
|
||||
// Calculate comparisons
|
||||
const comparison = {
|
||||
hours: calculateComparison(hoursData.totals.hours, prevHoursData.totals.hours),
|
||||
overtimeHours: calculateComparison(hoursData.totals.overtimeHours, prevHoursData.totals.overtimeHours),
|
||||
fte: calculateComparison(fte, prevFte),
|
||||
activeEmployees: calculateComparison(hoursData.totals.activeEmployees, prevHoursData.totals.activeEmployees),
|
||||
};
|
||||
|
||||
const response = {
|
||||
payPeriod: {
|
||||
start: payPeriod.start.toISODate(),
|
||||
end: payPeriod.end.toISODate(),
|
||||
label: formatPayPeriodLabel(payPeriod),
|
||||
week1: {
|
||||
start: payPeriod.week1.start.toISODate(),
|
||||
end: payPeriod.week1.end.toISODate(),
|
||||
label: formatWeekLabel(payPeriod.week1),
|
||||
},
|
||||
week2: {
|
||||
start: payPeriod.week2.start.toISODate(),
|
||||
end: payPeriod.week2.end.toISODate(),
|
||||
label: formatWeekLabel(payPeriod.week2),
|
||||
},
|
||||
isCurrent: isCurrentPayPeriod(payPeriod),
|
||||
},
|
||||
totals: {
|
||||
hours: hoursData.totals.hours,
|
||||
breakHours: hoursData.totals.breakHours,
|
||||
overtimeHours: hoursData.totals.overtimeHours,
|
||||
regularHours: hoursData.totals.regularHours,
|
||||
activeEmployees,
|
||||
fte,
|
||||
avgHoursPerEmployee,
|
||||
},
|
||||
previousTotals: {
|
||||
hours: prevHoursData.totals.hours,
|
||||
overtimeHours: prevHoursData.totals.overtimeHours,
|
||||
activeEmployees: prevHoursData.totals.activeEmployees,
|
||||
fte: prevFte,
|
||||
},
|
||||
comparison,
|
||||
byEmployee: hoursData.byEmployee,
|
||||
byWeek: hoursData.byWeek,
|
||||
};
|
||||
|
||||
return { response, release };
|
||||
};
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await Promise.race([mainOperation(), timeoutPromise]);
|
||||
} catch (error) {
|
||||
if (error.message.includes('timeout')) {
|
||||
console.log(`[PAYROLL-METRICS] Request timed out in ${Date.now() - startTime}ms`);
|
||||
throw error;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const { response, release } = result;
|
||||
|
||||
if (release) release();
|
||||
|
||||
console.log(`[PAYROLL-METRICS] Request completed in ${Date.now() - startTime}ms`);
|
||||
res.json(response);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error in /payroll-metrics:', error);
|
||||
console.log(`[PAYROLL-METRICS] Request failed in ${Date.now() - startTime}ms`);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get pay period info endpoint (for navigation without full data)
|
||||
router.get('/period-info', async (req, res) => {
|
||||
try {
|
||||
const { payPeriodStart, navigate } = req.query;
|
||||
|
||||
let payPeriod;
|
||||
|
||||
if (payPeriodStart) {
|
||||
const startDate = DateTime.fromISO(payPeriodStart, { zone: TIMEZONE });
|
||||
if (!startDate.isValid) {
|
||||
return res.status(400).json({ error: 'Invalid payPeriodStart date format' });
|
||||
}
|
||||
payPeriod = getPayPeriodForDate(startDate);
|
||||
} else {
|
||||
payPeriod = getCurrentPayPeriod();
|
||||
}
|
||||
|
||||
if (navigate) {
|
||||
const offset = parseInt(navigate, 10);
|
||||
if (!isNaN(offset)) {
|
||||
payPeriod = navigatePayPeriod(payPeriod.start, offset);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
payPeriod: {
|
||||
start: payPeriod.start.toISODate(),
|
||||
end: payPeriod.end.toISODate(),
|
||||
label: formatPayPeriodLabel(payPeriod),
|
||||
week1: {
|
||||
start: payPeriod.week1.start.toISODate(),
|
||||
end: payPeriod.week1.end.toISODate(),
|
||||
label: formatWeekLabel(payPeriod.week1),
|
||||
},
|
||||
week2: {
|
||||
start: payPeriod.week2.start.toISODate(),
|
||||
end: payPeriod.week2.end.toISODate(),
|
||||
label: formatWeekLabel(payPeriod.week2),
|
||||
},
|
||||
isCurrent: isCurrentPayPeriod(payPeriod),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in /payroll-metrics/period-info:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Health check
|
||||
router.get('/health', async (req, res) => {
|
||||
try {
|
||||
const { connection, release } = await getDbConnection();
|
||||
await connection.execute('SELECT 1 as test');
|
||||
release();
|
||||
|
||||
res.json({
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
pool: getPoolStatus(),
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
status: 'unhealthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Helper functions
|
||||
function calculateComparison(currentValue, previousValue) {
|
||||
if (typeof previousValue !== 'number') {
|
||||
return { absolute: null, percentage: null };
|
||||
}
|
||||
|
||||
const absolute = typeof currentValue === 'number' ? currentValue - previousValue : null;
|
||||
const percentage =
|
||||
absolute !== null && previousValue !== 0
|
||||
? (absolute / Math.abs(previousValue)) * 100
|
||||
: null;
|
||||
|
||||
return { absolute, percentage };
|
||||
}
|
||||
|
||||
function formatPayPeriodLabel(payPeriod) {
|
||||
const startStr = payPeriod.start.toFormat('MMM d');
|
||||
const endStr = payPeriod.end.toFormat('MMM d, yyyy');
|
||||
return `${startStr} – ${endStr}`;
|
||||
}
|
||||
|
||||
function formatWeekLabel(week) {
|
||||
const startStr = week.start.toFormat('MMM d');
|
||||
const endStr = week.end.toFormat('MMM d');
|
||||
return `${startStr} – ${endStr}`;
|
||||
}
|
||||
|
||||
function isCurrentPayPeriod(payPeriod) {
|
||||
const now = DateTime.now().setZone(TIMEZONE);
|
||||
return now >= payPeriod.start && now <= payPeriod.end;
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
@@ -49,6 +49,9 @@ app.get('/health', (req, res) => {
|
||||
app.use('/api/acot/test', require('./routes/test'));
|
||||
app.use('/api/acot/events', require('./routes/events'));
|
||||
app.use('/api/acot/discounts', require('./routes/discounts'));
|
||||
app.use('/api/acot/employee-metrics', require('./routes/employee-metrics'));
|
||||
app.use('/api/acot/payroll-metrics', require('./routes/payroll-metrics'));
|
||||
app.use('/api/acot/operations-metrics', require('./routes/operations-metrics'));
|
||||
|
||||
// Error handling middleware
|
||||
app.use((err, req, res, next) => {
|
||||
|
||||
@@ -175,13 +175,13 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
|
||||
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"form-data": "^4.0.4",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@@ -230,9 +230,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -596,6 +596,21 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-html": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
|
||||
@@ -721,13 +736,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
|
||||
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
@@ -868,6 +885,21 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import winston from 'winston';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
export function createLogger(service) {
|
||||
// Create logs directory relative to the project root (two levels up from utils)
|
||||
const logsDir = path.join(__dirname, '../../logs');
|
||||
|
||||
return winston.createLogger({
|
||||
level: process.env.LOG_LEVEL || 'info',
|
||||
format: winston.format.combine(
|
||||
@@ -19,11 +26,11 @@ export function createLogger(service) {
|
||||
}),
|
||||
// Write all logs to service-specific files
|
||||
new winston.transports.File({
|
||||
filename: path.join('logs', `${service}-error.log`),
|
||||
filename: path.join(logsDir, `${service}-error.log`),
|
||||
level: 'error'
|
||||
}),
|
||||
new winston.transports.File({
|
||||
filename: path.join('logs', `${service}-combined.log`)
|
||||
filename: path.join(logsDir, `${service}-combined.log`)
|
||||
})
|
||||
]
|
||||
});
|
||||
|
||||
@@ -33,7 +33,7 @@ const corsOptions = {
|
||||
origin: function(origin, callback) {
|
||||
const allowedOrigins = [
|
||||
'http://localhost:3000',
|
||||
'https://dashboard.kent.pw'
|
||||
'https://tools.acherryontop.com'
|
||||
];
|
||||
|
||||
console.log('CORS check for origin:', origin);
|
||||
|
||||
@@ -350,16 +350,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/express-session": {
|
||||
"version": "1.18.1",
|
||||
"resolved": "https://registry.npmjs.org/express-session/-/express-session-1.18.1.tgz",
|
||||
"integrity": "sha512-a5mtTqEaZvBCL9A9aqkrtfz+3SMDhOVUnjafjo+s7A9Txkq+SVX2DLvSp1Zrv4uCXa3lMSK3viWnh9Gg07PBUA==",
|
||||
"version": "1.18.2",
|
||||
"resolved": "https://registry.npmjs.org/express-session/-/express-session-1.18.2.tgz",
|
||||
"integrity": "sha512-SZjssGQC7TzTs9rpPDuUrR23GNZ9+2+IkA/+IJWmvQilTr5OSliEHGF+D9scbIpdC6yGtTI0/VhaHoVes2AN/A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cookie": "0.7.2",
|
||||
"cookie-signature": "1.0.7",
|
||||
"debug": "2.6.9",
|
||||
"depd": "~2.0.0",
|
||||
"on-headers": "~1.0.2",
|
||||
"on-headers": "~1.1.0",
|
||||
"parseurl": "~1.3.3",
|
||||
"safe-buffer": "5.2.1",
|
||||
"uid-safe": "~2.1.5"
|
||||
@@ -753,9 +753,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/on-headers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz",
|
||||
"integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz",
|
||||
"integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
|
||||
@@ -257,21 +257,6 @@
|
||||
"form-data": "^2.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/request/node_modules/form-data": {
|
||||
"version": "2.5.2",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.2.tgz",
|
||||
"integrity": "sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.6",
|
||||
"mime-types": "^2.1.12",
|
||||
"safe-buffer": "^5.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.12"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/tough-cookie": {
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
|
||||
@@ -466,9 +451,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -844,6 +829,21 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/escalade": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
|
||||
@@ -972,6 +972,23 @@
|
||||
"integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "2.5.5",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
|
||||
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.35",
|
||||
"safe-buffer": "^5.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.12"
|
||||
}
|
||||
},
|
||||
"node_modules/forwarded": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
||||
@@ -1185,6 +1202,21 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
|
||||
@@ -101,13 +101,13 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
|
||||
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"form-data": "^4.0.4",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@@ -351,6 +351,21 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-html": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
|
||||
@@ -451,13 +466,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
|
||||
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
@@ -548,6 +565,21 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
"ioredis": "^5.4.1",
|
||||
"luxon": "^3.5.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"pg": "^8.18.0",
|
||||
"recharts": "^2.15.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -23,13 +24,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/runtime": {
|
||||
"version": "7.26.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz",
|
||||
"integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==",
|
||||
"version": "7.28.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz",
|
||||
"integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"regenerator-runtime": "^0.14.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
@@ -181,9 +179,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1382,6 +1380,95 @@
|
||||
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg": {
|
||||
"version": "8.18.0",
|
||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.18.0.tgz",
|
||||
"integrity": "sha512-xqrUDL1b9MbkydY/s+VZ6v+xiMUmOUk7SS9d/1kpyQxoJ6U9AO1oIJyUWVZojbfe5Cc/oluutcgFG4L9RDP1iQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pg-connection-string": "^2.11.0",
|
||||
"pg-pool": "^3.11.0",
|
||||
"pg-protocol": "^1.11.0",
|
||||
"pg-types": "2.2.0",
|
||||
"pgpass": "1.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 16.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"pg-cloudflare": "^1.3.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"pg-native": ">=3.0.1"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"pg-native": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/pg-cloudflare": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz",
|
||||
"integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==",
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/pg-connection-string": {
|
||||
"version": "2.11.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.11.0.tgz",
|
||||
"integrity": "sha512-kecgoJwhOpxYU21rZjULrmrBJ698U2RxXofKVzOn5UDj61BPj/qMb7diYUR1nLScCDbrztQFl1TaQZT0t1EtzQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg-int8": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
|
||||
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-pool": {
|
||||
"version": "3.11.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.11.0.tgz",
|
||||
"integrity": "sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"pg": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-protocol": {
|
||||
"version": "1.11.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.11.0.tgz",
|
||||
"integrity": "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg-types": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
|
||||
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pg-int8": "1.0.1",
|
||||
"postgres-array": "~2.0.0",
|
||||
"postgres-bytea": "~1.0.0",
|
||||
"postgres-date": "~1.0.4",
|
||||
"postgres-interval": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/pgpass": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
|
||||
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"split2": "^4.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
||||
@@ -1395,6 +1482,45 @@
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-array": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
|
||||
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-bytea": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz",
|
||||
"integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-date": {
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
|
||||
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-interval": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
|
||||
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"xtend": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/prop-types": {
|
||||
"version": "15.8.1",
|
||||
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
|
||||
@@ -1601,12 +1727,6 @@
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/regenerator-runtime": {
|
||||
"version": "0.14.1",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz",
|
||||
"integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
@@ -1818,6 +1938,15 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/split2": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
|
||||
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">= 10.x"
|
||||
}
|
||||
},
|
||||
"node_modules/standard-as-callback": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
|
||||
@@ -1961,6 +2090,15 @@
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/xtend": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
|
||||
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.4"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
"ioredis": "^5.4.1",
|
||||
"luxon": "^3.5.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"pg": "^8.18.0",
|
||||
"recharts": "^2.15.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
-- Stores individual product links found in Klaviyo campaign emails
|
||||
CREATE TABLE IF NOT EXISTS klaviyo_campaign_products (
|
||||
id SERIAL PRIMARY KEY,
|
||||
campaign_id TEXT NOT NULL,
|
||||
campaign_name TEXT,
|
||||
sent_at TIMESTAMPTZ,
|
||||
pid BIGINT NOT NULL,
|
||||
product_url TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
UNIQUE(campaign_id, pid)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_kcp_campaign_id ON klaviyo_campaign_products(campaign_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_kcp_pid ON klaviyo_campaign_products(pid);
|
||||
CREATE INDEX IF NOT EXISTS idx_kcp_sent_at ON klaviyo_campaign_products(sent_at);
|
||||
|
||||
-- Stores non-product shop links (categories, filters, etc.) found in campaigns
|
||||
CREATE TABLE IF NOT EXISTS klaviyo_campaign_links (
|
||||
id SERIAL PRIMARY KEY,
|
||||
campaign_id TEXT NOT NULL,
|
||||
campaign_name TEXT,
|
||||
sent_at TIMESTAMPTZ,
|
||||
link_url TEXT NOT NULL,
|
||||
link_type TEXT, -- 'category', 'brand', 'filter', 'clearance', 'deals', 'other'
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
UNIQUE(campaign_id, link_url)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_kcl_campaign_id ON klaviyo_campaign_links(campaign_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_kcl_sent_at ON klaviyo_campaign_links(sent_at);
|
||||
@@ -0,0 +1,279 @@
|
||||
/**
|
||||
* Extract products featured in Klaviyo campaign emails and store in DB.
|
||||
*
|
||||
* - Fetches recent sent campaigns from Klaviyo API
|
||||
* - Gets template HTML for each campaign message
|
||||
* - Parses out product links (/shop/{id}) and other shop links
|
||||
* - Inserts into klaviyo_campaign_products and klaviyo_campaign_links tables
|
||||
*
|
||||
* Usage: node scripts/poc-campaign-products.js [limit] [offset]
|
||||
* limit: number of sent campaigns to process (default: 10)
|
||||
* offset: number of sent campaigns to skip before processing (default: 0)
|
||||
*
|
||||
* Requires DB_* env vars (from inventory-server .env) and KLAVIYO_API_KEY.
|
||||
*/
|
||||
|
||||
import fetch from 'node-fetch';
|
||||
import pg from 'pg';
|
||||
import dotenv from 'dotenv';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
// Load klaviyo .env for API key
|
||||
dotenv.config({ path: path.resolve(__dirname, '../.env') });
|
||||
// Also load the main inventory-server .env for DB credentials
|
||||
const mainEnvPath = '/var/www/html/inventory/.env';
|
||||
if (fs.existsSync(mainEnvPath)) {
|
||||
dotenv.config({ path: mainEnvPath });
|
||||
}
|
||||
|
||||
const API_KEY = process.env.KLAVIYO_API_KEY;
|
||||
const REVISION = process.env.KLAVIYO_API_REVISION || '2026-01-15';
|
||||
const BASE_URL = 'https://a.klaviyo.com/api';
|
||||
|
||||
if (!API_KEY) {
|
||||
console.error('KLAVIYO_API_KEY not set in .env');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// ── Klaviyo API helpers ──────────────────────────────────────────────
|
||||
|
||||
const headers = {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Klaviyo-API-Key ${API_KEY}`,
|
||||
'revision': REVISION,
|
||||
};
|
||||
|
||||
async function klaviyoGet(endpoint, params = {}) {
|
||||
const url = new URL(`${BASE_URL}${endpoint}`);
|
||||
for (const [k, v] of Object.entries(params)) {
|
||||
url.searchParams.append(k, v);
|
||||
}
|
||||
return klaviyoFetch(url.toString());
|
||||
}
|
||||
|
||||
async function klaviyoFetch(url) {
|
||||
const res = await fetch(url, { headers });
|
||||
if (!res.ok) {
|
||||
const body = await res.text();
|
||||
throw new Error(`Klaviyo ${res.status} on ${url}: ${body}`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
async function getRecentCampaigns(limit, offset = 0) {
|
||||
const campaigns = [];
|
||||
const messageMap = {};
|
||||
let skipped = 0;
|
||||
|
||||
let data = await klaviyoGet('/campaigns', {
|
||||
'filter': 'equals(messages.channel,"email")',
|
||||
'sort': '-scheduled_at',
|
||||
'include': 'campaign-messages',
|
||||
});
|
||||
|
||||
while (true) {
|
||||
for (const c of (data.data || [])) {
|
||||
if (c.attributes?.status === 'Sent') {
|
||||
if (skipped < offset) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
campaigns.push(c);
|
||||
if (campaigns.length >= limit) break;
|
||||
}
|
||||
}
|
||||
|
||||
for (const inc of (data.included || [])) {
|
||||
if (inc.type === 'campaign-message') {
|
||||
messageMap[inc.id] = inc;
|
||||
}
|
||||
}
|
||||
|
||||
const nextUrl = data.links?.next;
|
||||
if (campaigns.length >= limit || !nextUrl) break;
|
||||
|
||||
const progress = skipped < offset
|
||||
? `Skipped ${skipped}/${offset}...`
|
||||
: `Fetched ${campaigns.length}/${limit} sent campaigns, loading next page...`;
|
||||
console.log(` ${progress}`);
|
||||
await new Promise(r => setTimeout(r, 200));
|
||||
data = await klaviyoFetch(nextUrl);
|
||||
}
|
||||
|
||||
return { campaigns: campaigns.slice(0, limit), messageMap };
|
||||
}
|
||||
|
||||
async function getTemplateHtml(messageId) {
|
||||
const data = await klaviyoGet(`/campaign-messages/${messageId}/template`, {
|
||||
'fields[template]': 'html,name',
|
||||
});
|
||||
return {
|
||||
templateId: data.data?.id,
|
||||
templateName: data.data?.attributes?.name,
|
||||
html: data.data?.attributes?.html || '',
|
||||
};
|
||||
}
|
||||
|
||||
// ── HTML parsing ─────────────────────────────────────────────────────
|
||||
|
||||
function parseProductsFromHtml(html) {
|
||||
const seen = new Set();
|
||||
const products = [];
|
||||
|
||||
const linkRegex = /href="([^"]*acherryontop\.com\/shop\/(\d+))[^"]*"/gi;
|
||||
let match;
|
||||
while ((match = linkRegex.exec(html)) !== null) {
|
||||
const productId = match[2];
|
||||
if (!seen.has(productId)) {
|
||||
seen.add(productId);
|
||||
products.push({
|
||||
siteProductId: productId,
|
||||
url: match[1],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const categoryLinks = [];
|
||||
const catRegex = /href="([^"]*acherryontop\.com\/shop\/[^"]+)"/gi;
|
||||
while ((match = catRegex.exec(html)) !== null) {
|
||||
const url = match[1];
|
||||
if (/\/shop\/\d+$/.test(url)) continue;
|
||||
if (!categoryLinks.includes(url)) categoryLinks.push(url);
|
||||
}
|
||||
|
||||
return { products, categoryLinks };
|
||||
}
|
||||
|
||||
function classifyLink(url) {
|
||||
if (/\/shop\/(new|pre-order|backinstock)/.test(url)) return 'filter';
|
||||
if (/\/shop\/company\//.test(url)) return 'brand';
|
||||
if (/\/shop\/clearance/.test(url)) return 'clearance';
|
||||
if (/\/shop\/daily_deals/.test(url)) return 'deals';
|
||||
if (/\/shop\/category\//.test(url)) return 'category';
|
||||
return 'other';
|
||||
}
|
||||
|
||||
// ── Database ─────────────────────────────────────────────────────────
|
||||
|
||||
function createPool() {
|
||||
return new pg.Pool({
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
port: process.env.DB_PORT || 5432,
|
||||
ssl: process.env.DB_SSL === 'true' ? { rejectUnauthorized: false } : false,
|
||||
});
|
||||
}
|
||||
|
||||
async function insertProducts(pool, campaignId, campaignName, sentAt, products) {
|
||||
if (products.length === 0) return 0;
|
||||
|
||||
let inserted = 0;
|
||||
for (const p of products) {
|
||||
try {
|
||||
await pool.query(
|
||||
`INSERT INTO klaviyo_campaign_products
|
||||
(campaign_id, campaign_name, sent_at, pid, product_url)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
ON CONFLICT (campaign_id, pid) DO NOTHING`,
|
||||
[campaignId, campaignName, sentAt, parseInt(p.siteProductId), p.url]
|
||||
);
|
||||
inserted++;
|
||||
} catch (err) {
|
||||
console.error(` Error inserting product ${p.siteProductId}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
return inserted;
|
||||
}
|
||||
|
||||
async function insertLinks(pool, campaignId, campaignName, sentAt, links) {
|
||||
if (links.length === 0) return 0;
|
||||
|
||||
let inserted = 0;
|
||||
for (const url of links) {
|
||||
try {
|
||||
await pool.query(
|
||||
`INSERT INTO klaviyo_campaign_links
|
||||
(campaign_id, campaign_name, sent_at, link_url, link_type)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
ON CONFLICT (campaign_id, link_url) DO NOTHING`,
|
||||
[campaignId, campaignName, sentAt, url, classifyLink(url)]
|
||||
);
|
||||
inserted++;
|
||||
} catch (err) {
|
||||
console.error(` Error inserting link: ${err.message}`);
|
||||
}
|
||||
}
|
||||
return inserted;
|
||||
}
|
||||
|
||||
// ── Main ─────────────────────────────────────────────────────────────
|
||||
|
||||
async function main() {
|
||||
const limit = parseInt(process.argv[2]) || 10;
|
||||
const offset = parseInt(process.argv[3]) || 0;
|
||||
const pool = createPool();
|
||||
|
||||
try {
|
||||
// Fetch campaigns
|
||||
console.log(`Fetching up to ${limit} recent campaigns (offset: ${offset})...\n`);
|
||||
const { campaigns, messageMap } = await getRecentCampaigns(limit, offset);
|
||||
console.log(`Found ${campaigns.length} sent campaigns.\n`);
|
||||
|
||||
let totalProducts = 0;
|
||||
let totalLinks = 0;
|
||||
|
||||
for (const campaign of campaigns) {
|
||||
const name = campaign.attributes?.name || 'Unnamed';
|
||||
const sentAt = campaign.attributes?.send_time;
|
||||
|
||||
console.log(`━━━ ${name} (${sentAt?.slice(0, 10) || 'no date'}) ━━━`);
|
||||
|
||||
const msgIds = (campaign.relationships?.['campaign-messages']?.data || [])
|
||||
.map(r => r.id);
|
||||
|
||||
if (msgIds.length === 0) {
|
||||
console.log(' No messages.\n');
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const msgId of msgIds) {
|
||||
const msg = messageMap[msgId];
|
||||
const subject = msg?.attributes?.definition?.content?.subject;
|
||||
if (subject) console.log(` Subject: ${subject}`);
|
||||
|
||||
try {
|
||||
const template = await getTemplateHtml(msgId);
|
||||
const { products, categoryLinks } = parseProductsFromHtml(template.html);
|
||||
|
||||
const pInserted = await insertProducts(pool, campaign.id, name, sentAt, products);
|
||||
const lInserted = await insertLinks(pool, campaign.id, name, sentAt, categoryLinks);
|
||||
|
||||
console.log(` ${products.length} products (${pInserted} new), ${categoryLinks.length} links (${lInserted} new)`);
|
||||
totalProducts += pInserted;
|
||||
totalLinks += lInserted;
|
||||
|
||||
await new Promise(r => setTimeout(r, 200));
|
||||
} catch (err) {
|
||||
console.log(` Error: ${err.message}`);
|
||||
}
|
||||
}
|
||||
console.log('');
|
||||
}
|
||||
|
||||
console.log(`Done. Inserted ${totalProducts} product rows, ${totalLinks} link rows.`);
|
||||
} finally {
|
||||
await pool.end();
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error('Fatal error:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -41,13 +41,13 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
|
||||
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"form-data": "^4.0.4",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@@ -282,6 +282,21 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-html": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
|
||||
@@ -382,13 +397,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
|
||||
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
@@ -470,6 +487,21 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
|
||||
@@ -117,13 +117,13 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
|
||||
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"form-data": "^4.0.4",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@@ -172,9 +172,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -443,6 +443,21 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-html": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
|
||||
@@ -556,13 +571,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
|
||||
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
@@ -691,6 +708,21 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
|
||||
17
inventory-server/db/daily-deals-schema.sql
Normal file
17
inventory-server/db/daily-deals-schema.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
-- Daily Deals schema for local PostgreSQL
|
||||
-- Synced from production MySQL product_daily_deals + product_current_prices
|
||||
|
||||
CREATE TABLE IF NOT EXISTS product_daily_deals (
|
||||
deal_id serial PRIMARY KEY,
|
||||
deal_date date NOT NULL,
|
||||
pid bigint NOT NULL,
|
||||
price_id bigint NOT NULL,
|
||||
-- Denormalized from product_current_prices so we don't need to sync that whole table
|
||||
deal_price numeric(10,3),
|
||||
created_at timestamptz DEFAULT NOW(),
|
||||
CONSTRAINT fk_daily_deals_pid FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_daily_deals_date ON product_daily_deals(deal_date);
|
||||
CREATE INDEX IF NOT EXISTS idx_daily_deals_pid ON product_daily_deals(pid);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_daily_deals_unique ON product_daily_deals(deal_date, pid);
|
||||
234
inventory-server/db/functions.sql
Normal file
234
inventory-server/db/functions.sql
Normal file
@@ -0,0 +1,234 @@
|
||||
-- Custom PostgreSQL functions used by the metrics pipeline
|
||||
-- These must exist in the database before running calculate-metrics-new.js
|
||||
--
|
||||
-- To install/update: psql -d inventory_db -f functions.sql
|
||||
-- All functions use CREATE OR REPLACE so they are safe to re-run.
|
||||
|
||||
-- =============================================================================
|
||||
-- safe_divide: Division helper that returns a default value instead of erroring
|
||||
-- on NULL or zero denominators.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.safe_divide(
|
||||
numerator numeric,
|
||||
denominator numeric,
|
||||
default_value numeric DEFAULT NULL::numeric
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF denominator IS NULL OR denominator = 0 THEN
|
||||
RETURN default_value;
|
||||
ELSE
|
||||
RETURN numerator / denominator;
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- std_numeric: Standardized rounding helper for consistent numeric precision.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.std_numeric(
|
||||
value numeric,
|
||||
precision_digits integer DEFAULT 2
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF value IS NULL THEN
|
||||
RETURN NULL;
|
||||
ELSE
|
||||
RETURN ROUND(value, precision_digits);
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- calculate_sales_velocity: Daily sales velocity adjusted for stockout days.
|
||||
-- Ensures at least 14-day denominator for products with sales to avoid
|
||||
-- inflated velocity from short windows.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.calculate_sales_velocity(
|
||||
sales_30d integer,
|
||||
stockout_days_30d integer
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
RETURN sales_30d /
|
||||
NULLIF(
|
||||
GREATEST(
|
||||
30.0 - stockout_days_30d,
|
||||
CASE
|
||||
WHEN sales_30d > 0 THEN 14.0 -- If we have sales, ensure at least 14 days denominator
|
||||
ELSE 30.0 -- If no sales, use full period
|
||||
END
|
||||
),
|
||||
0
|
||||
);
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- get_weighted_avg_cost: Weighted average cost from receivings up to a given date.
|
||||
-- Uses all non-canceled receivings (no row limit) weighted by quantity.
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.get_weighted_avg_cost(
|
||||
p_pid bigint,
|
||||
p_date date
|
||||
)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql
|
||||
STABLE
|
||||
AS $function$
|
||||
DECLARE
|
||||
weighted_cost NUMERIC;
|
||||
BEGIN
|
||||
SELECT
|
||||
CASE
|
||||
WHEN SUM(qty_each) > 0 THEN SUM(cost_each * qty_each) / SUM(qty_each)
|
||||
ELSE NULL
|
||||
END INTO weighted_cost
|
||||
FROM receivings
|
||||
WHERE pid = p_pid
|
||||
AND received_date <= p_date
|
||||
AND status != 'canceled';
|
||||
|
||||
RETURN weighted_cost;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- classify_demand_pattern: Classifies demand based on average demand and
|
||||
-- coefficient of variation (CV). Standard inventory classification:
|
||||
-- zero: no demand
|
||||
-- stable: CV <= 0.2 (predictable, easy to forecast)
|
||||
-- variable: CV <= 0.5 (some variability, still forecastable)
|
||||
-- sporadic: low volume + high CV (intermittent demand)
|
||||
-- lumpy: high volume + high CV (unpredictable bursts)
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.classify_demand_pattern(
|
||||
avg_demand numeric,
|
||||
cv numeric
|
||||
)
|
||||
RETURNS character varying
|
||||
LANGUAGE plpgsql
|
||||
IMMUTABLE
|
||||
AS $function$
|
||||
BEGIN
|
||||
IF avg_demand IS NULL OR cv IS NULL THEN
|
||||
RETURN NULL;
|
||||
ELSIF avg_demand = 0 THEN
|
||||
RETURN 'zero';
|
||||
ELSIF cv <= 0.2 THEN
|
||||
RETURN 'stable';
|
||||
ELSIF cv <= 0.5 THEN
|
||||
RETURN 'variable';
|
||||
ELSIF avg_demand < 1.0 THEN
|
||||
RETURN 'sporadic';
|
||||
ELSE
|
||||
RETURN 'lumpy';
|
||||
END IF;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- detect_seasonal_pattern: Detects seasonality by comparing monthly average
|
||||
-- sales across the last 12 months. Uses coefficient of variation across months
|
||||
-- and peak-to-average ratio to classify patterns.
|
||||
--
|
||||
-- Returns:
|
||||
-- seasonal_pattern: 'none', 'moderate', or 'strong'
|
||||
-- seasonality_index: peak month avg / overall avg * 100 (100 = no seasonality)
|
||||
-- peak_season: name of peak month (e.g. 'January'), or NULL if none
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE FUNCTION public.detect_seasonal_pattern(p_pid bigint)
|
||||
RETURNS TABLE(seasonal_pattern character varying, seasonality_index numeric, peak_season character varying)
|
||||
LANGUAGE plpgsql
|
||||
STABLE
|
||||
AS $function$
|
||||
DECLARE
|
||||
v_monthly_cv NUMERIC;
|
||||
v_max_month_avg NUMERIC;
|
||||
v_overall_avg NUMERIC;
|
||||
v_monthly_stddev NUMERIC;
|
||||
v_peak_month_num INT;
|
||||
v_data_months INT;
|
||||
v_seasonality_index NUMERIC;
|
||||
v_seasonal_pattern VARCHAR;
|
||||
v_peak_season VARCHAR;
|
||||
BEGIN
|
||||
-- Gather monthly average sales and peak month in a single query
|
||||
SELECT
|
||||
COUNT(*),
|
||||
AVG(month_avg),
|
||||
STDDEV(month_avg),
|
||||
MAX(month_avg),
|
||||
(ARRAY_AGG(mo ORDER BY month_avg DESC))[1]::INT
|
||||
INTO v_data_months, v_overall_avg, v_monthly_stddev, v_max_month_avg, v_peak_month_num
|
||||
FROM (
|
||||
SELECT EXTRACT(MONTH FROM snapshot_date) AS mo, AVG(units_sold) AS month_avg
|
||||
FROM daily_product_snapshots
|
||||
WHERE pid = p_pid AND snapshot_date >= CURRENT_DATE - INTERVAL '365 days'
|
||||
GROUP BY EXTRACT(MONTH FROM snapshot_date)
|
||||
) monthly;
|
||||
|
||||
-- Need at least 3 months of data for meaningful seasonality detection
|
||||
IF v_data_months < 3 OR v_overall_avg IS NULL OR v_overall_avg = 0 THEN
|
||||
RETURN QUERY SELECT 'none'::VARCHAR, 100::NUMERIC, NULL::VARCHAR;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- CV of monthly averages
|
||||
v_monthly_cv := v_monthly_stddev / v_overall_avg;
|
||||
|
||||
-- Seasonality index: peak month avg / overall avg * 100
|
||||
v_seasonality_index := ROUND((v_max_month_avg / v_overall_avg * 100)::NUMERIC, 2);
|
||||
|
||||
IF v_monthly_cv > 0.5 AND v_seasonality_index > 150 THEN
|
||||
v_seasonal_pattern := 'strong';
|
||||
v_peak_season := TRIM(TO_CHAR(TO_DATE(v_peak_month_num::TEXT, 'MM'), 'Month'));
|
||||
ELSIF v_monthly_cv > 0.3 AND v_seasonality_index > 120 THEN
|
||||
v_seasonal_pattern := 'moderate';
|
||||
v_peak_season := TRIM(TO_CHAR(TO_DATE(v_peak_month_num::TEXT, 'MM'), 'Month'));
|
||||
ELSE
|
||||
v_seasonal_pattern := 'none';
|
||||
v_peak_season := NULL;
|
||||
v_seasonality_index := 100;
|
||||
END IF;
|
||||
|
||||
RETURN QUERY SELECT v_seasonal_pattern, v_seasonality_index, v_peak_season;
|
||||
END;
|
||||
$function$;
|
||||
|
||||
-- =============================================================================
|
||||
-- category_hierarchy: Materialized view providing a recursive category tree
|
||||
-- with ancestor paths for efficient rollup queries.
|
||||
--
|
||||
-- Refresh after category changes: REFRESH MATERIALIZED VIEW category_hierarchy;
|
||||
-- =============================================================================
|
||||
-- DROP MATERIALIZED VIEW IF EXISTS category_hierarchy;
|
||||
-- CREATE MATERIALIZED VIEW category_hierarchy AS
|
||||
-- WITH RECURSIVE cat_tree AS (
|
||||
-- SELECT cat_id, name, type, parent_id,
|
||||
-- cat_id AS root_id, 0 AS level, ARRAY[cat_id] AS path
|
||||
-- FROM categories
|
||||
-- WHERE parent_id IS NULL
|
||||
-- UNION ALL
|
||||
-- SELECT c.cat_id, c.name, c.type, c.parent_id,
|
||||
-- ct.root_id, ct.level + 1, ct.path || c.cat_id
|
||||
-- FROM categories c
|
||||
-- JOIN cat_tree ct ON c.parent_id = ct.cat_id
|
||||
-- )
|
||||
-- SELECT cat_id, name, type, parent_id, root_id, level, path,
|
||||
-- (SELECT array_agg(unnest ORDER BY unnest DESC)
|
||||
-- FROM unnest(cat_tree.path) unnest
|
||||
-- WHERE unnest <> cat_tree.cat_id) AS ancestor_ids
|
||||
-- FROM cat_tree;
|
||||
--
|
||||
-- CREATE UNIQUE INDEX ON category_hierarchy (cat_id);
|
||||
@@ -80,7 +80,6 @@ CREATE TABLE public.product_metrics (
|
||||
current_price NUMERIC(10, 2),
|
||||
current_regular_price NUMERIC(10, 2),
|
||||
current_cost_price NUMERIC(10, 4), -- Increased precision for cost
|
||||
current_landing_cost_price NUMERIC(10, 4), -- Increased precision for cost
|
||||
current_stock INT NOT NULL DEFAULT 0,
|
||||
current_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
current_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
@@ -156,9 +155,9 @@ CREATE TABLE public.product_metrics (
|
||||
days_of_stock_closing_stock NUMERIC(10, 2), -- lead_time_closing_stock - days_of_stock_forecast_units
|
||||
replenishment_needed_raw NUMERIC(10, 2), -- planning_period_forecast_units + config_safety_stock - current_stock - on_order_qty
|
||||
replenishment_units INT, -- CEILING(GREATEST(0, replenishment_needed_raw))
|
||||
replenishment_cost NUMERIC(14, 4), -- replenishment_units * COALESCE(current_landing_cost_price, current_cost_price)
|
||||
replenishment_cost NUMERIC(14, 4), -- replenishment_units * current_cost_price
|
||||
replenishment_retail NUMERIC(14, 4), -- replenishment_units * current_price
|
||||
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - COALESCE(current_landing_cost_price, current_cost_price))
|
||||
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - current_cost_price)
|
||||
to_order_units INT, -- Apply MOQ/UOM logic to replenishment_units
|
||||
forecast_lost_sales_units NUMERIC(10, 2), -- GREATEST(0, -lead_time_closing_stock)
|
||||
forecast_lost_revenue NUMERIC(14, 4), -- forecast_lost_sales_units * current_price
|
||||
@@ -167,7 +166,7 @@ CREATE TABLE public.product_metrics (
|
||||
sells_out_in_days NUMERIC(10, 1), -- (current_stock + on_order_qty) / sales_velocity_daily
|
||||
replenish_date DATE, -- Calc based on when stock hits safety stock minus lead time
|
||||
overstocked_units INT, -- GREATEST(0, current_stock - config_safety_stock - planning_period_forecast_units)
|
||||
overstocked_cost NUMERIC(14, 4), -- overstocked_units * COALESCE(current_landing_cost_price, current_cost_price)
|
||||
overstocked_cost NUMERIC(14, 4), -- overstocked_units * current_cost_price
|
||||
overstocked_retail NUMERIC(14, 4), -- overstocked_units * current_price
|
||||
is_old_stock BOOLEAN, -- Based on age, last sold, last received, on_order status
|
||||
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
-- Migration: Add date_online and shop_score columns to products table
|
||||
-- These fields are imported from production to improve newsletter recommendation accuracy:
|
||||
-- date_online = products.date_ol in production (date product went live on the shop)
|
||||
-- shop_score = products.score in production (sales-based popularity score)
|
||||
--
|
||||
-- After running this migration, do a full (non-incremental) import to backfill:
|
||||
-- INCREMENTAL_UPDATE=false node scripts/import-from-prod.js
|
||||
|
||||
-- Add date_online column (production: products.date_ol)
|
||||
ALTER TABLE products ADD COLUMN IF NOT EXISTS date_online TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
-- Add shop_score column (production: products.score)
|
||||
-- Using NUMERIC(10,2) to preserve the decimal precision from production
|
||||
ALTER TABLE products ADD COLUMN IF NOT EXISTS shop_score NUMERIC(10, 2) DEFAULT 0;
|
||||
|
||||
-- If shop_score was previously created as INTEGER, convert it
|
||||
ALTER TABLE products ALTER COLUMN shop_score TYPE NUMERIC(10, 2);
|
||||
|
||||
-- Index on date_online for the newsletter "new products" filter
|
||||
CREATE INDEX IF NOT EXISTS idx_products_date_online ON products(date_online);
|
||||
@@ -21,6 +21,7 @@ CREATE TABLE products (
|
||||
description TEXT,
|
||||
sku TEXT NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
date_online TIMESTAMP WITH TIME ZONE,
|
||||
first_received TIMESTAMP WITH TIME ZONE,
|
||||
stock_quantity INTEGER DEFAULT 0,
|
||||
preorder_count INTEGER DEFAULT 0,
|
||||
@@ -28,7 +29,6 @@ CREATE TABLE products (
|
||||
price NUMERIC(14, 4) NOT NULL,
|
||||
regular_price NUMERIC(14, 4) NOT NULL,
|
||||
cost_price NUMERIC(14, 4),
|
||||
landing_cost_price NUMERIC(14, 4),
|
||||
barcode TEXT,
|
||||
harmonized_tariff_code TEXT,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
@@ -63,6 +63,7 @@ CREATE TABLE products (
|
||||
baskets INTEGER DEFAULT 0,
|
||||
notifies INTEGER DEFAULT 0,
|
||||
date_last_sold DATE,
|
||||
shop_score NUMERIC(10, 2) DEFAULT 0,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (pid)
|
||||
);
|
||||
|
||||
29
inventory-server/migrations/001_create_import_sessions.sql
Normal file
29
inventory-server/migrations/001_create_import_sessions.sql
Normal file
@@ -0,0 +1,29 @@
|
||||
-- Migration: Create import_sessions table
|
||||
-- Run this against your PostgreSQL database
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import_sessions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL,
|
||||
name VARCHAR(255), -- NULL for unnamed/autosave sessions
|
||||
current_step VARCHAR(50) NOT NULL, -- 'validation' | 'imageUpload'
|
||||
data JSONB NOT NULL, -- Product rows
|
||||
product_images JSONB, -- Image assignments
|
||||
global_selections JSONB, -- Supplier, company, line, subline
|
||||
validation_state JSONB, -- Errors, UPC status, generated item numbers
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
updated_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Ensure only one unnamed session per user (autosave slot)
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_unnamed_session_per_user
|
||||
ON import_sessions (user_id)
|
||||
WHERE name IS NULL;
|
||||
|
||||
-- Index for fast user lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_import_sessions_user_id
|
||||
ON import_sessions (user_id);
|
||||
|
||||
-- Add comment for documentation
|
||||
COMMENT ON TABLE import_sessions IS 'Stores in-progress product import sessions for users';
|
||||
COMMENT ON COLUMN import_sessions.name IS 'Session name - NULL indicates the single unnamed/autosave session per user';
|
||||
COMMENT ON COLUMN import_sessions.current_step IS 'Which step the user was on: validation or imageUpload';
|
||||
@@ -0,0 +1,57 @@
|
||||
-- Migration: Make AI prompts extensible with is_singleton column
|
||||
-- Date: 2024-01-19
|
||||
-- Description: Removes hardcoded prompt_type CHECK constraint, adds is_singleton column
|
||||
-- for dynamic uniqueness enforcement, and creates appropriate indexes.
|
||||
|
||||
-- 1. Drop the old CHECK constraints on prompt_type (allows any string value now)
|
||||
ALTER TABLE ai_prompts DROP CONSTRAINT IF EXISTS ai_prompts_prompt_type_check;
|
||||
ALTER TABLE ai_prompts DROP CONSTRAINT IF EXISTS company_required_for_specific;
|
||||
|
||||
-- 2. Add is_singleton column (defaults to true for backwards compatibility)
|
||||
ALTER TABLE ai_prompts ADD COLUMN IF NOT EXISTS is_singleton BOOLEAN NOT NULL DEFAULT true;
|
||||
|
||||
-- 3. Drop ALL old unique constraints and indexes (cleanup)
|
||||
-- Some were created as CONSTRAINTS (via ADD CONSTRAINT), others as standalone indexes
|
||||
-- Must drop constraints first, then remaining standalone indexes
|
||||
|
||||
-- Drop constraints (these also remove their backing indexes)
|
||||
ALTER TABLE ai_prompts DROP CONSTRAINT IF EXISTS unique_company_prompt;
|
||||
ALTER TABLE ai_prompts DROP CONSTRAINT IF EXISTS idx_unique_general_prompt;
|
||||
ALTER TABLE ai_prompts DROP CONSTRAINT IF EXISTS idx_unique_system_prompt;
|
||||
|
||||
-- Drop standalone indexes (IF EXISTS handles cases where they don't exist)
|
||||
DROP INDEX IF EXISTS idx_unique_general_prompt;
|
||||
DROP INDEX IF EXISTS idx_unique_system_prompt;
|
||||
DROP INDEX IF EXISTS idx_unique_name_validation_system;
|
||||
DROP INDEX IF EXISTS idx_unique_name_validation_general;
|
||||
DROP INDEX IF EXISTS idx_unique_description_validation_system;
|
||||
DROP INDEX IF EXISTS idx_unique_description_validation_general;
|
||||
DROP INDEX IF EXISTS idx_unique_sanity_check_system;
|
||||
DROP INDEX IF EXISTS idx_unique_sanity_check_general;
|
||||
DROP INDEX IF EXISTS idx_unique_bulk_validation_system;
|
||||
DROP INDEX IF EXISTS idx_unique_bulk_validation_general;
|
||||
DROP INDEX IF EXISTS idx_unique_name_validation_company;
|
||||
DROP INDEX IF EXISTS idx_unique_description_validation_company;
|
||||
DROP INDEX IF EXISTS idx_unique_bulk_validation_company;
|
||||
|
||||
-- 4. Create new partial unique indexes based on is_singleton
|
||||
-- For singleton types WITHOUT company (only one per prompt_type)
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_singleton_no_company
|
||||
ON ai_prompts (prompt_type)
|
||||
WHERE is_singleton = true AND company IS NULL;
|
||||
|
||||
-- For singleton types WITH company (only one per prompt_type + company combination)
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_singleton_with_company
|
||||
ON ai_prompts (prompt_type, company)
|
||||
WHERE is_singleton = true AND company IS NOT NULL;
|
||||
|
||||
-- 5. Add index for fast lookups by type
|
||||
CREATE INDEX IF NOT EXISTS idx_prompt_type ON ai_prompts (prompt_type);
|
||||
|
||||
-- NOTE: After running this migration, you should:
|
||||
-- 1. Delete existing prompts with old types (general, system, company_specific)
|
||||
-- 2. Create new prompts with the new type naming convention:
|
||||
-- - name_validation_system, name_validation_general, name_validation_company_specific
|
||||
-- - description_validation_system, description_validation_general, description_validation_company_specific
|
||||
-- - sanity_check_system, sanity_check_general
|
||||
-- - bulk_validation_system, bulk_validation_general, bulk_validation_company_specific
|
||||
0
inventory-server/old/psql-csv-import.sh
Executable file → Normal file
0
inventory-server/old/psql-csv-import.sh
Executable file → Normal file
610
inventory-server/package-lock.json
generated
Executable file → Normal file
610
inventory-server/package-lock.json
generated
Executable file → Normal file
@@ -20,9 +20,10 @@
|
||||
"express": "^4.18.2",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"mysql2": "^3.12.0",
|
||||
"openai": "^4.85.3",
|
||||
"openai": "^6.0.0",
|
||||
"pg": "^8.14.1",
|
||||
"pm2": "^5.3.0",
|
||||
"sharp": "^0.33.5",
|
||||
"ssh2": "^1.16.0",
|
||||
"uuid": "^9.0.1"
|
||||
},
|
||||
@@ -30,6 +31,384 @@
|
||||
"nodemon": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@emnapi/runtime": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.6.0.tgz",
|
||||
"integrity": "sha512-obtUmAHTMjll499P+D9A3axeJFlhdjOWdKUNs/U6QIGT7V5RjcUW1xToAzjvmgTSQhDbYn/NwfTRoJcQ2rNBxA==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@emnapi/runtime/node_modules/tslib": {
|
||||
"version": "2.8.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
|
||||
"license": "0BSD",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/@img/sharp-darwin-arm64": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz",
|
||||
"integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-darwin-arm64": "1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-darwin-x64": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz",
|
||||
"integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-darwin-x64": "1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-darwin-arm64": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz",
|
||||
"integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-darwin-x64": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz",
|
||||
"integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-arm": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz",
|
||||
"integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"license": "LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-arm64": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz",
|
||||
"integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-s390x": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz",
|
||||
"integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"license": "LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-x64": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz",
|
||||
"integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linuxmusl-arm64": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz",
|
||||
"integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linuxmusl-x64": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz",
|
||||
"integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-arm": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz",
|
||||
"integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-arm": "1.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-arm64": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz",
|
||||
"integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-arm64": "1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-s390x": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz",
|
||||
"integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-s390x": "1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-x64": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz",
|
||||
"integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-x64": "1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linuxmusl-arm64": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz",
|
||||
"integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linuxmusl-arm64": "1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linuxmusl-x64": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz",
|
||||
"integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linuxmusl-x64": "1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-wasm32": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz",
|
||||
"integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==",
|
||||
"cpu": [
|
||||
"wasm32"
|
||||
],
|
||||
"license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"@emnapi/runtime": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-win32-ia32": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz",
|
||||
"integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"license": "Apache-2.0 AND LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-win32-x64": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz",
|
||||
"integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "Apache-2.0 AND LGPL-3.0-or-later",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
|
||||
@@ -399,43 +778,12 @@
|
||||
"integrity": "sha512-R/BHQFripuhW6XPXy05hIvXJQdQ4540KnTvEFHSLjXfHYM41liOLKgIJEyYYiQe796xpaMHfe4Uj/p7Uvng2vA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "18.19.76",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.76.tgz",
|
||||
"integrity": "sha512-yvR7Q9LdPz2vGpmpJX5LolrgRdWvB67MJKDPSgIIzpFbaf9a1j/f5DnLp5VDyHGMR0QZHlTr1afsD87QCXFHKw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~5.26.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node-fetch": {
|
||||
"version": "2.6.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz",
|
||||
"integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"form-data": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/abbrev": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
|
||||
"integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/abort-controller": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"event-target-shim": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.5"
|
||||
}
|
||||
},
|
||||
"node_modules/accepts": {
|
||||
"version": "1.3.8",
|
||||
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
|
||||
@@ -458,18 +806,6 @@
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/agentkeepalive": {
|
||||
"version": "4.6.0",
|
||||
"resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.6.0.tgz",
|
||||
"integrity": "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"humanize-ms": "^1.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/amp": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/amp/-/amp-0.3.1.tgz",
|
||||
@@ -632,13 +968,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.8.1.tgz",
|
||||
"integrity": "sha512-NN+fvwH/kV01dYUQ3PTOZns4LWtWhOFCAhQ/pHb88WQ1hNe5V/dvFwc4VJcDL11LT9xSX0QtsR8sWUuyOuOq7g==",
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
|
||||
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"form-data": "^4.0.4",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@@ -735,9 +1071,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
@@ -883,6 +1219,19 @@
|
||||
"node": ">=8.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/color": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
|
||||
"integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"color-convert": "^2.0.1",
|
||||
"color-string": "^1.9.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
@@ -901,6 +1250,16 @@
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/color-string": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
|
||||
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"color-name": "^1.0.0",
|
||||
"simple-swizzle": "^0.2.2"
|
||||
}
|
||||
},
|
||||
"node_modules/color-support": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
|
||||
@@ -1327,15 +1686,6 @@
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/event-target-shim": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
||||
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/eventemitter2": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-5.0.1.tgz",
|
||||
@@ -1460,39 +1810,21 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
|
||||
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/form-data-encoder": {
|
||||
"version": "1.7.2",
|
||||
"resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz",
|
||||
"integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/formdata-node": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz",
|
||||
"integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"node-domexception": "1.0.0",
|
||||
"web-streams-polyfill": "4.0.0-beta.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12.20"
|
||||
}
|
||||
},
|
||||
"node_modules/forwarded": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
||||
@@ -1853,15 +2185,6 @@
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/humanize-ms": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz",
|
||||
"integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
@@ -1932,6 +2255,12 @@
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/is-arrayish": {
|
||||
"version": "0.3.4",
|
||||
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.4.tgz",
|
||||
"integrity": "sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-binary-path": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
|
||||
@@ -2407,25 +2736,6 @@
|
||||
"integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/node-domexception": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
|
||||
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/jimmywarting"
|
||||
},
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://paypal.me/jimmywarting"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||
@@ -2622,25 +2932,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/openai": {
|
||||
"version": "4.85.3",
|
||||
"resolved": "https://registry.npmjs.org/openai/-/openai-4.85.3.tgz",
|
||||
"integrity": "sha512-KTMXAK6FPd2IvsPtglMt0J1GyVrjMxCYzu/mVbCPabzzquSJoZlYpHtE0p0ScZPyt11XTc757xSO4j39j5g+Xw==",
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/openai/-/openai-6.0.0.tgz",
|
||||
"integrity": "sha512-J7LEmTn3WLZnbyEmMYcMPyT5A0fGzhPwSvVUcNRKy6j2hJIbqSFrJERnUHYNkcoCCalRumypnj9AVoe5bVHd3Q==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/node-fetch": "^2.6.4",
|
||||
"abort-controller": "^3.0.0",
|
||||
"agentkeepalive": "^4.2.1",
|
||||
"form-data-encoder": "1.7.2",
|
||||
"formdata-node": "^4.3.2",
|
||||
"node-fetch": "^2.6.7"
|
||||
},
|
||||
"bin": {
|
||||
"openai": "bin/cli"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"ws": "^8.18.0",
|
||||
"zod": "^3.23.8"
|
||||
"zod": "^3.25 || ^4.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"ws": {
|
||||
@@ -3494,6 +3795,45 @@
|
||||
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/sharp": {
|
||||
"version": "0.33.5",
|
||||
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz",
|
||||
"integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==",
|
||||
"hasInstallScript": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"color": "^4.2.3",
|
||||
"detect-libc": "^2.0.3",
|
||||
"semver": "^7.6.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-darwin-arm64": "0.33.5",
|
||||
"@img/sharp-darwin-x64": "0.33.5",
|
||||
"@img/sharp-libvips-darwin-arm64": "1.0.4",
|
||||
"@img/sharp-libvips-darwin-x64": "1.0.4",
|
||||
"@img/sharp-libvips-linux-arm": "1.0.5",
|
||||
"@img/sharp-libvips-linux-arm64": "1.0.4",
|
||||
"@img/sharp-libvips-linux-s390x": "1.0.4",
|
||||
"@img/sharp-libvips-linux-x64": "1.0.4",
|
||||
"@img/sharp-libvips-linuxmusl-arm64": "1.0.4",
|
||||
"@img/sharp-libvips-linuxmusl-x64": "1.0.4",
|
||||
"@img/sharp-linux-arm": "0.33.5",
|
||||
"@img/sharp-linux-arm64": "0.33.5",
|
||||
"@img/sharp-linux-s390x": "0.33.5",
|
||||
"@img/sharp-linux-x64": "0.33.5",
|
||||
"@img/sharp-linuxmusl-arm64": "0.33.5",
|
||||
"@img/sharp-linuxmusl-x64": "0.33.5",
|
||||
"@img/sharp-wasm32": "0.33.5",
|
||||
"@img/sharp-win32-ia32": "0.33.5",
|
||||
"@img/sharp-win32-x64": "0.33.5"
|
||||
}
|
||||
},
|
||||
"node_modules/shimmer": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz",
|
||||
@@ -3578,6 +3918,15 @@
|
||||
"integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/simple-swizzle": {
|
||||
"version": "0.2.4",
|
||||
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.4.tgz",
|
||||
"integrity": "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-arrayish": "^0.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/simple-update-notifier": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
|
||||
@@ -3941,12 +4290,6 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "5.26.5",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
|
||||
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/unpipe": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||
@@ -4017,15 +4360,6 @@
|
||||
"lodash": "^4.17.14"
|
||||
}
|
||||
},
|
||||
"node_modules/web-streams-polyfill": {
|
||||
"version": "4.0.0-beta.3",
|
||||
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz",
|
||||
"integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
|
||||
3
inventory-server/package.json
Executable file → Normal file
3
inventory-server/package.json
Executable file → Normal file
@@ -29,9 +29,10 @@
|
||||
"express": "^4.18.2",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"mysql2": "^3.12.0",
|
||||
"openai": "^4.85.3",
|
||||
"openai": "^6.0.0",
|
||||
"pg": "^8.14.1",
|
||||
"pm2": "^5.3.0",
|
||||
"sharp": "^0.33.5",
|
||||
"ssh2": "^1.16.0",
|
||||
"uuid": "^9.0.1"
|
||||
},
|
||||
|
||||
@@ -11,6 +11,7 @@ const RUN_PERIODIC_METRICS = true;
|
||||
const RUN_BRAND_METRICS = true;
|
||||
const RUN_VENDOR_METRICS = true;
|
||||
const RUN_CATEGORY_METRICS = true;
|
||||
const RUN_LIFECYCLE_FORECASTS = true;
|
||||
|
||||
// Maximum execution time for the entire sequence (e.g., 90 minutes)
|
||||
const MAX_EXECUTION_TIME_TOTAL = 90 * 60 * 1000;
|
||||
@@ -592,6 +593,13 @@ async function runAllCalculations() {
|
||||
historyType: 'product_metrics',
|
||||
statusModule: 'product_metrics'
|
||||
},
|
||||
{
|
||||
run: RUN_LIFECYCLE_FORECASTS,
|
||||
name: 'Lifecycle Forecast Update',
|
||||
sqlFile: 'metrics-new/update_lifecycle_forecasts.sql',
|
||||
historyType: 'lifecycle_forecasts',
|
||||
statusModule: 'lifecycle_forecasts'
|
||||
},
|
||||
{
|
||||
run: RUN_PERIODIC_METRICS,
|
||||
name: 'Periodic Metrics Update',
|
||||
|
||||
283
inventory-server/scripts/embedding-poc.js
Normal file
283
inventory-server/scripts/embedding-poc.js
Normal file
@@ -0,0 +1,283 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Embedding Proof-of-Concept Script
|
||||
*
|
||||
* Demonstrates how category embeddings work for product matching.
|
||||
* Uses OpenAI text-embedding-3-small model.
|
||||
*
|
||||
* Usage: node scripts/embedding-poc.js
|
||||
*/
|
||||
|
||||
const path = require('path');
|
||||
require('dotenv').config({ path: path.join(__dirname, '../.env') });
|
||||
|
||||
const { getDbConnection, closeAllConnections } = require('../src/utils/dbConnection');
|
||||
|
||||
// ============================================================================
|
||||
// Configuration
|
||||
// ============================================================================
|
||||
|
||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||
const EMBEDDING_MODEL = 'text-embedding-3-small';
|
||||
const EMBEDDING_DIMENSIONS = 1536;
|
||||
|
||||
// Sample products to test (you can modify these)
|
||||
const TEST_PRODUCTS = [
|
||||
{
|
||||
name: "Cosmos Infinity Chipboard - Stamperia",
|
||||
description: "Laser-cut chipboard shapes featuring celestial designs for mixed media projects"
|
||||
},
|
||||
{
|
||||
name: "Distress Oxide Ink Pad - Mermaid Lagoon",
|
||||
description: "Water-reactive dye ink that creates an oxidized effect"
|
||||
},
|
||||
{
|
||||
name: "Hedwig Puffy Stickers - Paper House Productions",
|
||||
description: "3D puffy stickers featuring Harry Potter's owl Hedwig"
|
||||
},
|
||||
{
|
||||
name: "Black Velvet Watercolor Brush Size 6",
|
||||
description: "Round brush for watercolor painting with synthetic bristles"
|
||||
},
|
||||
{
|
||||
name: "Floral Washi Tape Set",
|
||||
description: "Decorative paper tape with flower patterns, pack of 6 rolls"
|
||||
}
|
||||
];
|
||||
|
||||
// ============================================================================
|
||||
// OpenAI Embedding Functions
|
||||
// ============================================================================
|
||||
|
||||
async function getEmbeddings(texts) {
|
||||
const response = await fetch('https://api.openai.com/v1/embeddings', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${OPENAI_API_KEY}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
input: texts.map(t => t.substring(0, 8000)), // Max 8k chars per text
|
||||
model: EMBEDDING_MODEL,
|
||||
dimensions: EMBEDDING_DIMENSIONS
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw new Error(`OpenAI API error: ${error.error?.message || response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Sort by index to ensure order matches input
|
||||
const sorted = data.data.sort((a, b) => a.index - b.index);
|
||||
|
||||
return {
|
||||
embeddings: sorted.map(item => item.embedding),
|
||||
usage: data.usage,
|
||||
model: data.model
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Vector Math
|
||||
// ============================================================================
|
||||
|
||||
function cosineSimilarity(a, b) {
|
||||
let dotProduct = 0;
|
||||
let normA = 0;
|
||||
let normB = 0;
|
||||
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
dotProduct += a[i] * b[i];
|
||||
normA += a[i] * a[i];
|
||||
normB += b[i] * b[i];
|
||||
}
|
||||
|
||||
return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
|
||||
}
|
||||
|
||||
function findTopMatches(queryEmbedding, categoryEmbeddings, topK = 10) {
|
||||
const scored = categoryEmbeddings.map(cat => ({
|
||||
...cat,
|
||||
similarity: cosineSimilarity(queryEmbedding, cat.embedding)
|
||||
}));
|
||||
|
||||
scored.sort((a, b) => b.similarity - a.similarity);
|
||||
|
||||
return scored.slice(0, topK);
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Database Functions
|
||||
// ============================================================================
|
||||
|
||||
async function fetchCategories(connection) {
|
||||
console.log('\n📂 Fetching categories from database...');
|
||||
|
||||
// Fetch hierarchical categories (types 10-13)
|
||||
const [rows] = await connection.query(`
|
||||
SELECT
|
||||
cat_id,
|
||||
name,
|
||||
master_cat_id,
|
||||
type
|
||||
FROM product_categories
|
||||
WHERE type IN (10, 11, 12, 13)
|
||||
ORDER BY type, name
|
||||
`);
|
||||
|
||||
console.log(` Found ${rows.length} category records`);
|
||||
|
||||
// Build category paths
|
||||
const byId = new Map(rows.map(r => [r.cat_id, r]));
|
||||
const categories = [];
|
||||
|
||||
for (const row of rows) {
|
||||
const path = [];
|
||||
let current = row;
|
||||
|
||||
// Walk up the tree to build full path
|
||||
while (current) {
|
||||
path.unshift(current.name);
|
||||
current = current.master_cat_id ? byId.get(current.master_cat_id) : null;
|
||||
}
|
||||
|
||||
categories.push({
|
||||
id: row.cat_id,
|
||||
name: row.name,
|
||||
type: row.type,
|
||||
fullPath: path.join(' > '),
|
||||
embeddingText: path.join(' ') // For embedding generation
|
||||
});
|
||||
}
|
||||
|
||||
// Count by level
|
||||
const levels = {
|
||||
10: categories.filter(c => c.type === 10).length,
|
||||
11: categories.filter(c => c.type === 11).length,
|
||||
12: categories.filter(c => c.type === 12).length,
|
||||
13: categories.filter(c => c.type === 13).length,
|
||||
};
|
||||
|
||||
console.log(` Level breakdown: ${levels[10]} top-level, ${levels[11]} L2, ${levels[12]} L3, ${levels[13]} L4`);
|
||||
|
||||
return categories;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Main Script
|
||||
// ============================================================================
|
||||
|
||||
async function main() {
|
||||
console.log('═══════════════════════════════════════════════════════════════');
|
||||
console.log(' EMBEDDING PROOF-OF-CONCEPT');
|
||||
console.log(' Model: ' + EMBEDDING_MODEL);
|
||||
console.log('═══════════════════════════════════════════════════════════════');
|
||||
|
||||
if (!OPENAI_API_KEY) {
|
||||
console.error('❌ OPENAI_API_KEY not found in environment');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let connection;
|
||||
|
||||
try {
|
||||
// Step 1: Connect to database
|
||||
console.log('\n🔌 Connecting to database via SSH tunnel...');
|
||||
const { connection: conn } = await getDbConnection();
|
||||
connection = conn;
|
||||
console.log(' ✅ Connected');
|
||||
|
||||
// Step 2: Fetch categories
|
||||
const categories = await fetchCategories(connection);
|
||||
|
||||
// Step 3: Generate embeddings for categories
|
||||
console.log('\n🧮 Generating embeddings for categories...');
|
||||
console.log(' This will cost approximately $' + (categories.length * 0.00002).toFixed(4));
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
// Process in batches of 100 (OpenAI limit is 2048)
|
||||
const BATCH_SIZE = 100;
|
||||
let totalTokens = 0;
|
||||
|
||||
for (let i = 0; i < categories.length; i += BATCH_SIZE) {
|
||||
const batch = categories.slice(i, i + BATCH_SIZE);
|
||||
const texts = batch.map(c => c.embeddingText);
|
||||
|
||||
const result = await getEmbeddings(texts);
|
||||
|
||||
// Attach embeddings to categories
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
batch[j].embedding = result.embeddings[j];
|
||||
}
|
||||
|
||||
totalTokens += result.usage.total_tokens;
|
||||
console.log(` Batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(categories.length / BATCH_SIZE)}: ${batch.length} categories embedded`);
|
||||
}
|
||||
|
||||
const embeddingTime = Date.now() - startTime;
|
||||
console.log(` ✅ Generated ${categories.length} embeddings in ${embeddingTime}ms`);
|
||||
console.log(` 📊 Total tokens used: ${totalTokens} (~$${(totalTokens * 0.00002).toFixed(4)})`);
|
||||
|
||||
// Step 4: Test with sample products
|
||||
console.log('\n═══════════════════════════════════════════════════════════════');
|
||||
console.log(' TESTING WITH SAMPLE PRODUCTS');
|
||||
console.log('═══════════════════════════════════════════════════════════════');
|
||||
|
||||
for (const product of TEST_PRODUCTS) {
|
||||
console.log('\n┌─────────────────────────────────────────────────────────────');
|
||||
console.log(`│ Product: "${product.name}"`);
|
||||
console.log(`│ Description: "${product.description.substring(0, 60)}..."`);
|
||||
console.log('├─────────────────────────────────────────────────────────────');
|
||||
|
||||
// Generate embedding for product
|
||||
const productText = `${product.name} ${product.description}`;
|
||||
const { embeddings: [productEmbedding] } = await getEmbeddings([productText]);
|
||||
|
||||
// Find top matches
|
||||
const matches = findTopMatches(productEmbedding, categories, 10);
|
||||
|
||||
console.log('│ Top 10 Category Matches:');
|
||||
matches.forEach((match, i) => {
|
||||
const similarity = (match.similarity * 100).toFixed(1);
|
||||
const bar = '█'.repeat(Math.round(match.similarity * 20));
|
||||
const marker = i < 3 ? ' ✅' : '';
|
||||
console.log(`│ ${(i + 1).toString().padStart(2)}. [${similarity.padStart(5)}%] ${bar.padEnd(20)} ${match.fullPath}${marker}`);
|
||||
});
|
||||
console.log('└─────────────────────────────────────────────────────────────');
|
||||
}
|
||||
|
||||
// Step 5: Summary
|
||||
console.log('\n═══════════════════════════════════════════════════════════════');
|
||||
console.log(' SUMMARY');
|
||||
console.log('═══════════════════════════════════════════════════════════════');
|
||||
console.log(` Categories embedded: ${categories.length}`);
|
||||
console.log(` Embedding time: ${embeddingTime}ms (one-time cost)`);
|
||||
console.log(` Per-product lookup: ~${(Date.now() - startTime) / TEST_PRODUCTS.length}ms`);
|
||||
console.log(` Vector dimensions: ${EMBEDDING_DIMENSIONS}`);
|
||||
console.log(` Memory usage: ~${(categories.length * EMBEDDING_DIMENSIONS * 4 / 1024 / 1024).toFixed(2)} MB (in-memory vectors)`);
|
||||
console.log('');
|
||||
console.log(' 💡 In production:');
|
||||
console.log(' - Category embeddings are computed once and cached');
|
||||
console.log(' - Only product embedding is computed per-request (~$0.00002)');
|
||||
console.log(' - Vector search is instant (in-memory cosine similarity)');
|
||||
console.log(' - Top 10 results go to AI for final selection (~$0.0001)');
|
||||
console.log('═══════════════════════════════════════════════════════════════\n');
|
||||
|
||||
} catch (error) {
|
||||
console.error('\n❌ Error:', error.message);
|
||||
if (error.stack) {
|
||||
console.error(error.stack);
|
||||
}
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await closeAllConnections();
|
||||
console.log('🔌 Database connections closed');
|
||||
}
|
||||
}
|
||||
|
||||
// Run the script
|
||||
main();
|
||||
Binary file not shown.
1619
inventory-server/scripts/forecast/forecast_engine.py
Normal file
1619
inventory-server/scripts/forecast/forecast_engine.py
Normal file
File diff suppressed because it is too large
Load Diff
5
inventory-server/scripts/forecast/requirements.txt
Normal file
5
inventory-server/scripts/forecast/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
numpy>=1.24
|
||||
scipy>=1.10
|
||||
pandas>=2.0
|
||||
psycopg2-binary>=2.9
|
||||
statsmodels>=0.14
|
||||
128
inventory-server/scripts/forecast/run_forecast.js
Normal file
128
inventory-server/scripts/forecast/run_forecast.js
Normal file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Forecast Pipeline Orchestrator
|
||||
*
|
||||
* Spawns the Python forecast engine with database credentials from the
|
||||
* environment. Can be run manually, via cron, or integrated into the
|
||||
* existing metrics pipeline.
|
||||
*
|
||||
* Usage:
|
||||
* node run_forecast.js
|
||||
*
|
||||
* Environment:
|
||||
* Reads DB_HOST, DB_USER, DB_PASSWORD, DB_NAME, DB_PORT from
|
||||
* /var/www/html/inventory/.env (or current process env).
|
||||
*/
|
||||
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
// Load .env file if it exists (production path)
|
||||
const envPaths = [
|
||||
'/var/www/html/inventory/.env',
|
||||
path.join(__dirname, '../../.env'),
|
||||
];
|
||||
|
||||
for (const envPath of envPaths) {
|
||||
if (fs.existsSync(envPath)) {
|
||||
const envContent = fs.readFileSync(envPath, 'utf-8');
|
||||
for (const line of envContent.split('\n')) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
const eqIndex = trimmed.indexOf('=');
|
||||
if (eqIndex === -1) continue;
|
||||
const key = trimmed.slice(0, eqIndex);
|
||||
const value = trimmed.slice(eqIndex + 1);
|
||||
if (!process.env[key]) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
console.log(`Loaded env from ${envPath}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Verify required env vars
|
||||
const required = ['DB_HOST', 'DB_USER', 'DB_PASSWORD', 'DB_NAME'];
|
||||
const missing = required.filter(k => !process.env[k]);
|
||||
if (missing.length > 0) {
|
||||
console.error(`Missing required environment variables: ${missing.join(', ')}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const SCRIPT_DIR = __dirname;
|
||||
const PYTHON_SCRIPT = path.join(SCRIPT_DIR, 'forecast_engine.py');
|
||||
const VENV_DIR = path.join(SCRIPT_DIR, 'venv');
|
||||
const REQUIREMENTS = path.join(SCRIPT_DIR, 'requirements.txt');
|
||||
|
||||
// Determine python binary (prefer venv if it exists)
|
||||
function getPythonBin() {
|
||||
const venvPython = path.join(VENV_DIR, 'bin', 'python');
|
||||
if (fs.existsSync(venvPython)) return venvPython;
|
||||
|
||||
// Fall back to system python
|
||||
return 'python3';
|
||||
}
|
||||
|
||||
// Ensure venv and dependencies are installed
|
||||
async function ensureDependencies() {
|
||||
if (!fs.existsSync(path.join(VENV_DIR, 'bin', 'python'))) {
|
||||
console.log('Creating virtual environment...');
|
||||
await runCommand('python3', ['-m', 'venv', VENV_DIR]);
|
||||
}
|
||||
|
||||
// Always run pip install — idempotent, fast when packages already present
|
||||
console.log('Checking dependencies...');
|
||||
const python = path.join(VENV_DIR, 'bin', 'python');
|
||||
await runCommand(python, ['-m', 'pip', 'install', '--quiet', '-r', REQUIREMENTS]);
|
||||
}
|
||||
|
||||
function runCommand(cmd, args, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const proc = spawn(cmd, args, {
|
||||
stdio: 'inherit',
|
||||
...options,
|
||||
});
|
||||
proc.on('close', code => {
|
||||
if (code === 0) resolve();
|
||||
else reject(new Error(`${cmd} exited with code ${code}`));
|
||||
});
|
||||
proc.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const startTime = Date.now();
|
||||
console.log('='.repeat(60));
|
||||
console.log(`Forecast Pipeline - ${new Date().toISOString()}`);
|
||||
console.log('='.repeat(60));
|
||||
|
||||
try {
|
||||
await ensureDependencies();
|
||||
|
||||
const pythonBin = getPythonBin();
|
||||
console.log(`Using Python: ${pythonBin}`);
|
||||
console.log(`Running: ${PYTHON_SCRIPT}`);
|
||||
console.log('');
|
||||
|
||||
await runCommand(pythonBin, [PYTHON_SCRIPT], {
|
||||
env: {
|
||||
...process.env,
|
||||
PYTHONUNBUFFERED: '1', // Real-time output
|
||||
},
|
||||
});
|
||||
|
||||
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
console.log('');
|
||||
console.log('='.repeat(60));
|
||||
console.log(`Forecast pipeline completed in ${duration}s`);
|
||||
console.log('='.repeat(60));
|
||||
} catch (err) {
|
||||
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
console.error(`Forecast pipeline FAILED after ${duration}s:`, err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
51
inventory-server/scripts/forecast/sql/create_tables.sql
Normal file
51
inventory-server/scripts/forecast/sql/create_tables.sql
Normal file
@@ -0,0 +1,51 @@
|
||||
-- Forecasting Pipeline Tables
|
||||
-- Run once to create the schema. Safe to re-run (IF NOT EXISTS).
|
||||
|
||||
-- Precomputed reference decay curves per brand (or brand x category at any hierarchy level)
|
||||
CREATE TABLE IF NOT EXISTS brand_lifecycle_curves (
|
||||
id SERIAL PRIMARY KEY,
|
||||
brand TEXT NOT NULL,
|
||||
root_category TEXT, -- NULL = brand-level fallback curve, else category name
|
||||
cat_id BIGINT, -- NULL = brand-only; else category_hierarchy.cat_id for precise matching
|
||||
category_level SMALLINT, -- NULL = brand-only; 0-3 = hierarchy depth
|
||||
amplitude NUMERIC(10,4), -- A in: sales(t) = A * exp(-λt) + C
|
||||
decay_rate NUMERIC(10,6), -- λ (higher = faster decay)
|
||||
baseline NUMERIC(10,4), -- C (long-tail steady-state daily sales)
|
||||
r_squared NUMERIC(6,4), -- goodness of fit
|
||||
sample_size INT, -- number of products that informed this curve
|
||||
median_first_week_sales NUMERIC(10,2), -- for scaling new launches
|
||||
median_preorder_sales NUMERIC(10,2), -- for scaling pre-order products
|
||||
median_preorder_days NUMERIC(10,2), -- median pre-order accumulation window (days)
|
||||
computed_at TIMESTAMP DEFAULT NOW(),
|
||||
UNIQUE(brand, cat_id)
|
||||
);
|
||||
|
||||
-- Per-product daily forecasts (next 90 days, regenerated each run)
|
||||
CREATE TABLE IF NOT EXISTS product_forecasts (
|
||||
pid BIGINT NOT NULL,
|
||||
forecast_date DATE NOT NULL,
|
||||
forecast_units NUMERIC(10,2),
|
||||
forecast_revenue NUMERIC(14,4),
|
||||
lifecycle_phase TEXT, -- preorder, launch, decay, mature, slow_mover, dormant
|
||||
forecast_method TEXT, -- lifecycle_curve, exp_smoothing, velocity, zero
|
||||
confidence_lower NUMERIC(10,2),
|
||||
confidence_upper NUMERIC(10,2),
|
||||
generated_at TIMESTAMP DEFAULT NOW(),
|
||||
PRIMARY KEY (pid, forecast_date)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_pf_date ON product_forecasts(forecast_date);
|
||||
CREATE INDEX IF NOT EXISTS idx_pf_phase ON product_forecasts(lifecycle_phase);
|
||||
|
||||
-- Forecast run history (for monitoring)
|
||||
CREATE TABLE IF NOT EXISTS forecast_runs (
|
||||
id SERIAL PRIMARY KEY,
|
||||
started_at TIMESTAMP NOT NULL,
|
||||
finished_at TIMESTAMP,
|
||||
status TEXT DEFAULT 'running', -- running, completed, failed
|
||||
products_forecast INT,
|
||||
phase_counts JSONB, -- {"launch": 50, "decay": 200, ...}
|
||||
curve_count INT, -- brand curves computed
|
||||
error_message TEXT,
|
||||
duration_seconds NUMERIC(10,2)
|
||||
);
|
||||
@@ -6,6 +6,8 @@ const importCategories = require('./import/categories');
|
||||
const { importProducts } = require('./import/products');
|
||||
const importOrders = require('./import/orders');
|
||||
const importPurchaseOrders = require('./import/purchase-orders');
|
||||
const importDailyDeals = require('./import/daily-deals');
|
||||
const importStockSnapshots = require('./import/stock-snapshots');
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../.env") });
|
||||
|
||||
@@ -14,6 +16,8 @@ const IMPORT_CATEGORIES = true;
|
||||
const IMPORT_PRODUCTS = true;
|
||||
const IMPORT_ORDERS = true;
|
||||
const IMPORT_PURCHASE_ORDERS = true;
|
||||
const IMPORT_DAILY_DEALS = true;
|
||||
const IMPORT_STOCK_SNAPSHOTS = true;
|
||||
|
||||
// Add flag for incremental updates
|
||||
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
|
||||
@@ -36,7 +40,7 @@ const sshConfig = {
|
||||
password: process.env.PROD_DB_PASSWORD,
|
||||
database: process.env.PROD_DB_NAME,
|
||||
port: process.env.PROD_DB_PORT || 3306,
|
||||
timezone: '-05:00', // Production DB always stores times in EST (UTC-5) regardless of DST
|
||||
timezone: '-05:00', // mysql2 driver timezone — corrected at runtime via adjustDateForMySQL() in utils.js
|
||||
},
|
||||
localDbConfig: {
|
||||
// PostgreSQL config for local
|
||||
@@ -78,7 +82,9 @@ async function main() {
|
||||
IMPORT_CATEGORIES,
|
||||
IMPORT_PRODUCTS,
|
||||
IMPORT_ORDERS,
|
||||
IMPORT_PURCHASE_ORDERS
|
||||
IMPORT_PURCHASE_ORDERS,
|
||||
IMPORT_DAILY_DEALS,
|
||||
IMPORT_STOCK_SNAPSHOTS
|
||||
].filter(Boolean).length;
|
||||
|
||||
try {
|
||||
@@ -126,10 +132,12 @@ async function main() {
|
||||
'categories_enabled', $2::boolean,
|
||||
'products_enabled', $3::boolean,
|
||||
'orders_enabled', $4::boolean,
|
||||
'purchase_orders_enabled', $5::boolean
|
||||
'purchase_orders_enabled', $5::boolean,
|
||||
'daily_deals_enabled', $6::boolean,
|
||||
'stock_snapshots_enabled', $7::boolean
|
||||
)
|
||||
) RETURNING id
|
||||
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS]);
|
||||
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_DAILY_DEALS, IMPORT_STOCK_SNAPSHOTS]);
|
||||
importHistoryId = historyResult.rows[0].id;
|
||||
} catch (error) {
|
||||
console.error("Error creating import history record:", error);
|
||||
@@ -146,7 +154,9 @@ async function main() {
|
||||
categories: null,
|
||||
products: null,
|
||||
orders: null,
|
||||
purchaseOrders: null
|
||||
purchaseOrders: null,
|
||||
dailyDeals: null,
|
||||
stockSnapshots: null
|
||||
};
|
||||
|
||||
let totalRecordsAdded = 0;
|
||||
@@ -224,6 +234,61 @@ async function main() {
|
||||
}
|
||||
}
|
||||
|
||||
if (IMPORT_DAILY_DEALS) {
|
||||
try {
|
||||
const stepStart = Date.now();
|
||||
results.dailyDeals = await importDailyDeals(prodConnection, localConnection);
|
||||
stepTimings.dailyDeals = Math.round((Date.now() - stepStart) / 1000);
|
||||
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Daily deals import result:', results.dailyDeals);
|
||||
|
||||
if (results.dailyDeals?.status === 'error') {
|
||||
console.error('Daily deals import had an error:', results.dailyDeals.error);
|
||||
} else {
|
||||
totalRecordsAdded += parseInt(results.dailyDeals?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.dailyDeals?.recordsUpdated || 0);
|
||||
totalRecordsDeleted += parseInt(results.dailyDeals?.recordsDeleted || 0);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during daily deals import:', error);
|
||||
results.dailyDeals = {
|
||||
status: 'error',
|
||||
error: error.message,
|
||||
recordsAdded: 0,
|
||||
recordsUpdated: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (IMPORT_STOCK_SNAPSHOTS) {
|
||||
try {
|
||||
const stepStart = Date.now();
|
||||
results.stockSnapshots = await importStockSnapshots(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||
stepTimings.stockSnapshots = Math.round((Date.now() - stepStart) / 1000);
|
||||
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Stock snapshots import result:', results.stockSnapshots);
|
||||
|
||||
if (results.stockSnapshots?.status === 'error') {
|
||||
console.error('Stock snapshots import had an error:', results.stockSnapshots.error);
|
||||
} else {
|
||||
totalRecordsAdded += parseInt(results.stockSnapshots?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.stockSnapshots?.recordsUpdated || 0);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during stock snapshots import:', error);
|
||||
results.stockSnapshots = {
|
||||
status: 'error',
|
||||
error: error.message,
|
||||
recordsAdded: 0,
|
||||
recordsUpdated: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const endTime = Date.now();
|
||||
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
|
||||
|
||||
@@ -241,15 +306,19 @@ async function main() {
|
||||
'products_enabled', $5::boolean,
|
||||
'orders_enabled', $6::boolean,
|
||||
'purchase_orders_enabled', $7::boolean,
|
||||
'categories_result', COALESCE($8::jsonb, 'null'::jsonb),
|
||||
'products_result', COALESCE($9::jsonb, 'null'::jsonb),
|
||||
'orders_result', COALESCE($10::jsonb, 'null'::jsonb),
|
||||
'purchase_orders_result', COALESCE($11::jsonb, 'null'::jsonb),
|
||||
'total_deleted', $12::integer,
|
||||
'total_skipped', $13::integer,
|
||||
'step_timings', $14::jsonb
|
||||
'daily_deals_enabled', $8::boolean,
|
||||
'categories_result', COALESCE($9::jsonb, 'null'::jsonb),
|
||||
'products_result', COALESCE($10::jsonb, 'null'::jsonb),
|
||||
'orders_result', COALESCE($11::jsonb, 'null'::jsonb),
|
||||
'purchase_orders_result', COALESCE($12::jsonb, 'null'::jsonb),
|
||||
'daily_deals_result', COALESCE($13::jsonb, 'null'::jsonb),
|
||||
'stock_snapshots_enabled', $14::boolean,
|
||||
'stock_snapshots_result', COALESCE($15::jsonb, 'null'::jsonb),
|
||||
'total_deleted', $16::integer,
|
||||
'total_skipped', $17::integer,
|
||||
'step_timings', $18::jsonb
|
||||
)
|
||||
WHERE id = $15
|
||||
WHERE id = $19
|
||||
`, [
|
||||
totalElapsedSeconds,
|
||||
parseInt(totalRecordsAdded),
|
||||
@@ -258,10 +327,14 @@ async function main() {
|
||||
IMPORT_PRODUCTS,
|
||||
IMPORT_ORDERS,
|
||||
IMPORT_PURCHASE_ORDERS,
|
||||
IMPORT_DAILY_DEALS,
|
||||
JSON.stringify(results.categories),
|
||||
JSON.stringify(results.products),
|
||||
JSON.stringify(results.orders),
|
||||
JSON.stringify(results.purchaseOrders),
|
||||
JSON.stringify(results.dailyDeals),
|
||||
IMPORT_STOCK_SNAPSHOTS,
|
||||
JSON.stringify(results.stockSnapshots),
|
||||
totalRecordsDeleted,
|
||||
totalRecordsSkipped,
|
||||
JSON.stringify(stepTimings),
|
||||
|
||||
167
inventory-server/scripts/import/daily-deals.js
Normal file
167
inventory-server/scripts/import/daily-deals.js
Normal file
@@ -0,0 +1,167 @@
|
||||
const { outputProgress, formatElapsedTime } = require('../metrics-new/utils/progress');
|
||||
|
||||
/**
|
||||
* Import daily deals from production MySQL to local PostgreSQL.
|
||||
*
|
||||
* Production has two tables:
|
||||
* - product_daily_deals (deal_id, deal_date, pid, price_id)
|
||||
* - product_current_prices (price_id, pid, price_each, active, ...)
|
||||
*
|
||||
* We join them in the prod query to denormalize the deal price, avoiding
|
||||
* the need to sync the full product_current_prices table.
|
||||
*
|
||||
* On each sync:
|
||||
* 1. Fetch deals from the last 7 days (plus today) from production
|
||||
* 2. Upsert into local table
|
||||
* 3. Hard delete local deals older than 7 days past their deal_date
|
||||
*/
|
||||
async function importDailyDeals(prodConnection, localConnection) {
|
||||
outputProgress({
|
||||
operation: "Starting daily deals import",
|
||||
status: "running",
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
await localConnection.query('BEGIN');
|
||||
|
||||
// Fetch recent daily deals from production (MySQL 5.7, no CTEs)
|
||||
// Join product_current_prices to get the actual deal price
|
||||
// Only grab last 7 days + today + tomorrow (for pre-scheduled deals)
|
||||
const [deals] = await prodConnection.query(`
|
||||
SELECT
|
||||
pdd.deal_id,
|
||||
pdd.deal_date,
|
||||
pdd.pid,
|
||||
pdd.price_id,
|
||||
pcp.price_each as deal_price
|
||||
FROM product_daily_deals pdd
|
||||
LEFT JOIN product_current_prices pcp ON pcp.price_id = pdd.price_id
|
||||
WHERE pdd.deal_date >= DATE_SUB(CURDATE(), INTERVAL 7 DAY)
|
||||
AND pdd.deal_date <= DATE_ADD(CURDATE(), INTERVAL 1 DAY)
|
||||
ORDER BY pdd.deal_date DESC, pdd.pid
|
||||
`);
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Daily deals import",
|
||||
message: `Fetched ${deals.length} deals from production`,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
});
|
||||
|
||||
let totalInserted = 0;
|
||||
let totalUpdated = 0;
|
||||
|
||||
if (deals.length > 0) {
|
||||
// Batch upsert — filter to only PIDs that exist locally
|
||||
const pids = [...new Set(deals.map(d => d.pid))];
|
||||
const existingResult = await localConnection.query(
|
||||
`SELECT pid FROM products WHERE pid = ANY($1)`,
|
||||
[pids]
|
||||
);
|
||||
const existingPids = new Set(
|
||||
(Array.isArray(existingResult) ? existingResult[0] : existingResult)
|
||||
.rows.map(r => Number(r.pid))
|
||||
);
|
||||
|
||||
const validDeals = deals.filter(d => existingPids.has(Number(d.pid)));
|
||||
|
||||
if (validDeals.length > 0) {
|
||||
// Build batch upsert
|
||||
const values = validDeals.flatMap(d => [
|
||||
d.deal_date,
|
||||
d.pid,
|
||||
d.price_id,
|
||||
d.deal_price ?? null,
|
||||
]);
|
||||
|
||||
const placeholders = validDeals
|
||||
.map((_, i) => `($${i * 4 + 1}, $${i * 4 + 2}, $${i * 4 + 3}, $${i * 4 + 4})`)
|
||||
.join(',');
|
||||
|
||||
const upsertQuery = `
|
||||
WITH upserted AS (
|
||||
INSERT INTO product_daily_deals (deal_date, pid, price_id, deal_price)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (deal_date, pid) DO UPDATE SET
|
||||
price_id = EXCLUDED.price_id,
|
||||
deal_price = EXCLUDED.deal_price
|
||||
WHERE
|
||||
product_daily_deals.price_id IS DISTINCT FROM EXCLUDED.price_id OR
|
||||
product_daily_deals.deal_price IS DISTINCT FROM EXCLUDED.deal_price
|
||||
RETURNING
|
||||
CASE WHEN xmax = 0 THEN true ELSE false END as is_insert
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE is_insert) as inserted,
|
||||
COUNT(*) FILTER (WHERE NOT is_insert) as updated
|
||||
FROM upserted
|
||||
`;
|
||||
|
||||
const result = await localConnection.query(upsertQuery, values);
|
||||
const queryResult = Array.isArray(result) ? result[0] : result;
|
||||
totalInserted = parseInt(queryResult.rows[0].inserted) || 0;
|
||||
totalUpdated = parseInt(queryResult.rows[0].updated) || 0;
|
||||
}
|
||||
|
||||
const skipped = deals.length - validDeals.length;
|
||||
if (skipped > 0) {
|
||||
console.log(`Skipped ${skipped} deals (PIDs not in local products table)`);
|
||||
}
|
||||
}
|
||||
|
||||
// Hard delete deals older than 7 days past their deal_date
|
||||
const deleteResult = await localConnection.query(`
|
||||
DELETE FROM product_daily_deals
|
||||
WHERE deal_date < CURRENT_DATE - INTERVAL '7 days'
|
||||
`);
|
||||
const deletedCount = deleteResult.rowCount ??
|
||||
(Array.isArray(deleteResult) ? deleteResult[0]?.rowCount : 0) ?? 0;
|
||||
|
||||
// Update sync status
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
VALUES ('product_daily_deals', NOW())
|
||||
ON CONFLICT (table_name) DO UPDATE SET
|
||||
last_sync_timestamp = NOW()
|
||||
`);
|
||||
|
||||
await localConnection.query('COMMIT');
|
||||
|
||||
outputProgress({
|
||||
status: "complete",
|
||||
operation: "Daily deals import completed",
|
||||
message: `Inserted ${totalInserted}, updated ${totalUpdated}, deleted ${deletedCount} expired`,
|
||||
current: totalInserted + totalUpdated,
|
||||
total: totalInserted + totalUpdated,
|
||||
duration: formatElapsedTime(startTime),
|
||||
});
|
||||
|
||||
return {
|
||||
status: "complete",
|
||||
recordsAdded: totalInserted,
|
||||
recordsUpdated: totalUpdated,
|
||||
recordsDeleted: deletedCount,
|
||||
totalRecords: totalInserted + totalUpdated,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error importing daily deals:", error);
|
||||
|
||||
try {
|
||||
await localConnection.query('ROLLBACK');
|
||||
} catch (rollbackError) {
|
||||
console.error("Error during rollback:", rollbackError);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: "error",
|
||||
operation: "Daily deals import failed",
|
||||
error: error.message,
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = importDailyDeals;
|
||||
@@ -17,6 +17,33 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
const startTime = Date.now();
|
||||
const skippedOrders = new Set();
|
||||
const missingProducts = new Set();
|
||||
|
||||
// Map order status codes to text values (consistent with PO status mapping in purchase-orders.js)
|
||||
const orderStatusMap = {
|
||||
0: 'created',
|
||||
10: 'unfinished',
|
||||
15: 'canceled',
|
||||
16: 'combined',
|
||||
20: 'placed',
|
||||
22: 'placed_incomplete',
|
||||
30: 'canceled',
|
||||
40: 'awaiting_payment',
|
||||
50: 'awaiting_products',
|
||||
55: 'shipping_later',
|
||||
56: 'shipping_together',
|
||||
60: 'ready',
|
||||
61: 'flagged',
|
||||
62: 'fix_before_pick',
|
||||
65: 'manual_picking',
|
||||
70: 'in_pt',
|
||||
80: 'picked',
|
||||
90: 'awaiting_shipment',
|
||||
91: 'remote_wait',
|
||||
92: 'awaiting_pickup',
|
||||
93: 'fix_before_ship',
|
||||
95: 'shipped_confirmed',
|
||||
100: 'shipped'
|
||||
};
|
||||
let recordsAdded = 0;
|
||||
let recordsUpdated = 0;
|
||||
let processedCount = 0;
|
||||
@@ -31,8 +58,12 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
|
||||
);
|
||||
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||
// Adjust for mysql2 driver timezone vs MySQL server timezone mismatch
|
||||
const mysqlSyncTime = prodConnection.adjustDateForMySQL
|
||||
? prodConnection.adjustDateForMySQL(lastSyncTime)
|
||||
: lastSyncTime;
|
||||
|
||||
console.log('Orders: Using last sync time:', lastSyncTime);
|
||||
console.log('Orders: Using last sync time:', lastSyncTime, '(adjusted:', mysqlSyncTime, ')');
|
||||
|
||||
// First get count of order items - Keep MySQL compatible for production
|
||||
const [[{ total }]] = await prodConnection.query(`
|
||||
@@ -44,23 +75,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
AND o.date_placed IS NOT NULL
|
||||
${incrementalUpdate ? `
|
||||
AND (
|
||||
o.stamp > ?
|
||||
o.stamp > ?
|
||||
OR oi.stamp > ?
|
||||
OR EXISTS (
|
||||
SELECT 1 FROM order_discount_items odi
|
||||
WHERE odi.order_id = o.order_id
|
||||
AND odi.pid = oi.prod_pid
|
||||
)
|
||||
OR EXISTS (
|
||||
SELECT 1 FROM order_tax_info oti
|
||||
SELECT 1 FROM order_tax_info oti
|
||||
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
||||
WHERE oti.order_id = o.order_id
|
||||
WHERE oti.order_id = o.order_id
|
||||
AND otip.pid = oi.prod_pid
|
||||
AND oti.stamp > ?
|
||||
)
|
||||
)
|
||||
` : ''}
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
|
||||
|
||||
totalOrderItems = total;
|
||||
console.log('Orders: Found changes:', totalOrderItems);
|
||||
@@ -83,23 +109,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
AND o.date_placed IS NOT NULL
|
||||
${incrementalUpdate ? `
|
||||
AND (
|
||||
o.stamp > ?
|
||||
o.stamp > ?
|
||||
OR oi.stamp > ?
|
||||
OR EXISTS (
|
||||
SELECT 1 FROM order_discount_items odi
|
||||
WHERE odi.order_id = o.order_id
|
||||
AND odi.pid = oi.prod_pid
|
||||
)
|
||||
OR EXISTS (
|
||||
SELECT 1 FROM order_tax_info oti
|
||||
SELECT 1 FROM order_tax_info oti
|
||||
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
|
||||
WHERE oti.order_id = o.order_id
|
||||
WHERE oti.order_id = o.order_id
|
||||
AND otip.pid = oi.prod_pid
|
||||
AND oti.stamp > ?
|
||||
)
|
||||
)
|
||||
` : ''}
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
|
||||
|
||||
console.log('Orders: Found', orderItems.length, 'order items to process');
|
||||
|
||||
@@ -284,7 +305,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
new Date(order.date), // Convert to TIMESTAMP WITH TIME ZONE
|
||||
order.customer,
|
||||
toTitleCase(order.customer_name) || '',
|
||||
order.status.toString(), // Convert status to TEXT
|
||||
orderStatusMap[order.status] || order.status.toString(), // Map numeric status to text
|
||||
order.canceled,
|
||||
order.summary_discount || 0,
|
||||
order.summary_subtotal || 0,
|
||||
@@ -513,11 +534,12 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
}
|
||||
};
|
||||
|
||||
// Process all data types SEQUENTIALLY for each batch - not in parallel
|
||||
// Process all data types for each batch
|
||||
// Note: these run sequentially because they share a single PG connection
|
||||
// and each manages its own transaction
|
||||
for (let i = 0; i < orderIds.length; i += METADATA_BATCH_SIZE) {
|
||||
const batchIds = orderIds.slice(i, i + METADATA_BATCH_SIZE);
|
||||
|
||||
// Run these sequentially instead of in parallel to avoid transaction conflicts
|
||||
|
||||
await processMetadataBatch(batchIds);
|
||||
await processDiscountsBatch(batchIds);
|
||||
await processTaxesBatch(batchIds);
|
||||
@@ -536,16 +558,36 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
});
|
||||
}
|
||||
|
||||
// Pre-check all products at once
|
||||
// Pre-check all products and preload cost_price into a temp table
|
||||
// This avoids joining public.products in every sub-batch query (was causing 2x slowdown)
|
||||
const allOrderPids = [...new Set(orderItems.map(item => item.prod_pid))];
|
||||
console.log('Orders: Checking', allOrderPids.length, 'unique products');
|
||||
|
||||
|
||||
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
|
||||
"SELECT pid FROM products WHERE pid = ANY($1::bigint[])",
|
||||
"SELECT pid, cost_price FROM products WHERE pid = ANY($1::bigint[])",
|
||||
[allOrderPids]
|
||||
) : [[]];
|
||||
|
||||
) : [{ rows: [] }];
|
||||
|
||||
const existingPids = new Set(existingProducts.rows.map(p => p.pid));
|
||||
|
||||
// Create temp table with product cost_price for fast lookup in sub-batch queries
|
||||
await localConnection.query(`
|
||||
DROP TABLE IF EXISTS temp_product_costs;
|
||||
CREATE TEMP TABLE temp_product_costs (
|
||||
pid BIGINT PRIMARY KEY,
|
||||
cost_price NUMERIC(14, 4)
|
||||
)
|
||||
`);
|
||||
if (existingProducts.rows.length > 0) {
|
||||
const costPids = existingProducts.rows.filter(p => p.cost_price != null).map(p => p.pid);
|
||||
const costPrices = existingProducts.rows.filter(p => p.cost_price != null).map(p => p.cost_price);
|
||||
if (costPids.length > 0) {
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_product_costs (pid, cost_price)
|
||||
SELECT * FROM UNNEST($1::bigint[], $2::numeric[])
|
||||
`, [costPids, costPrices]);
|
||||
}
|
||||
}
|
||||
|
||||
// Process in smaller batches
|
||||
for (let i = 0; i < orderIds.length; i += 2000) { // Increased from 1000 to 2000
|
||||
@@ -570,14 +612,15 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
ELSE 0
|
||||
END) as promo_discount_sum,
|
||||
COALESCE(ot.tax, 0) as total_tax,
|
||||
COALESCE(oc.costeach, oi.price * 0.5) as costeach
|
||||
COALESCE(oc.costeach, pc.cost_price, oi.price * 0.5) as costeach
|
||||
FROM temp_order_items oi
|
||||
LEFT JOIN temp_item_discounts id ON oi.order_id = id.order_id AND oi.pid = id.pid
|
||||
LEFT JOIN temp_main_discounts md ON id.order_id = md.order_id AND id.discount_id = md.discount_id
|
||||
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN temp_order_costs oc ON oi.order_id = oc.order_id AND oi.pid = oc.pid
|
||||
LEFT JOIN temp_product_costs pc ON oi.pid = pc.pid
|
||||
WHERE oi.order_id = ANY($1)
|
||||
GROUP BY oi.order_id, oi.pid, ot.tax, oc.costeach
|
||||
GROUP BY oi.order_id, oi.pid, ot.tax, oc.costeach, pc.cost_price
|
||||
)
|
||||
SELECT
|
||||
oi.order_id as order_number,
|
||||
@@ -587,17 +630,14 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
oi.price,
|
||||
oi.quantity,
|
||||
(
|
||||
-- Part 1: Sale Savings for the Line
|
||||
(oi.base_discount * oi.quantity)
|
||||
+
|
||||
-- Part 2: Prorated Points Discount (if applicable)
|
||||
-- Prorated Points Discount (e.g. loyalty points applied at order level)
|
||||
CASE
|
||||
WHEN om.summary_discount_subtotal > 0 AND om.summary_subtotal > 0 THEN
|
||||
COALESCE(ROUND((om.summary_discount_subtotal * (oi.price * oi.quantity)) / NULLIF(om.summary_subtotal, 0), 4), 0)
|
||||
ELSE 0
|
||||
END
|
||||
+
|
||||
-- Part 3: Specific Item-Level Discount (only if parent discount affected subtotal)
|
||||
-- Specific Item-Level Promo Discount (coupon codes, etc.)
|
||||
COALESCE(ot.promo_discount_sum, 0)
|
||||
)::NUMERIC(14, 4) as discount,
|
||||
COALESCE(ot.total_tax, 0)::NUMERIC(14, 4) as tax,
|
||||
@@ -607,10 +647,11 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
om.customer_name,
|
||||
om.status,
|
||||
om.canceled,
|
||||
COALESCE(ot.costeach, oi.price * 0.5)::NUMERIC(14, 4) as costeach
|
||||
COALESCE(ot.costeach, pc.cost_price, oi.price * 0.5)::NUMERIC(14, 4) as costeach
|
||||
FROM temp_order_items oi
|
||||
JOIN temp_order_meta om ON oi.order_id = om.order_id
|
||||
LEFT JOIN order_totals ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN temp_product_costs pc ON oi.pid = pc.pid
|
||||
WHERE oi.order_id = ANY($1)
|
||||
ORDER BY oi.order_id, oi.pid
|
||||
`, [subBatchIds]);
|
||||
@@ -654,7 +695,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
o.shipping,
|
||||
o.customer,
|
||||
o.customer_name,
|
||||
o.status.toString(), // Convert status to TEXT
|
||||
o.status, // Already mapped to text via orderStatusMap
|
||||
o.canceled,
|
||||
o.costeach
|
||||
]);
|
||||
@@ -744,6 +785,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
DROP TABLE IF EXISTS temp_order_costs;
|
||||
DROP TABLE IF EXISTS temp_main_discounts;
|
||||
DROP TABLE IF EXISTS temp_item_discounts;
|
||||
DROP TABLE IF EXISTS temp_product_costs;
|
||||
`);
|
||||
|
||||
// Commit final transaction
|
||||
|
||||
@@ -75,8 +75,8 @@ async function setupTemporaryTables(connection) {
|
||||
artist TEXT,
|
||||
categories TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
date_online TIMESTAMP WITH TIME ZONE,
|
||||
first_received TIMESTAMP WITH TIME ZONE,
|
||||
landing_cost_price NUMERIC(14, 4),
|
||||
barcode TEXT,
|
||||
harmonized_tariff_code TEXT,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
@@ -98,6 +98,7 @@ async function setupTemporaryTables(connection) {
|
||||
baskets INTEGER,
|
||||
notifies INTEGER,
|
||||
date_last_sold TIMESTAMP WITH TIME ZONE,
|
||||
shop_score NUMERIC(10, 2) DEFAULT 0,
|
||||
primary_iid INTEGER,
|
||||
image TEXT,
|
||||
image_175 TEXT,
|
||||
@@ -137,13 +138,14 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
p.notes AS description,
|
||||
p.itemnumber AS sku,
|
||||
p.date_created,
|
||||
p.date_ol,
|
||||
p.datein AS first_received,
|
||||
p.location,
|
||||
p.upc AS barcode,
|
||||
p.harmonized_tariff_code,
|
||||
p.stamp AS updated_at,
|
||||
CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible,
|
||||
CASE
|
||||
CASE
|
||||
WHEN p.reorder < 0 THEN 0
|
||||
WHEN p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 1 YEAR) THEN 1
|
||||
WHEN COALESCE(pnb.inventory, 0) > 0 THEN 1
|
||||
@@ -160,20 +162,19 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
COALESCE(pnb.inventory, 0) as notions_inv_count,
|
||||
COALESCE(pcp.price_each, 0) as price,
|
||||
COALESCE(p.sellingprice, 0) AS regular_price,
|
||||
CASE
|
||||
WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0)
|
||||
CASE
|
||||
WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0)
|
||||
THEN (
|
||||
SELECT ROUND(SUM(costeach * count) / SUM(count), 5)
|
||||
FROM product_inventory
|
||||
SELECT ROUND(SUM(costeach * count) / SUM(count), 5)
|
||||
FROM product_inventory
|
||||
WHERE pid = p.pid AND count > 0
|
||||
)
|
||||
ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1)
|
||||
END AS cost_price,
|
||||
NULL as landing_cost_price,
|
||||
s.companyname AS vendor,
|
||||
CASE
|
||||
WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber
|
||||
ELSE sid.supplier_itemnumber
|
||||
CASE
|
||||
WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber
|
||||
ELSE sid.supplier_itemnumber
|
||||
END AS vendor_reference,
|
||||
sid.notions_itemnumber AS notions_reference,
|
||||
CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink,
|
||||
@@ -181,7 +182,7 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
pc2.name AS line,
|
||||
pc3.name AS subline,
|
||||
pc4.name AS artist,
|
||||
COALESCE(CASE
|
||||
COALESCE(CASE
|
||||
WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit
|
||||
ELSE sid.supplier_qty_per_unit
|
||||
END, sid.notions_qty_per_unit) AS moq,
|
||||
@@ -194,17 +195,18 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
p.country_of_origin,
|
||||
(SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets,
|
||||
(SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies,
|
||||
(SELECT COALESCE(SUM(oi.qty_ordered), 0)
|
||||
FROM order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
(SELECT COALESCE(SUM(oi.qty_ordered), 0)
|
||||
FROM order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
WHERE oi.prod_pid = p.pid AND o.order_status >= 20) AS total_sold,
|
||||
pls.date_sold as date_last_sold,
|
||||
COALESCE(p.score, 0) as shop_score,
|
||||
(SELECT iid FROM product_images WHERE pid = p.pid AND \`order\` = 255 LIMIT 1) AS primary_iid,
|
||||
GROUP_CONCAT(DISTINCT CASE
|
||||
WHEN pc.cat_id IS NOT NULL
|
||||
GROUP_CONCAT(DISTINCT CASE
|
||||
WHEN pc.cat_id IS NOT NULL
|
||||
AND pc.type IN (10, 20, 11, 21, 12, 13)
|
||||
AND pci.cat_id NOT IN (16, 17)
|
||||
THEN pci.cat_id
|
||||
THEN pci.cat_id
|
||||
END) as category_ids
|
||||
FROM products p
|
||||
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
||||
@@ -238,8 +240,8 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
const batch = prodData.slice(i, i + BATCH_SIZE);
|
||||
|
||||
const placeholders = batch.map((_, idx) => {
|
||||
const base = idx * 48; // 48 columns
|
||||
return `(${Array.from({ length: 48 }, (_, i) => `$${base + i + 1}`).join(', ')})`;
|
||||
const base = idx * 49; // 49 columns
|
||||
return `(${Array.from({ length: 49 }, (_, i) => `$${base + i + 1}`).join(', ')})`;
|
||||
}).join(',');
|
||||
|
||||
const values = batch.flatMap(row => {
|
||||
@@ -264,8 +266,8 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
row.artist,
|
||||
row.category_ids,
|
||||
validateDate(row.date_created),
|
||||
validateDate(row.date_ol),
|
||||
validateDate(row.first_received),
|
||||
row.landing_cost_price,
|
||||
row.barcode,
|
||||
row.harmonized_tariff_code,
|
||||
validateDate(row.updated_at),
|
||||
@@ -287,6 +289,7 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
row.baskets,
|
||||
row.notifies,
|
||||
validateDate(row.date_last_sold),
|
||||
Number(row.shop_score) || 0,
|
||||
row.primary_iid,
|
||||
imageUrls.image,
|
||||
imageUrls.image_175,
|
||||
@@ -301,11 +304,11 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
INSERT INTO products (
|
||||
pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count,
|
||||
price, regular_price, cost_price, vendor, vendor_reference, notions_reference,
|
||||
brand, line, subline, artist, categories, created_at, first_received,
|
||||
landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible,
|
||||
brand, line, subline, artist, categories, created_at, date_online, first_received,
|
||||
barcode, harmonized_tariff_code, updated_at, visible,
|
||||
managing_stock, replenishable, permalink, moq, uom, rating, reviews,
|
||||
weight, length, width, height, country_of_origin, location, total_sold,
|
||||
baskets, notifies, date_last_sold, primary_iid, image, image_175, image_full, options, tags
|
||||
baskets, notifies, date_last_sold, shop_score, primary_iid, image, image_175, image_full, options, tags
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (pid) DO NOTHING
|
||||
@@ -343,13 +346,14 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
p.notes AS description,
|
||||
p.itemnumber AS sku,
|
||||
p.date_created,
|
||||
p.date_ol,
|
||||
p.datein AS first_received,
|
||||
p.location,
|
||||
p.upc AS barcode,
|
||||
p.harmonized_tariff_code,
|
||||
p.stamp AS updated_at,
|
||||
CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible,
|
||||
CASE
|
||||
CASE
|
||||
WHEN p.reorder < 0 THEN 0
|
||||
WHEN p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 1 YEAR) THEN 1
|
||||
WHEN COALESCE(pnb.inventory, 0) > 0 THEN 1
|
||||
@@ -366,20 +370,19 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
COALESCE(pnb.inventory, 0) as notions_inv_count,
|
||||
COALESCE(pcp.price_each, 0) as price,
|
||||
COALESCE(p.sellingprice, 0) AS regular_price,
|
||||
CASE
|
||||
WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0)
|
||||
CASE
|
||||
WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0)
|
||||
THEN (
|
||||
SELECT ROUND(SUM(costeach * count) / SUM(count), 5)
|
||||
FROM product_inventory
|
||||
SELECT ROUND(SUM(costeach * count) / SUM(count), 5)
|
||||
FROM product_inventory
|
||||
WHERE pid = p.pid AND count > 0
|
||||
)
|
||||
ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1)
|
||||
END AS cost_price,
|
||||
NULL as landing_cost_price,
|
||||
s.companyname AS vendor,
|
||||
CASE
|
||||
WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber
|
||||
ELSE sid.supplier_itemnumber
|
||||
CASE
|
||||
WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber
|
||||
ELSE sid.supplier_itemnumber
|
||||
END AS vendor_reference,
|
||||
sid.notions_itemnumber AS notions_reference,
|
||||
CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink,
|
||||
@@ -387,7 +390,7 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
pc2.name AS line,
|
||||
pc3.name AS subline,
|
||||
pc4.name AS artist,
|
||||
COALESCE(CASE
|
||||
COALESCE(CASE
|
||||
WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit
|
||||
ELSE sid.supplier_qty_per_unit
|
||||
END, sid.notions_qty_per_unit) AS moq,
|
||||
@@ -400,17 +403,18 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
p.country_of_origin,
|
||||
(SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets,
|
||||
(SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies,
|
||||
(SELECT COALESCE(SUM(oi.qty_ordered), 0)
|
||||
FROM order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
(SELECT COALESCE(SUM(oi.qty_ordered), 0)
|
||||
FROM order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
WHERE oi.prod_pid = p.pid AND o.order_status >= 20) AS total_sold,
|
||||
pls.date_sold as date_last_sold,
|
||||
COALESCE(p.score, 0) as shop_score,
|
||||
(SELECT iid FROM product_images WHERE pid = p.pid AND \`order\` = 255 LIMIT 1) AS primary_iid,
|
||||
GROUP_CONCAT(DISTINCT CASE
|
||||
WHEN pc.cat_id IS NOT NULL
|
||||
GROUP_CONCAT(DISTINCT CASE
|
||||
WHEN pc.cat_id IS NOT NULL
|
||||
AND pc.type IN (10, 20, 11, 21, 12, 13)
|
||||
AND pci.cat_id NOT IN (16, 17)
|
||||
THEN pci.cat_id
|
||||
THEN pci.cat_id
|
||||
END) as category_ids
|
||||
FROM products p
|
||||
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
||||
@@ -427,16 +431,15 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
|
||||
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
||||
WHERE ${incrementalUpdate ? `
|
||||
p.date_created >= DATE(?) OR
|
||||
p.stamp > ? OR
|
||||
ci.stamp > ? OR
|
||||
pcp.date_deactive > ? OR
|
||||
pcp.date_active > ? OR
|
||||
pnb.date_updated > ?
|
||||
-- Add condition for product_images changes if needed for incremental updates
|
||||
-- OR EXISTS (SELECT 1 FROM product_images pi WHERE pi.pid = p.pid AND pi.stamp > ?)
|
||||
` : 'TRUE'}
|
||||
GROUP BY p.pid
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime /*, lastSyncTime */] : []);
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
@@ -450,8 +453,8 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
|
||||
await withRetry(async () => {
|
||||
const placeholders = batch.map((_, idx) => {
|
||||
const base = idx * 48; // 48 columns
|
||||
return `(${Array.from({ length: 48 }, (_, i) => `$${base + i + 1}`).join(', ')})`;
|
||||
const base = idx * 49; // 49 columns
|
||||
return `(${Array.from({ length: 49 }, (_, i) => `$${base + i + 1}`).join(', ')})`;
|
||||
}).join(',');
|
||||
|
||||
const values = batch.flatMap(row => {
|
||||
@@ -476,8 +479,8 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
row.artist,
|
||||
row.category_ids,
|
||||
validateDate(row.date_created),
|
||||
validateDate(row.date_ol),
|
||||
validateDate(row.first_received),
|
||||
row.landing_cost_price,
|
||||
row.barcode,
|
||||
row.harmonized_tariff_code,
|
||||
validateDate(row.updated_at),
|
||||
@@ -499,6 +502,7 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
row.baskets,
|
||||
row.notifies,
|
||||
validateDate(row.date_last_sold),
|
||||
Number(row.shop_score) || 0,
|
||||
row.primary_iid,
|
||||
imageUrls.image,
|
||||
imageUrls.image_175,
|
||||
@@ -512,11 +516,11 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
INSERT INTO temp_products (
|
||||
pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count,
|
||||
price, regular_price, cost_price, vendor, vendor_reference, notions_reference,
|
||||
brand, line, subline, artist, categories, created_at, first_received,
|
||||
landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible,
|
||||
brand, line, subline, artist, categories, created_at, date_online, first_received,
|
||||
barcode, harmonized_tariff_code, updated_at, visible,
|
||||
managing_stock, replenishable, permalink, moq, uom, rating, reviews,
|
||||
weight, length, width, height, country_of_origin, location, total_sold,
|
||||
baskets, notifies, date_last_sold, primary_iid, image, image_175, image_full, options, tags
|
||||
baskets, notifies, date_last_sold, shop_score, primary_iid, image, image_175, image_full, options, tags
|
||||
) VALUES ${placeholders}
|
||||
ON CONFLICT (pid) DO UPDATE SET
|
||||
title = EXCLUDED.title,
|
||||
@@ -536,8 +540,8 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
subline = EXCLUDED.subline,
|
||||
artist = EXCLUDED.artist,
|
||||
created_at = EXCLUDED.created_at,
|
||||
date_online = EXCLUDED.date_online,
|
||||
first_received = EXCLUDED.first_received,
|
||||
landing_cost_price = EXCLUDED.landing_cost_price,
|
||||
barcode = EXCLUDED.barcode,
|
||||
harmonized_tariff_code = EXCLUDED.harmonized_tariff_code,
|
||||
updated_at = EXCLUDED.updated_at,
|
||||
@@ -559,13 +563,14 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
baskets = EXCLUDED.baskets,
|
||||
notifies = EXCLUDED.notifies,
|
||||
date_last_sold = EXCLUDED.date_last_sold,
|
||||
shop_score = EXCLUDED.shop_score,
|
||||
primary_iid = EXCLUDED.primary_iid,
|
||||
image = EXCLUDED.image,
|
||||
image_175 = EXCLUDED.image_175,
|
||||
image_full = EXCLUDED.image_full,
|
||||
options = EXCLUDED.options,
|
||||
tags = EXCLUDED.tags
|
||||
RETURNING
|
||||
RETURNING
|
||||
xmax = 0 as inserted
|
||||
`, values);
|
||||
}, `Error inserting batch ${i} to ${i + batch.length}`);
|
||||
@@ -615,8 +620,8 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
AND t.barcode IS NOT DISTINCT FROM p.barcode
|
||||
AND t.updated_at IS NOT DISTINCT FROM p.updated_at
|
||||
AND t.total_sold IS NOT DISTINCT FROM p.total_sold
|
||||
-- Check key fields that are likely to change
|
||||
-- We don't need to check every single field, just the important ones
|
||||
AND t.date_online IS NOT DISTINCT FROM p.date_online
|
||||
AND t.shop_score IS NOT DISTINCT FROM p.shop_score
|
||||
`);
|
||||
|
||||
// Get count of products that need updating
|
||||
@@ -664,8 +669,13 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
// Setup temporary tables
|
||||
await setupTemporaryTables(localConnection);
|
||||
|
||||
// Adjust sync time for mysql2 driver timezone vs MySQL server timezone mismatch
|
||||
const mysqlSyncTime = prodConnection.adjustDateForMySQL
|
||||
? prodConnection.adjustDateForMySQL(lastSyncTime)
|
||||
: lastSyncTime;
|
||||
|
||||
// Materialize calculations into temp table
|
||||
const materializeResult = await materializeCalculations(prodConnection, localConnection, incrementalUpdate, lastSyncTime, startTime);
|
||||
const materializeResult = await materializeCalculations(prodConnection, localConnection, incrementalUpdate, mysqlSyncTime, startTime);
|
||||
|
||||
// Get the list of products that need updating
|
||||
const [products] = await localConnection.query(`
|
||||
@@ -689,8 +699,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
t.artist,
|
||||
t.categories,
|
||||
t.created_at,
|
||||
t.date_online,
|
||||
t.first_received,
|
||||
t.landing_cost_price,
|
||||
t.barcode,
|
||||
t.harmonized_tariff_code,
|
||||
t.updated_at,
|
||||
@@ -711,6 +721,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
t.baskets,
|
||||
t.notifies,
|
||||
t.date_last_sold,
|
||||
t.shop_score,
|
||||
t.primary_iid,
|
||||
t.image,
|
||||
t.image_175,
|
||||
@@ -729,8 +740,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
const batch = products.rows.slice(i, i + BATCH_SIZE);
|
||||
|
||||
const placeholders = batch.map((_, idx) => {
|
||||
const base = idx * 47; // 47 columns
|
||||
return `(${Array.from({ length: 47 }, (_, i) => `$${base + i + 1}`).join(', ')})`;
|
||||
const base = idx * 48; // 48 columns (no primary_iid in this INSERT)
|
||||
return `(${Array.from({ length: 48 }, (_, i) => `$${base + i + 1}`).join(', ')})`;
|
||||
}).join(',');
|
||||
|
||||
const values = batch.flatMap(row => {
|
||||
@@ -755,8 +766,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
row.artist,
|
||||
row.categories,
|
||||
validateDate(row.created_at),
|
||||
validateDate(row.date_online),
|
||||
validateDate(row.first_received),
|
||||
row.landing_cost_price,
|
||||
row.barcode,
|
||||
row.harmonized_tariff_code,
|
||||
validateDate(row.updated_at),
|
||||
@@ -778,6 +789,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
row.baskets,
|
||||
row.notifies,
|
||||
validateDate(row.date_last_sold),
|
||||
Number(row.shop_score) || 0,
|
||||
imageUrls.image,
|
||||
imageUrls.image_175,
|
||||
imageUrls.image_full,
|
||||
@@ -791,11 +803,11 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
INSERT INTO products (
|
||||
pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count,
|
||||
price, regular_price, cost_price, vendor, vendor_reference, notions_reference,
|
||||
brand, line, subline, artist, categories, created_at, first_received,
|
||||
landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible,
|
||||
brand, line, subline, artist, categories, created_at, date_online, first_received,
|
||||
barcode, harmonized_tariff_code, updated_at, visible,
|
||||
managing_stock, replenishable, permalink, moq, uom, rating, reviews,
|
||||
weight, length, width, height, country_of_origin, location, total_sold,
|
||||
baskets, notifies, date_last_sold, image, image_175, image_full, options, tags
|
||||
baskets, notifies, date_last_sold, shop_score, image, image_175, image_full, options, tags
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (pid) DO UPDATE SET
|
||||
@@ -816,8 +828,8 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
subline = EXCLUDED.subline,
|
||||
artist = EXCLUDED.artist,
|
||||
created_at = EXCLUDED.created_at,
|
||||
date_online = EXCLUDED.date_online,
|
||||
first_received = EXCLUDED.first_received,
|
||||
landing_cost_price = EXCLUDED.landing_cost_price,
|
||||
barcode = EXCLUDED.barcode,
|
||||
harmonized_tariff_code = EXCLUDED.harmonized_tariff_code,
|
||||
updated_at = EXCLUDED.updated_at,
|
||||
@@ -839,15 +851,16 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
baskets = EXCLUDED.baskets,
|
||||
notifies = EXCLUDED.notifies,
|
||||
date_last_sold = EXCLUDED.date_last_sold,
|
||||
shop_score = EXCLUDED.shop_score,
|
||||
image = EXCLUDED.image,
|
||||
image_175 = EXCLUDED.image_175,
|
||||
image_full = EXCLUDED.image_full,
|
||||
options = EXCLUDED.options,
|
||||
tags = EXCLUDED.tags
|
||||
RETURNING
|
||||
RETURNING
|
||||
xmax = 0 as inserted
|
||||
)
|
||||
SELECT
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE inserted) as inserted,
|
||||
COUNT(*) FILTER (WHERE NOT inserted) as updated
|
||||
FROM upserted
|
||||
|
||||
@@ -65,8 +65,12 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'"
|
||||
);
|
||||
const lastSyncTime = syncInfo?.rows?.[0]?.last_sync_timestamp || '1970-01-01';
|
||||
// Adjust for mysql2 driver timezone vs MySQL server timezone mismatch
|
||||
const mysqlSyncTime = prodConnection.adjustDateForMySQL
|
||||
? prodConnection.adjustDateForMySQL(lastSyncTime)
|
||||
: lastSyncTime;
|
||||
|
||||
console.log('Purchase Orders: Using last sync time:', lastSyncTime);
|
||||
console.log('Purchase Orders: Using last sync time:', lastSyncTime, '(adjusted:', mysqlSyncTime, ')');
|
||||
|
||||
// Create temp tables for processing
|
||||
await localConnection.query(`
|
||||
@@ -254,7 +258,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
OR p.date_estin > ?
|
||||
)
|
||||
` : ''}
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
|
||||
|
||||
const totalPOs = poCount[0].total;
|
||||
console.log(`Found ${totalPOs} relevant purchase orders`);
|
||||
@@ -291,7 +295,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
` : ''}
|
||||
ORDER BY p.po_id
|
||||
LIMIT ${PO_BATCH_SIZE} OFFSET ${offset}
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
|
||||
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime, mysqlSyncTime] : []);
|
||||
|
||||
if (poList.length === 0) {
|
||||
allPOsProcessed = true;
|
||||
@@ -426,7 +430,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
OR r.date_created > ?
|
||||
)
|
||||
` : ''}
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime] : []);
|
||||
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime] : []);
|
||||
|
||||
const totalReceivings = receivingCount[0].total;
|
||||
console.log(`Found ${totalReceivings} relevant receivings`);
|
||||
@@ -463,7 +467,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
` : ''}
|
||||
ORDER BY r.receiving_id
|
||||
LIMIT ${PO_BATCH_SIZE} OFFSET ${offset}
|
||||
`, incrementalUpdate ? [lastSyncTime, lastSyncTime] : []);
|
||||
`, incrementalUpdate ? [mysqlSyncTime, mysqlSyncTime] : []);
|
||||
|
||||
if (receivingList.length === 0) {
|
||||
allReceivingsProcessed = true;
|
||||
|
||||
188
inventory-server/scripts/import/stock-snapshots.js
Normal file
188
inventory-server/scripts/import/stock-snapshots.js
Normal file
@@ -0,0 +1,188 @@
|
||||
const { outputProgress, formatElapsedTime, calculateRate } = require('../metrics-new/utils/progress');
|
||||
|
||||
const BATCH_SIZE = 5000;
|
||||
|
||||
/**
|
||||
* Imports daily stock snapshots from MySQL's snap_product_value table to PostgreSQL.
|
||||
* This provides historical end-of-day stock quantities per product, dating back to 2012.
|
||||
*
|
||||
* MySQL source table: snap_product_value (date, pid, count, pending, value)
|
||||
* - date: snapshot date (typically yesterday's date, recorded daily by cron)
|
||||
* - pid: product ID
|
||||
* - count: end-of-day stock quantity (sum of product_inventory.count)
|
||||
* - pending: pending/on-order quantity
|
||||
* - value: total inventory value at cost (sum of costeach * count)
|
||||
*
|
||||
* PostgreSQL target table: stock_snapshots (snapshot_date, pid, stock_quantity, pending_quantity, stock_value)
|
||||
*
|
||||
* @param {object} prodConnection - MySQL connection to production DB
|
||||
* @param {object} localConnection - PostgreSQL connection wrapper
|
||||
* @param {boolean} incrementalUpdate - If true, only fetch new snapshots since last import
|
||||
* @returns {object} Import statistics
|
||||
*/
|
||||
async function importStockSnapshots(prodConnection, localConnection, incrementalUpdate = true) {
|
||||
const startTime = Date.now();
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Stock snapshots import',
|
||||
message: 'Starting stock snapshots import...',
|
||||
current: 0,
|
||||
total: 0,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Ensure target table exists
|
||||
await localConnection.query(`
|
||||
CREATE TABLE IF NOT EXISTS stock_snapshots (
|
||||
snapshot_date DATE NOT NULL,
|
||||
pid BIGINT NOT NULL,
|
||||
stock_quantity INT NOT NULL DEFAULT 0,
|
||||
pending_quantity INT NOT NULL DEFAULT 0,
|
||||
stock_value NUMERIC(14, 4) NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (snapshot_date, pid)
|
||||
)
|
||||
`);
|
||||
|
||||
// Create index for efficient lookups by pid
|
||||
await localConnection.query(`
|
||||
CREATE INDEX IF NOT EXISTS idx_stock_snapshots_pid ON stock_snapshots (pid)
|
||||
`);
|
||||
|
||||
// Determine the start date for the import
|
||||
let startDate = '2020-01-01'; // Default: match the orders/snapshots date range
|
||||
if (incrementalUpdate) {
|
||||
const [result] = await localConnection.query(`
|
||||
SELECT MAX(snapshot_date)::text AS max_date FROM stock_snapshots
|
||||
`);
|
||||
if (result.rows[0]?.max_date) {
|
||||
// Start from the day after the last imported date
|
||||
startDate = result.rows[0].max_date;
|
||||
}
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Stock snapshots import',
|
||||
message: `Fetching stock snapshots from MySQL since ${startDate}...`,
|
||||
current: 0,
|
||||
total: 0,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Count total rows to import
|
||||
const [countResult] = await prodConnection.query(
|
||||
`SELECT COUNT(*) AS total FROM snap_product_value WHERE date > ?`,
|
||||
[startDate]
|
||||
);
|
||||
const totalRows = countResult[0].total;
|
||||
|
||||
if (totalRows === 0) {
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Stock snapshots import',
|
||||
message: 'No new stock snapshots to import',
|
||||
current: 0,
|
||||
total: 0,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
return { recordsAdded: 0, recordsUpdated: 0, status: 'complete' };
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Stock snapshots import',
|
||||
message: `Found ${totalRows.toLocaleString()} stock snapshot rows to import`,
|
||||
current: 0,
|
||||
total: totalRows,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Process in batches using date-based pagination (more efficient than OFFSET)
|
||||
let processedRows = 0;
|
||||
let recordsAdded = 0;
|
||||
let currentDate = startDate;
|
||||
|
||||
while (processedRows < totalRows) {
|
||||
// Fetch a batch of dates
|
||||
const [dateBatch] = await prodConnection.query(
|
||||
`SELECT DISTINCT date FROM snap_product_value
|
||||
WHERE date > ? ORDER BY date LIMIT 10`,
|
||||
[currentDate]
|
||||
);
|
||||
|
||||
if (dateBatch.length === 0) break;
|
||||
|
||||
const dates = dateBatch.map(r => r.date);
|
||||
const lastDate = dates[dates.length - 1];
|
||||
|
||||
// Fetch all rows for these dates
|
||||
const [rows] = await prodConnection.query(
|
||||
`SELECT date, pid, count AS stock_quantity, pending AS pending_quantity, value AS stock_value
|
||||
FROM snap_product_value
|
||||
WHERE date > ? AND date <= ?
|
||||
ORDER BY date, pid`,
|
||||
[currentDate, lastDate]
|
||||
);
|
||||
|
||||
if (rows.length === 0) break;
|
||||
|
||||
// Batch insert into PostgreSQL using UNNEST for efficiency
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
|
||||
const dates = batch.map(r => r.date);
|
||||
const pids = batch.map(r => r.pid);
|
||||
const quantities = batch.map(r => r.stock_quantity);
|
||||
const pending = batch.map(r => r.pending_quantity);
|
||||
const values = batch.map(r => r.stock_value);
|
||||
|
||||
try {
|
||||
const [result] = await localConnection.query(`
|
||||
INSERT INTO stock_snapshots (snapshot_date, pid, stock_quantity, pending_quantity, stock_value)
|
||||
SELECT * FROM UNNEST(
|
||||
$1::date[], $2::bigint[], $3::int[], $4::int[], $5::numeric[]
|
||||
)
|
||||
ON CONFLICT (snapshot_date, pid) DO UPDATE SET
|
||||
stock_quantity = EXCLUDED.stock_quantity,
|
||||
pending_quantity = EXCLUDED.pending_quantity,
|
||||
stock_value = EXCLUDED.stock_value
|
||||
`, [dates, pids, quantities, pending, values]);
|
||||
|
||||
recordsAdded += batch.length;
|
||||
} catch (err) {
|
||||
console.error(`Error inserting batch at offset ${i} (date range ending ${currentDate}):`, err.message);
|
||||
}
|
||||
}
|
||||
|
||||
processedRows += rows.length;
|
||||
currentDate = lastDate;
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Stock snapshots import',
|
||||
message: `Imported ${processedRows.toLocaleString()} / ${totalRows.toLocaleString()} rows (through ${currentDate})`,
|
||||
current: processedRows,
|
||||
total: totalRows,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
rate: calculateRate(processedRows, startTime)
|
||||
});
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Stock snapshots import',
|
||||
message: `Stock snapshots import complete: ${recordsAdded.toLocaleString()} rows`,
|
||||
current: processedRows,
|
||||
total: totalRows,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
return {
|
||||
recordsAdded,
|
||||
recordsUpdated: 0,
|
||||
status: 'complete'
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = importStockSnapshots;
|
||||
@@ -48,6 +48,37 @@ async function setupConnections(sshConfig) {
|
||||
stream: tunnel.stream,
|
||||
});
|
||||
|
||||
// Detect MySQL server timezone and calculate correction for the driver timezone mismatch.
|
||||
// The mysql2 driver is configured with timezone: '-05:00' (EST), but the MySQL server
|
||||
// may be in a different timezone (e.g., America/Chicago = CST/CDT). When the driver
|
||||
// formats a JS Date as EST and MySQL interprets it in its own timezone, DATETIME
|
||||
// comparisons can be off. This correction adjusts Date objects before they're passed
|
||||
// to MySQL queries so the formatted string matches the server's local time.
|
||||
const [[{ utcDiffSec }]] = await prodConnection.query(
|
||||
"SELECT TIMESTAMPDIFF(SECOND, NOW(), UTC_TIMESTAMP()) as utcDiffSec"
|
||||
);
|
||||
const mysqlOffsetMs = -utcDiffSec * 1000; // MySQL UTC offset in ms (e.g., -21600000 for CST)
|
||||
const driverOffsetMs = -5 * 3600 * 1000; // Driver's -05:00 in ms (-18000000)
|
||||
const tzCorrectionMs = driverOffsetMs - mysqlOffsetMs;
|
||||
// CST (winter): -18000000 - (-21600000) = +3600000 (1 hour correction needed)
|
||||
// CDT (summer): -18000000 - (-18000000) = 0 (no correction needed)
|
||||
|
||||
if (tzCorrectionMs !== 0) {
|
||||
console.log(`MySQL timezone correction: ${tzCorrectionMs / 1000}s (server offset: ${utcDiffSec}s from UTC)`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjusts a Date/timestamp for the mysql2 driver timezone mismatch before
|
||||
* passing it as a query parameter to MySQL. This ensures that the string
|
||||
* mysql2 generates matches the timezone that DATETIME values are stored in.
|
||||
*/
|
||||
function adjustDateForMySQL(date) {
|
||||
if (!date || tzCorrectionMs === 0) return date;
|
||||
const d = date instanceof Date ? date : new Date(date);
|
||||
return new Date(d.getTime() - tzCorrectionMs);
|
||||
}
|
||||
prodConnection.adjustDateForMySQL = adjustDateForMySQL;
|
||||
|
||||
// Setup PostgreSQL connection pool for local
|
||||
const localPool = new Pool(sshConfig.localDbConfig);
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ BEGIN
|
||||
p.visible as is_visible, p.replenishable,
|
||||
COALESCE(p.price, 0.00) as current_price, COALESCE(p.regular_price, 0.00) as current_regular_price,
|
||||
COALESCE(p.cost_price, 0.00) as current_cost_price,
|
||||
COALESCE(p.landing_cost_price, p.cost_price, 0.00) as current_effective_cost, -- Use landing if available, else cost
|
||||
COALESCE(p.cost_price, 0.00) as current_effective_cost,
|
||||
p.stock_quantity as current_stock, -- Use actual current stock for forecast base
|
||||
p.created_at, p.first_received, p.date_last_sold,
|
||||
p.moq,
|
||||
@@ -214,7 +214,7 @@ BEGIN
|
||||
-- Final INSERT/UPDATE statement using all the prepared CTEs
|
||||
INSERT INTO public.product_metrics (
|
||||
pid, last_calculated, sku, title, brand, vendor, image_url, is_visible, is_replenishable,
|
||||
current_price, current_regular_price, current_cost_price, current_landing_cost_price,
|
||||
current_price, current_regular_price, current_cost_price,
|
||||
current_stock, current_stock_cost, current_stock_retail, current_stock_gross,
|
||||
on_order_qty, on_order_cost, on_order_retail, earliest_expected_date,
|
||||
date_created, date_first_received, date_last_received, date_first_sold, date_last_sold, age_days,
|
||||
@@ -242,7 +242,7 @@ BEGIN
|
||||
SELECT
|
||||
-- Select columns in order, joining all CTEs by pid
|
||||
ci.pid, _start_time, ci.sku, ci.title, ci.brand, ci.vendor, ci.image_url, ci.is_visible, ci.replenishable,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price, ci.current_effective_cost,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price,
|
||||
ci.current_stock, (ci.current_stock * COALESCE(ci.current_effective_cost, 0.00))::numeric(12,2), (ci.current_stock * COALESCE(ci.current_price, 0.00))::numeric(12,2), (ci.current_stock * COALESCE(ci.current_regular_price, 0.00))::numeric(12,2),
|
||||
COALESCE(ooi.on_order_qty, 0), COALESCE(ooi.on_order_cost, 0.00)::numeric(12,2), (COALESCE(ooi.on_order_qty, 0) * COALESCE(ci.current_price, 0.00))::numeric(12,2), ooi.earliest_expected_date,
|
||||
|
||||
@@ -415,7 +415,7 @@ BEGIN
|
||||
-- *** IMPORTANT: List ALL columns here, ensuring order matches INSERT list ***
|
||||
-- Update ALL columns to ensure entire row is refreshed
|
||||
last_calculated = EXCLUDED.last_calculated, sku = EXCLUDED.sku, title = EXCLUDED.title, brand = EXCLUDED.brand, vendor = EXCLUDED.vendor, image_url = EXCLUDED.image_url, is_visible = EXCLUDED.is_visible, is_replenishable = EXCLUDED.is_replenishable,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price, current_landing_cost_price = EXCLUDED.current_landing_cost_price,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price,
|
||||
current_stock = EXCLUDED.current_stock, current_stock_cost = EXCLUDED.current_stock_cost, current_stock_retail = EXCLUDED.current_stock_retail, current_stock_gross = EXCLUDED.current_stock_gross,
|
||||
on_order_qty = EXCLUDED.on_order_qty, on_order_cost = EXCLUDED.on_order_cost, on_order_retail = EXCLUDED.on_order_retail, earliest_expected_date = EXCLUDED.earliest_expected_date,
|
||||
date_created = EXCLUDED.date_created, date_first_received = EXCLUDED.date_first_received, date_last_received = EXCLUDED.date_last_received, date_first_sold = EXCLUDED.date_first_sold, date_last_sold = EXCLUDED.date_last_sold, age_days = EXCLUDED.age_days,
|
||||
|
||||
@@ -10,7 +10,7 @@ DECLARE
|
||||
_date DATE;
|
||||
_count INT;
|
||||
_total_records INT := 0;
|
||||
_begin_date DATE := (SELECT MIN(date)::date FROM orders WHERE date >= '2024-01-01'); -- Starting point for data rebuild
|
||||
_begin_date DATE := (SELECT MIN(date)::date FROM orders WHERE date >= '2020-01-01'); -- Starting point: captures all historical order data
|
||||
_end_date DATE := CURRENT_DATE;
|
||||
BEGIN
|
||||
RAISE NOTICE 'Beginning daily snapshots rebuild from % to %. Starting at %', _begin_date, _end_date, _start_time;
|
||||
@@ -36,7 +36,13 @@ BEGIN
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.discount ELSE 0 END), 0.00) AS discounts,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN COALESCE(o.costeach, p.landing_cost_price, p.cost_price) * o.quantity ELSE 0 END), 0.00) AS cogs,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN
|
||||
COALESCE(
|
||||
o.costeach,
|
||||
get_weighted_avg_cost(p.pid, o.date::date),
|
||||
p.cost_price
|
||||
) * o.quantity
|
||||
ELSE 0 END), 0.00) AS cogs,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN p.regular_price * o.quantity ELSE 0 END), 0.00) AS gross_regular_revenue,
|
||||
|
||||
-- Aggregate Returns (Quantity < 0 or Status = Returned)
|
||||
@@ -63,15 +69,17 @@ BEGIN
|
||||
GROUP BY r.pid
|
||||
HAVING COUNT(DISTINCT r.receiving_id) > 0 OR SUM(r.qty_each) > 0
|
||||
),
|
||||
-- Get stock quantities for the day - note this is approximate since we're using current products data
|
||||
-- Use historical stock from stock_snapshots when available,
|
||||
-- falling back to current stock from products table
|
||||
StockData AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.stock_quantity,
|
||||
COALESCE(p.landing_cost_price, p.cost_price, 0.00) as effective_cost_price,
|
||||
COALESCE(ss.stock_quantity, p.stock_quantity) AS stock_quantity,
|
||||
COALESCE(ss.stock_value, p.stock_quantity * COALESCE(p.cost_price, 0.00)) AS stock_value,
|
||||
COALESCE(p.price, 0.00) as current_price,
|
||||
COALESCE(p.regular_price, 0.00) as current_regular_price
|
||||
FROM public.products p
|
||||
LEFT JOIN stock_snapshots ss ON p.pid = ss.pid AND ss.snapshot_date = _date
|
||||
)
|
||||
INSERT INTO public.daily_product_snapshots (
|
||||
snapshot_date,
|
||||
@@ -99,9 +107,9 @@ BEGIN
|
||||
_date AS snapshot_date,
|
||||
COALESCE(sd.pid, rd.pid) AS pid,
|
||||
sd.sku,
|
||||
-- Use current stock as approximation, since historical stock data may not be available
|
||||
-- Historical stock from stock_snapshots, falls back to current stock
|
||||
s.stock_quantity AS eod_stock_quantity,
|
||||
s.stock_quantity * s.effective_cost_price AS eod_stock_cost,
|
||||
s.stock_value AS eod_stock_cost,
|
||||
s.stock_quantity * s.current_price AS eod_stock_retail,
|
||||
s.stock_quantity * s.current_regular_price AS eod_stock_gross,
|
||||
(s.stock_quantity <= 0) AS stockout_flag,
|
||||
@@ -111,10 +119,10 @@ BEGIN
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00),
|
||||
COALESCE(sd.discounts, 0.00),
|
||||
COALESCE(sd.returns_revenue, 0.00),
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) AS net_revenue,
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00) AS net_revenue,
|
||||
COALESCE(sd.cogs, 0.00),
|
||||
COALESCE(sd.gross_regular_revenue, 0.00),
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
|
||||
-- Receiving metrics
|
||||
COALESCE(rd.units_received, 0),
|
||||
COALESCE(rd.cost_received, 0.00),
|
||||
|
||||
@@ -23,21 +23,21 @@ BEGIN
|
||||
-- Only include products with valid sales data in each time period
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_7d > 0 THEN pm.pid END) AS products_with_sales_7d,
|
||||
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
|
||||
SUM(COALESCE(pm.revenue_7d, 0)) AS revenue_7d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_30d > 0 THEN pm.pid END) AS products_with_sales_30d,
|
||||
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
|
||||
SUM(COALESCE(pm.revenue_30d, 0)) AS revenue_30d,
|
||||
SUM(COALESCE(pm.cogs_30d, 0)) AS cogs_30d,
|
||||
SUM(COALESCE(pm.profit_30d, 0)) AS profit_30d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_365d > 0 THEN pm.pid END) AS products_with_sales_365d,
|
||||
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
|
||||
SUM(COALESCE(pm.revenue_365d, 0)) AS revenue_365d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.lifetime_sales > 0 THEN pm.pid END) AS products_with_lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
|
||||
SUM(COALESCE(pm.lifetime_revenue, 0)) AS lifetime_revenue
|
||||
FROM public.product_metrics pm
|
||||
JOIN public.products p ON pm.pid = p.pid
|
||||
GROUP BY brand_group
|
||||
|
||||
@@ -28,8 +28,8 @@ BEGIN
|
||||
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
|
||||
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
|
||||
SUM(COALESCE(pm.cogs_30d, 0)) AS cogs_30d,
|
||||
SUM(COALESCE(pm.profit_30d, 0)) AS profit_30d,
|
||||
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
|
||||
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
|
||||
@@ -38,58 +38,56 @@ BEGIN
|
||||
JOIN public.product_metrics pm ON pc.pid = pm.pid
|
||||
GROUP BY pc.cat_id
|
||||
),
|
||||
-- Calculate rolled-up metrics (including all descendant categories)
|
||||
-- Map each category to ALL distinct products in it or any descendant.
|
||||
-- Uses the path array from category_hierarchy: for product P in category C,
|
||||
-- P contributes to C and every ancestor in C's path.
|
||||
-- DISTINCT ensures each (ancestor, pid) pair appears only once, preventing
|
||||
-- double-counting when a product belongs to multiple categories under the same parent.
|
||||
CategoryProducts AS (
|
||||
SELECT DISTINCT
|
||||
ancestor_cat_id,
|
||||
pc.pid
|
||||
FROM public.product_categories pc
|
||||
JOIN category_hierarchy ch ON pc.cat_id = ch.cat_id
|
||||
CROSS JOIN LATERAL unnest(ch.path) AS ancestor_cat_id
|
||||
),
|
||||
-- Calculate rolled-up metrics using deduplicated product sets
|
||||
RolledUpMetrics AS (
|
||||
SELECT
|
||||
ch.cat_id,
|
||||
-- Sum metrics from this category and all its descendants
|
||||
SUM(dcm.product_count) AS product_count,
|
||||
SUM(dcm.active_product_count) AS active_product_count,
|
||||
SUM(dcm.replenishable_product_count) AS replenishable_product_count,
|
||||
SUM(dcm.current_stock_units) AS current_stock_units,
|
||||
SUM(dcm.current_stock_cost) AS current_stock_cost,
|
||||
SUM(dcm.current_stock_retail) AS current_stock_retail,
|
||||
SUM(dcm.sales_7d) AS sales_7d,
|
||||
SUM(dcm.revenue_7d) AS revenue_7d,
|
||||
SUM(dcm.sales_30d) AS sales_30d,
|
||||
SUM(dcm.revenue_30d) AS revenue_30d,
|
||||
SUM(dcm.cogs_30d) AS cogs_30d,
|
||||
SUM(dcm.profit_30d) AS profit_30d,
|
||||
SUM(dcm.sales_365d) AS sales_365d,
|
||||
SUM(dcm.revenue_365d) AS revenue_365d,
|
||||
SUM(dcm.lifetime_sales) AS lifetime_sales,
|
||||
SUM(dcm.lifetime_revenue) AS lifetime_revenue
|
||||
FROM category_hierarchy ch
|
||||
LEFT JOIN DirectCategoryMetrics dcm ON
|
||||
dcm.cat_id = ch.cat_id OR
|
||||
dcm.cat_id = ANY(SELECT cat_id FROM category_hierarchy WHERE ch.cat_id = ANY(ancestor_ids))
|
||||
GROUP BY ch.cat_id
|
||||
),
|
||||
PreviousPeriodCategoryMetrics AS (
|
||||
-- Get previous period metrics for growth calculation
|
||||
SELECT
|
||||
pc.cat_id,
|
||||
SUM(CASE WHEN dps.snapshot_date >= CURRENT_DATE - INTERVAL '59 days'
|
||||
AND dps.snapshot_date < CURRENT_DATE - INTERVAL '29 days'
|
||||
THEN dps.units_sold ELSE 0 END) AS sales_prev_30d,
|
||||
SUM(CASE WHEN dps.snapshot_date >= CURRENT_DATE - INTERVAL '59 days'
|
||||
AND dps.snapshot_date < CURRENT_DATE - INTERVAL '29 days'
|
||||
THEN dps.net_revenue ELSE 0 END) AS revenue_prev_30d
|
||||
FROM public.daily_product_snapshots dps
|
||||
JOIN public.product_categories pc ON dps.pid = pc.pid
|
||||
GROUP BY pc.cat_id
|
||||
cp.ancestor_cat_id AS cat_id,
|
||||
COUNT(DISTINCT cp.pid) AS product_count,
|
||||
COUNT(DISTINCT CASE WHEN pm.is_visible THEN cp.pid END) AS active_product_count,
|
||||
COUNT(DISTINCT CASE WHEN pm.is_replenishable THEN cp.pid END) AS replenishable_product_count,
|
||||
SUM(pm.current_stock) AS current_stock_units,
|
||||
SUM(pm.current_stock_cost) AS current_stock_cost,
|
||||
SUM(pm.current_stock_retail) AS current_stock_retail,
|
||||
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
|
||||
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
|
||||
SUM(COALESCE(pm.cogs_30d, 0)) AS cogs_30d,
|
||||
SUM(COALESCE(pm.profit_30d, 0)) AS profit_30d,
|
||||
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
|
||||
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
|
||||
FROM CategoryProducts cp
|
||||
JOIN public.product_metrics pm ON cp.pid = pm.pid
|
||||
GROUP BY cp.ancestor_cat_id
|
||||
),
|
||||
-- Previous period rolled up using same deduplicated product sets
|
||||
RolledUpPreviousPeriod AS (
|
||||
-- Calculate rolled-up previous period metrics
|
||||
SELECT
|
||||
ch.cat_id,
|
||||
SUM(ppcm.sales_prev_30d) AS sales_prev_30d,
|
||||
SUM(ppcm.revenue_prev_30d) AS revenue_prev_30d
|
||||
FROM category_hierarchy ch
|
||||
LEFT JOIN PreviousPeriodCategoryMetrics ppcm ON
|
||||
ppcm.cat_id = ch.cat_id OR
|
||||
ppcm.cat_id = ANY(SELECT cat_id FROM category_hierarchy WHERE ch.cat_id = ANY(ancestor_ids))
|
||||
GROUP BY ch.cat_id
|
||||
cp.ancestor_cat_id AS cat_id,
|
||||
SUM(CASE WHEN dps.snapshot_date >= CURRENT_DATE - INTERVAL '59 days'
|
||||
AND dps.snapshot_date < CURRENT_DATE - INTERVAL '29 days'
|
||||
THEN dps.units_sold ELSE 0 END) AS sales_prev_30d,
|
||||
SUM(CASE WHEN dps.snapshot_date >= CURRENT_DATE - INTERVAL '59 days'
|
||||
AND dps.snapshot_date < CURRENT_DATE - INTERVAL '29 days'
|
||||
THEN dps.net_revenue ELSE 0 END) AS revenue_prev_30d
|
||||
FROM CategoryProducts cp
|
||||
JOIN public.daily_product_snapshots dps ON cp.pid = dps.pid
|
||||
GROUP BY cp.ancestor_cat_id
|
||||
),
|
||||
AllCategories AS (
|
||||
-- Ensure all categories are included
|
||||
|
||||
@@ -24,21 +24,21 @@ BEGIN
|
||||
-- Only include products with valid sales data in each time period
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_7d > 0 THEN pm.pid END) AS products_with_sales_7d,
|
||||
SUM(CASE WHEN pm.sales_7d > 0 THEN pm.sales_7d ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN pm.revenue_7d > 0 THEN pm.revenue_7d ELSE 0 END) AS revenue_7d,
|
||||
SUM(COALESCE(pm.revenue_7d, 0)) AS revenue_7d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_30d > 0 THEN pm.pid END) AS products_with_sales_30d,
|
||||
SUM(CASE WHEN pm.sales_30d > 0 THEN pm.sales_30d ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN pm.revenue_30d > 0 THEN pm.revenue_30d ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN pm.cogs_30d > 0 THEN pm.cogs_30d ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN pm.profit_30d != 0 THEN pm.profit_30d ELSE 0 END) AS profit_30d,
|
||||
SUM(COALESCE(pm.revenue_30d, 0)) AS revenue_30d,
|
||||
SUM(COALESCE(pm.cogs_30d, 0)) AS cogs_30d,
|
||||
SUM(COALESCE(pm.profit_30d, 0)) AS profit_30d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.sales_365d > 0 THEN pm.pid END) AS products_with_sales_365d,
|
||||
SUM(CASE WHEN pm.sales_365d > 0 THEN pm.sales_365d ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN pm.revenue_365d > 0 THEN pm.revenue_365d ELSE 0 END) AS revenue_365d,
|
||||
SUM(COALESCE(pm.revenue_365d, 0)) AS revenue_365d,
|
||||
|
||||
COUNT(DISTINCT CASE WHEN pm.lifetime_sales > 0 THEN pm.pid END) AS products_with_lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_sales > 0 THEN pm.lifetime_sales ELSE 0 END) AS lifetime_sales,
|
||||
SUM(CASE WHEN pm.lifetime_revenue > 0 THEN pm.lifetime_revenue ELSE 0 END) AS lifetime_revenue
|
||||
SUM(COALESCE(pm.lifetime_revenue, 0)) AS lifetime_revenue
|
||||
FROM public.product_metrics pm
|
||||
JOIN public.products p ON pm.pid = p.pid
|
||||
WHERE p.vendor IS NOT NULL AND p.vendor <> ''
|
||||
@@ -72,7 +72,7 @@ BEGIN
|
||||
END))::int AS avg_lead_time_days_hist -- Avg lead time from HISTORICAL received POs
|
||||
FROM public.purchase_orders po
|
||||
-- Join to receivings table to find when items were received
|
||||
LEFT JOIN public.receivings r ON r.pid = po.pid
|
||||
LEFT JOIN public.receivings r ON r.pid = po.pid AND r.supplier_id = po.supplier_id
|
||||
WHERE po.vendor IS NOT NULL AND po.vendor <> ''
|
||||
AND po.date >= CURRENT_DATE - INTERVAL '1 year' -- Look at POs created in the last year
|
||||
AND po.status = 'done' -- Only calculate lead time on completed POs
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
-- Migration: Map existing numeric order statuses to text values
|
||||
-- Run this ONCE on the production PostgreSQL database after deploying the updated orders import.
|
||||
-- This updates ~2.88M rows. On a busy system, consider running during low-traffic hours.
|
||||
-- The WHERE clause ensures idempotency - only rows with numeric statuses are updated.
|
||||
|
||||
UPDATE orders SET status = CASE status
|
||||
WHEN '0' THEN 'created'
|
||||
WHEN '10' THEN 'unfinished'
|
||||
WHEN '15' THEN 'canceled'
|
||||
WHEN '16' THEN 'combined'
|
||||
WHEN '20' THEN 'placed'
|
||||
WHEN '22' THEN 'placed_incomplete'
|
||||
WHEN '30' THEN 'canceled'
|
||||
WHEN '40' THEN 'awaiting_payment'
|
||||
WHEN '50' THEN 'awaiting_products'
|
||||
WHEN '55' THEN 'shipping_later'
|
||||
WHEN '56' THEN 'shipping_together'
|
||||
WHEN '60' THEN 'ready'
|
||||
WHEN '61' THEN 'flagged'
|
||||
WHEN '62' THEN 'fix_before_pick'
|
||||
WHEN '65' THEN 'manual_picking'
|
||||
WHEN '70' THEN 'in_pt'
|
||||
WHEN '80' THEN 'picked'
|
||||
WHEN '90' THEN 'awaiting_shipment'
|
||||
WHEN '91' THEN 'remote_wait'
|
||||
WHEN '92' THEN 'awaiting_pickup'
|
||||
WHEN '93' THEN 'fix_before_ship'
|
||||
WHEN '95' THEN 'shipped_confirmed'
|
||||
WHEN '100' THEN 'shipped'
|
||||
ELSE status
|
||||
END
|
||||
WHERE status ~ '^\d+$'; -- Only update rows that still have numeric statuses
|
||||
|
||||
-- Verify the migration
|
||||
SELECT status, COUNT(*) as count
|
||||
FROM orders
|
||||
GROUP BY status
|
||||
ORDER BY count DESC;
|
||||
@@ -0,0 +1,51 @@
|
||||
-- Migration 002: Fix discount double-counting in orders
|
||||
--
|
||||
-- PROBLEM: The orders import was calculating discount as:
|
||||
-- discount = (prod_price_reg - prod_price) * quantity <-- "sale savings" (WRONG)
|
||||
-- + prorated points discount
|
||||
-- + item-level promo discounts
|
||||
--
|
||||
-- Since `price` in the orders table already IS the sale price (prod_price, not prod_price_reg),
|
||||
-- the "sale savings" component double-counted the markdown. This resulted in inflated discounts
|
||||
-- and near-zero net_revenue for products sold on sale.
|
||||
--
|
||||
-- Example: Product with regular_price=$30, sale_price=$15, qty=2
|
||||
-- BEFORE (buggy): discount = ($30-$15)*2 + 0 + 0 = $30.00
|
||||
-- net_revenue = $15*2 - $30 = $0.00 (WRONG!)
|
||||
-- AFTER (fixed): discount = 0 + 0 + 0 = $0.00
|
||||
-- net_revenue = $15*2 - $0 = $30.00 (CORRECT!)
|
||||
--
|
||||
-- FIX: This cannot be fixed with a pure SQL migration because PostgreSQL doesn't store
|
||||
-- prod_price_reg. The discount column has the inflated value baked in, and we can't
|
||||
-- decompose which portion was the base_discount vs actual promo discounts.
|
||||
--
|
||||
-- REQUIRED ACTION: Run a FULL (non-incremental) orders re-import after deploying the
|
||||
-- fixed orders.js. This will recalculate all discounts using the corrected formula.
|
||||
--
|
||||
-- Steps:
|
||||
-- 1. Deploy updated orders.js (base_discount removed from discount calculation)
|
||||
-- 2. Run: node scripts/import/orders.js --full
|
||||
-- (or trigger a full sync through whatever mechanism is used)
|
||||
-- 3. After re-import, run the daily snapshots rebuild to propagate corrected revenue:
|
||||
-- psql -f scripts/metrics-new/backfill/rebuild_daily_snapshots.sql
|
||||
-- 4. Re-run metrics calculation:
|
||||
-- node scripts/metrics-new/calculate-metrics-new.js
|
||||
--
|
||||
-- VERIFICATION: After re-import, check the previously-affected products:
|
||||
SELECT
|
||||
o.pid,
|
||||
p.title,
|
||||
o.order_number,
|
||||
o.price,
|
||||
o.quantity,
|
||||
o.discount,
|
||||
(o.price * o.quantity) as gross_revenue,
|
||||
(o.price * o.quantity - o.discount) as net_revenue
|
||||
FROM orders o
|
||||
JOIN products p ON o.pid = p.pid
|
||||
WHERE o.pid IN (624756, 614513)
|
||||
ORDER BY o.date DESC
|
||||
LIMIT 10;
|
||||
|
||||
-- Expected: discount should be 0 (or small promo amount) for regular sales,
|
||||
-- and net_revenue should be close to gross_revenue.
|
||||
@@ -1,78 +1,112 @@
|
||||
-- Description: Calculates and updates daily aggregated product data for recent days.
|
||||
-- Uses UPSERT (INSERT ON CONFLICT UPDATE) for idempotency.
|
||||
-- Description: Calculates and updates daily aggregated product data.
|
||||
-- Self-healing: detects gaps (missing snapshots), stale data (snapshot
|
||||
-- aggregates that don't match source tables after backfills), and always
|
||||
-- reprocesses recent days to pick up new orders and data corrections.
|
||||
-- Dependencies: Core import tables (products, orders, purchase_orders), calculate_status table.
|
||||
-- Frequency: Hourly (Run ~5-10 minutes after hourly data import completes).
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
_module_name TEXT := 'daily_snapshots';
|
||||
_start_time TIMESTAMPTZ := clock_timestamp(); -- Time execution started
|
||||
_last_calc_time TIMESTAMPTZ;
|
||||
_target_date DATE; -- Will be set in the loop
|
||||
_start_time TIMESTAMPTZ := clock_timestamp();
|
||||
_target_date DATE;
|
||||
_total_records INT := 0;
|
||||
_has_orders BOOLEAN := FALSE;
|
||||
_process_days INT := 5; -- Number of days to check/process (today plus previous 4 days)
|
||||
_day_counter INT;
|
||||
_missing_days INT[] := ARRAY[]::INT[]; -- Array to store days with missing or incomplete data
|
||||
_days_processed INT := 0;
|
||||
_max_backfill_days INT := 90; -- Safety cap: max days to backfill per run
|
||||
_recent_recheck_days INT := 2; -- Always reprocess this many recent days (today + yesterday)
|
||||
_latest_snapshot DATE;
|
||||
_backfill_start DATE;
|
||||
BEGIN
|
||||
-- Get the timestamp before the last successful run of this module
|
||||
SELECT last_calculation_timestamp INTO _last_calc_time
|
||||
FROM public.calculate_status
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RAISE NOTICE 'Running % script. Start Time: %', _module_name, _start_time;
|
||||
|
||||
-- First, check which days need processing by comparing orders data with snapshot data
|
||||
FOR _day_counter IN 0..(_process_days-1) LOOP
|
||||
_target_date := CURRENT_DATE - (_day_counter * INTERVAL '1 day');
|
||||
|
||||
-- Check if this date needs updating by comparing orders to snapshot data
|
||||
-- If the date has orders but not enough snapshots, or if snapshots show zero sales but orders exist, it's incomplete
|
||||
SELECT
|
||||
CASE WHEN (
|
||||
-- We have orders for this date but not enough snapshots, or snapshots with wrong total
|
||||
(EXISTS (SELECT 1 FROM public.orders WHERE date::date = _target_date) AND
|
||||
(
|
||||
-- No snapshots exist for this date
|
||||
NOT EXISTS (SELECT 1 FROM public.daily_product_snapshots WHERE snapshot_date = _target_date) OR
|
||||
-- Or snapshots show zero sales but orders exist
|
||||
(SELECT COALESCE(SUM(units_sold), 0) FROM public.daily_product_snapshots WHERE snapshot_date = _target_date) = 0 OR
|
||||
-- Or the count of snapshot records is significantly less than distinct products in orders
|
||||
(SELECT COUNT(*) FROM public.daily_product_snapshots WHERE snapshot_date = _target_date) <
|
||||
(SELECT COUNT(DISTINCT pid) FROM public.orders WHERE date::date = _target_date) * 0.8
|
||||
)
|
||||
)
|
||||
) THEN TRUE ELSE FALSE END
|
||||
INTO _has_orders;
|
||||
|
||||
IF _has_orders THEN
|
||||
-- This day needs processing - add to our array
|
||||
_missing_days := _missing_days || _day_counter;
|
||||
RAISE NOTICE 'Day % needs updating (incomplete or missing data)', _target_date;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- If no days need updating, exit early
|
||||
IF array_length(_missing_days, 1) IS NULL THEN
|
||||
RAISE NOTICE 'No days need updating - all snapshot data appears complete';
|
||||
|
||||
-- Still update the calculate_status to record this run
|
||||
UPDATE public.calculate_status
|
||||
SET last_calculation_timestamp = _start_time
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Need to update % days with missing or incomplete data', array_length(_missing_days, 1);
|
||||
|
||||
-- Process only the days that need updating
|
||||
FOREACH _day_counter IN ARRAY _missing_days LOOP
|
||||
_target_date := CURRENT_DATE - (_day_counter * INTERVAL '1 day');
|
||||
RAISE NOTICE 'Processing date: %', _target_date;
|
||||
|
||||
-- Find the latest existing snapshot date (for logging only)
|
||||
SELECT MAX(snapshot_date) INTO _latest_snapshot
|
||||
FROM public.daily_product_snapshots;
|
||||
|
||||
-- Always scan the full backfill window to catch holes in the middle,
|
||||
-- not just gaps at the end. The gap fill and stale detection queries
|
||||
-- need to see the entire range to find missing or outdated snapshots.
|
||||
_backfill_start := CURRENT_DATE - _max_backfill_days;
|
||||
|
||||
IF _latest_snapshot IS NULL THEN
|
||||
RAISE NOTICE 'No existing snapshots found. Backfilling up to % days.', _max_backfill_days;
|
||||
ELSE
|
||||
RAISE NOTICE 'Latest snapshot: %. Scanning from % for gaps and stale data.', _latest_snapshot, _backfill_start;
|
||||
END IF;
|
||||
|
||||
-- Process all dates that need snapshots:
|
||||
-- 1. Gap fill: dates with orders/receivings but no snapshots (older than recent window)
|
||||
-- 2. Stale detection: existing snapshots where aggregates don't match source data
|
||||
-- (catches backfilled imports that arrived after snapshot was calculated)
|
||||
-- 3. Recent recheck: last N days always reprocessed (picks up new orders, corrections)
|
||||
FOR _target_date IN
|
||||
SELECT d FROM (
|
||||
-- Gap fill: find dates with activity but missing snapshots
|
||||
SELECT activity_dates.d
|
||||
FROM (
|
||||
SELECT DISTINCT date::date AS d FROM public.orders
|
||||
WHERE date::date >= _backfill_start AND date::date < CURRENT_DATE - _recent_recheck_days
|
||||
UNION
|
||||
SELECT DISTINCT received_date::date AS d FROM public.receivings
|
||||
WHERE received_date::date >= _backfill_start AND received_date::date < CURRENT_DATE - _recent_recheck_days
|
||||
) activity_dates
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM public.daily_product_snapshots dps WHERE dps.snapshot_date = activity_dates.d
|
||||
)
|
||||
UNION
|
||||
-- Stale detection: compare snapshot aggregates against source tables
|
||||
SELECT snap_agg.snapshot_date AS d
|
||||
FROM (
|
||||
SELECT snapshot_date,
|
||||
COALESCE(SUM(units_received), 0)::bigint AS snap_received,
|
||||
COALESCE(SUM(units_sold), 0)::bigint AS snap_sold
|
||||
FROM public.daily_product_snapshots
|
||||
WHERE snapshot_date >= _backfill_start
|
||||
AND snapshot_date < CURRENT_DATE - _recent_recheck_days
|
||||
GROUP BY snapshot_date
|
||||
) snap_agg
|
||||
LEFT JOIN (
|
||||
SELECT received_date::date AS d, SUM(qty_each)::bigint AS actual_received
|
||||
FROM public.receivings
|
||||
WHERE received_date::date >= _backfill_start
|
||||
AND received_date::date < CURRENT_DATE - _recent_recheck_days
|
||||
GROUP BY received_date::date
|
||||
) recv_agg ON snap_agg.snapshot_date = recv_agg.d
|
||||
LEFT JOIN (
|
||||
SELECT date::date AS d,
|
||||
SUM(CASE WHEN quantity > 0 AND COALESCE(status, 'pending') NOT IN ('canceled', 'returned')
|
||||
THEN quantity ELSE 0 END)::bigint AS actual_sold
|
||||
FROM public.orders
|
||||
WHERE date::date >= _backfill_start
|
||||
AND date::date < CURRENT_DATE - _recent_recheck_days
|
||||
GROUP BY date::date
|
||||
) orders_agg ON snap_agg.snapshot_date = orders_agg.d
|
||||
WHERE snap_agg.snap_received != COALESCE(recv_agg.actual_received, 0)
|
||||
OR snap_agg.snap_sold != COALESCE(orders_agg.actual_sold, 0)
|
||||
UNION
|
||||
-- Recent days: always reprocess
|
||||
SELECT d::date
|
||||
FROM generate_series(
|
||||
(CURRENT_DATE - _recent_recheck_days)::timestamp,
|
||||
CURRENT_DATE::timestamp,
|
||||
'1 day'::interval
|
||||
) d
|
||||
) dates_to_process
|
||||
ORDER BY d
|
||||
LOOP
|
||||
_days_processed := _days_processed + 1;
|
||||
|
||||
-- Classify why this date is being processed (for logging)
|
||||
IF _target_date >= CURRENT_DATE - _recent_recheck_days THEN
|
||||
RAISE NOTICE 'Processing date: % [recent recheck]', _target_date;
|
||||
ELSIF NOT EXISTS (SELECT 1 FROM public.daily_product_snapshots WHERE snapshot_date = _target_date) THEN
|
||||
RAISE NOTICE 'Processing date: % [gap fill — no existing snapshot]', _target_date;
|
||||
ELSE
|
||||
RAISE NOTICE 'Processing date: % [stale data — snapshot aggregates mismatch source]', _target_date;
|
||||
END IF;
|
||||
|
||||
-- IMPORTANT: First delete any existing data for this date to prevent duplication
|
||||
DELETE FROM public.daily_product_snapshots
|
||||
DELETE FROM public.daily_product_snapshots
|
||||
WHERE snapshot_date = _target_date;
|
||||
|
||||
-- Proceed with calculating daily metrics only for products with actual activity
|
||||
@@ -90,7 +124,6 @@ BEGIN
|
||||
COALESCE(
|
||||
o.costeach, -- First use order-specific cost if available
|
||||
get_weighted_avg_cost(p.pid, o.date::date), -- Then use weighted average cost
|
||||
p.landing_cost_price, -- Fallback to landing cost
|
||||
p.cost_price -- Final fallback to current cost
|
||||
) * o.quantity
|
||||
ELSE 0 END), 0.00) AS cogs,
|
||||
@@ -124,14 +157,16 @@ BEGIN
|
||||
HAVING COUNT(DISTINCT r.receiving_id) > 0 OR SUM(r.qty_each) > 0
|
||||
),
|
||||
CurrentStock AS (
|
||||
-- Select current stock values directly from products table
|
||||
-- Use historical stock from stock_snapshots when available,
|
||||
-- falling back to current stock from products table
|
||||
SELECT
|
||||
pid,
|
||||
stock_quantity,
|
||||
COALESCE(landing_cost_price, cost_price, 0.00) as effective_cost_price,
|
||||
COALESCE(price, 0.00) as current_price,
|
||||
COALESCE(regular_price, 0.00) as current_regular_price
|
||||
FROM public.products
|
||||
p.pid,
|
||||
COALESCE(ss.stock_quantity, p.stock_quantity) AS stock_quantity,
|
||||
COALESCE(ss.stock_value, p.stock_quantity * COALESCE(p.cost_price, 0.00)) AS stock_value,
|
||||
COALESCE(p.price, 0.00) AS current_price,
|
||||
COALESCE(p.regular_price, 0.00) AS current_regular_price
|
||||
FROM public.products p
|
||||
LEFT JOIN stock_snapshots ss ON p.pid = ss.pid AND ss.snapshot_date = _target_date
|
||||
),
|
||||
ProductsWithActivity AS (
|
||||
-- Quick pre-filter to only process products with activity
|
||||
@@ -171,7 +206,7 @@ BEGIN
|
||||
COALESCE(sd.sku, p.sku) AS sku, -- Get SKU from sales data or products table
|
||||
-- Inventory Metrics (Using CurrentStock)
|
||||
cs.stock_quantity AS eod_stock_quantity,
|
||||
cs.stock_quantity * cs.effective_cost_price AS eod_stock_cost,
|
||||
cs.stock_value AS eod_stock_cost,
|
||||
cs.stock_quantity * cs.current_price AS eod_stock_retail,
|
||||
cs.stock_quantity * cs.current_regular_price AS eod_stock_gross,
|
||||
(cs.stock_quantity <= 0) AS stockout_flag,
|
||||
@@ -181,10 +216,10 @@ BEGIN
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00),
|
||||
COALESCE(sd.discounts, 0.00),
|
||||
COALESCE(sd.returns_revenue, 0.00),
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) AS net_revenue,
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00) AS net_revenue,
|
||||
COALESCE(sd.cogs, 0.00),
|
||||
COALESCE(sd.gross_regular_revenue, 0.00),
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit, -- Basic profit: Net Revenue - COGS
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) - COALESCE(sd.returns_revenue, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit,
|
||||
-- Receiving Metrics (From ReceivingData)
|
||||
COALESCE(rd.units_received, 0),
|
||||
COALESCE(rd.cost_received, 0.00),
|
||||
@@ -201,12 +236,18 @@ BEGIN
|
||||
RAISE NOTICE 'Created % daily snapshot records for % with sales/receiving activity', _total_records, _target_date;
|
||||
END LOOP;
|
||||
|
||||
-- Update the status table with the timestamp from the START of this run
|
||||
UPDATE public.calculate_status
|
||||
SET last_calculation_timestamp = _start_time
|
||||
WHERE module_name = _module_name;
|
||||
IF _days_processed = 0 THEN
|
||||
RAISE NOTICE 'No days need updating — all snapshot data is current.';
|
||||
ELSE
|
||||
RAISE NOTICE 'Processed % days total.', _days_processed;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Finished % processing for multiple dates. Duration: %', _module_name, clock_timestamp() - _start_time;
|
||||
-- Update the status table with the timestamp from the START of this run
|
||||
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
|
||||
VALUES (_module_name, _start_time)
|
||||
ON CONFLICT (module_name) DO UPDATE SET last_calculation_timestamp = _start_time;
|
||||
|
||||
RAISE NOTICE 'Finished % script. Duration: %', _module_name, clock_timestamp() - _start_time;
|
||||
|
||||
END $$;
|
||||
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
-- Description: Populates lifecycle forecast columns on product_metrics from product_forecasts.
|
||||
-- Runs AFTER update_product_metrics.sql so that lead time / days of stock settings are available.
|
||||
-- Dependencies: product_metrics (fully populated), product_forecasts, settings tables.
|
||||
-- Frequency: After each metrics run and/or after forecast engine runs.
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
_module_name TEXT := 'lifecycle_forecasts';
|
||||
_start_time TIMESTAMPTZ := clock_timestamp();
|
||||
_updated INT;
|
||||
BEGIN
|
||||
RAISE NOTICE 'Running % module. Start Time: %', _module_name, _start_time;
|
||||
|
||||
-- Step 1: Set lifecycle_phase from product_forecasts (one phase per product)
|
||||
UPDATE product_metrics pm
|
||||
SET lifecycle_phase = sub.lifecycle_phase
|
||||
FROM (
|
||||
SELECT DISTINCT ON (pid) pid, lifecycle_phase
|
||||
FROM product_forecasts
|
||||
ORDER BY pid, forecast_date
|
||||
) sub
|
||||
WHERE pm.pid = sub.pid
|
||||
AND (pm.lifecycle_phase IS DISTINCT FROM sub.lifecycle_phase);
|
||||
|
||||
GET DIAGNOSTICS _updated = ROW_COUNT;
|
||||
RAISE NOTICE 'Updated lifecycle_phase for % products', _updated;
|
||||
|
||||
-- Step 2: Compute lifecycle-based lead time and planning period forecasts
|
||||
-- Uses each product's configured lead time and days of stock
|
||||
WITH forecast_sums AS (
|
||||
SELECT
|
||||
pf.pid,
|
||||
SUM(pf.forecast_units) FILTER (
|
||||
WHERE pf.forecast_date <= CURRENT_DATE + s.effective_lead_time
|
||||
) AS lt_forecast,
|
||||
SUM(pf.forecast_units) FILTER (
|
||||
WHERE pf.forecast_date <= CURRENT_DATE + s.effective_lead_time + s.effective_days_of_stock
|
||||
) AS pp_forecast
|
||||
FROM product_forecasts pf
|
||||
JOIN (
|
||||
SELECT
|
||||
p.pid,
|
||||
COALESCE(sp.lead_time_days, sv.default_lead_time_days,
|
||||
(SELECT setting_value::int FROM settings_global WHERE setting_key = 'default_lead_time_days'), 14
|
||||
) AS effective_lead_time,
|
||||
COALESCE(sp.days_of_stock, sv.default_days_of_stock,
|
||||
(SELECT setting_value::int FROM settings_global WHERE setting_key = 'default_days_of_stock'), 30
|
||||
) AS effective_days_of_stock
|
||||
FROM products p
|
||||
LEFT JOIN settings_product sp ON p.pid = sp.pid
|
||||
LEFT JOIN settings_vendor sv ON p.vendor = sv.vendor
|
||||
) s ON s.pid = pf.pid
|
||||
WHERE pf.forecast_date >= CURRENT_DATE
|
||||
GROUP BY pf.pid
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
SET
|
||||
lifecycle_lead_time_forecast = COALESCE(fs.lt_forecast, 0),
|
||||
lifecycle_planning_period_forecast = COALESCE(fs.pp_forecast, 0)
|
||||
FROM forecast_sums fs
|
||||
WHERE pm.pid = fs.pid
|
||||
AND (pm.lifecycle_lead_time_forecast IS DISTINCT FROM COALESCE(fs.lt_forecast, 0)
|
||||
OR pm.lifecycle_planning_period_forecast IS DISTINCT FROM COALESCE(fs.pp_forecast, 0));
|
||||
|
||||
GET DIAGNOSTICS _updated = ROW_COUNT;
|
||||
RAISE NOTICE 'Updated lifecycle forecasts for % products', _updated;
|
||||
|
||||
-- Step 3: Reclassify demand_pattern using residual CV (de-trended)
|
||||
-- For launch/decay products, raw CV is high because of expected lifecycle decay.
|
||||
-- We subtract the expected brand curve value to get residuals, then compute CV on those.
|
||||
-- Products that track their brand curve closely → low residual CV → "stable"
|
||||
-- Products with erratic deviations from curve → higher residual CV → "variable"/"sporadic"
|
||||
WITH product_curve AS (
|
||||
-- Get each product's brand curve and age
|
||||
SELECT
|
||||
pm.pid,
|
||||
pm.lifecycle_phase,
|
||||
pm.date_first_received,
|
||||
blc.amplitude,
|
||||
blc.decay_rate,
|
||||
blc.baseline
|
||||
FROM product_metrics pm
|
||||
JOIN products p ON p.pid = pm.pid
|
||||
LEFT JOIN brand_lifecycle_curves blc
|
||||
ON blc.brand = pm.brand
|
||||
AND blc.root_category IS NULL -- brand-only curve
|
||||
WHERE pm.lifecycle_phase IN ('launch', 'decay')
|
||||
AND pm.date_first_received IS NOT NULL
|
||||
AND blc.amplitude IS NOT NULL
|
||||
),
|
||||
daily_residuals AS (
|
||||
-- Compute residual = actual - expected for each snapshot day
|
||||
-- Curve params are in WEEKLY units; divide by 7 to get daily expected
|
||||
SELECT
|
||||
dps.pid,
|
||||
dps.units_sold,
|
||||
(pc.amplitude * EXP(-pc.decay_rate * (dps.snapshot_date - pc.date_first_received)::numeric / 7.0) + pc.baseline) / 7.0 AS expected,
|
||||
dps.units_sold - (pc.amplitude * EXP(-pc.decay_rate * (dps.snapshot_date - pc.date_first_received)::numeric / 7.0) + pc.baseline) / 7.0 AS residual
|
||||
FROM daily_product_snapshots dps
|
||||
JOIN product_curve pc ON pc.pid = dps.pid
|
||||
WHERE dps.snapshot_date >= CURRENT_DATE - INTERVAL '29 days'
|
||||
AND dps.snapshot_date <= CURRENT_DATE
|
||||
),
|
||||
residual_cv AS (
|
||||
SELECT
|
||||
pid,
|
||||
AVG(units_sold) AS avg_sales,
|
||||
CASE WHEN COUNT(*) >= 7 AND AVG(ABS(expected)) > 0.01 THEN
|
||||
STDDEV_POP(residual) / GREATEST(AVG(ABS(expected)), 0.1)
|
||||
END AS res_cv
|
||||
FROM daily_residuals
|
||||
GROUP BY pid
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
SET demand_pattern = classify_demand_pattern(rc.avg_sales, rc.res_cv)
|
||||
FROM residual_cv rc
|
||||
WHERE pm.pid = rc.pid
|
||||
AND rc.res_cv IS NOT NULL
|
||||
AND pm.demand_pattern IS DISTINCT FROM classify_demand_pattern(rc.avg_sales, rc.res_cv);
|
||||
|
||||
GET DIAGNOSTICS _updated = ROW_COUNT;
|
||||
RAISE NOTICE 'Reclassified demand_pattern for % launch/decay products', _updated;
|
||||
|
||||
-- Update tracking
|
||||
INSERT INTO public.calculate_status (module_name, last_calculation_timestamp)
|
||||
VALUES (_module_name, clock_timestamp())
|
||||
ON CONFLICT (module_name) DO UPDATE SET
|
||||
last_calculation_timestamp = EXCLUDED.last_calculation_timestamp;
|
||||
|
||||
RAISE NOTICE '% module complete. Duration: %', _module_name, clock_timestamp() - _start_time;
|
||||
END $$;
|
||||
@@ -21,20 +21,30 @@ BEGIN
|
||||
RAISE NOTICE 'Running % module. Start Time: %', _module_name, _start_time;
|
||||
|
||||
-- 1. Calculate Average Lead Time
|
||||
-- For each completed PO, find the earliest receiving from the same supplier
|
||||
-- within 180 days, then average those per-PO lead times per product.
|
||||
RAISE NOTICE 'Calculating Average Lead Time...';
|
||||
WITH LeadTimes AS (
|
||||
WITH po_first_receiving AS (
|
||||
SELECT
|
||||
po.pid,
|
||||
-- Calculate lead time by looking at when items ordered on POs were received
|
||||
AVG(GREATEST(1, (r.received_date::date - po.date::date))) AS avg_days -- Use GREATEST(1,...) to avoid 0 or negative days
|
||||
po.po_id,
|
||||
po.date::date AS po_date,
|
||||
MIN(r.received_date::date) AS first_receive_date
|
||||
FROM public.purchase_orders po
|
||||
-- Join to receivings table to find actual receipts
|
||||
JOIN public.receivings r ON r.pid = po.pid
|
||||
WHERE po.status = 'done' -- Only include completed POs
|
||||
AND r.received_date >= po.date -- Ensure received date is not before order date
|
||||
-- Optional: add check to make sure receiving is related to PO if you have source_po_id
|
||||
-- AND (r.source_po_id = po.po_id OR r.source_po_id IS NULL)
|
||||
GROUP BY po.pid
|
||||
JOIN public.receivings r
|
||||
ON r.pid = po.pid
|
||||
AND r.supplier_id = po.supplier_id -- same supplier
|
||||
AND r.received_date >= po.date -- received after order
|
||||
AND r.received_date <= po.date + INTERVAL '180 days' -- within reasonable window
|
||||
WHERE po.status = 'done'
|
||||
GROUP BY po.pid, po.po_id, po.date
|
||||
),
|
||||
LeadTimes AS (
|
||||
SELECT
|
||||
pid,
|
||||
ROUND(AVG(GREATEST(1, first_receive_date - po_date))) AS avg_days
|
||||
FROM po_first_receiving
|
||||
GROUP BY pid
|
||||
)
|
||||
UPDATE public.product_metrics pm
|
||||
SET avg_lead_time_days = lt.avg_days::int
|
||||
|
||||
@@ -52,7 +52,7 @@ BEGIN
|
||||
COALESCE(p.price, 0.00) as current_price,
|
||||
COALESCE(p.regular_price, 0.00) as current_regular_price,
|
||||
COALESCE(p.cost_price, 0.00) as current_cost_price,
|
||||
COALESCE(p.landing_cost_price, p.cost_price, 0.00) as current_effective_cost, -- Use landing if available, else cost
|
||||
COALESCE(p.cost_price, 0.00) as current_effective_cost,
|
||||
p.stock_quantity as current_stock,
|
||||
p.created_at,
|
||||
p.first_received,
|
||||
@@ -61,16 +61,72 @@ BEGIN
|
||||
p.uom -- Assuming UOM logic is handled elsewhere or simple (e.g., 1=each)
|
||||
FROM public.products p
|
||||
),
|
||||
-- Stale POs: open, >90 days past expected, AND a newer PO exists for the same product.
|
||||
-- These are likely abandoned/superseded and should not consume receivings in FIFO.
|
||||
StalePOLines AS (
|
||||
SELECT po.po_id, po.pid
|
||||
FROM public.purchase_orders po
|
||||
WHERE po.status IN ('created', 'ordered', 'preordered', 'electronically_sent',
|
||||
'electronically_ready_send', 'receiving_started')
|
||||
AND po.expected_date IS NOT NULL
|
||||
AND po.expected_date < _current_date - INTERVAL '90 days'
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM public.purchase_orders newer
|
||||
WHERE newer.pid = po.pid
|
||||
AND newer.status NOT IN ('canceled', 'done')
|
||||
AND COALESCE(newer.date_ordered, newer.date_created)
|
||||
> COALESCE(po.date_ordered, po.date_created)
|
||||
)
|
||||
),
|
||||
-- All non-canceled, non-stale POs in FIFO order per (pid, supplier).
|
||||
-- Includes closed ('done') POs so they consume receivings before open POs.
|
||||
POFifo AS (
|
||||
SELECT
|
||||
po.pid, po.supplier_id, po.po_id, po.ordered, po.status,
|
||||
po.po_cost_price, po.expected_date,
|
||||
SUM(po.ordered) OVER (
|
||||
PARTITION BY po.pid, po.supplier_id
|
||||
ORDER BY COALESCE(po.date_ordered, po.date_created), po.po_id
|
||||
) - po.ordered AS cumulative_before
|
||||
FROM public.purchase_orders po
|
||||
WHERE po.status != 'canceled'
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM StalePOLines s
|
||||
WHERE s.po_id = po.po_id AND s.pid = po.pid
|
||||
)
|
||||
),
|
||||
-- Total received per (product, supplier) across all receivings.
|
||||
SupplierReceived AS (
|
||||
SELECT pid, supplier_id, SUM(qty_each) AS total_received
|
||||
FROM public.receivings
|
||||
WHERE status IN ('partial_received', 'full_received', 'paid')
|
||||
GROUP BY pid, supplier_id
|
||||
),
|
||||
-- FIFO allocation: receivings fill oldest POs first per (pid, supplier).
|
||||
-- Only open PO lines are reported; closed POs just absorb receivings.
|
||||
OnOrderInfo AS (
|
||||
SELECT
|
||||
pid,
|
||||
SUM(ordered) AS on_order_qty,
|
||||
SUM(ordered * po_cost_price) AS on_order_cost,
|
||||
MIN(expected_date) AS earliest_expected_date
|
||||
FROM public.purchase_orders
|
||||
WHERE status IN ('created', 'ordered', 'preordered', 'electronically_sent', 'electronically_ready_send', 'receiving_started')
|
||||
AND status NOT IN ('canceled', 'done')
|
||||
GROUP BY pid
|
||||
po.pid,
|
||||
SUM(GREATEST(0,
|
||||
po.ordered - GREATEST(0, LEAST(po.ordered,
|
||||
COALESCE(sr.total_received, 0) - po.cumulative_before
|
||||
))
|
||||
)) AS on_order_qty,
|
||||
SUM(GREATEST(0,
|
||||
po.ordered - GREATEST(0, LEAST(po.ordered,
|
||||
COALESCE(sr.total_received, 0) - po.cumulative_before
|
||||
))
|
||||
) * po.po_cost_price) AS on_order_cost,
|
||||
MIN(po.expected_date) FILTER (WHERE
|
||||
po.ordered > GREATEST(0, LEAST(po.ordered,
|
||||
COALESCE(sr.total_received, 0) - po.cumulative_before
|
||||
))
|
||||
) AS earliest_expected_date
|
||||
FROM POFifo po
|
||||
LEFT JOIN SupplierReceived sr ON sr.pid = po.pid AND sr.supplier_id = po.supplier_id
|
||||
WHERE po.status IN ('created', 'ordered', 'preordered', 'electronically_sent',
|
||||
'electronically_ready_send', 'receiving_started')
|
||||
GROUP BY po.pid
|
||||
),
|
||||
HistoricalDates AS (
|
||||
-- Note: Calculating these MIN/MAX values hourly can be slow on large tables.
|
||||
@@ -142,6 +198,17 @@ BEGIN
|
||||
FROM public.daily_product_snapshots
|
||||
GROUP BY pid
|
||||
),
|
||||
BeginningStock AS (
|
||||
-- Get stock level from 30 days ago for sell-through calculation.
|
||||
-- Uses the closest available snapshot if exact date is missing (activity-only snapshots).
|
||||
SELECT DISTINCT ON (pid)
|
||||
pid,
|
||||
eod_stock_quantity AS beginning_stock_30d
|
||||
FROM public.daily_product_snapshots
|
||||
WHERE snapshot_date <= _current_date - INTERVAL '30 days'
|
||||
AND snapshot_date >= _current_date - INTERVAL '37 days'
|
||||
ORDER BY pid, snapshot_date DESC
|
||||
),
|
||||
FirstPeriodMetrics AS (
|
||||
SELECT
|
||||
pid,
|
||||
@@ -204,20 +271,33 @@ BEGIN
|
||||
GROUP BY pid
|
||||
),
|
||||
DemandVariability AS (
|
||||
-- Calculate variance and standard deviation of daily sales
|
||||
-- Calculate variance and standard deviation of daily sales over the full 30-day window
|
||||
-- including zero-sales days (not just activity days) for accurate variability metrics.
|
||||
-- Uses algebraic equivalents to avoid expensive CROSS JOIN with generate_series.
|
||||
-- For N=30 total days, k active days, sum S, sum_sq SS:
|
||||
-- mean = S/N, variance = (SS/N) - (S/N)^2 (population variance over all N days)
|
||||
SELECT
|
||||
pid,
|
||||
COUNT(*) AS days_with_data,
|
||||
AVG(units_sold) AS avg_daily_sales,
|
||||
VARIANCE(units_sold) AS sales_variance,
|
||||
STDDEV(units_sold) AS sales_std_dev,
|
||||
-- Coefficient of variation
|
||||
CASE
|
||||
WHEN AVG(units_sold) > 0 THEN STDDEV(units_sold) / AVG(units_sold)
|
||||
ELSE NULL
|
||||
SUM(units_sold)::numeric / 30.0 AS avg_daily_sales,
|
||||
CASE WHEN SUM(units_sold) > 0 THEN
|
||||
(SUM(units_sold::numeric * units_sold::numeric) / 30.0)
|
||||
- (SUM(units_sold)::numeric / 30.0) * (SUM(units_sold)::numeric / 30.0)
|
||||
END AS sales_variance,
|
||||
CASE WHEN SUM(units_sold) > 0 THEN
|
||||
(|/ GREATEST(0,
|
||||
(SUM(units_sold::numeric * units_sold::numeric) / 30.0)
|
||||
- (SUM(units_sold)::numeric / 30.0) * (SUM(units_sold)::numeric / 30.0)
|
||||
))::numeric
|
||||
END AS sales_std_dev,
|
||||
CASE WHEN SUM(units_sold) > 0 THEN
|
||||
((|/ GREATEST(0,
|
||||
(SUM(units_sold::numeric * units_sold::numeric) / 30.0)
|
||||
- (SUM(units_sold)::numeric / 30.0) * (SUM(units_sold)::numeric / 30.0)
|
||||
)) / (SUM(units_sold)::numeric / 30.0))::numeric
|
||||
END AS sales_cv
|
||||
FROM public.daily_product_snapshots
|
||||
WHERE snapshot_date >= _current_date - INTERVAL '29 days'
|
||||
WHERE snapshot_date >= _current_date - INTERVAL '29 days'
|
||||
AND snapshot_date <= _current_date
|
||||
GROUP BY pid
|
||||
),
|
||||
@@ -242,14 +322,51 @@ BEGIN
|
||||
GROUP BY pid
|
||||
),
|
||||
SeasonalityAnalysis AS (
|
||||
-- Simple seasonality detection
|
||||
SELECT
|
||||
p.pid,
|
||||
sp.seasonal_pattern,
|
||||
sp.seasonality_index,
|
||||
sp.peak_season
|
||||
FROM products p
|
||||
CROSS JOIN LATERAL detect_seasonal_pattern(p.pid) sp
|
||||
-- Set-based seasonality detection (replaces per-product function calls)
|
||||
-- Computes monthly CV and peak-to-average ratio across the last 12 months
|
||||
SELECT
|
||||
pid,
|
||||
CASE
|
||||
WHEN monthly_cv > 0.5 AND seasonality_index > 150 THEN 'strong'
|
||||
WHEN monthly_cv > 0.3 AND seasonality_index > 120 THEN 'moderate'
|
||||
ELSE 'none'
|
||||
END::varchar AS seasonal_pattern,
|
||||
CASE
|
||||
WHEN monthly_cv > 0.3 AND seasonality_index > 120 THEN seasonality_index
|
||||
ELSE 100::numeric
|
||||
END AS seasonality_index,
|
||||
CASE
|
||||
WHEN monthly_cv > 0.3 AND seasonality_index > 120
|
||||
THEN TRIM(TO_CHAR(TO_DATE(peak_month::text, 'MM'), 'Month'))
|
||||
ELSE NULL
|
||||
END::varchar AS peak_season
|
||||
FROM (
|
||||
SELECT
|
||||
pid,
|
||||
CASE WHEN overall_avg > 0 AND monthly_stddev IS NOT NULL
|
||||
THEN monthly_stddev / overall_avg ELSE 0 END AS monthly_cv,
|
||||
CASE WHEN overall_avg > 0
|
||||
THEN ROUND((max_month_avg / overall_avg * 100)::numeric, 2)
|
||||
ELSE 100 END AS seasonality_index,
|
||||
peak_month
|
||||
FROM (
|
||||
SELECT
|
||||
ms.pid,
|
||||
AVG(ms.month_avg) AS overall_avg,
|
||||
STDDEV(ms.month_avg) AS monthly_stddev,
|
||||
MAX(ms.month_avg) AS max_month_avg,
|
||||
(ARRAY_AGG(ms.mo ORDER BY ms.month_avg DESC))[1] AS peak_month
|
||||
FROM (
|
||||
SELECT pid, EXTRACT(MONTH FROM snapshot_date)::int AS mo, AVG(units_sold) AS month_avg
|
||||
FROM daily_product_snapshots
|
||||
WHERE snapshot_date >= CURRENT_DATE - INTERVAL '365 days'
|
||||
AND units_sold > 0
|
||||
GROUP BY pid, EXTRACT(MONTH FROM snapshot_date)
|
||||
) ms
|
||||
GROUP BY ms.pid
|
||||
HAVING COUNT(*) >= 3 -- Need at least 3 months for meaningful seasonality
|
||||
) agg
|
||||
) classified
|
||||
)
|
||||
-- Final UPSERT into product_metrics
|
||||
INSERT INTO public.product_metrics (
|
||||
@@ -257,7 +374,7 @@ BEGIN
|
||||
barcode, harmonized_tariff_code, vendor_reference, notions_reference, line, subline, artist,
|
||||
moq, rating, reviews, weight, length, width, height, country_of_origin, location,
|
||||
baskets, notifies, preorder_count, notions_inv_count,
|
||||
current_price, current_regular_price, current_cost_price, current_landing_cost_price,
|
||||
current_price, current_regular_price, current_cost_price,
|
||||
current_stock, current_stock_cost, current_stock_retail, current_stock_gross,
|
||||
on_order_qty, on_order_cost, on_order_retail, earliest_expected_date,
|
||||
date_created, date_first_received, date_last_received, date_first_sold, date_last_sold, age_days,
|
||||
@@ -295,7 +412,7 @@ BEGIN
|
||||
ci.barcode, ci.harmonized_tariff_code, ci.vendor_reference, ci.notions_reference, ci.line, ci.subline, ci.artist,
|
||||
ci.moq, ci.rating, ci.reviews, ci.weight, ci.length, ci.width, ci.height, ci.country_of_origin, ci.location,
|
||||
ci.baskets, ci.notifies, ci.preorder_count, ci.notions_inv_count,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price, ci.current_effective_cost,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price,
|
||||
ci.current_stock, ci.current_stock * ci.current_effective_cost, ci.current_stock * ci.current_price, ci.current_stock * ci.current_regular_price,
|
||||
COALESCE(ooi.on_order_qty, 0), COALESCE(ooi.on_order_cost, 0.00), COALESCE(ooi.on_order_qty, 0) * ci.current_price, ooi.earliest_expected_date,
|
||||
ci.created_at::date, COALESCE(ci.first_received::date, hd.date_first_received_calc), hd.date_last_received_calc, hd.date_first_sold, COALESCE(ci.date_last_sold, hd.max_order_date),
|
||||
@@ -321,10 +438,10 @@ BEGIN
|
||||
(GREATEST(0, ci.historical_total_sold - COALESCE(lr.lifetime_units_from_orders, 0)) *
|
||||
COALESCE(
|
||||
-- Use oldest known price from snapshots as proxy
|
||||
(SELECT revenue_7d / NULLIF(sales_7d, 0)
|
||||
FROM daily_product_snapshots
|
||||
WHERE pid = ci.pid AND sales_7d > 0
|
||||
ORDER BY snapshot_date ASC
|
||||
(SELECT net_revenue / NULLIF(units_sold, 0)
|
||||
FROM daily_product_snapshots
|
||||
WHERE pid = ci.pid AND units_sold > 0
|
||||
ORDER BY snapshot_date ASC
|
||||
LIMIT 1),
|
||||
ci.current_price
|
||||
))
|
||||
@@ -353,10 +470,10 @@ BEGIN
|
||||
(sa.stockout_days_30d / 30.0) * 100 AS stockout_rate_30d,
|
||||
sa.gross_regular_revenue_30d - sa.gross_revenue_30d AS markdown_30d,
|
||||
((sa.gross_regular_revenue_30d - sa.gross_revenue_30d) / NULLIF(sa.gross_regular_revenue_30d, 0)) * 100 AS markdown_rate_30d,
|
||||
-- Fix sell-through rate: Industry standard is Units Sold / (Beginning Inventory + Units Received)
|
||||
-- Approximating beginning inventory as current stock + units sold - units received
|
||||
-- Sell-through rate: Industry standard is Units Sold / (Beginning Inventory + Units Received)
|
||||
-- Uses actual snapshot from 30 days ago as beginning stock, falls back to avg_stock_units_30d
|
||||
(sa.sales_30d / NULLIF(
|
||||
ci.current_stock + sa.sales_30d + sa.returns_units_30d - sa.received_qty_30d,
|
||||
COALESCE(bs.beginning_stock_30d, sa.avg_stock_units_30d::int, 0) + sa.received_qty_30d,
|
||||
0
|
||||
)) * 100 AS sell_through_30d,
|
||||
|
||||
@@ -505,6 +622,7 @@ BEGIN
|
||||
LEFT JOIN PreviousPeriodMetrics ppm ON ci.pid = ppm.pid
|
||||
LEFT JOIN DemandVariability dv ON ci.pid = dv.pid
|
||||
LEFT JOIN ServiceLevels sl ON ci.pid = sl.pid
|
||||
LEFT JOIN BeginningStock bs ON ci.pid = bs.pid
|
||||
LEFT JOIN SeasonalityAnalysis season ON ci.pid = season.pid
|
||||
WHERE s.exclude_forecast IS FALSE OR s.exclude_forecast IS NULL -- Exclude products explicitly marked
|
||||
|
||||
@@ -514,7 +632,7 @@ BEGIN
|
||||
barcode = EXCLUDED.barcode, harmonized_tariff_code = EXCLUDED.harmonized_tariff_code, vendor_reference = EXCLUDED.vendor_reference, notions_reference = EXCLUDED.notions_reference, line = EXCLUDED.line, subline = EXCLUDED.subline, artist = EXCLUDED.artist,
|
||||
moq = EXCLUDED.moq, rating = EXCLUDED.rating, reviews = EXCLUDED.reviews, weight = EXCLUDED.weight, length = EXCLUDED.length, width = EXCLUDED.width, height = EXCLUDED.height, country_of_origin = EXCLUDED.country_of_origin, location = EXCLUDED.location,
|
||||
baskets = EXCLUDED.baskets, notifies = EXCLUDED.notifies, preorder_count = EXCLUDED.preorder_count, notions_inv_count = EXCLUDED.notions_inv_count,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price, current_landing_cost_price = EXCLUDED.current_landing_cost_price,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price,
|
||||
current_stock = EXCLUDED.current_stock, current_stock_cost = EXCLUDED.current_stock_cost, current_stock_retail = EXCLUDED.current_stock_retail, current_stock_gross = EXCLUDED.current_stock_gross,
|
||||
on_order_qty = EXCLUDED.on_order_qty, on_order_cost = EXCLUDED.on_order_cost, on_order_retail = EXCLUDED.on_order_retail, earliest_expected_date = EXCLUDED.earliest_expected_date,
|
||||
date_created = EXCLUDED.date_created, date_first_received = EXCLUDED.date_first_received, date_last_received = EXCLUDED.date_last_received, date_first_sold = EXCLUDED.date_first_sold, date_last_sold = EXCLUDED.date_last_sold, age_days = EXCLUDED.age_days,
|
||||
@@ -567,11 +685,26 @@ BEGIN
|
||||
product_metrics.replenishment_units IS DISTINCT FROM EXCLUDED.replenishment_units OR
|
||||
product_metrics.stock_cover_in_days IS DISTINCT FROM EXCLUDED.stock_cover_in_days OR
|
||||
product_metrics.yesterday_sales IS DISTINCT FROM EXCLUDED.yesterday_sales OR
|
||||
-- Check a few other important fields that might change
|
||||
product_metrics.date_last_sold IS DISTINCT FROM EXCLUDED.date_last_sold OR
|
||||
product_metrics.earliest_expected_date IS DISTINCT FROM EXCLUDED.earliest_expected_date OR
|
||||
product_metrics.lifetime_sales IS DISTINCT FROM EXCLUDED.lifetime_sales OR
|
||||
product_metrics.lifetime_revenue_quality IS DISTINCT FROM EXCLUDED.lifetime_revenue_quality
|
||||
product_metrics.lifetime_revenue_quality IS DISTINCT FROM EXCLUDED.lifetime_revenue_quality OR
|
||||
-- Derived metrics that can change even when source fields don't
|
||||
product_metrics.profit_30d IS DISTINCT FROM EXCLUDED.profit_30d OR
|
||||
product_metrics.cogs_30d IS DISTINCT FROM EXCLUDED.cogs_30d OR
|
||||
product_metrics.margin_30d IS DISTINCT FROM EXCLUDED.margin_30d OR
|
||||
product_metrics.stockout_days_30d IS DISTINCT FROM EXCLUDED.stockout_days_30d OR
|
||||
product_metrics.sell_through_30d IS DISTINCT FROM EXCLUDED.sell_through_30d OR
|
||||
-- Growth and variability metrics
|
||||
product_metrics.sales_growth_30d_vs_prev IS DISTINCT FROM EXCLUDED.sales_growth_30d_vs_prev OR
|
||||
product_metrics.revenue_growth_30d_vs_prev IS DISTINCT FROM EXCLUDED.revenue_growth_30d_vs_prev OR
|
||||
product_metrics.demand_pattern IS DISTINCT FROM EXCLUDED.demand_pattern OR
|
||||
product_metrics.seasonal_pattern IS DISTINCT FROM EXCLUDED.seasonal_pattern OR
|
||||
product_metrics.seasonality_index IS DISTINCT FROM EXCLUDED.seasonality_index OR
|
||||
product_metrics.service_level_30d IS DISTINCT FROM EXCLUDED.service_level_30d OR
|
||||
product_metrics.fill_rate_30d IS DISTINCT FROM EXCLUDED.fill_rate_30d OR
|
||||
-- Time-based safety net: always update if more than 1 day stale
|
||||
product_metrics.last_calculated < NOW() - INTERVAL '1 day'
|
||||
;
|
||||
|
||||
-- Update the status table with the timestamp from the START of this run
|
||||
|
||||
@@ -5,6 +5,8 @@ const corsMiddleware = cors({
|
||||
origin: [
|
||||
'https://inventory.kent.pw',
|
||||
'http://localhost:5175',
|
||||
'https://acot.site',
|
||||
'https://tools.acherryontop.com',
|
||||
/^http:\/\/192\.168\.\d+\.\d+(:\d+)?$/,
|
||||
/^http:\/\/10\.\d+\.\d+\.\d+(:\d+)?$/
|
||||
],
|
||||
@@ -26,7 +28,7 @@ const corsErrorHandler = (err, req, res, next) => {
|
||||
res.status(403).json({
|
||||
error: 'CORS not allowed',
|
||||
origin: req.get('Origin'),
|
||||
message: 'Origin not in allowed list: https://inventory.kent.pw, localhost:5175, 192.168.x.x, or 10.x.x.x'
|
||||
message: 'Origin not in allowed list: https://inventory.kent.pw, https://acot.site, https://tools.acherryontop.com, localhost:5175, 192.168.x.x, or 10.x.x.x'
|
||||
});
|
||||
} else {
|
||||
next(err);
|
||||
|
||||
@@ -51,66 +51,64 @@ router.get('/:id', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Get prompt by type (general, system, company_specific)
|
||||
// Get prompt by type (any prompt_type value - extensible)
|
||||
router.get('/by-type', async (req, res) => {
|
||||
try {
|
||||
const { type, company } = req.query;
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Validate prompt type
|
||||
if (!type || !['general', 'system', 'company_specific'].includes(type)) {
|
||||
return res.status(400).json({
|
||||
error: 'Valid type query parameter is required (general, system, or company_specific)'
|
||||
});
|
||||
}
|
||||
|
||||
// For company_specific type, company ID is required
|
||||
if (type === 'company_specific' && !company) {
|
||||
|
||||
// Validate type is provided
|
||||
if (!type || typeof type !== 'string' || type.trim().length === 0) {
|
||||
return res.status(400).json({
|
||||
error: 'Company ID is required for company_specific prompt type'
|
||||
error: 'Valid type query parameter is required'
|
||||
});
|
||||
}
|
||||
|
||||
// For general and system types, company should not be provided
|
||||
if ((type === 'general' || type === 'system') && company) {
|
||||
|
||||
// For company_specific types, company ID is required
|
||||
const isCompanySpecificType = type.endsWith('_company_specific');
|
||||
if (isCompanySpecificType && !company) {
|
||||
return res.status(400).json({
|
||||
error: 'Company ID should not be provided for general or system prompt types'
|
||||
error: 'Company ID is required for company_specific prompt types'
|
||||
});
|
||||
}
|
||||
|
||||
// Build the query based on the type
|
||||
|
||||
// For non-company-specific types, company should not be provided
|
||||
if (!isCompanySpecificType && company) {
|
||||
return res.status(400).json({
|
||||
error: 'Company ID should not be provided for non-company-specific prompt types'
|
||||
});
|
||||
}
|
||||
|
||||
// Build the query based on whether company is provided
|
||||
let query, params;
|
||||
if (type === 'company_specific') {
|
||||
if (company) {
|
||||
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1 AND company = $2';
|
||||
params = [type, company];
|
||||
params = [type.trim(), company];
|
||||
} else {
|
||||
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1';
|
||||
params = [type];
|
||||
query = 'SELECT * FROM ai_prompts WHERE prompt_type = $1 AND company IS NULL';
|
||||
params = [type.trim()];
|
||||
}
|
||||
|
||||
|
||||
// Execute the query
|
||||
const result = await pool.query(query, params);
|
||||
|
||||
|
||||
// Check if any prompt was found
|
||||
if (result.rows.length === 0) {
|
||||
let errorMessage;
|
||||
if (type === 'company_specific') {
|
||||
errorMessage = `AI prompt not found for company ${company}`;
|
||||
} else {
|
||||
errorMessage = `${type.charAt(0).toUpperCase() + type.slice(1)} AI prompt not found`;
|
||||
}
|
||||
const errorMessage = company
|
||||
? `AI prompt '${type}' not found for company ${company}`
|
||||
: `AI prompt '${type}' not found`;
|
||||
return res.status(404).json({ error: errorMessage });
|
||||
}
|
||||
|
||||
|
||||
// Return the first matching prompt
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching AI prompt by type:', error);
|
||||
res.status(500).json({
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
@@ -130,27 +128,28 @@ router.post('/', async (req, res) => {
|
||||
if (!prompt_text || !prompt_type) {
|
||||
return res.status(400).json({ error: 'Prompt text and type are required' });
|
||||
}
|
||||
|
||||
// Validate prompt type
|
||||
if (!['general', 'company_specific', 'system'].includes(prompt_type)) {
|
||||
return res.status(400).json({ error: 'Prompt type must be either "general", "company_specific", or "system"' });
|
||||
}
|
||||
|
||||
// Validate company is provided for company-specific prompts
|
||||
if (prompt_type === 'company_specific' && !company) {
|
||||
return res.status(400).json({ error: 'Company is required for company-specific prompts' });
|
||||
|
||||
// Validate prompt_type is a non-empty string (no hardcoded list - extensible)
|
||||
if (typeof prompt_type !== 'string' || prompt_type.trim().length === 0) {
|
||||
return res.status(400).json({ error: 'Prompt type must be a non-empty string' });
|
||||
}
|
||||
|
||||
// Validate company is not provided for general or system prompts
|
||||
if ((prompt_type === 'general' || prompt_type === 'system') && company) {
|
||||
return res.status(400).json({ error: 'Company should not be provided for general or system prompts' });
|
||||
// For company-specific types (ending with _company_specific), require company
|
||||
const isCompanySpecificType = prompt_type.endsWith('_company_specific');
|
||||
if (isCompanySpecificType && !company) {
|
||||
return res.status(400).json({ error: 'Company is required for company-specific prompt types' });
|
||||
}
|
||||
|
||||
// For non-company-specific types, company should not be provided
|
||||
if (!isCompanySpecificType && company) {
|
||||
return res.status(400).json({ error: 'Company should not be provided for non-company-specific prompt types' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
|
||||
const result = await pool.query(`
|
||||
INSERT INTO ai_prompts (
|
||||
prompt_text,
|
||||
@@ -160,35 +159,30 @@ router.post('/', async (req, res) => {
|
||||
RETURNING *
|
||||
`, [
|
||||
prompt_text,
|
||||
prompt_type,
|
||||
company
|
||||
prompt_type.trim(),
|
||||
company || null
|
||||
]);
|
||||
|
||||
res.status(201).json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error creating AI prompt:', error);
|
||||
|
||||
|
||||
// Check for unique constraint violations
|
||||
if (error instanceof Error && error.message.includes('unique constraint')) {
|
||||
if (error.message.includes('unique_company_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A prompt already exists for this company',
|
||||
if (error instanceof Error && error.message.includes('unique')) {
|
||||
if (error.message.includes('idx_singleton_with_company')) {
|
||||
return res.status(409).json({
|
||||
error: 'A prompt of this type already exists for this company',
|
||||
details: error.message
|
||||
});
|
||||
} else if (error.message.includes('idx_unique_general_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A general prompt already exists',
|
||||
details: error.message
|
||||
});
|
||||
} else if (error.message.includes('idx_unique_system_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A system prompt already exists',
|
||||
} else if (error.message.includes('idx_singleton_no_company')) {
|
||||
return res.status(409).json({
|
||||
error: 'A prompt of this type already exists',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Failed to create AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
@@ -209,73 +203,70 @@ router.put('/:id', async (req, res) => {
|
||||
if (!prompt_text || !prompt_type) {
|
||||
return res.status(400).json({ error: 'Prompt text and type are required' });
|
||||
}
|
||||
|
||||
// Validate prompt type
|
||||
if (!['general', 'company_specific', 'system'].includes(prompt_type)) {
|
||||
return res.status(400).json({ error: 'Prompt type must be either "general", "company_specific", or "system"' });
|
||||
|
||||
// Validate prompt_type is a non-empty string (no hardcoded list - extensible)
|
||||
if (typeof prompt_type !== 'string' || prompt_type.trim().length === 0) {
|
||||
return res.status(400).json({ error: 'Prompt type must be a non-empty string' });
|
||||
}
|
||||
|
||||
// Validate company is provided for company-specific prompts
|
||||
if (prompt_type === 'company_specific' && !company) {
|
||||
return res.status(400).json({ error: 'Company is required for company-specific prompts' });
|
||||
|
||||
// For company-specific types, require company
|
||||
const isCompanySpecificType = prompt_type.endsWith('_company_specific');
|
||||
if (isCompanySpecificType && !company) {
|
||||
return res.status(400).json({ error: 'Company is required for company-specific prompt types' });
|
||||
}
|
||||
|
||||
// Validate company is not provided for general or system prompts
|
||||
if ((prompt_type === 'general' || prompt_type === 'system') && company) {
|
||||
return res.status(400).json({ error: 'Company should not be provided for general or system prompts' });
|
||||
|
||||
// For non-company-specific types, company should not be provided
|
||||
if (!isCompanySpecificType && company) {
|
||||
return res.status(400).json({ error: 'Company should not be provided for non-company-specific prompt types' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
|
||||
// Check if the prompt exists
|
||||
const checkResult = await pool.query('SELECT * FROM ai_prompts WHERE id = $1', [id]);
|
||||
if (checkResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'AI prompt not found' });
|
||||
}
|
||||
|
||||
|
||||
const result = await pool.query(`
|
||||
UPDATE ai_prompts
|
||||
SET
|
||||
UPDATE ai_prompts
|
||||
SET
|
||||
prompt_text = $1,
|
||||
prompt_type = $2,
|
||||
company = $3
|
||||
company = $3,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = $4
|
||||
RETURNING *
|
||||
`, [
|
||||
prompt_text,
|
||||
prompt_type,
|
||||
company,
|
||||
prompt_type.trim(),
|
||||
company || null,
|
||||
id
|
||||
]);
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error updating AI prompt:', error);
|
||||
|
||||
|
||||
// Check for unique constraint violations
|
||||
if (error instanceof Error && error.message.includes('unique constraint')) {
|
||||
if (error.message.includes('unique_company_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A prompt already exists for this company',
|
||||
if (error instanceof Error && error.message.includes('unique')) {
|
||||
if (error.message.includes('idx_singleton_with_company')) {
|
||||
return res.status(409).json({
|
||||
error: 'A prompt of this type already exists for this company',
|
||||
details: error.message
|
||||
});
|
||||
} else if (error.message.includes('idx_unique_general_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A general prompt already exists',
|
||||
details: error.message
|
||||
});
|
||||
} else if (error.message.includes('idx_unique_system_prompt')) {
|
||||
return res.status(409).json({
|
||||
error: 'A system prompt already exists',
|
||||
} else if (error.message.includes('idx_singleton_no_company')) {
|
||||
return res.status(409).json({
|
||||
error: 'A prompt of this type already exists',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Failed to update AI prompt',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
434
inventory-server/src/routes/ai.js
Normal file
434
inventory-server/src/routes/ai.js
Normal file
@@ -0,0 +1,434 @@
|
||||
/**
|
||||
* AI Routes
|
||||
*
|
||||
* API endpoints for AI-powered product validation features.
|
||||
* Provides embedding generation and similarity-based suggestions.
|
||||
*/
|
||||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const aiService = require('../services/ai');
|
||||
const { getDbConnection, closeAllConnections } = require('../utils/dbConnection');
|
||||
|
||||
// Track initialization state
|
||||
let initializationPromise = null;
|
||||
|
||||
/**
|
||||
* Ensure AI service is initialized
|
||||
* Uses lazy initialization on first request
|
||||
*/
|
||||
async function ensureInitialized() {
|
||||
if (aiService.isReady()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (initializationPromise) {
|
||||
await initializationPromise;
|
||||
return aiService.isReady();
|
||||
}
|
||||
|
||||
initializationPromise = (async () => {
|
||||
try {
|
||||
console.log('[AI Routes] Initializing AI service...');
|
||||
|
||||
// Get database connection for taxonomy
|
||||
const { connection } = await getDbConnection();
|
||||
|
||||
const result = await aiService.initialize({
|
||||
openaiApiKey: process.env.OPENAI_API_KEY,
|
||||
groqApiKey: process.env.GROQ_API_KEY,
|
||||
mysqlConnection: connection,
|
||||
pool: null, // Will be set by setPool()
|
||||
logger: console
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
console.error('[AI Routes] AI service initialization failed:', result.message);
|
||||
return false;
|
||||
}
|
||||
|
||||
console.log('[AI Routes] AI service initialized:', {
|
||||
...result.stats,
|
||||
groqEnabled: result.groqEnabled
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Failed to initialize AI service:', error);
|
||||
return false;
|
||||
}
|
||||
})();
|
||||
|
||||
await initializationPromise;
|
||||
return aiService.isReady();
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/ai/status
|
||||
* Get AI service status
|
||||
*/
|
||||
router.get('/status', async (req, res) => {
|
||||
try {
|
||||
const status = aiService.getStatus();
|
||||
res.json(status);
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/ai/initialize
|
||||
* Manually trigger initialization (also happens automatically on first use)
|
||||
*/
|
||||
router.post('/initialize', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
const status = aiService.getStatus();
|
||||
|
||||
res.json({
|
||||
success: ready,
|
||||
...status
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Initialize error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/ai/taxonomy
|
||||
* Get all taxonomy data (categories, themes, colors) without embeddings
|
||||
*/
|
||||
router.get('/taxonomy', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
const taxonomy = aiService.getTaxonomyData();
|
||||
res.json(taxonomy);
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Taxonomy error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/ai/embedding
|
||||
* Generate embedding for a single product
|
||||
*
|
||||
* Body: { product: { name, description, company_name, line_name } }
|
||||
* Returns: { embedding: number[], latencyMs: number }
|
||||
*/
|
||||
router.post('/embedding', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
const { product } = req.body;
|
||||
|
||||
if (!product) {
|
||||
return res.status(400).json({ error: 'Product is required' });
|
||||
}
|
||||
|
||||
const result = await aiService.getProductEmbedding(product);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Embedding error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/ai/embeddings
|
||||
* Generate embeddings for multiple products
|
||||
*
|
||||
* Body: { products: Array<{ name, description, company_name, line_name }> }
|
||||
* Returns: { embeddings: Array<{ index, embedding }>, latencyMs }
|
||||
*/
|
||||
router.post('/embeddings', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
const { products } = req.body;
|
||||
|
||||
if (!Array.isArray(products)) {
|
||||
return res.status(400).json({ error: 'Products array is required' });
|
||||
}
|
||||
|
||||
const result = await aiService.getProductEmbeddings(products);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Embeddings error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/ai/suggestions
|
||||
* Get category/theme/color suggestions for a single product
|
||||
* Generates embedding and finds similar taxonomy items
|
||||
*
|
||||
* Body: { product: { name, description, company_name, line_name }, options?: { topCategories, topThemes, topColors } }
|
||||
* Returns: { categories: Array, themes: Array, colors: Array, latencyMs }
|
||||
*/
|
||||
router.post('/suggestions', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
const { product, options } = req.body;
|
||||
|
||||
if (!product) {
|
||||
return res.status(400).json({ error: 'Product is required' });
|
||||
}
|
||||
|
||||
const suggestions = await aiService.getSuggestionsForProduct(product, options);
|
||||
res.json(suggestions);
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Suggestions error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/ai/suggestions/batch
|
||||
* Get suggestions for multiple products
|
||||
* More efficient than calling /suggestions multiple times
|
||||
*
|
||||
* Body: { products: Array, options?: { topCategories, topThemes, topColors } }
|
||||
* Returns: { results: Array<{ index, categories, themes, colors }>, latencyMs }
|
||||
*/
|
||||
router.post('/suggestions/batch', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
const { products, options } = req.body;
|
||||
|
||||
if (!Array.isArray(products)) {
|
||||
return res.status(400).json({ error: 'Products array is required' });
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
// Generate all embeddings at once
|
||||
const { embeddings, latencyMs: embeddingLatency } = await aiService.getProductEmbeddings(products);
|
||||
|
||||
// Find suggestions for each embedding
|
||||
const results = embeddings.map(({ index, embedding }) => {
|
||||
const suggestions = aiService.findSimilarTaxonomy(embedding, options);
|
||||
return {
|
||||
index,
|
||||
...suggestions
|
||||
};
|
||||
});
|
||||
|
||||
const totalLatency = Date.now() - startTime;
|
||||
|
||||
res.json({
|
||||
results,
|
||||
latencyMs: totalLatency,
|
||||
embeddingLatencyMs: embeddingLatency,
|
||||
searchLatencyMs: totalLatency - embeddingLatency,
|
||||
productCount: products.length,
|
||||
embeddingCount: embeddings.length
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Batch suggestions error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/ai/similar
|
||||
* Find similar taxonomy items given a pre-computed embedding
|
||||
* Useful when frontend has cached the embedding
|
||||
*
|
||||
* Body: { embedding: number[], options?: { topCategories, topThemes, topColors } }
|
||||
* Returns: { categories, themes, colors }
|
||||
*/
|
||||
router.post('/similar', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
const { embedding, options } = req.body;
|
||||
|
||||
if (!embedding || !Array.isArray(embedding)) {
|
||||
return res.status(400).json({ error: 'Embedding array is required' });
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
const suggestions = aiService.findSimilarTaxonomy(embedding, options);
|
||||
|
||||
res.json({
|
||||
...suggestions,
|
||||
latencyMs: Date.now() - startTime
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Similar error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// INLINE AI VALIDATION ENDPOINTS (Groq-powered)
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* POST /api/ai/validate/inline/name
|
||||
* Validate a single product name for spelling, grammar, and naming conventions
|
||||
*
|
||||
* Body: { product: { name, company_name, company_id, line_name, description } }
|
||||
* Returns: { isValid, suggestion?, issues[], latencyMs }
|
||||
*/
|
||||
router.post('/validate/inline/name', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
if (!aiService.hasChatCompletion()) {
|
||||
return res.status(503).json({
|
||||
error: 'Chat completion not available - GROQ_API_KEY not configured'
|
||||
});
|
||||
}
|
||||
|
||||
const { product } = req.body;
|
||||
|
||||
if (!product) {
|
||||
return res.status(400).json({ error: 'Product is required' });
|
||||
}
|
||||
|
||||
// Get pool from app.locals (set by server.js)
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const result = await aiService.runTask(aiService.TASK_IDS.VALIDATE_NAME, {
|
||||
product,
|
||||
pool
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
return res.status(500).json({
|
||||
error: result.error || 'Validation failed',
|
||||
code: result.code
|
||||
});
|
||||
}
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Name validation error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/ai/validate/inline/description
|
||||
* Validate a single product description for quality and guideline compliance
|
||||
*
|
||||
* Body: { product: { name, description, company_name, company_id, categories } }
|
||||
* Returns: { isValid, suggestion?, issues[], latencyMs }
|
||||
*/
|
||||
router.post('/validate/inline/description', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
if (!aiService.hasChatCompletion()) {
|
||||
return res.status(503).json({
|
||||
error: 'Chat completion not available - GROQ_API_KEY not configured'
|
||||
});
|
||||
}
|
||||
|
||||
const { product } = req.body;
|
||||
|
||||
if (!product) {
|
||||
return res.status(400).json({ error: 'Product is required' });
|
||||
}
|
||||
|
||||
// Get pool from app.locals (set by server.js)
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const result = await aiService.runTask(aiService.TASK_IDS.VALIDATE_DESCRIPTION, {
|
||||
product,
|
||||
pool
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
return res.status(500).json({
|
||||
error: result.error || 'Validation failed',
|
||||
code: result.code
|
||||
});
|
||||
}
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Description validation error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/ai/validate/sanity-check
|
||||
* Run consistency/sanity check on a batch of products
|
||||
*
|
||||
* Body: { products: Array<product data> }
|
||||
* Returns: { issues: Array<{ productIndex, field, issue, suggestion? }>, summary, latencyMs }
|
||||
*/
|
||||
router.post('/validate/sanity-check', async (req, res) => {
|
||||
try {
|
||||
const ready = await ensureInitialized();
|
||||
if (!ready) {
|
||||
return res.status(503).json({ error: 'AI service not available' });
|
||||
}
|
||||
|
||||
if (!aiService.hasChatCompletion()) {
|
||||
return res.status(503).json({
|
||||
error: 'Chat completion not available - GROQ_API_KEY not configured'
|
||||
});
|
||||
}
|
||||
|
||||
const { products } = req.body;
|
||||
|
||||
if (!Array.isArray(products) || products.length === 0) {
|
||||
return res.status(400).json({ error: 'Products array is required' });
|
||||
}
|
||||
|
||||
// Get pool from app.locals (set by server.js)
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const result = await aiService.runTask(aiService.TASK_IDS.SANITY_CHECK, {
|
||||
products,
|
||||
pool
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
return res.status(500).json({
|
||||
error: result.error || 'Sanity check failed',
|
||||
code: result.code
|
||||
});
|
||||
}
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[AI Routes] Sanity check error:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
170
inventory-server/src/routes/hts-lookup.js
Normal file
170
inventory-server/src/routes/hts-lookup.js
Normal file
@@ -0,0 +1,170 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// GET /api/hts-lookup?search=term
|
||||
// Finds matching products and groups them by harmonized tariff code
|
||||
router.get('/', async (req, res) => {
|
||||
const searchTerm = typeof req.query.search === 'string' ? req.query.search.trim() : '';
|
||||
|
||||
if (!searchTerm) {
|
||||
return res.status(400).json({ error: 'Search term is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const likeTerm = `%${searchTerm}%`;
|
||||
|
||||
const { rows } = await pool.query(
|
||||
`
|
||||
WITH matched_products AS (
|
||||
SELECT
|
||||
pid,
|
||||
title,
|
||||
sku,
|
||||
barcode,
|
||||
brand,
|
||||
vendor,
|
||||
harmonized_tariff_code,
|
||||
NULLIF(
|
||||
LOWER(
|
||||
REGEXP_REPLACE(
|
||||
COALESCE(NULLIF(TRIM(harmonized_tariff_code), ''), ''),
|
||||
'[^0-9A-Za-z]',
|
||||
'',
|
||||
'g'
|
||||
)
|
||||
),
|
||||
''
|
||||
) AS normalized_code
|
||||
FROM products
|
||||
WHERE visible = TRUE
|
||||
AND (
|
||||
title ILIKE $1
|
||||
OR sku ILIKE $1
|
||||
OR barcode ILIKE $1
|
||||
OR vendor ILIKE $1
|
||||
OR brand ILIKE $1
|
||||
OR vendor_reference ILIKE $1
|
||||
OR harmonized_tariff_code ILIKE $1
|
||||
)
|
||||
),
|
||||
grouped AS (
|
||||
SELECT
|
||||
COALESCE(NULLIF(TRIM(harmonized_tariff_code), ''), 'Unspecified') AS harmonized_tariff_code,
|
||||
normalized_code,
|
||||
COUNT(*)::INT AS product_count,
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'pid', pid,
|
||||
'title', title,
|
||||
'sku', sku,
|
||||
'barcode', barcode,
|
||||
'brand', brand,
|
||||
'vendor', vendor
|
||||
)
|
||||
ORDER BY title
|
||||
) AS products
|
||||
FROM matched_products
|
||||
GROUP BY
|
||||
COALESCE(NULLIF(TRIM(harmonized_tariff_code), ''), 'Unspecified'),
|
||||
normalized_code
|
||||
),
|
||||
hts_lookup AS (
|
||||
SELECT
|
||||
h."HTS Number" AS hts_number,
|
||||
h."Indent" AS indent,
|
||||
h."Description" AS description,
|
||||
h."Unit of Quantity" AS unit_of_quantity,
|
||||
h."General Rate of Duty" AS general_rate_of_duty,
|
||||
h."Special Rate of Duty" AS special_rate_of_duty,
|
||||
h."Column 2 Rate of Duty" AS column2_rate_of_duty,
|
||||
h."Quota Quantity" AS quota_quantity,
|
||||
h."Additional Duties" AS additional_duties,
|
||||
NULLIF(
|
||||
LOWER(
|
||||
REGEXP_REPLACE(
|
||||
COALESCE(h."HTS Number", ''),
|
||||
'[^0-9A-Za-z]',
|
||||
'',
|
||||
'g'
|
||||
)
|
||||
),
|
||||
''
|
||||
) AS normalized_hts_number
|
||||
FROM htsdata h
|
||||
)
|
||||
SELECT
|
||||
g.harmonized_tariff_code,
|
||||
g.product_count,
|
||||
g.products,
|
||||
hts.hts_details
|
||||
FROM grouped g
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'hts_number', h.hts_number,
|
||||
'indent', h.indent,
|
||||
'description', h.description,
|
||||
'unit_of_quantity', h.unit_of_quantity,
|
||||
'general_rate_of_duty', h.general_rate_of_duty,
|
||||
'special_rate_of_duty', h.special_rate_of_duty,
|
||||
'column2_rate_of_duty', h.column2_rate_of_duty,
|
||||
'quota_quantity', h.quota_quantity,
|
||||
'additional_duties', h.additional_duties
|
||||
)
|
||||
ORDER BY LENGTH(COALESCE(h.normalized_hts_number, '')) ASC NULLS LAST,
|
||||
NULLIF(h.indent, '')::INT NULLS LAST
|
||||
) AS hts_details
|
||||
FROM hts_lookup h
|
||||
WHERE COALESCE(g.normalized_code, '') <> ''
|
||||
AND COALESCE(h.normalized_hts_number, '') <> ''
|
||||
AND (
|
||||
g.normalized_code LIKE h.normalized_hts_number || '%'
|
||||
OR h.normalized_hts_number LIKE g.normalized_code || '%'
|
||||
)
|
||||
) hts ON TRUE
|
||||
ORDER BY g.product_count DESC, g.harmonized_tariff_code ASC
|
||||
`,
|
||||
[likeTerm]
|
||||
);
|
||||
|
||||
const totalMatches = rows.reduce((sum, row) => sum + (parseInt(row.product_count, 10) || 0), 0);
|
||||
|
||||
res.json({
|
||||
search: searchTerm,
|
||||
total: totalMatches,
|
||||
results: rows.map(row => ({
|
||||
harmonized_tariff_code: row.harmonized_tariff_code,
|
||||
product_count: parseInt(row.product_count, 10) || 0,
|
||||
hts_details: Array.isArray(row.hts_details)
|
||||
? row.hts_details.map(detail => ({
|
||||
hts_number: detail.hts_number,
|
||||
indent: detail.indent,
|
||||
description: detail.description,
|
||||
unit_of_quantity: detail.unit_of_quantity,
|
||||
general_rate_of_duty: detail.general_rate_of_duty,
|
||||
special_rate_of_duty: detail.special_rate_of_duty,
|
||||
column2_rate_of_duty: detail.column2_rate_of_duty,
|
||||
quota_quantity: detail.quota_quantity,
|
||||
additional_duties: detail.additional_duties
|
||||
}))
|
||||
: [],
|
||||
products: Array.isArray(row.products)
|
||||
? row.products.map(product => ({
|
||||
pid: product.pid,
|
||||
title: product.title,
|
||||
sku: product.sku,
|
||||
barcode: product.barcode,
|
||||
brand: product.brand,
|
||||
vendor: product.vendor
|
||||
}))
|
||||
: []
|
||||
}))
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error performing HTS lookup:', error);
|
||||
res.status(500).json({ error: 'Failed to lookup HTS codes' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
337
inventory-server/src/routes/import-sessions.js
Normal file
337
inventory-server/src/routes/import-sessions.js
Normal file
@@ -0,0 +1,337 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Get all import sessions for a user (named + unnamed)
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const { user_id } = req.query;
|
||||
|
||||
if (!user_id) {
|
||||
return res.status(400).json({ error: 'user_id query parameter is required' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
user_id,
|
||||
name,
|
||||
current_step,
|
||||
jsonb_array_length(data) as row_count,
|
||||
global_selections,
|
||||
created_at,
|
||||
updated_at
|
||||
FROM import_sessions
|
||||
WHERE user_id = $1
|
||||
ORDER BY
|
||||
CASE WHEN name IS NULL THEN 0 ELSE 1 END,
|
||||
updated_at DESC
|
||||
`, [user_id]);
|
||||
|
||||
res.json(result.rows);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import sessions:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch import sessions',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get session by ID
|
||||
router.get('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM import_sessions
|
||||
WHERE id = $1
|
||||
`, [id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Import session not found' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching import session:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch import session',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Autosave - upsert unnamed session for user
|
||||
// IMPORTANT: This must be defined before /:id routes to avoid Express matching "autosave" as an :id
|
||||
router.put('/autosave', async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
user_id,
|
||||
current_step,
|
||||
data,
|
||||
product_images,
|
||||
global_selections,
|
||||
validation_state
|
||||
} = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!user_id) {
|
||||
return res.status(400).json({ error: 'user_id is required' });
|
||||
}
|
||||
if (!current_step) {
|
||||
return res.status(400).json({ error: 'current_step is required' });
|
||||
}
|
||||
if (!data || !Array.isArray(data)) {
|
||||
return res.status(400).json({ error: 'data must be an array' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Upsert: insert or update the unnamed session for this user
|
||||
const result = await pool.query(`
|
||||
INSERT INTO import_sessions (
|
||||
user_id,
|
||||
name,
|
||||
current_step,
|
||||
data,
|
||||
product_images,
|
||||
global_selections,
|
||||
validation_state
|
||||
) VALUES ($1, NULL, $2, $3, $4, $5, $6)
|
||||
ON CONFLICT (user_id) WHERE name IS NULL
|
||||
DO UPDATE SET
|
||||
current_step = EXCLUDED.current_step,
|
||||
data = EXCLUDED.data,
|
||||
product_images = EXCLUDED.product_images,
|
||||
global_selections = EXCLUDED.global_selections,
|
||||
validation_state = EXCLUDED.validation_state,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
RETURNING id, user_id, name, current_step, created_at, updated_at
|
||||
`, [
|
||||
user_id,
|
||||
current_step,
|
||||
JSON.stringify(data),
|
||||
product_images ? JSON.stringify(product_images) : null,
|
||||
global_selections ? JSON.stringify(global_selections) : null,
|
||||
validation_state ? JSON.stringify(validation_state) : null
|
||||
]);
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error autosaving import session:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to autosave import session',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Delete unnamed session for user (clear autosave)
|
||||
// IMPORTANT: This must be defined before /:id routes
|
||||
router.delete('/autosave/:user_id', async (req, res) => {
|
||||
try {
|
||||
const { user_id } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
'DELETE FROM import_sessions WHERE user_id = $1 AND name IS NULL RETURNING id, user_id, name, current_step, created_at, updated_at',
|
||||
[user_id]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'No autosave session found for user' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Autosave session deleted successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error deleting autosave session:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to delete autosave session',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Create new named session
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
user_id,
|
||||
name,
|
||||
current_step,
|
||||
data,
|
||||
product_images,
|
||||
global_selections,
|
||||
validation_state
|
||||
} = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!user_id) {
|
||||
return res.status(400).json({ error: 'user_id is required' });
|
||||
}
|
||||
if (!name || typeof name !== 'string' || name.trim().length === 0) {
|
||||
return res.status(400).json({ error: 'name is required for creating a named session' });
|
||||
}
|
||||
if (!current_step) {
|
||||
return res.status(400).json({ error: 'current_step is required' });
|
||||
}
|
||||
if (!data || !Array.isArray(data)) {
|
||||
return res.status(400).json({ error: 'data must be an array' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
INSERT INTO import_sessions (
|
||||
user_id,
|
||||
name,
|
||||
current_step,
|
||||
data,
|
||||
product_images,
|
||||
global_selections,
|
||||
validation_state
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING id, user_id, name, current_step, created_at, updated_at
|
||||
`, [
|
||||
user_id,
|
||||
name.trim(),
|
||||
current_step,
|
||||
JSON.stringify(data),
|
||||
product_images ? JSON.stringify(product_images) : null,
|
||||
global_selections ? JSON.stringify(global_selections) : null,
|
||||
validation_state ? JSON.stringify(validation_state) : null
|
||||
]);
|
||||
|
||||
res.status(201).json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error creating import session:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to create import session',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Update named session by ID
|
||||
router.put('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const {
|
||||
name,
|
||||
current_step,
|
||||
data,
|
||||
product_images,
|
||||
global_selections,
|
||||
validation_state
|
||||
} = req.body;
|
||||
|
||||
if (!current_step) {
|
||||
return res.status(400).json({ error: 'current_step is required' });
|
||||
}
|
||||
if (!data || !Array.isArray(data)) {
|
||||
return res.status(400).json({ error: 'data must be an array' });
|
||||
}
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
// Build update query - optionally include name if provided
|
||||
const hasName = name !== undefined;
|
||||
const result = await pool.query(`
|
||||
UPDATE import_sessions
|
||||
SET
|
||||
${hasName ? 'name = $1,' : ''}
|
||||
current_step = $${hasName ? 2 : 1},
|
||||
data = $${hasName ? 3 : 2},
|
||||
product_images = $${hasName ? 4 : 3},
|
||||
global_selections = $${hasName ? 5 : 4},
|
||||
validation_state = $${hasName ? 6 : 5},
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = $${hasName ? 7 : 6}
|
||||
RETURNING id, user_id, name, current_step, created_at, updated_at
|
||||
`, hasName ? [
|
||||
typeof name === 'string' ? name.trim() : name,
|
||||
current_step,
|
||||
JSON.stringify(data),
|
||||
product_images ? JSON.stringify(product_images) : null,
|
||||
global_selections ? JSON.stringify(global_selections) : null,
|
||||
validation_state ? JSON.stringify(validation_state) : null,
|
||||
id
|
||||
] : [
|
||||
current_step,
|
||||
JSON.stringify(data),
|
||||
product_images ? JSON.stringify(product_images) : null,
|
||||
global_selections ? JSON.stringify(global_selections) : null,
|
||||
validation_state ? JSON.stringify(validation_state) : null,
|
||||
id
|
||||
]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Import session not found' });
|
||||
}
|
||||
|
||||
res.json(result.rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error updating import session:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to update import session',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Delete session by ID
|
||||
router.delete('/:id', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pool = req.app.locals.pool;
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not initialized');
|
||||
}
|
||||
|
||||
const result = await pool.query('DELETE FROM import_sessions WHERE id = $1 RETURNING id, user_id, name, current_step, created_at, updated_at', [id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Import session not found' });
|
||||
}
|
||||
|
||||
res.json({ message: 'Import session deleted successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error deleting import session:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to delete import session',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Error handling middleware
|
||||
router.use((err, req, res, next) => {
|
||||
console.error('Import sessions route error:', err);
|
||||
res.status(500).json({
|
||||
error: 'Internal server error',
|
||||
details: err.message
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -5,6 +5,8 @@ const mysql = require('mysql2/promise');
|
||||
const multer = require('multer');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const fsp = fs.promises;
|
||||
const sharp = require('sharp');
|
||||
|
||||
// Create uploads directory if it doesn't exist
|
||||
const uploadsDir = path.join('/var/www/html/inventory/uploads/products');
|
||||
@@ -35,6 +37,9 @@ const connectionCache = {
|
||||
}
|
||||
};
|
||||
|
||||
const MIN_IMAGE_DIMENSION = 1000;
|
||||
const MAX_IMAGE_SIZE_BYTES = 5 * 1024 * 1024;
|
||||
|
||||
// Function to schedule image deletion after 24 hours
|
||||
const scheduleImageDeletion = (filename, filePath) => {
|
||||
// Only schedule deletion for images in the products folder
|
||||
@@ -145,6 +150,255 @@ const cleanupImagesOnStartup = () => {
|
||||
// Run cleanup on server start
|
||||
cleanupImagesOnStartup();
|
||||
|
||||
const bytesToMegabytes = (bytes) => Number((bytes / (1024 * 1024)).toFixed(2));
|
||||
|
||||
const processUploadedImage = async (filePath, mimetype) => {
|
||||
const notices = [];
|
||||
const legacyWarnings = [];
|
||||
const metadata = {};
|
||||
|
||||
const originalBuffer = await fsp.readFile(filePath);
|
||||
let baseMetadata = await sharp(originalBuffer, { failOn: 'none' }).metadata();
|
||||
|
||||
metadata.width = baseMetadata.width || 0;
|
||||
metadata.height = baseMetadata.height || 0;
|
||||
metadata.size = originalBuffer.length;
|
||||
metadata.colorSpace = baseMetadata.space || baseMetadata.colourspace || null;
|
||||
|
||||
if (
|
||||
baseMetadata.width &&
|
||||
baseMetadata.height &&
|
||||
(baseMetadata.width < MIN_IMAGE_DIMENSION || baseMetadata.height < MIN_IMAGE_DIMENSION)
|
||||
) {
|
||||
const message = `Image is ${baseMetadata.width}x${baseMetadata.height}. Recommended minimum is ${MIN_IMAGE_DIMENSION}x${MIN_IMAGE_DIMENSION}.`;
|
||||
notices.push({
|
||||
message,
|
||||
level: 'warning',
|
||||
code: 'dimensions_too_small',
|
||||
source: 'server'
|
||||
});
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
|
||||
const colorSpace = (baseMetadata.space || baseMetadata.colourspace || '').toLowerCase();
|
||||
let shouldConvertToRgb = colorSpace === 'cmyk';
|
||||
|
||||
if (shouldConvertToRgb) {
|
||||
const message = 'Converted image from CMYK to RGB.';
|
||||
notices.push({
|
||||
message,
|
||||
level: 'info',
|
||||
code: 'converted_to_rgb',
|
||||
source: 'server'
|
||||
});
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
|
||||
const format = (baseMetadata.format || '').toLowerCase();
|
||||
if (format === 'gif') {
|
||||
if (metadata.size > MAX_IMAGE_SIZE_BYTES) {
|
||||
const message = `GIF optimization is limited; resulting size is ${bytesToMegabytes(metadata.size)}MB (target 5MB).`;
|
||||
notices.push({
|
||||
message,
|
||||
level: 'warning',
|
||||
code: 'gif_size_limit',
|
||||
source: 'server'
|
||||
});
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
metadata.convertedToRgb = false;
|
||||
metadata.resized = false;
|
||||
return { notices, warnings: legacyWarnings, metadata, finalSize: metadata.size };
|
||||
}
|
||||
|
||||
const supportsQuality = ['jpeg', 'jpg', 'webp'].includes(format);
|
||||
let targetQuality = supportsQuality ? 90 : undefined;
|
||||
let finalQuality = undefined;
|
||||
|
||||
let currentWidth = baseMetadata.width || null;
|
||||
let currentHeight = baseMetadata.height || null;
|
||||
|
||||
let resized = false;
|
||||
let mutated = false;
|
||||
let finalBuffer = originalBuffer;
|
||||
let finalInfo = baseMetadata;
|
||||
|
||||
const encode = async ({ width, height, quality }) => {
|
||||
let pipeline = sharp(originalBuffer, { failOn: 'none' });
|
||||
|
||||
if (shouldConvertToRgb) {
|
||||
pipeline = pipeline.toColorspace('srgb');
|
||||
}
|
||||
|
||||
if (width || height) {
|
||||
pipeline = pipeline.resize({
|
||||
width: width ?? undefined,
|
||||
height: height ?? undefined,
|
||||
fit: 'inside',
|
||||
withoutEnlargement: true,
|
||||
});
|
||||
}
|
||||
|
||||
switch (format) {
|
||||
case 'png':
|
||||
pipeline = pipeline.png({
|
||||
compressionLevel: 9,
|
||||
adaptiveFiltering: true,
|
||||
palette: true,
|
||||
});
|
||||
break;
|
||||
case 'webp':
|
||||
pipeline = pipeline.webp({ quality: quality ?? 90 });
|
||||
break;
|
||||
case 'jpeg':
|
||||
case 'jpg':
|
||||
default:
|
||||
pipeline = pipeline.jpeg({ quality: quality ?? 90, mozjpeg: true });
|
||||
break;
|
||||
}
|
||||
|
||||
return pipeline.toBuffer({ resolveWithObject: true });
|
||||
};
|
||||
|
||||
const canResize =
|
||||
(currentWidth && currentWidth > MIN_IMAGE_DIMENSION) ||
|
||||
(currentHeight && currentHeight > MIN_IMAGE_DIMENSION);
|
||||
|
||||
if (metadata.size > MAX_IMAGE_SIZE_BYTES && (supportsQuality || canResize)) {
|
||||
const maxAttempts = 8;
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
let targetWidth = currentWidth;
|
||||
let targetHeight = currentHeight;
|
||||
let resizedThisAttempt = false;
|
||||
|
||||
if (currentWidth && currentWidth > MIN_IMAGE_DIMENSION) {
|
||||
targetWidth = Math.max(MIN_IMAGE_DIMENSION, Math.round(currentWidth * 0.85));
|
||||
}
|
||||
|
||||
if (currentHeight && currentHeight > MIN_IMAGE_DIMENSION) {
|
||||
targetHeight = Math.max(MIN_IMAGE_DIMENSION, Math.round(currentHeight * 0.85));
|
||||
}
|
||||
|
||||
if (
|
||||
(targetWidth && currentWidth && targetWidth < currentWidth) ||
|
||||
(targetHeight && currentHeight && targetHeight < currentHeight)
|
||||
) {
|
||||
resized = true;
|
||||
resizedThisAttempt = true;
|
||||
currentWidth = targetWidth;
|
||||
currentHeight = targetHeight;
|
||||
} else if (!supportsQuality || (targetQuality && targetQuality <= 70)) {
|
||||
// Cannot resize further and quality cannot be adjusted
|
||||
break;
|
||||
}
|
||||
|
||||
const qualityForAttempt = supportsQuality ? targetQuality : undefined;
|
||||
const { data, info } = await encode({
|
||||
width: currentWidth,
|
||||
height: currentHeight,
|
||||
quality: qualityForAttempt,
|
||||
});
|
||||
|
||||
mutated = true;
|
||||
finalBuffer = data;
|
||||
finalInfo = info;
|
||||
metadata.optimizedSize = data.length;
|
||||
if (info.width) metadata.width = info.width;
|
||||
if (info.height) metadata.height = info.height;
|
||||
if (info.width) currentWidth = info.width;
|
||||
if (info.height) currentHeight = info.height;
|
||||
|
||||
if (supportsQuality && qualityForAttempt) {
|
||||
finalQuality = qualityForAttempt;
|
||||
}
|
||||
|
||||
if (data.length <= MAX_IMAGE_SIZE_BYTES) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (resizedThisAttempt) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (supportsQuality && targetQuality && targetQuality > 70) {
|
||||
const nextQuality = Math.max(70, targetQuality - 10);
|
||||
if (nextQuality === targetQuality) {
|
||||
break;
|
||||
}
|
||||
targetQuality = nextQuality;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (finalBuffer.length > MAX_IMAGE_SIZE_BYTES) {
|
||||
const message = `Optimized image remains ${bytesToMegabytes(finalBuffer.length)}MB (target 5MB).`;
|
||||
notices.push({
|
||||
message,
|
||||
level: 'warning',
|
||||
code: 'size_over_limit',
|
||||
source: 'server'
|
||||
});
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
} else if (shouldConvertToRgb) {
|
||||
const { data, info } = await encode({ width: currentWidth, height: currentHeight });
|
||||
mutated = true;
|
||||
finalBuffer = data;
|
||||
finalInfo = info;
|
||||
metadata.optimizedSize = data.length;
|
||||
if (info.width) metadata.width = info.width;
|
||||
if (info.height) metadata.height = info.height;
|
||||
if (info.width) currentWidth = info.width;
|
||||
if (info.height) currentHeight = info.height;
|
||||
}
|
||||
|
||||
if (mutated) {
|
||||
await fsp.writeFile(filePath, finalBuffer);
|
||||
metadata.optimizedSize = finalBuffer.length;
|
||||
} else {
|
||||
// No transformation occurred; still need to ensure we report original stats
|
||||
metadata.optimizedSize = metadata.size;
|
||||
}
|
||||
|
||||
metadata.convertedToRgb = shouldConvertToRgb && mutated;
|
||||
metadata.resized = resized;
|
||||
if (finalQuality) {
|
||||
metadata.quality = finalQuality;
|
||||
}
|
||||
|
||||
if (resized && metadata.width && metadata.height) {
|
||||
const message = `Image resized to ${metadata.width}x${metadata.height} during optimization.`;
|
||||
notices.push({
|
||||
message,
|
||||
level: 'info',
|
||||
code: 'resized',
|
||||
source: 'server'
|
||||
});
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
|
||||
if (finalQuality && finalQuality < 90) {
|
||||
const message = `Image quality adjusted to ${finalQuality} to reduce file size.`;
|
||||
notices.push({
|
||||
message,
|
||||
level: 'info',
|
||||
code: 'quality_adjusted',
|
||||
source: 'server'
|
||||
});
|
||||
legacyWarnings.push(message);
|
||||
}
|
||||
|
||||
return {
|
||||
notices,
|
||||
warnings: legacyWarnings,
|
||||
metadata,
|
||||
finalSize: finalBuffer.length,
|
||||
};
|
||||
};
|
||||
|
||||
// Configure multer for file uploads
|
||||
const storage = multer.diskStorage({
|
||||
destination: function (req, file, cb) {
|
||||
@@ -178,7 +432,7 @@ const storage = multer.diskStorage({
|
||||
const upload = multer({
|
||||
storage: storage,
|
||||
limits: {
|
||||
fileSize: 5 * 1024 * 1024, // 5MB max file size
|
||||
fileSize: 15 * 1024 * 1024, // Allow bigger uploads; processing will reduce to 5MB
|
||||
},
|
||||
fileFilter: function (req, file, cb) {
|
||||
// Accept only image files
|
||||
@@ -345,7 +599,7 @@ async function setupSshTunnel() {
|
||||
}
|
||||
|
||||
// Image upload endpoint
|
||||
router.post('/upload-image', upload.single('image'), (req, res) => {
|
||||
router.post('/upload-image', upload.single('image'), async (req, res) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ error: 'No image file provided' });
|
||||
@@ -375,9 +629,13 @@ router.post('/upload-image', upload.single('image'), (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Process the image (resize/compress/color-space) before responding
|
||||
const processingResult = await processUploadedImage(filePath, req.file.mimetype);
|
||||
req.file.size = processingResult.finalSize;
|
||||
|
||||
// Create URL for the uploaded file - using an absolute URL with domain
|
||||
// This will generate a URL like: https://inventory.acot.site/uploads/products/filename.jpg
|
||||
const baseUrl = 'https://inventory.acot.site';
|
||||
// This will generate a URL like: https://acot.site/uploads/products/filename.jpg
|
||||
const baseUrl = 'https://tools.acherryontop.com';
|
||||
const imageUrl = `${baseUrl}/uploads/products/${req.file.filename}`;
|
||||
|
||||
// Schedule this image for deletion in 24 hours
|
||||
@@ -390,11 +648,24 @@ router.post('/upload-image', upload.single('image'), (req, res) => {
|
||||
fileName: req.file.filename,
|
||||
mimetype: req.file.mimetype,
|
||||
fullPath: filePath,
|
||||
notices: processingResult.notices,
|
||||
warnings: processingResult.warnings,
|
||||
metadata: processingResult.metadata,
|
||||
message: 'Image uploaded successfully (will auto-delete after 24 hours)'
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error uploading image:', error);
|
||||
if (req?.file?.filename) {
|
||||
const cleanupPath = path.join(uploadsDir, req.file.filename);
|
||||
if (fs.existsSync(cleanupPath)) {
|
||||
try {
|
||||
fs.unlinkSync(cleanupPath);
|
||||
} catch (cleanupError) {
|
||||
console.error('Failed to remove file after processing error:', cleanupError);
|
||||
}
|
||||
}
|
||||
}
|
||||
res.status(500).json({ error: error.message || 'Failed to upload image' });
|
||||
}
|
||||
});
|
||||
@@ -444,6 +715,26 @@ router.delete('/delete-image', (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Clear all taxonomy caches
|
||||
router.post('/clear-taxonomy-cache', (req, res) => {
|
||||
try {
|
||||
// Clear all entries from the query cache
|
||||
const cacheSize = connectionCache.queryCache.size;
|
||||
connectionCache.queryCache.clear();
|
||||
|
||||
console.log(`Cleared ${cacheSize} entries from taxonomy cache`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Cache cleared (${cacheSize} entries removed)`,
|
||||
clearedEntries: cacheSize
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error clearing taxonomy cache:', error);
|
||||
res.status(500).json({ error: 'Failed to clear cache' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get all options for import fields
|
||||
router.get('/field-options', async (req, res) => {
|
||||
try {
|
||||
@@ -755,17 +1046,21 @@ router.get('/list-uploads', (req, res) => {
|
||||
|
||||
// Search products from production database
|
||||
router.get('/search-products', async (req, res) => {
|
||||
const { q, company, dateRange } = req.query;
|
||||
|
||||
if (!q) {
|
||||
return res.status(400).json({ error: 'Search term is required' });
|
||||
const { q, pid, company, dateRange } = req.query;
|
||||
|
||||
if (!q && !pid) {
|
||||
return res.status(400).json({ error: 'Search term or pid is required' });
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
|
||||
|
||||
// Build WHERE clause with additional filters
|
||||
let whereClause = `
|
||||
let whereClause;
|
||||
if (pid) {
|
||||
whereClause = `\n WHERE p.pid = ${connection.escape(Number(pid))}`;
|
||||
} else {
|
||||
whereClause = `
|
||||
WHERE (
|
||||
p.description LIKE ? OR
|
||||
p.itemnumber LIKE ? OR
|
||||
@@ -773,6 +1068,7 @@ router.get('/search-products', async (req, res) => {
|
||||
pc1.name LIKE ? OR
|
||||
s.companyname LIKE ?
|
||||
)`;
|
||||
}
|
||||
|
||||
// Add company filter if provided
|
||||
if (company) {
|
||||
@@ -831,8 +1127,9 @@ router.get('/search-products', async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
const isPidSearch = !!pid;
|
||||
// Special case for wildcard search
|
||||
const isWildcardSearch = q === '*';
|
||||
const isWildcardSearch = !isPidSearch && q === '*';
|
||||
const searchPattern = isWildcardSearch ? '%' : `%${q}%`;
|
||||
const exactPattern = isWildcardSearch ? '%' : q;
|
||||
|
||||
@@ -847,10 +1144,11 @@ router.get('/search-products', async (req, res) => {
|
||||
p.harmonized_tariff_code,
|
||||
pcp.price_each AS price,
|
||||
p.sellingprice AS regular_price,
|
||||
CASE
|
||||
WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0)
|
||||
THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0)
|
||||
ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1)
|
||||
CASE
|
||||
WHEN sid.supplier_id = 92 THEN
|
||||
CASE WHEN COALESCE(sid.notions_cost_each, 0) > 0 THEN sid.notions_cost_each ELSE sid.supplier_cost_each END
|
||||
ELSE
|
||||
CASE WHEN COALESCE(sid.supplier_cost_each, 0) > 0 THEN sid.supplier_cost_each ELSE sid.notions_cost_each END
|
||||
END AS cost_price,
|
||||
s.companyname AS vendor,
|
||||
sid.supplier_itemnumber AS vendor_reference,
|
||||
@@ -892,9 +1190,9 @@ router.get('/search-products', async (req, res) => {
|
||||
LEFT JOIN current_inventory ci ON p.pid = ci.pid
|
||||
${whereClause}
|
||||
GROUP BY p.pid
|
||||
${isWildcardSearch ? 'ORDER BY p.datein DESC' : `
|
||||
ORDER BY
|
||||
CASE
|
||||
${isPidSearch ? '' : isWildcardSearch ? 'ORDER BY p.datein DESC' : `
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN p.description LIKE ? THEN 1
|
||||
WHEN p.itemnumber = ? THEN 2
|
||||
WHEN p.upc = ? THEN 3
|
||||
@@ -904,10 +1202,12 @@ router.get('/search-products', async (req, res) => {
|
||||
END
|
||||
`}
|
||||
`;
|
||||
|
||||
// Prepare query parameters based on whether it's a wildcard search
|
||||
|
||||
// Prepare query parameters based on search type
|
||||
let queryParams;
|
||||
if (isWildcardSearch) {
|
||||
if (isPidSearch) {
|
||||
queryParams = [];
|
||||
} else if (isWildcardSearch) {
|
||||
queryParams = [
|
||||
searchPattern, // LIKE for description
|
||||
searchPattern, // LIKE for itemnumber
|
||||
@@ -955,6 +1255,443 @@ router.get('/search-products', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Shared SELECT for product queries (matches search-products fields)
|
||||
const PRODUCT_SELECT = `
|
||||
SELECT
|
||||
p.pid,
|
||||
p.description AS title,
|
||||
p.notes AS description,
|
||||
p.itemnumber AS sku,
|
||||
p.upc AS barcode,
|
||||
p.harmonized_tariff_code,
|
||||
pcp.price_each AS price,
|
||||
p.sellingprice AS regular_price,
|
||||
CASE
|
||||
WHEN sid.supplier_id = 92 THEN
|
||||
CASE WHEN COALESCE(sid.notions_cost_each, 0) > 0 THEN sid.notions_cost_each ELSE sid.supplier_cost_each END
|
||||
ELSE
|
||||
CASE WHEN COALESCE(sid.supplier_cost_each, 0) > 0 THEN sid.supplier_cost_each ELSE sid.notions_cost_each END
|
||||
END AS cost_price,
|
||||
s.companyname AS vendor,
|
||||
sid.supplier_itemnumber AS vendor_reference,
|
||||
sid.notions_itemnumber AS notions_reference,
|
||||
sid.supplier_id AS supplier,
|
||||
sid.notions_case_pack AS case_qty,
|
||||
pc1.name AS brand,
|
||||
p.company AS brand_id,
|
||||
pc2.name AS line,
|
||||
p.line AS line_id,
|
||||
pc3.name AS subline,
|
||||
p.subline AS subline_id,
|
||||
pc4.name AS artist,
|
||||
p.artist AS artist_id,
|
||||
COALESCE(CASE
|
||||
WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit
|
||||
ELSE sid.supplier_qty_per_unit
|
||||
END, sid.notions_qty_per_unit) AS moq,
|
||||
p.weight,
|
||||
p.length,
|
||||
p.width,
|
||||
p.height,
|
||||
p.country_of_origin,
|
||||
ci.totalsold AS total_sold,
|
||||
p.datein AS first_received,
|
||||
pls.date_sold AS date_last_sold,
|
||||
IF(p.tax_code IS NULL, '', CAST(p.tax_code AS CHAR)) AS tax_code,
|
||||
CAST(p.size_cat AS CHAR) AS size_cat,
|
||||
CAST(p.shipping_restrictions AS CHAR) AS shipping_restrictions
|
||||
FROM products p
|
||||
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
|
||||
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
||||
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
||||
LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id
|
||||
LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id
|
||||
LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id
|
||||
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
|
||||
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
||||
LEFT JOIN current_inventory ci ON p.pid = ci.pid`;
|
||||
|
||||
// Load products for a specific line (company + line + optional subline)
|
||||
router.get('/line-products', async (req, res) => {
|
||||
const { company, line, subline } = req.query;
|
||||
if (!company || !line) {
|
||||
return res.status(400).json({ error: 'company and line are required' });
|
||||
}
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
let where = 'WHERE p.company = ? AND p.line = ?';
|
||||
const params = [Number(company), Number(line)];
|
||||
if (subline) {
|
||||
where += ' AND p.subline = ?';
|
||||
params.push(Number(subline));
|
||||
}
|
||||
const query = `${PRODUCT_SELECT} ${where} GROUP BY p.pid ORDER BY p.description`;
|
||||
const [results] = await connection.query(query, params);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('Error loading line products:', error);
|
||||
res.status(500).json({ error: 'Failed to load line products' });
|
||||
}
|
||||
});
|
||||
|
||||
// Load new products (last 45 days by release date, excluding preorders)
|
||||
router.get('/new-products', async (req, res) => {
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
const query = `${PRODUCT_SELECT}
|
||||
LEFT JOIN shop_inventory si2 ON p.pid = si2.pid AND si2.store = 0
|
||||
WHERE DATEDIFF(NOW(), p.date_ol) <= 45
|
||||
AND p.notnew = 0
|
||||
AND (si2.all IS NULL OR si2.all != 2)
|
||||
GROUP BY p.pid
|
||||
ORDER BY IF(p.date_ol != '0000-00-00', p.date_ol, p.date_created) DESC`;
|
||||
const [results] = await connection.query(query);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('Error loading new products:', error);
|
||||
res.status(500).json({ error: 'Failed to load new products' });
|
||||
}
|
||||
});
|
||||
|
||||
// Load preorder products
|
||||
router.get('/preorder-products', async (req, res) => {
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
const query = `${PRODUCT_SELECT}
|
||||
LEFT JOIN shop_inventory si2 ON p.pid = si2.pid AND si2.store = 0
|
||||
WHERE si2.all = 2
|
||||
GROUP BY p.pid
|
||||
ORDER BY IF(p.date_ol != '0000-00-00', p.date_ol, p.date_created) DESC`;
|
||||
const [results] = await connection.query(query);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('Error loading preorder products:', error);
|
||||
res.status(500).json({ error: 'Failed to load preorder products' });
|
||||
}
|
||||
});
|
||||
|
||||
// Load hidden recently-created products from local PG, enriched from MySQL
|
||||
router.get('/hidden-new-products', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const pgResult = await pool.query(
|
||||
`SELECT pid FROM products WHERE visible = false AND created_at > NOW() - INTERVAL '90 days' ORDER BY created_at DESC LIMIT 500`
|
||||
);
|
||||
const pids = pgResult.rows.map(r => r.pid);
|
||||
if (pids.length === 0) return res.json([]);
|
||||
|
||||
const { connection } = await getDbConnection();
|
||||
const placeholders = pids.map(() => '?').join(',');
|
||||
const query = `${PRODUCT_SELECT} WHERE p.pid IN (${placeholders}) GROUP BY p.pid ORDER BY FIELD(p.pid, ${placeholders})`;
|
||||
const [results] = await connection.query(query, [...pids, ...pids]);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('Error loading hidden new products:', error);
|
||||
res.status(500).json({ error: 'Failed to load hidden new products' });
|
||||
}
|
||||
});
|
||||
|
||||
// Load landing page extras (featured lines) for new/preorder pages
|
||||
router.get('/landing-extras', async (req, res) => {
|
||||
const { catId, sid } = req.query;
|
||||
if (!catId) {
|
||||
return res.status(400).json({ error: 'catId is required' });
|
||||
}
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
const [results] = await connection.query(
|
||||
`SELECT extra_id, image, extra_cat_id, path, name, top_text, is_new
|
||||
FROM product_category_landing_extras
|
||||
WHERE cat_id = ? AND sid = ? AND section_cat_id = 0 AND hidden = 0
|
||||
ORDER BY \`order\` DESC, name ASC`,
|
||||
[Number(catId), Number(sid) || 0]
|
||||
);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('Error loading landing extras:', error);
|
||||
res.status(500).json({ error: 'Failed to load landing extras' });
|
||||
}
|
||||
});
|
||||
|
||||
// Load products by shop path (resolves category names to IDs)
|
||||
router.get('/path-products', async (req, res) => {
|
||||
res.set('Cache-Control', 'no-store');
|
||||
const { path: shopPath } = req.query;
|
||||
if (!shopPath) {
|
||||
return res.status(400).json({ error: 'path is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
|
||||
// Strip common URL prefixes (full URLs, /shop/, leading slash)
|
||||
const cleanPath = String(shopPath)
|
||||
.replace(/^https?:\/\/[^/]+/, '')
|
||||
.replace(/^\/shop\//, '/')
|
||||
.replace(/^\//, '');
|
||||
const parts = cleanPath.split('/');
|
||||
const filters = {};
|
||||
for (let i = 0; i < parts.length - 1; i += 2) {
|
||||
filters[parts[i]] = decodeURIComponent(parts[i + 1]).replace(/_/g, ' ');
|
||||
}
|
||||
|
||||
if (Object.keys(filters).length === 0) {
|
||||
return res.status(400).json({ error: 'No valid filters found in path' });
|
||||
}
|
||||
|
||||
// Resolve category names to IDs (order matters: company -> line -> subline)
|
||||
const typeMap = { company: 1, line: 2, subline: 3, section: 10, cat: 11, subcat: 12, subsubcat: 13 };
|
||||
const resolvedIds = {};
|
||||
const resolveOrder = ['company', 'line', 'subline', 'section', 'cat', 'subcat', 'subsubcat'];
|
||||
|
||||
for (const key of resolveOrder) {
|
||||
const value = filters[key];
|
||||
if (!value) continue;
|
||||
const type = typeMap[key];
|
||||
if (!type) continue;
|
||||
const types = key === 'cat' ? [11, 20] : key === 'subcat' ? [12, 21] : [type];
|
||||
|
||||
// For line/subline, filter by parent (master_cat_id) to disambiguate
|
||||
let parentFilter = '';
|
||||
const qParams = [value];
|
||||
if (key === 'line' && resolvedIds.company != null) {
|
||||
parentFilter = ' AND master_cat_id = ?';
|
||||
qParams.push(resolvedIds.company);
|
||||
} else if (key === 'subline' && resolvedIds.line != null) {
|
||||
parentFilter = ' AND master_cat_id = ?';
|
||||
qParams.push(resolvedIds.line);
|
||||
}
|
||||
|
||||
const [rows] = await connection.query(
|
||||
`SELECT cat_id FROM product_categories WHERE LOWER(name) = LOWER(?) AND type IN (${types.join(',')})${parentFilter} LIMIT 1`,
|
||||
qParams
|
||||
);
|
||||
if (rows.length > 0) {
|
||||
resolvedIds[key] = rows[0].cat_id;
|
||||
} else {
|
||||
return res.json([]);
|
||||
}
|
||||
}
|
||||
|
||||
// Build WHERE using resolved IDs
|
||||
const whereParts = [];
|
||||
const params = [];
|
||||
const directFields = { company: 'p.company', line: 'p.line', subline: 'p.subline' };
|
||||
|
||||
for (const [key, catId] of Object.entries(resolvedIds)) {
|
||||
if (directFields[key]) {
|
||||
whereParts.push(`${directFields[key]} = ?`);
|
||||
params.push(catId);
|
||||
} else {
|
||||
whereParts.push('EXISTS (SELECT 1 FROM product_category_index pci2 WHERE pci2.pid = p.pid AND pci2.cat_id = ?)');
|
||||
params.push(catId);
|
||||
}
|
||||
}
|
||||
|
||||
if (whereParts.length === 0) {
|
||||
return res.status(400).json({ error: 'No valid filters found in path' });
|
||||
}
|
||||
|
||||
const query = `${PRODUCT_SELECT} WHERE ${whereParts.join(' AND ')} GROUP BY p.pid ORDER BY p.description`;
|
||||
const [results] = await connection.query(query, params);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('Error loading path products:', error);
|
||||
res.status(500).json({ error: 'Failed to load products by path' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get product images for a given PID from production DB
|
||||
router.get('/product-images/:pid', async (req, res) => {
|
||||
const pid = parseInt(req.params.pid, 10);
|
||||
if (!pid || pid <= 0) {
|
||||
return res.status(400).json({ error: 'Valid PID is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const { connection } = await getDbConnection();
|
||||
|
||||
const [rows] = await connection.query(
|
||||
'SELECT iid, type, width, height, `order`, hidden FROM product_images WHERE pid = ? ORDER BY `order` DESC, type',
|
||||
[pid]
|
||||
);
|
||||
|
||||
// Group by iid and build image URLs using the same logic as the PHP codebase
|
||||
const typeMap = { 1: 'o', 2: 'l', 3: 't', 4: '100x100', 5: '175x175', 6: '300x300', 7: '600x600', 8: '500x500', 9: '150x150' };
|
||||
const padded = String(pid).padStart(10, '0');
|
||||
const pathPrefix = `${padded.substring(0, 4)}/${padded.substring(4, 7)}/`;
|
||||
|
||||
const imagesByIid = {};
|
||||
for (const row of rows) {
|
||||
const typeName = typeMap[row.type];
|
||||
if (!typeName) continue;
|
||||
if (!imagesByIid[row.iid]) {
|
||||
imagesByIid[row.iid] = { iid: row.iid, order: row.order, hidden: !!row.hidden, sizes: {} };
|
||||
}
|
||||
imagesByIid[row.iid].sizes[typeName] = {
|
||||
width: row.width,
|
||||
height: row.height,
|
||||
url: `https://sbing.com/i/products/${pathPrefix}${pid}-${typeName}-${row.iid}.jpg`,
|
||||
};
|
||||
}
|
||||
|
||||
const images = Object.values(imagesByIid).sort((a, b) => b.order - a.order);
|
||||
res.json(images);
|
||||
} catch (error) {
|
||||
console.error('Error fetching product images:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product images' });
|
||||
}
|
||||
});
|
||||
|
||||
const UPC_SUPPLIER_PREFIX_LEADING_DIGIT = '4';
|
||||
const UPC_MAX_SEQUENCE = 99999;
|
||||
const UPC_RESERVATION_TTL = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
function buildSupplierPrefix(supplierId) {
|
||||
const numericId = Number.parseInt(String(supplierId), 10);
|
||||
if (Number.isNaN(numericId) || numericId < 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const padded = String(numericId).padStart(5, '0');
|
||||
const prefix = `${UPC_SUPPLIER_PREFIX_LEADING_DIGIT}${padded}`;
|
||||
return prefix.length === 6 ? prefix : null;
|
||||
}
|
||||
|
||||
function calculateUpcCheckDigit(upcWithoutCheckDigit) {
|
||||
if (!/^\d{11}$/.test(upcWithoutCheckDigit)) {
|
||||
throw new Error('UPC body must be 11 numeric characters');
|
||||
}
|
||||
|
||||
let sum = 0;
|
||||
for (let i = 0; i < upcWithoutCheckDigit.length; i += 1) {
|
||||
const digit = Number.parseInt(upcWithoutCheckDigit[i], 10);
|
||||
sum += (i % 2 === 0) ? digit * 3 : digit;
|
||||
}
|
||||
|
||||
const mod = sum % 10;
|
||||
return mod === 0 ? 0 : 10 - mod;
|
||||
}
|
||||
|
||||
const upcReservationCache = new Map();
|
||||
const upcGenerationLocks = new Map();
|
||||
|
||||
function getReservedSequence(prefix) {
|
||||
const entry = upcReservationCache.get(prefix);
|
||||
if (!entry) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (Date.now() > entry.expiresAt) {
|
||||
upcReservationCache.delete(prefix);
|
||||
return 0;
|
||||
}
|
||||
|
||||
return entry.lastSequence;
|
||||
}
|
||||
|
||||
function setReservedSequence(prefix, sequence) {
|
||||
upcReservationCache.set(prefix, {
|
||||
lastSequence: sequence,
|
||||
expiresAt: Date.now() + UPC_RESERVATION_TTL
|
||||
});
|
||||
}
|
||||
|
||||
async function runWithSupplierLock(prefix, task) {
|
||||
const previous = upcGenerationLocks.get(prefix) || Promise.resolve();
|
||||
const chained = previous.catch(() => {}).then(() => task());
|
||||
upcGenerationLocks.set(prefix, chained);
|
||||
|
||||
try {
|
||||
return await chained;
|
||||
} finally {
|
||||
if (upcGenerationLocks.get(prefix) === chained) {
|
||||
upcGenerationLocks.delete(prefix);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
router.post('/generate-upc', async (req, res) => {
|
||||
const { supplierId, increment } = req.body || {};
|
||||
|
||||
if (supplierId === undefined || supplierId === null || String(supplierId).trim() === '') {
|
||||
return res.status(400).json({ error: 'Supplier ID is required to generate a UPC' });
|
||||
}
|
||||
|
||||
const supplierPrefix = buildSupplierPrefix(supplierId);
|
||||
if (!supplierPrefix) {
|
||||
return res.status(400).json({ error: 'Supplier ID must be a non-negative number with at most 5 digits' });
|
||||
}
|
||||
|
||||
const step = Number.parseInt(increment, 10);
|
||||
const sequenceIncrement = Number.isNaN(step) || step < 1 ? 1 : step;
|
||||
|
||||
try {
|
||||
const result = await runWithSupplierLock(supplierPrefix, async () => {
|
||||
const { connection } = await getDbConnection();
|
||||
|
||||
const [rows] = await connection.query(
|
||||
`SELECT CAST(SUBSTRING(upc,7,5) AS UNSIGNED) AS num
|
||||
FROM products
|
||||
WHERE LEFT(upc, 6) = ? AND LENGTH(upc) = 12
|
||||
ORDER BY num DESC
|
||||
LIMIT 1`,
|
||||
[supplierPrefix]
|
||||
);
|
||||
|
||||
const lastSequenceFromDb = rows && rows.length > 0 && rows[0].num !== null
|
||||
? Number.parseInt(rows[0].num, 10) || 0
|
||||
: 0;
|
||||
|
||||
const cachedSequence = getReservedSequence(supplierPrefix);
|
||||
const baselineSequence = Math.max(lastSequenceFromDb, cachedSequence);
|
||||
|
||||
let nextSequence = baselineSequence + sequenceIncrement;
|
||||
let candidateUpc = null;
|
||||
let attempts = 0;
|
||||
|
||||
while (attempts < 10 && nextSequence <= UPC_MAX_SEQUENCE) {
|
||||
const sequencePart = String(nextSequence).padStart(5, '0');
|
||||
const upcBody = `${supplierPrefix}${sequencePart}`;
|
||||
const checkDigit = calculateUpcCheckDigit(upcBody);
|
||||
const fullUpc = `${upcBody}${checkDigit}`;
|
||||
|
||||
const [existing] = await connection.query(
|
||||
'SELECT 1 FROM products WHERE upc = ? LIMIT 1',
|
||||
[fullUpc]
|
||||
);
|
||||
|
||||
if (!existing || existing.length === 0) {
|
||||
candidateUpc = { upc: fullUpc, sequence: nextSequence };
|
||||
break;
|
||||
}
|
||||
|
||||
nextSequence += 1;
|
||||
attempts += 1;
|
||||
}
|
||||
|
||||
if (!candidateUpc) {
|
||||
const reason = nextSequence > UPC_MAX_SEQUENCE
|
||||
? 'UPC range exhausted for this supplier'
|
||||
: 'Unable to find an available UPC';
|
||||
const error = new Error(reason);
|
||||
error.status = 409;
|
||||
throw error;
|
||||
}
|
||||
|
||||
setReservedSequence(supplierPrefix, candidateUpc.sequence);
|
||||
return candidateUpc.upc;
|
||||
});
|
||||
|
||||
return res.json({ success: true, upc: result });
|
||||
} catch (error) {
|
||||
console.error('Error generating UPC:', error);
|
||||
const status = error.status && Number.isInteger(error.status) ? error.status : 500;
|
||||
const message = status === 500 ? 'Failed to generate UPC' : error.message;
|
||||
return res.status(status).json({ error: message, details: status === 500 ? error.message : undefined });
|
||||
}
|
||||
});
|
||||
|
||||
// Endpoint to check UPC and generate item number
|
||||
router.get('/check-upc-and-generate-sku', async (req, res) => {
|
||||
const { upc, supplierId } = req.query;
|
||||
@@ -974,7 +1711,7 @@ router.get('/check-upc-and-generate-sku', async (req, res) => {
|
||||
|
||||
if (upcCheck.length > 0) {
|
||||
return res.status(409).json({
|
||||
error: 'UPC already exists',
|
||||
error: 'A product with this UPC already exists',
|
||||
existingProductId: upcCheck[0].pid,
|
||||
existingItemNumber: upcCheck[0].itemnumber
|
||||
});
|
||||
@@ -1149,4 +1886,4 @@ router.get('/product-categories/:pid', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { Pool } = require('pg'); // Assuming pg driver
|
||||
|
||||
// --- Configuration & Helpers ---
|
||||
|
||||
@@ -44,7 +43,6 @@ const COLUMN_MAP = {
|
||||
currentPrice: 'pm.current_price',
|
||||
currentRegularPrice: 'pm.current_regular_price',
|
||||
currentCostPrice: 'pm.current_cost_price',
|
||||
currentLandingCostPrice: 'pm.current_landing_cost_price',
|
||||
currentStock: 'pm.current_stock',
|
||||
currentStockCost: 'pm.current_stock_cost',
|
||||
currentStockRetail: 'pm.current_stock_retail',
|
||||
@@ -177,7 +175,7 @@ const COLUMN_MAP = {
|
||||
const COLUMN_TYPES = {
|
||||
// Numeric columns (use numeric operators and sorting)
|
||||
numeric: [
|
||||
'pid', 'currentPrice', 'currentRegularPrice', 'currentCostPrice', 'currentLandingCostPrice',
|
||||
'pid', 'currentPrice', 'currentRegularPrice', 'currentCostPrice',
|
||||
'currentStock', 'currentStockCost', 'currentStockRetail', 'currentStockGross',
|
||||
'onOrderQty', 'onOrderCost', 'onOrderRetail', 'ageDays',
|
||||
'sales7d', 'revenue7d', 'sales14d', 'revenue14d', 'sales30d', 'revenue30d',
|
||||
@@ -255,32 +253,98 @@ const SPECIAL_SORT_COLUMNS = {
|
||||
};
|
||||
|
||||
// Status priority for sorting (lower number = higher priority)
|
||||
// Values must match what's stored in the DB status column
|
||||
const STATUS_PRIORITY = {
|
||||
'Critical': 1,
|
||||
'At Risk': 2,
|
||||
'Reorder': 3,
|
||||
'Overstocked': 4,
|
||||
'Reorder Soon': 3,
|
||||
'Overstock': 4,
|
||||
'Healthy': 5,
|
||||
'New': 6
|
||||
// Any other status will be sorted alphabetically after these
|
||||
};
|
||||
|
||||
// Get database column name from frontend column name
|
||||
// Returns null for unknown keys so callers can skip them
|
||||
function getDbColumn(frontendColumn) {
|
||||
return COLUMN_MAP[frontendColumn] || 'pm.title'; // Default to title if not found
|
||||
return COLUMN_MAP[frontendColumn] || null;
|
||||
}
|
||||
|
||||
// Get column type for proper sorting
|
||||
// Get column type by searching through the COLUMN_TYPES arrays
|
||||
function getColumnType(frontendColumn) {
|
||||
return COLUMN_TYPES[frontendColumn] || 'string';
|
||||
if (COLUMN_TYPES.numeric.includes(frontendColumn)) return 'numeric';
|
||||
if (COLUMN_TYPES.date.includes(frontendColumn)) return 'date';
|
||||
if (COLUMN_TYPES.boolean.includes(frontendColumn)) return 'boolean';
|
||||
return 'string';
|
||||
}
|
||||
|
||||
// --- Route Handlers ---
|
||||
|
||||
// GET /metrics/summary - Aggregate KPI summary for the current view
|
||||
router.get('/summary', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
try {
|
||||
// Build WHERE clause from same filters as main list endpoint
|
||||
const conditions = ['pm.is_visible = true', 'pm.is_replenishable = true'];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
// Handle showNonReplenishable
|
||||
if (req.query.showNonReplenishable === 'true') {
|
||||
// Remove the is_replenishable condition
|
||||
conditions.pop();
|
||||
}
|
||||
// Handle showInvisible
|
||||
if (req.query.showInvisible === 'true') {
|
||||
conditions.shift(); // Remove is_visible condition
|
||||
}
|
||||
|
||||
// Handle stock_status filter
|
||||
if (req.query.stock_status) {
|
||||
conditions.push(`pm.status = $${paramCounter++}`);
|
||||
params.push(req.query.stock_status);
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
const sql = `
|
||||
SELECT
|
||||
COUNT(*)::int AS total_products,
|
||||
COALESCE(SUM(pm.current_stock_cost), 0)::numeric(15,2) AS total_stock_value,
|
||||
COALESCE(SUM(pm.current_stock_retail), 0)::numeric(15,2) AS total_stock_retail,
|
||||
COUNT(*) FILTER (WHERE pm.status IN ('Critical', 'Reorder Soon'))::int AS needs_reorder_count,
|
||||
COALESCE(SUM(pm.replenishment_cost) FILTER (WHERE pm.replenishment_units > 0), 0)::numeric(15,2) AS total_replenishment_cost,
|
||||
COALESCE(SUM(pm.replenishment_units) FILTER (WHERE pm.replenishment_units > 0), 0)::int AS total_replenishment_units,
|
||||
COALESCE(SUM(pm.overstocked_cost) FILTER (WHERE pm.overstocked_units > 0), 0)::numeric(15,2) AS total_overstock_value,
|
||||
COALESCE(SUM(pm.overstocked_units) FILTER (WHERE pm.overstocked_units > 0), 0)::int AS total_overstock_units,
|
||||
COALESCE(SUM(pm.on_order_qty), 0)::int AS total_on_order_units,
|
||||
COALESCE(SUM(pm.on_order_cost), 0)::numeric(15,2) AS total_on_order_cost,
|
||||
COALESCE(AVG(pm.stock_cover_in_days) FILTER (WHERE pm.stock_cover_in_days IS NOT NULL AND pm.current_stock > 0), 0)::numeric(10,1) AS avg_stock_cover_days,
|
||||
COUNT(*) FILTER (WHERE pm.current_stock = 0)::int AS out_of_stock_count,
|
||||
COALESCE(SUM(pm.forecast_lost_revenue) FILTER (WHERE pm.forecast_lost_revenue > 0), 0)::numeric(15,2) AS total_lost_revenue,
|
||||
COALESCE(SUM(pm.forecast_lost_sales_units) FILTER (WHERE pm.forecast_lost_sales_units > 0), 0)::int AS total_lost_sales_units,
|
||||
COUNT(*) FILTER (WHERE pm.status = 'Critical')::int AS critical_count,
|
||||
COUNT(*) FILTER (WHERE pm.status = 'Reorder Soon')::int AS reorder_count,
|
||||
COUNT(*) FILTER (WHERE pm.status = 'At Risk')::int AS at_risk_count,
|
||||
COUNT(*) FILTER (WHERE pm.status = 'Overstock')::int AS overstock_count,
|
||||
COUNT(*) FILTER (WHERE pm.status = 'Healthy')::int AS healthy_count,
|
||||
COUNT(*) FILTER (WHERE pm.status = 'New')::int AS new_count
|
||||
FROM public.product_metrics pm
|
||||
${whereClause}
|
||||
`;
|
||||
|
||||
const { rows } = await pool.query(sql, params);
|
||||
res.json(rows[0]);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching metrics summary:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch metrics summary.' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /metrics/filter-options - Provide distinct values for filter dropdowns
|
||||
router.get('/filter-options', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /metrics/filter-options');
|
||||
try {
|
||||
const [vendorRes, brandRes, abcClassRes] = await Promise.all([
|
||||
pool.query(`SELECT DISTINCT vendor FROM public.product_metrics WHERE vendor IS NOT NULL AND vendor <> '' ORDER BY vendor`),
|
||||
@@ -304,7 +368,6 @@ router.get('/filter-options', async (req, res) => {
|
||||
// GET /metrics/ - List all product metrics with filtering, sorting, pagination
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /metrics received query:', req.query);
|
||||
|
||||
try {
|
||||
// --- Pagination ---
|
||||
@@ -317,11 +380,9 @@ router.get('/', async (req, res) => {
|
||||
|
||||
// --- Sorting ---
|
||||
const sortQueryKey = req.query.sort || 'title'; // Default sort field key
|
||||
const dbColumn = getDbColumn(sortQueryKey);
|
||||
const sortDbColumn = getDbColumn(sortQueryKey) || 'pm.title';
|
||||
const columnType = getColumnType(sortQueryKey);
|
||||
|
||||
console.log(`Sorting request: ${sortQueryKey} -> ${dbColumn} (${columnType})`);
|
||||
|
||||
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
|
||||
|
||||
// Always put nulls last regardless of sort direction or column type
|
||||
@@ -332,29 +393,29 @@ router.get('/', async (req, res) => {
|
||||
|
||||
if (SPECIAL_SORT_COLUMNS[sortQueryKey] === 'abs') {
|
||||
// Sort by absolute value for columns where negative values matter
|
||||
orderByClause = `ABS(${dbColumn}::numeric) ${sortDirection} ${nullsOrder}`;
|
||||
} else if (columnType === 'number' || SPECIAL_SORT_COLUMNS[sortQueryKey] === true) {
|
||||
orderByClause = `ABS(${sortDbColumn}::numeric) ${sortDirection} ${nullsOrder}`;
|
||||
} else if (columnType === 'numeric' || SPECIAL_SORT_COLUMNS[sortQueryKey] === true) {
|
||||
// For numeric columns, cast to numeric to ensure proper sorting
|
||||
orderByClause = `${dbColumn}::numeric ${sortDirection} ${nullsOrder}`;
|
||||
orderByClause = `${sortDbColumn}::numeric ${sortDirection} ${nullsOrder}`;
|
||||
} else if (columnType === 'date') {
|
||||
// For date columns, cast to timestamp to ensure proper sorting
|
||||
orderByClause = `CASE WHEN ${dbColumn} IS NULL THEN 1 ELSE 0 END, ${dbColumn}::timestamp ${sortDirection}`;
|
||||
} else if (columnType === 'status' || SPECIAL_SORT_COLUMNS[sortQueryKey] === 'priority') {
|
||||
orderByClause = `CASE WHEN ${sortDbColumn} IS NULL THEN 1 ELSE 0 END, ${sortDbColumn}::timestamp ${sortDirection}`;
|
||||
} else if (SPECIAL_SORT_COLUMNS[sortQueryKey] === 'priority') {
|
||||
// Special handling for status column, using priority for known statuses
|
||||
orderByClause = `
|
||||
CASE WHEN ${dbColumn} IS NULL THEN 999
|
||||
WHEN ${dbColumn} = 'Critical' THEN 1
|
||||
WHEN ${dbColumn} = 'At Risk' THEN 2
|
||||
WHEN ${dbColumn} = 'Reorder' THEN 3
|
||||
WHEN ${dbColumn} = 'Overstocked' THEN 4
|
||||
WHEN ${dbColumn} = 'Healthy' THEN 5
|
||||
WHEN ${dbColumn} = 'New' THEN 6
|
||||
CASE WHEN ${sortDbColumn} IS NULL THEN 999
|
||||
WHEN ${sortDbColumn} = 'Critical' THEN 1
|
||||
WHEN ${sortDbColumn} = 'At Risk' THEN 2
|
||||
WHEN ${sortDbColumn} = 'Reorder Soon' THEN 3
|
||||
WHEN ${sortDbColumn} = 'Overstock' THEN 4
|
||||
WHEN ${sortDbColumn} = 'Healthy' THEN 5
|
||||
WHEN ${sortDbColumn} = 'New' THEN 6
|
||||
ELSE 100
|
||||
END ${sortDirection} ${nullsOrder},
|
||||
${dbColumn} ${sortDirection}`;
|
||||
${sortDbColumn} ${sortDirection}`;
|
||||
} else {
|
||||
// For string and boolean columns, no special casting needed
|
||||
orderByClause = `CASE WHEN ${dbColumn} IS NULL THEN 1 ELSE 0 END, ${dbColumn} ${sortDirection}`;
|
||||
orderByClause = `CASE WHEN ${sortDbColumn} IS NULL THEN 1 ELSE 0 END, ${sortDbColumn} ${sortDirection}`;
|
||||
}
|
||||
|
||||
// --- Filtering ---
|
||||
@@ -389,26 +450,26 @@ router.get('/', async (req, res) => {
|
||||
let operator = '='; // Default operator
|
||||
let value = req.query[key];
|
||||
|
||||
// Check for operator suffixes (e.g., sales30d_gt, title_like)
|
||||
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
|
||||
// Check for operator suffixes (e.g., sales30d_gt, title_ilike, isVisible_is_true)
|
||||
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|starts_with|ends_with|not_contains|between|in|is_empty|is_not_empty|is_true|is_false)$/);
|
||||
if (operatorMatch) {
|
||||
filterKey = operatorMatch[1]; // e.g., "sales30d"
|
||||
operator = operatorMatch[2]; // e.g., "gt"
|
||||
}
|
||||
|
||||
// Get the database column for this filter key
|
||||
const dbColumn = getDbColumn(filterKey);
|
||||
const filterDbColumn = getDbColumn(filterKey);
|
||||
const valueType = getColumnType(filterKey);
|
||||
|
||||
if (!dbColumn) {
|
||||
|
||||
if (!filterDbColumn) {
|
||||
console.warn(`Invalid filter key ignored: ${key}`);
|
||||
continue; // Skip if the key doesn't map to a known column
|
||||
}
|
||||
|
||||
// --- Build WHERE clause fragment ---
|
||||
let needsParam = true; // Declared outside try so catch can access it
|
||||
try {
|
||||
let conditionFragment = '';
|
||||
let needsParam = true; // Most operators need a parameter
|
||||
|
||||
switch (operator.toLowerCase()) {
|
||||
case 'eq': operator = '='; break;
|
||||
@@ -417,48 +478,65 @@ router.get('/', async (req, res) => {
|
||||
case 'gte': operator = '>='; break;
|
||||
case 'lt': operator = '<'; break;
|
||||
case 'lte': operator = '<='; break;
|
||||
case 'like': operator = 'LIKE'; value = `%${value}%`; break; // Add wildcards for LIKE
|
||||
case 'ilike': operator = 'ILIKE'; value = `%${value}%`; break; // Add wildcards for ILIKE
|
||||
case 'like': operator = 'ILIKE'; value = `%${value}%`; break;
|
||||
case 'ilike': operator = 'ILIKE'; value = `%${value}%`; break;
|
||||
case 'starts_with': operator = 'ILIKE'; value = `${value}%`; break;
|
||||
case 'ends_with': operator = 'ILIKE'; value = `%${value}`; break;
|
||||
case 'not_contains': operator = 'NOT ILIKE'; value = `%${value}%`; break;
|
||||
case 'is_empty':
|
||||
conditionFragment = `(${filterDbColumn} IS NULL OR ${filterDbColumn}::text = '')`;
|
||||
needsParam = false;
|
||||
break;
|
||||
case 'is_not_empty':
|
||||
conditionFragment = `(${filterDbColumn} IS NOT NULL AND ${filterDbColumn}::text <> '')`;
|
||||
needsParam = false;
|
||||
break;
|
||||
case 'is_true':
|
||||
conditionFragment = `${filterDbColumn} = true`;
|
||||
needsParam = false;
|
||||
break;
|
||||
case 'is_false':
|
||||
conditionFragment = `${filterDbColumn} = false`;
|
||||
needsParam = false;
|
||||
break;
|
||||
case 'between':
|
||||
const [val1, val2] = String(value).split(',');
|
||||
if (val1 !== undefined && val2 !== undefined) {
|
||||
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
|
||||
conditionFragment = `${filterDbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
|
||||
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
|
||||
needsParam = false; // Params added manually
|
||||
needsParam = false;
|
||||
} else {
|
||||
console.warn(`Invalid 'between' value for ${key}: ${value}`);
|
||||
continue; // Skip this filter
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case 'in':
|
||||
const inValues = String(value).split(',');
|
||||
if (inValues.length > 0) {
|
||||
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
|
||||
conditionFragment = `${dbColumn} IN (${placeholders})`;
|
||||
params.push(...inValues.map(v => parseValue(v, valueType))); // Add all parsed values
|
||||
needsParam = false; // Params added manually
|
||||
conditionFragment = `${filterDbColumn} IN (${placeholders})`;
|
||||
params.push(...inValues.map(v => parseValue(v, valueType)));
|
||||
needsParam = false;
|
||||
} else {
|
||||
console.warn(`Invalid 'in' value for ${key}: ${value}`);
|
||||
continue; // Skip this filter
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
// Add other operators as needed (IS NULL, IS NOT NULL, etc.)
|
||||
case '=': // Keep default '='
|
||||
default: operator = '='; break; // Ensure default is handled
|
||||
case '=':
|
||||
default: operator = '='; break;
|
||||
}
|
||||
|
||||
if (needsParam) {
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
conditionFragment = `${filterDbColumn} ${operator} $${paramCounter++}`;
|
||||
params.push(parseValue(value, valueType));
|
||||
}
|
||||
|
||||
if (conditionFragment) {
|
||||
conditions.push(`(${conditionFragment})`); // Wrap condition in parentheses
|
||||
conditions.push(`(${conditionFragment})`);
|
||||
}
|
||||
|
||||
} catch (parseError) {
|
||||
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
|
||||
// Decrement counter if param wasn't actually used due to error
|
||||
if (needsParam) paramCounter--;
|
||||
}
|
||||
}
|
||||
@@ -466,13 +544,8 @@ router.get('/', async (req, res) => {
|
||||
// --- Construct and Execute Queries ---
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Debug log of conditions and parameters
|
||||
console.log('Constructed WHERE conditions:', conditions);
|
||||
console.log('Parameters:', params);
|
||||
|
||||
// Count Query
|
||||
const countSql = `SELECT COUNT(*) AS total FROM public.product_metrics pm ${whereClause}`;
|
||||
console.log('Executing Count Query:', countSql, params);
|
||||
const countPromise = pool.query(countSql, params);
|
||||
|
||||
// Data Query (Select all columns from metrics table for now)
|
||||
@@ -484,16 +557,6 @@ router.get('/', async (req, res) => {
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
const dataParams = [...params, limit, offset];
|
||||
|
||||
// Log detailed query information for debugging
|
||||
console.log('Executing Data Query:');
|
||||
console.log(' - Sort Column:', dbColumn);
|
||||
console.log(' - Column Type:', columnType);
|
||||
console.log(' - Sort Direction:', sortDirection);
|
||||
console.log(' - Order By Clause:', orderByClause);
|
||||
console.log(' - Full SQL:', dataSql);
|
||||
console.log(' - Parameters:', dataParams);
|
||||
|
||||
const dataPromise = pool.query(dataSql, dataParams);
|
||||
|
||||
// Execute queries in parallel
|
||||
@@ -501,7 +564,6 @@ router.get('/', async (req, res) => {
|
||||
|
||||
const total = parseInt(countResult.rows[0].total, 10);
|
||||
const metrics = dataResult.rows;
|
||||
console.log(`Total: ${total}, Fetched: ${metrics.length} for page ${page}`);
|
||||
|
||||
// --- Respond ---
|
||||
res.json({
|
||||
@@ -535,7 +597,6 @@ router.get('/:pid', async (req, res) => {
|
||||
return res.status(400).json({ error: 'Invalid Product ID.' });
|
||||
}
|
||||
|
||||
console.log(`GET /metrics/${pid}`);
|
||||
try {
|
||||
const { rows } = await pool.query(
|
||||
`SELECT * FROM public.product_metrics WHERE pid = $1`,
|
||||
@@ -543,11 +604,8 @@ router.get('/:pid', async (req, res) => {
|
||||
);
|
||||
|
||||
if (rows.length === 0) {
|
||||
console.log(`Metrics not found for PID: ${pid}`);
|
||||
return res.status(404).json({ error: 'Metrics not found for this product.' });
|
||||
}
|
||||
|
||||
console.log(`Metrics found for PID: ${pid}`);
|
||||
// Data is pre-calculated, return the first (only) row
|
||||
res.json(rows[0]);
|
||||
|
||||
@@ -566,7 +624,7 @@ function parseValue(value, type) {
|
||||
if (value === null || value === undefined || value === '') return null; // Allow empty strings? Or handle differently?
|
||||
|
||||
switch (type) {
|
||||
case 'number':
|
||||
case 'numeric':
|
||||
const num = parseFloat(value);
|
||||
if (isNaN(num)) throw new Error(`Invalid number format: "${value}"`);
|
||||
return num;
|
||||
|
||||
724
inventory-server/src/routes/newsletter.js
Normal file
724
inventory-server/src/routes/newsletter.js
Normal file
@@ -0,0 +1,724 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Shared CTE fragment for the reference date.
|
||||
// Uses MAX(last_calculated) from product_metrics so time-relative logic works
|
||||
// correctly even when the local data snapshot is behind real-time.
|
||||
const REF_DATE_CTE = `
|
||||
ref AS (SELECT COALESCE(MAX(last_calculated), NOW()) as d FROM product_metrics)
|
||||
`;
|
||||
|
||||
// Category definitions matching production website logic:
|
||||
//
|
||||
// NEW: date_online within 31 days (matches prod's date_ol), NOT preorder
|
||||
// PRE-ORDER: preorder_count > 0, NOT new
|
||||
// CLEARANCE: (regular_price - price) / regular_price >= 0.35 (matches prod's 35% clearance threshold)
|
||||
// DAILY DEALS: product_daily_deals table
|
||||
// BACK IN STOCK: date_last_received > date_first_received, received within 14d,
|
||||
// first received > 30d ago, excludes new products (prod excludes datein < 30d)
|
||||
// BESTSELLERS: shop_score > 20 + in stock + recent sales (matches prod's /shop/hot page)
|
||||
//
|
||||
// Mutual exclusivity:
|
||||
// - New and Pre-order are exclusive: if preorder_count > 0, it's preorder not new
|
||||
// - Back in stock excludes new products and preorder products
|
||||
// - Clearance is independent (a bestseller can also be clearance)
|
||||
|
||||
const CATEGORY_FILTERS = {
|
||||
new: "AND is_new = true",
|
||||
preorder: "AND is_preorder = true",
|
||||
clearance: "AND is_clearance = true",
|
||||
daily_deals: "AND is_daily_deal = true",
|
||||
back_in_stock: "AND is_back_in_stock = true",
|
||||
bestsellers: "AND shop_score > 20 AND COALESCE(current_stock, 0) > 0 AND COALESCE(sales_30d, 0) > 0",
|
||||
never_featured: "AND times_featured IS NULL AND line_last_featured_at IS NULL",
|
||||
no_interest: "AND COALESCE(total_sold, 0) = 0 AND COALESCE(current_stock, 0) > 0 AND COALESCE(date_online, product_created_at) <= CURRENT_DATE - INTERVAL '30 days'",
|
||||
};
|
||||
|
||||
function buildScoredCTE({ forCount = false } = {}) {
|
||||
// forCount=true returns minimal columns for COUNT(*)
|
||||
const selectColumns = forCount ? `
|
||||
p.pid,
|
||||
p.created_at as product_created_at,
|
||||
p.date_online,
|
||||
p.shop_score,
|
||||
p.preorder_count,
|
||||
p.price,
|
||||
p.regular_price,
|
||||
p.total_sold,
|
||||
p.line,
|
||||
pm.current_stock,
|
||||
pm.on_order_qty,
|
||||
pm.sales_30d,
|
||||
pm.sales_7d,
|
||||
pm.date_last_received,
|
||||
pm.date_first_received,
|
||||
nh.times_featured,
|
||||
nh.last_featured_at,
|
||||
lh.line_last_featured_at,
|
||||
dd.deal_id,
|
||||
dd.deal_price
|
||||
` : `
|
||||
p.pid,
|
||||
p.title,
|
||||
p.sku,
|
||||
p.brand,
|
||||
p.vendor,
|
||||
p.price,
|
||||
p.regular_price,
|
||||
p.shop_score,
|
||||
p.image_175 as image,
|
||||
p.permalink,
|
||||
p.stock_quantity,
|
||||
p.preorder_count,
|
||||
p.tags,
|
||||
p.categories,
|
||||
p.line,
|
||||
p.created_at as product_created_at,
|
||||
p.date_online,
|
||||
p.first_received,
|
||||
p.date_last_sold,
|
||||
p.total_sold,
|
||||
p.baskets,
|
||||
p.notifies,
|
||||
pm.sales_7d,
|
||||
pm.sales_30d,
|
||||
pm.revenue_30d,
|
||||
pm.current_stock,
|
||||
pm.on_order_qty,
|
||||
pm.abc_class,
|
||||
pm.date_first_received,
|
||||
pm.date_last_received,
|
||||
pm.sales_velocity_daily,
|
||||
pm.sells_out_in_days,
|
||||
pm.sales_growth_30d_vs_prev,
|
||||
pm.margin_30d,
|
||||
-- Direct product feature history
|
||||
nh.times_featured,
|
||||
nh.last_featured_at,
|
||||
EXTRACT(DAY FROM ref.d - nh.last_featured_at)::int as days_since_featured,
|
||||
-- Line-level feature history
|
||||
lh.line_products_featured,
|
||||
lh.line_total_features,
|
||||
lh.line_last_featured_at,
|
||||
lh.line_products_featured_30d,
|
||||
lh.line_products_featured_7d,
|
||||
ls.line_product_count,
|
||||
EXTRACT(DAY FROM ref.d - lh.line_last_featured_at)::int as line_days_since_featured,
|
||||
COALESCE(nh.last_featured_at, lh.line_last_featured_at) as effective_last_featured,
|
||||
EXTRACT(DAY FROM ref.d - COALESCE(nh.last_featured_at, lh.line_last_featured_at))::int as effective_days_since_featured,
|
||||
EXTRACT(DAY FROM ref.d - COALESCE(p.date_online, p.created_at))::int as age_days
|
||||
`;
|
||||
|
||||
return `
|
||||
${REF_DATE_CTE},
|
||||
newsletter_history AS (
|
||||
SELECT
|
||||
pid,
|
||||
COUNT(*) as times_featured,
|
||||
MAX(sent_at) as last_featured_at,
|
||||
MIN(sent_at) as first_featured_at
|
||||
FROM klaviyo_campaign_products
|
||||
GROUP BY pid
|
||||
),
|
||||
line_history AS (
|
||||
SELECT
|
||||
p2.line,
|
||||
COUNT(DISTINCT kcp.pid) as line_products_featured,
|
||||
COUNT(*) as line_total_features,
|
||||
MAX(kcp.sent_at) as line_last_featured_at,
|
||||
COUNT(DISTINCT kcp.pid) FILTER (
|
||||
WHERE kcp.sent_at > (SELECT d FROM ref) - INTERVAL '30 days'
|
||||
) as line_products_featured_30d,
|
||||
COUNT(DISTINCT kcp.pid) FILTER (
|
||||
WHERE kcp.sent_at > (SELECT d FROM ref) - INTERVAL '7 days'
|
||||
) as line_products_featured_7d
|
||||
FROM products p2
|
||||
JOIN klaviyo_campaign_products kcp ON kcp.pid = p2.pid
|
||||
WHERE p2.line IS NOT NULL AND p2.line != ''
|
||||
GROUP BY p2.line
|
||||
),
|
||||
line_sizes AS (
|
||||
SELECT line, COUNT(*) as line_product_count
|
||||
FROM products
|
||||
WHERE visible = true AND line IS NOT NULL AND line != ''
|
||||
GROUP BY line
|
||||
),
|
||||
scored AS (
|
||||
SELECT
|
||||
${selectColumns},
|
||||
|
||||
-- === CATEGORY FLAGS (production-accurate, mutually exclusive where needed) ===
|
||||
|
||||
-- NEW: date_online within 31 days of reference date, AND not on preorder
|
||||
-- Uses date_online (prod's date_ol) instead of created_at for accuracy
|
||||
CASE
|
||||
WHEN p.preorder_count > 0 THEN false
|
||||
WHEN COALESCE(p.date_online, p.created_at) > ref.d - INTERVAL '31 days' THEN true
|
||||
ELSE false
|
||||
END as is_new,
|
||||
|
||||
-- PRE-ORDER: has preorder quantity
|
||||
CASE
|
||||
WHEN p.preorder_count > 0 THEN true
|
||||
ELSE false
|
||||
END as is_preorder,
|
||||
|
||||
-- CLEARANCE: 35%+ discount off regular price (matches prod threshold), price must be > 0
|
||||
CASE
|
||||
WHEN p.price > 0 AND p.regular_price > 0 AND p.price < p.regular_price
|
||||
AND ((p.regular_price - p.price) / p.regular_price * 100) >= 35
|
||||
THEN true
|
||||
ELSE false
|
||||
END as is_clearance,
|
||||
|
||||
-- DAILY DEALS: product has an active deal for today
|
||||
CASE WHEN dd.deal_id IS NOT NULL THEN true ELSE false END as is_daily_deal,
|
||||
dd.deal_price,
|
||||
|
||||
-- DISCOUNT %
|
||||
CASE
|
||||
WHEN p.price > 0 AND p.regular_price > 0 AND p.price < p.regular_price
|
||||
THEN ROUND(((p.regular_price - p.price) / p.regular_price * 100)::numeric, 0)
|
||||
ELSE 0
|
||||
END as discount_pct,
|
||||
|
||||
CASE WHEN pm.current_stock > 0 AND pm.current_stock <= 5 THEN true ELSE false END as is_low_stock,
|
||||
|
||||
-- BACK IN STOCK: restocked product, not new, not preorder
|
||||
-- Matches prod: date_refill within X days, date_refill > datein,
|
||||
-- NOT datein within last 30 days (excludes new products)
|
||||
-- We use date_last_received/date_first_received as our equivalents
|
||||
CASE
|
||||
WHEN p.preorder_count > 0 THEN false
|
||||
WHEN COALESCE(p.date_online, p.created_at) > ref.d - INTERVAL '31 days' THEN false
|
||||
WHEN pm.date_last_received > ref.d - INTERVAL '14 days'
|
||||
AND pm.date_last_received > pm.date_first_received
|
||||
AND pm.date_first_received < ref.d - INTERVAL '30 days'
|
||||
AND pm.current_stock > 0
|
||||
THEN true
|
||||
ELSE false
|
||||
END as is_back_in_stock,
|
||||
|
||||
-- === RECOMMENDATION SCORE ===
|
||||
(
|
||||
-- New product boost (first 31 days by date_online, not preorder)
|
||||
CASE
|
||||
WHEN p.preorder_count > 0 THEN 0
|
||||
WHEN COALESCE(p.date_online, p.created_at) > ref.d - INTERVAL '14 days' THEN 50
|
||||
WHEN COALESCE(p.date_online, p.created_at) > ref.d - INTERVAL '31 days' THEN 35
|
||||
ELSE 0
|
||||
END
|
||||
-- Pre-order boost
|
||||
+ CASE WHEN p.preorder_count > 0 THEN 30 ELSE 0 END
|
||||
-- Clearance boost (scaled by discount depth)
|
||||
+ CASE
|
||||
WHEN p.price > 0 AND p.regular_price > 0 AND p.price < p.regular_price
|
||||
AND ((p.regular_price - p.price) / p.regular_price * 100) >= 35
|
||||
THEN LEAST(((p.regular_price - p.price) / p.regular_price * 50)::int, 25)
|
||||
ELSE 0
|
||||
END
|
||||
-- Sales velocity boost (prod's "hot" logic: recent purchase count)
|
||||
+ CASE WHEN COALESCE(pm.sales_7d, 0) >= 5 THEN 15
|
||||
WHEN COALESCE(pm.sales_7d, 0) >= 2 THEN 10
|
||||
WHEN COALESCE(pm.sales_7d, 0) >= 1 THEN 5
|
||||
ELSE 0 END
|
||||
-- Back in stock boost (only for actual restocks, not new arrivals)
|
||||
+ CASE
|
||||
WHEN p.preorder_count = 0
|
||||
AND COALESCE(p.date_online, p.created_at) <= ref.d - INTERVAL '31 days'
|
||||
AND pm.date_last_received > ref.d - INTERVAL '14 days'
|
||||
AND pm.date_last_received > pm.date_first_received
|
||||
AND pm.date_first_received < ref.d - INTERVAL '30 days'
|
||||
AND pm.current_stock > 0
|
||||
THEN 25
|
||||
ELSE 0
|
||||
END
|
||||
-- High interest (baskets + notifies)
|
||||
+ LEAST((COALESCE(p.baskets, 0) + COALESCE(p.notifies, 0)) / 2, 15)
|
||||
-- Recency penalty: line-aware effective last featured (tuned for daily sends)
|
||||
+ CASE
|
||||
WHEN COALESCE(nh.last_featured_at, lh.line_last_featured_at) IS NULL THEN 10
|
||||
WHEN COALESCE(nh.last_featured_at, lh.line_last_featured_at) > ref.d - INTERVAL '2 days' THEN -30
|
||||
WHEN COALESCE(nh.last_featured_at, lh.line_last_featured_at) > ref.d - INTERVAL '5 days' THEN -15
|
||||
WHEN COALESCE(nh.last_featured_at, lh.line_last_featured_at) > ref.d - INTERVAL '10 days' THEN -5
|
||||
ELSE 5
|
||||
END
|
||||
-- Over-featured penalty (direct product only, tuned for daily sends)
|
||||
+ CASE
|
||||
WHEN COALESCE(nh.times_featured, 0) > 15 THEN -10
|
||||
WHEN COALESCE(nh.times_featured, 0) > 8 THEN -5
|
||||
ELSE 0
|
||||
END
|
||||
-- Line saturation penalty (uses 7-day window for daily send cadence)
|
||||
+ CASE
|
||||
WHEN lh.line_products_featured_7d IS NOT NULL
|
||||
AND ls.line_product_count IS NOT NULL
|
||||
AND ls.line_product_count > 0
|
||||
AND (lh.line_products_featured_7d::float / ls.line_product_count) > 0.7
|
||||
THEN -10
|
||||
WHEN lh.line_products_featured_7d IS NOT NULL
|
||||
AND lh.line_products_featured_7d >= 4
|
||||
THEN -5
|
||||
ELSE 0
|
||||
END
|
||||
-- Price tier adjustment (deprioritize very low-price items)
|
||||
+ CASE
|
||||
WHEN COALESCE(p.price, 0) < 3 THEN -15
|
||||
WHEN COALESCE(p.price, 0) < 8 THEN -5
|
||||
WHEN COALESCE(p.price, 0) >= 25 THEN 5
|
||||
ELSE 0
|
||||
END
|
||||
-- ABC class boost
|
||||
+ CASE WHEN pm.abc_class = 'A' THEN 10
|
||||
WHEN pm.abc_class = 'B' THEN 5
|
||||
ELSE 0 END
|
||||
-- Stock penalty
|
||||
+ CASE
|
||||
WHEN COALESCE(pm.current_stock, 0) <= 0 AND COALESCE(p.preorder_count, 0) = 0 THEN -100
|
||||
WHEN COALESCE(pm.current_stock, 0) <= 2 AND COALESCE(p.preorder_count, 0) = 0 THEN -20
|
||||
ELSE 0
|
||||
END
|
||||
) as score
|
||||
|
||||
FROM ref, products p
|
||||
LEFT JOIN product_metrics pm ON pm.pid = p.pid
|
||||
LEFT JOIN newsletter_history nh ON nh.pid = p.pid
|
||||
LEFT JOIN line_history lh ON lh.line = p.line AND p.line IS NOT NULL AND p.line != ''
|
||||
LEFT JOIN line_sizes ls ON ls.line = p.line AND p.line IS NOT NULL AND p.line != ''
|
||||
LEFT JOIN product_daily_deals dd ON dd.pid = p.pid AND dd.deal_date = CURRENT_DATE
|
||||
WHERE p.visible = true
|
||||
)
|
||||
`;
|
||||
}
|
||||
|
||||
// GET /api/newsletter/recommendations
|
||||
router.get('/recommendations', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
try {
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const limit = parseInt(req.query.limit) || 50;
|
||||
const offset = (page - 1) * limit;
|
||||
const category = req.query.category || 'all';
|
||||
|
||||
const categoryFilter = CATEGORY_FILTERS[category] || '';
|
||||
|
||||
const query = `
|
||||
WITH ${buildScoredCTE()}
|
||||
SELECT *
|
||||
FROM scored
|
||||
WHERE score > -50
|
||||
${categoryFilter}
|
||||
ORDER BY score DESC, COALESCE(sales_7d, 0) DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
`;
|
||||
|
||||
const countQuery = `
|
||||
WITH ${buildScoredCTE({ forCount: true })}
|
||||
SELECT COUNT(*) FROM scored
|
||||
WHERE score > -50
|
||||
${categoryFilter}
|
||||
`;
|
||||
|
||||
const [dataResult, countResult] = await Promise.all([
|
||||
pool.query(query, [limit, offset]),
|
||||
pool.query(countQuery)
|
||||
]);
|
||||
|
||||
res.json({
|
||||
products: dataResult.rows,
|
||||
pagination: {
|
||||
total: parseInt(countResult.rows[0].count),
|
||||
pages: Math.ceil(parseInt(countResult.rows[0].count) / limit),
|
||||
currentPage: page,
|
||||
limit
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching newsletter recommendations:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch newsletter recommendations' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/newsletter/history/:pid
|
||||
router.get('/history/:pid', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
const { pid } = req.params;
|
||||
|
||||
try {
|
||||
const { rows } = await pool.query(`
|
||||
SELECT campaign_id, campaign_name, sent_at, product_url
|
||||
FROM klaviyo_campaign_products
|
||||
WHERE pid = $1
|
||||
ORDER BY sent_at DESC
|
||||
`, [pid]);
|
||||
|
||||
res.json({ history: rows });
|
||||
} catch (error) {
|
||||
console.error('Error fetching newsletter history:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch newsletter history' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/newsletter/stats
|
||||
router.get('/stats', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
try {
|
||||
const { rows } = await pool.query(`
|
||||
WITH ref AS (SELECT COALESCE(MAX(last_calculated), NOW()) as d FROM product_metrics),
|
||||
featured_pids AS (
|
||||
SELECT DISTINCT pid FROM klaviyo_campaign_products
|
||||
),
|
||||
recent_pids AS (
|
||||
SELECT DISTINCT pid FROM klaviyo_campaign_products
|
||||
WHERE sent_at > (SELECT d FROM ref) - INTERVAL '2 days'
|
||||
)
|
||||
SELECT
|
||||
-- Unfeatured new products
|
||||
(SELECT COUNT(*) FROM products p, ref
|
||||
WHERE p.visible = true AND p.preorder_count = 0
|
||||
AND COALESCE(p.date_online, p.created_at) > ref.d - INTERVAL '31 days'
|
||||
AND p.pid NOT IN (SELECT pid FROM featured_pids)
|
||||
) as unfeatured_new,
|
||||
-- Back in stock, not yet featured since restock
|
||||
(SELECT COUNT(*) FROM products p
|
||||
JOIN product_metrics pm ON pm.pid = p.pid
|
||||
CROSS JOIN ref
|
||||
WHERE p.visible = true
|
||||
AND p.preorder_count = 0
|
||||
AND COALESCE(p.date_online, p.created_at) <= ref.d - INTERVAL '31 days'
|
||||
AND pm.date_last_received > ref.d - INTERVAL '14 days'
|
||||
AND pm.date_last_received > pm.date_first_received
|
||||
AND pm.date_first_received < ref.d - INTERVAL '30 days'
|
||||
AND pm.current_stock > 0
|
||||
AND p.pid NOT IN (
|
||||
SELECT pid FROM klaviyo_campaign_products
|
||||
WHERE sent_at > pm.date_last_received
|
||||
)
|
||||
) as back_in_stock_ready,
|
||||
-- High score products available (score 40+, not featured in last 2 days)
|
||||
(SELECT COUNT(*) FROM (
|
||||
WITH ${buildScoredCTE({ forCount: true })}
|
||||
SELECT pid FROM scored
|
||||
WHERE score >= 40
|
||||
AND pid NOT IN (SELECT pid FROM recent_pids)
|
||||
) hs) as high_score_available,
|
||||
-- Last campaign date
|
||||
(SELECT MAX(sent_at) FROM klaviyo_campaign_products) as last_campaign_date,
|
||||
-- Avg days since last featured (across visible in-stock catalog)
|
||||
(SELECT ROUND(AVG(days)::numeric, 1) FROM (
|
||||
SELECT EXTRACT(DAY FROM ref.d - MAX(kcp.sent_at))::int as days
|
||||
FROM products p
|
||||
CROSS JOIN ref
|
||||
JOIN klaviyo_campaign_products kcp ON kcp.pid = p.pid
|
||||
JOIN product_metrics pm ON pm.pid = p.pid
|
||||
WHERE p.visible = true AND COALESCE(pm.current_stock, 0) > 0
|
||||
GROUP BY p.pid, ref.d
|
||||
) avg_calc) as avg_days_since_featured,
|
||||
-- Never featured (visible, in stock or preorder)
|
||||
(SELECT COUNT(*) FROM products p
|
||||
LEFT JOIN product_metrics pm ON pm.pid = p.pid
|
||||
WHERE p.visible = true
|
||||
AND (COALESCE(pm.current_stock, 0) > 0 OR p.preorder_count > 0)
|
||||
AND p.pid NOT IN (SELECT pid FROM featured_pids)
|
||||
) as never_featured
|
||||
`);
|
||||
|
||||
res.json(rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching newsletter stats:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch newsletter stats' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/newsletter/score-breakdown/:pid
|
||||
// Returns the individual scoring factors for a single product (debug endpoint)
|
||||
router.get('/score-breakdown/:pid', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
const { pid } = req.params;
|
||||
|
||||
try {
|
||||
const { rows } = await pool.query(`
|
||||
WITH ${REF_DATE_CTE},
|
||||
newsletter_history AS (
|
||||
SELECT pid, COUNT(*) as times_featured, MAX(sent_at) as last_featured_at
|
||||
FROM klaviyo_campaign_products GROUP BY pid
|
||||
),
|
||||
line_history AS (
|
||||
SELECT p2.line,
|
||||
COUNT(DISTINCT kcp.pid) FILTER (WHERE kcp.sent_at > (SELECT d FROM ref) - INTERVAL '7 days') as line_products_featured_7d
|
||||
FROM products p2
|
||||
JOIN klaviyo_campaign_products kcp ON kcp.pid = p2.pid
|
||||
WHERE p2.line IS NOT NULL AND p2.line != ''
|
||||
GROUP BY p2.line
|
||||
),
|
||||
line_sizes AS (
|
||||
SELECT line, COUNT(*) as line_product_count
|
||||
FROM products WHERE visible = true AND line IS NOT NULL AND line != '' GROUP BY line
|
||||
)
|
||||
SELECT
|
||||
-- New product boost
|
||||
CASE
|
||||
WHEN p.preorder_count > 0 THEN 0
|
||||
WHEN COALESCE(p.date_online, p.created_at) > ref.d - INTERVAL '14 days' THEN 50
|
||||
WHEN COALESCE(p.date_online, p.created_at) > ref.d - INTERVAL '31 days' THEN 35
|
||||
ELSE 0
|
||||
END as new_boost,
|
||||
-- Pre-order boost
|
||||
CASE WHEN p.preorder_count > 0 THEN 30 ELSE 0 END as preorder_boost,
|
||||
-- Clearance boost
|
||||
CASE
|
||||
WHEN p.price > 0 AND p.regular_price > 0 AND p.price < p.regular_price
|
||||
AND ((p.regular_price - p.price) / p.regular_price * 100) >= 35
|
||||
THEN LEAST(((p.regular_price - p.price) / p.regular_price * 50)::int, 25)
|
||||
ELSE 0
|
||||
END as clearance_boost,
|
||||
-- Sales velocity
|
||||
CASE WHEN COALESCE(pm.sales_7d, 0) >= 5 THEN 15
|
||||
WHEN COALESCE(pm.sales_7d, 0) >= 2 THEN 10
|
||||
WHEN COALESCE(pm.sales_7d, 0) >= 1 THEN 5
|
||||
ELSE 0 END as velocity_boost,
|
||||
-- Back in stock
|
||||
CASE
|
||||
WHEN p.preorder_count = 0
|
||||
AND COALESCE(p.date_online, p.created_at) <= ref.d - INTERVAL '31 days'
|
||||
AND pm.date_last_received > ref.d - INTERVAL '14 days'
|
||||
AND pm.date_last_received > pm.date_first_received
|
||||
AND pm.date_first_received < ref.d - INTERVAL '30 days'
|
||||
AND pm.current_stock > 0
|
||||
THEN 25 ELSE 0
|
||||
END as back_in_stock_boost,
|
||||
-- Interest
|
||||
LEAST((COALESCE(p.baskets, 0) + COALESCE(p.notifies, 0)) / 2, 15) as interest_boost,
|
||||
-- Recency
|
||||
CASE
|
||||
WHEN COALESCE(nh.last_featured_at, lh.line_last_featured_at) IS NULL THEN 10
|
||||
WHEN COALESCE(nh.last_featured_at, lh.line_last_featured_at) > ref.d - INTERVAL '2 days' THEN -30
|
||||
WHEN COALESCE(nh.last_featured_at, lh.line_last_featured_at) > ref.d - INTERVAL '5 days' THEN -15
|
||||
WHEN COALESCE(nh.last_featured_at, lh.line_last_featured_at) > ref.d - INTERVAL '10 days' THEN -5
|
||||
ELSE 5
|
||||
END as recency_adj,
|
||||
-- Over-featured
|
||||
CASE
|
||||
WHEN COALESCE(nh.times_featured, 0) > 15 THEN -10
|
||||
WHEN COALESCE(nh.times_featured, 0) > 8 THEN -5
|
||||
ELSE 0
|
||||
END as over_featured_adj,
|
||||
-- Line saturation
|
||||
CASE
|
||||
WHEN lh2.line_products_featured_7d IS NOT NULL
|
||||
AND ls.line_product_count IS NOT NULL AND ls.line_product_count > 0
|
||||
AND (lh2.line_products_featured_7d::float / ls.line_product_count) > 0.7
|
||||
THEN -10
|
||||
WHEN lh2.line_products_featured_7d IS NOT NULL AND lh2.line_products_featured_7d >= 4
|
||||
THEN -5
|
||||
ELSE 0
|
||||
END as line_saturation_adj,
|
||||
-- Price tier
|
||||
CASE
|
||||
WHEN COALESCE(p.price, 0) < 3 THEN -15
|
||||
WHEN COALESCE(p.price, 0) < 8 THEN -5
|
||||
WHEN COALESCE(p.price, 0) >= 25 THEN 5
|
||||
ELSE 0
|
||||
END as price_tier_adj,
|
||||
-- ABC class
|
||||
CASE WHEN pm.abc_class = 'A' THEN 10 WHEN pm.abc_class = 'B' THEN 5 ELSE 0 END as abc_boost,
|
||||
-- Stock penalty
|
||||
CASE
|
||||
WHEN COALESCE(pm.current_stock, 0) <= 0 AND COALESCE(p.preorder_count, 0) = 0 THEN -100
|
||||
WHEN COALESCE(pm.current_stock, 0) <= 2 AND COALESCE(p.preorder_count, 0) = 0 THEN -20
|
||||
ELSE 0
|
||||
END as stock_penalty
|
||||
FROM ref, products p
|
||||
LEFT JOIN product_metrics pm ON pm.pid = p.pid
|
||||
LEFT JOIN newsletter_history nh ON nh.pid = p.pid
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT MAX(kcp.sent_at) as line_last_featured_at
|
||||
FROM products p3
|
||||
JOIN klaviyo_campaign_products kcp ON kcp.pid = p3.pid
|
||||
WHERE p3.line = p.line AND p.line IS NOT NULL AND p.line != ''
|
||||
) lh ON true
|
||||
LEFT JOIN line_history lh2 ON lh2.line = p.line AND p.line IS NOT NULL AND p.line != ''
|
||||
LEFT JOIN line_sizes ls ON ls.line = p.line AND p.line IS NOT NULL AND p.line != ''
|
||||
WHERE p.pid = $1
|
||||
`, [pid]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Product not found' });
|
||||
}
|
||||
res.json(rows[0]);
|
||||
} catch (error) {
|
||||
console.error('Error fetching score breakdown:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch score breakdown' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/newsletter/campaigns
|
||||
// Returns all campaigns with product counts and links
|
||||
router.get('/campaigns', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
try {
|
||||
const [campaignsResult, linksResult, summaryResult] = await Promise.all([
|
||||
pool.query(`
|
||||
SELECT
|
||||
kcp.campaign_id,
|
||||
kcp.campaign_name,
|
||||
kcp.sent_at,
|
||||
COUNT(*) as product_count,
|
||||
json_agg(json_build_object(
|
||||
'pid', kcp.pid,
|
||||
'title', p.title,
|
||||
'sku', p.sku,
|
||||
'brand', p.brand,
|
||||
'line', p.line,
|
||||
'image', p.image_175,
|
||||
'product_url', kcp.product_url
|
||||
) ORDER BY p.brand, p.line, p.title) as products
|
||||
FROM klaviyo_campaign_products kcp
|
||||
LEFT JOIN products p ON p.pid = kcp.pid
|
||||
GROUP BY kcp.campaign_id, kcp.campaign_name, kcp.sent_at
|
||||
ORDER BY kcp.sent_at DESC
|
||||
`),
|
||||
pool.query(`
|
||||
SELECT campaign_id, campaign_name, sent_at, link_url, link_type
|
||||
FROM klaviyo_campaign_links
|
||||
ORDER BY sent_at DESC
|
||||
`),
|
||||
pool.query(`
|
||||
SELECT
|
||||
COUNT(DISTINCT campaign_id) as total_campaigns,
|
||||
COUNT(DISTINCT pid) as total_unique_products,
|
||||
ROUND(COUNT(*)::numeric / NULLIF(COUNT(DISTINCT campaign_id), 0), 1) as avg_products_per_campaign
|
||||
FROM klaviyo_campaign_products
|
||||
`)
|
||||
]);
|
||||
|
||||
// Group links by campaign_id
|
||||
const linksByCampaign = {};
|
||||
for (const link of linksResult.rows) {
|
||||
if (!linksByCampaign[link.campaign_id]) linksByCampaign[link.campaign_id] = [];
|
||||
linksByCampaign[link.campaign_id].push(link);
|
||||
}
|
||||
|
||||
const campaigns = campaignsResult.rows.map(c => ({
|
||||
...c,
|
||||
links: linksByCampaign[c.campaign_id] || []
|
||||
}));
|
||||
|
||||
res.json({
|
||||
campaigns,
|
||||
summary: summaryResult.rows[0]
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching campaigns:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch campaigns' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/newsletter/campaigns/products
|
||||
// Returns product-level aggregate stats across all campaigns
|
||||
router.get('/campaigns/products', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
try {
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
kcp.pid,
|
||||
p.title,
|
||||
p.sku,
|
||||
p.brand,
|
||||
p.image_175 as image,
|
||||
p.permalink,
|
||||
COUNT(*) as times_featured,
|
||||
MIN(kcp.sent_at) as first_featured_at,
|
||||
MAX(kcp.sent_at) as last_featured_at,
|
||||
EXTRACT(DAY FROM NOW() - MAX(kcp.sent_at))::int as days_since_featured,
|
||||
EXTRACT(DAY FROM MAX(kcp.sent_at) - MIN(kcp.sent_at))::int as featured_span_days,
|
||||
CASE WHEN COUNT(*) > 1
|
||||
THEN ROUND(EXTRACT(DAY FROM MAX(kcp.sent_at) - MIN(kcp.sent_at))::numeric / (COUNT(*) - 1), 1)
|
||||
ELSE NULL
|
||||
END as avg_days_between_features,
|
||||
json_agg(json_build_object(
|
||||
'campaign_id', kcp.campaign_id,
|
||||
'campaign_name', kcp.campaign_name,
|
||||
'sent_at', kcp.sent_at
|
||||
) ORDER BY kcp.sent_at DESC) as campaigns
|
||||
FROM klaviyo_campaign_products kcp
|
||||
LEFT JOIN products p ON p.pid = kcp.pid
|
||||
GROUP BY kcp.pid, p.title, p.sku, p.brand, p.image_175, p.permalink
|
||||
ORDER BY COUNT(*) DESC, MAX(kcp.sent_at) DESC
|
||||
`);
|
||||
|
||||
res.json({ products: rows });
|
||||
} catch (error) {
|
||||
console.error('Error fetching campaign products:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch campaign products' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/newsletter/campaigns/brands
|
||||
// Returns brand-level aggregate stats across all campaigns
|
||||
router.get('/campaigns/brands', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
try {
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
COALESCE(p.brand, 'Unknown') as brand,
|
||||
COUNT(DISTINCT kcp.pid) as product_count,
|
||||
COUNT(*) as times_featured,
|
||||
MIN(kcp.sent_at) as first_featured_at,
|
||||
MAX(kcp.sent_at) as last_featured_at,
|
||||
EXTRACT(DAY FROM NOW() - MAX(kcp.sent_at))::int as days_since_featured,
|
||||
CASE WHEN COUNT(DISTINCT kcp.campaign_id) > 1
|
||||
THEN ROUND(EXTRACT(DAY FROM MAX(kcp.sent_at) - MIN(kcp.sent_at))::numeric / (COUNT(DISTINCT kcp.campaign_id) - 1), 1)
|
||||
ELSE NULL
|
||||
END as avg_days_between_features,
|
||||
json_agg(DISTINCT jsonb_build_object(
|
||||
'campaign_id', kcp.campaign_id,
|
||||
'campaign_name', kcp.campaign_name,
|
||||
'sent_at', kcp.sent_at
|
||||
)) as campaigns
|
||||
FROM klaviyo_campaign_products kcp
|
||||
LEFT JOIN products p ON p.pid = kcp.pid
|
||||
GROUP BY COALESCE(p.brand, 'Unknown')
|
||||
ORDER BY COUNT(*) DESC, MAX(kcp.sent_at) DESC
|
||||
`);
|
||||
|
||||
res.json({ brands: rows });
|
||||
} catch (error) {
|
||||
console.error('Error fetching campaign brands:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch campaign brands' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/newsletter/campaigns/links
|
||||
// Returns link-level aggregate stats across all campaigns
|
||||
router.get('/campaigns/links', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
try {
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
link_url,
|
||||
link_type,
|
||||
COUNT(*) as times_used,
|
||||
MIN(sent_at) as first_used_at,
|
||||
MAX(sent_at) as last_used_at,
|
||||
EXTRACT(DAY FROM NOW() - MAX(sent_at))::int as days_since_used,
|
||||
json_agg(DISTINCT campaign_name ORDER BY campaign_name) as campaign_names
|
||||
FROM klaviyo_campaign_links
|
||||
GROUP BY link_url, link_type
|
||||
ORDER BY COUNT(*) DESC, MAX(sent_at) DESC
|
||||
`);
|
||||
|
||||
res.json({ links: rows });
|
||||
} catch (error) {
|
||||
console.error('Error fetching campaign links:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch campaign links' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
197
inventory-server/src/routes/products.js
Executable file → Normal file
197
inventory-server/src/routes/products.js
Executable file → Normal file
@@ -1,11 +1,27 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const multer = require('multer');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { importProductsFromCSV } = require('../utils/csvImporter');
|
||||
const { PurchaseOrderStatus, ReceivingStatus } = require('../types/status-codes');
|
||||
|
||||
// Configure multer for file uploads
|
||||
const upload = multer({ dest: 'uploads/' });
|
||||
// Configure multer for file uploads without silent fallbacks
|
||||
const configuredUploadsDir = process.env.UPLOADS_DIR;
|
||||
const uploadsDir = configuredUploadsDir
|
||||
? (path.isAbsolute(configuredUploadsDir)
|
||||
? configuredUploadsDir
|
||||
: path.resolve(__dirname, '../../', configuredUploadsDir))
|
||||
: path.resolve(__dirname, '../../uploads');
|
||||
|
||||
try {
|
||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
||||
} catch (error) {
|
||||
console.error(`Failed to initialize uploads directory at ${uploadsDir}:`, error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const upload = multer({ dest: uploadsDir });
|
||||
|
||||
// Get unique brands
|
||||
router.get('/brands', async (req, res) => {
|
||||
@@ -129,7 +145,6 @@ router.get('/', async (req, res) => {
|
||||
stock: 'p.stock_quantity',
|
||||
price: 'p.price',
|
||||
costPrice: 'p.cost_price',
|
||||
landingCost: 'p.landing_cost_price',
|
||||
dailySalesAvg: 'pm.daily_sales_avg',
|
||||
weeklySalesAvg: 'pm.weekly_sales_avg',
|
||||
monthlySalesAvg: 'pm.monthly_sales_avg',
|
||||
@@ -438,6 +453,47 @@ router.get('/trending', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Quick product search for the product editor (uses local Postgres)
|
||||
router.get('/search', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
const q = req.query.q;
|
||||
if (!q || !q.trim()) {
|
||||
return res.status(400).json({ error: 'Search query is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const terms = q.trim().split(/\s+/).filter(Boolean);
|
||||
// Each term must match at least one of: title, sku, barcode, brand, vendor
|
||||
const conditions = terms.map((_, i) => {
|
||||
const p = i * 5;
|
||||
return `(p.title ILIKE $${p + 1} OR p.sku ILIKE $${p + 2} OR p.barcode ILIKE $${p + 3} OR p.brand ILIKE $${p + 4} OR p.vendor ILIKE $${p + 5})`;
|
||||
});
|
||||
const params = terms.flatMap(t => {
|
||||
const like = `%${t}%`;
|
||||
return [like, like, like, like, like];
|
||||
});
|
||||
|
||||
const { rows } = await pool.query(`
|
||||
SELECT pid, title, sku, barcode, brand, line, regular_price, image_175
|
||||
FROM products p
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
ORDER BY
|
||||
CASE WHEN p.sku ILIKE $${params.length + 1} THEN 0
|
||||
WHEN p.barcode ILIKE $${params.length + 1} THEN 1
|
||||
WHEN p.title ILIKE $${params.length + 1} THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
p.total_sold DESC NULLS LAST
|
||||
LIMIT 50
|
||||
`, [...params, `%${q.trim()}%`]);
|
||||
|
||||
res.json(rows);
|
||||
} catch (error) {
|
||||
console.error('Error searching products:', error);
|
||||
res.status(500).json({ error: 'Search failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get a single product
|
||||
router.get('/:id', async (req, res) => {
|
||||
try {
|
||||
@@ -471,27 +527,26 @@ router.get('/:id', async (req, res) => {
|
||||
const { rows: productRows } = await pool.query(`
|
||||
SELECT
|
||||
p.*,
|
||||
pm.daily_sales_avg,
|
||||
pm.weekly_sales_avg,
|
||||
pm.monthly_sales_avg,
|
||||
pm.days_of_inventory,
|
||||
pm.reorder_point,
|
||||
pm.safety_stock,
|
||||
pm.stock_status,
|
||||
pm.avg_sales_per_day_30d AS daily_sales_avg,
|
||||
pm.sales_7d AS weekly_sales_avg,
|
||||
pm.avg_sales_per_month_30d AS monthly_sales_avg,
|
||||
pm.stock_cover_in_days AS days_of_inventory,
|
||||
pm.replenishment_units AS reorder_point,
|
||||
pm.config_safety_stock AS safety_stock,
|
||||
pm.status AS stock_status,
|
||||
pm.abc_class,
|
||||
pm.avg_margin_percent,
|
||||
pm.total_revenue,
|
||||
pm.inventory_value,
|
||||
pm.turnover_rate,
|
||||
pm.gmroi,
|
||||
pm.cost_of_goods_sold,
|
||||
pm.gross_profit,
|
||||
pm.margin_30d AS avg_margin_percent,
|
||||
pm.lifetime_revenue AS total_revenue,
|
||||
pm.current_stock_cost AS inventory_value,
|
||||
pm.stockturn_30d AS turnover_rate,
|
||||
pm.gmroi_30d AS gmroi,
|
||||
pm.cogs_30d AS cost_of_goods_sold,
|
||||
pm.profit_30d AS gross_profit,
|
||||
pm.avg_lead_time_days,
|
||||
pm.current_lead_time,
|
||||
pm.target_lead_time,
|
||||
pm.lead_time_status,
|
||||
pm.reorder_qty,
|
||||
pm.overstocked_amt
|
||||
pm.config_lead_time AS current_lead_time,
|
||||
pm.config_lead_time AS target_lead_time,
|
||||
pm.to_order_units AS reorder_qty,
|
||||
pm.overstocked_units AS overstocked_amt
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
WHERE p.pid = $1
|
||||
@@ -565,7 +620,6 @@ router.get('/:id', async (req, res) => {
|
||||
price: parseFloat(productRows[0].price),
|
||||
regular_price: parseFloat(productRows[0].regular_price),
|
||||
cost_price: parseFloat(productRows[0].cost_price),
|
||||
landing_cost_price: parseFloat(productRows[0].landing_cost_price),
|
||||
stock_quantity: parseInt(productRows[0].stock_quantity),
|
||||
moq: parseInt(productRows[0].moq),
|
||||
uom: parseInt(productRows[0].uom),
|
||||
@@ -675,32 +729,33 @@ router.get('/:id/time-series', async (req, res) => {
|
||||
LIMIT 10
|
||||
`, [id]);
|
||||
|
||||
// Get recent purchase orders with detailed status
|
||||
// Get recent purchase orders with received quantities from the receivings table
|
||||
const { rows: recentPurchases } = await pool.query(`
|
||||
SELECT
|
||||
TO_CHAR(date, 'YYYY-MM-DD') as date,
|
||||
TO_CHAR(expected_date, 'YYYY-MM-DD') as expected_date,
|
||||
TO_CHAR(received_date, 'YYYY-MM-DD') as received_date,
|
||||
po_id,
|
||||
ordered,
|
||||
received,
|
||||
status,
|
||||
receiving_status,
|
||||
cost_price,
|
||||
notes,
|
||||
CASE
|
||||
WHEN received_date IS NOT NULL THEN
|
||||
(received_date - date)
|
||||
WHEN expected_date < CURRENT_DATE AND status < $2 THEN
|
||||
(CURRENT_DATE - expected_date)
|
||||
ELSE NULL
|
||||
SELECT
|
||||
TO_CHAR(po.date, 'YYYY-MM-DD') as date,
|
||||
TO_CHAR(po.expected_date, 'YYYY-MM-DD') as expected_date,
|
||||
TO_CHAR(MAX(r.received_date), 'YYYY-MM-DD') as received_date,
|
||||
po.po_id,
|
||||
po.ordered,
|
||||
COALESCE(SUM(r.qty_each), 0)::integer as received,
|
||||
po.status,
|
||||
po.po_cost_price as cost_price,
|
||||
po.notes,
|
||||
CASE
|
||||
WHEN MAX(r.received_date) IS NOT NULL THEN
|
||||
EXTRACT(DAY FROM MAX(r.received_date) - po.date)::integer
|
||||
WHEN po.expected_date < CURRENT_DATE AND po.status NOT IN ('done', 'canceled') THEN
|
||||
(CURRENT_DATE - po.expected_date)
|
||||
ELSE NULL
|
||||
END as lead_time_days
|
||||
FROM purchase_orders
|
||||
WHERE pid = $1
|
||||
AND status != $3
|
||||
ORDER BY date DESC
|
||||
FROM purchase_orders po
|
||||
LEFT JOIN receivings r ON r.receiving_id = po.po_id AND r.pid = po.pid AND r.status != 'canceled'
|
||||
WHERE po.pid = $1
|
||||
AND po.status != 'canceled'
|
||||
GROUP BY po.id, po.po_id, po.date, po.expected_date, po.ordered, po.status, po.po_cost_price, po.notes
|
||||
ORDER BY po.date DESC
|
||||
LIMIT 10
|
||||
`, [id, PurchaseOrderStatus.ReceivingStarted, PurchaseOrderStatus.Canceled]);
|
||||
`, [id]);
|
||||
|
||||
res.json({
|
||||
monthly_sales: formattedMonthlySales,
|
||||
@@ -716,8 +771,7 @@ router.get('/:id/time-series', async (req, res) => {
|
||||
...po,
|
||||
ordered: parseInt(po.ordered),
|
||||
received: parseInt(po.received),
|
||||
status: parseInt(po.status),
|
||||
receiving_status: parseInt(po.receiving_status),
|
||||
status: po.status, // Text-based status (e.g., 'done', 'ordered', 'receiving_started')
|
||||
cost_price: parseFloat(po.cost_price),
|
||||
lead_time_days: po.lead_time_days ? parseInt(po.lead_time_days) : null
|
||||
}))
|
||||
@@ -728,4 +782,49 @@ router.get('/:id/time-series', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
// GET /products/:id/forecast
|
||||
// Returns the 90-day daily forecast for a single product from product_forecasts
|
||||
router.get('/:id/forecast', async (req, res) => {
|
||||
const { id } = req.params;
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
forecast_date AS date,
|
||||
forecast_units AS units,
|
||||
forecast_revenue AS revenue,
|
||||
lifecycle_phase AS phase,
|
||||
forecast_method AS method,
|
||||
confidence_lower,
|
||||
confidence_upper
|
||||
FROM product_forecasts
|
||||
WHERE pid = $1
|
||||
ORDER BY forecast_date
|
||||
`, [id]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
return res.json({ forecast: [], phase: null, method: null });
|
||||
}
|
||||
|
||||
const phase = rows[0].phase;
|
||||
const method = rows[0].method;
|
||||
|
||||
res.json({
|
||||
phase,
|
||||
method,
|
||||
forecast: rows.map(r => ({
|
||||
date: r.date instanceof Date ? r.date.toISOString().split('T')[0] : r.date,
|
||||
units: parseFloat(r.units) || 0,
|
||||
revenue: parseFloat(r.revenue) || 0,
|
||||
confidenceLower: parseFloat(r.confidence_lower) || 0,
|
||||
confidenceUpper: parseFloat(r.confidence_upper) || 0,
|
||||
})),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching product forecast:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product forecast' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1185,4 +1185,96 @@ router.get('/delivery-metrics', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
// PO Pipeline — expected arrivals timeline + overdue summary
|
||||
router.get('/pipeline', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
// Stale PO filter (reused across queries)
|
||||
const staleFilter = `
|
||||
WITH stale AS (
|
||||
SELECT po_id, pid
|
||||
FROM purchase_orders po
|
||||
WHERE po.status IN ('created', 'ordered', 'preordered', 'electronically_sent',
|
||||
'electronically_ready_send', 'receiving_started')
|
||||
AND po.expected_date IS NOT NULL
|
||||
AND po.expected_date < CURRENT_DATE - INTERVAL '90 days'
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM purchase_orders newer
|
||||
WHERE newer.pid = po.pid
|
||||
AND newer.status NOT IN ('canceled', 'done')
|
||||
AND COALESCE(newer.date_ordered, newer.date_created)
|
||||
> COALESCE(po.date_ordered, po.date_created)
|
||||
)
|
||||
)`;
|
||||
|
||||
// Expected arrivals by week (excludes stale POs)
|
||||
const { rows: arrivals } = await pool.query(`
|
||||
${staleFilter}
|
||||
SELECT
|
||||
DATE_TRUNC('week', po.expected_date)::date AS week,
|
||||
COUNT(DISTINCT po.po_id) AS po_count,
|
||||
ROUND(SUM(po.po_cost_price * po.ordered)::numeric, 0) AS expected_value,
|
||||
COUNT(DISTINCT po.vendor) AS vendor_count
|
||||
FROM purchase_orders po
|
||||
WHERE po.status IN ('ordered', 'electronically_sent')
|
||||
AND po.expected_date IS NOT NULL
|
||||
AND NOT EXISTS (SELECT 1 FROM stale s WHERE s.po_id = po.po_id AND s.pid = po.pid)
|
||||
GROUP BY 1
|
||||
ORDER BY 1
|
||||
`);
|
||||
|
||||
// Overdue POs (excludes stale)
|
||||
const { rows: [overdue] } = await pool.query(`
|
||||
${staleFilter}
|
||||
SELECT
|
||||
COUNT(DISTINCT po.po_id) AS po_count,
|
||||
ROUND(COALESCE(SUM(po.po_cost_price * po.ordered), 0)::numeric, 0) AS total_value
|
||||
FROM purchase_orders po
|
||||
WHERE po.status IN ('ordered', 'electronically_sent')
|
||||
AND po.expected_date IS NOT NULL
|
||||
AND po.expected_date < CURRENT_DATE
|
||||
AND NOT EXISTS (SELECT 1 FROM stale s WHERE s.po_id = po.po_id AND s.pid = po.pid)
|
||||
`);
|
||||
|
||||
// Summary: on-order value from product_metrics (FIFO-accurate), PO counts from purchase_orders with staleness filter
|
||||
const { rows: [summary] } = await pool.query(`
|
||||
${staleFilter}
|
||||
SELECT
|
||||
COUNT(DISTINCT po.po_id) AS total_open_pos,
|
||||
COUNT(DISTINCT po.vendor) AS vendor_count
|
||||
FROM purchase_orders po
|
||||
WHERE po.status IN ('ordered', 'electronically_sent')
|
||||
AND NOT EXISTS (SELECT 1 FROM stale s WHERE s.po_id = po.po_id AND s.pid = po.pid)
|
||||
`);
|
||||
|
||||
const { rows: [onOrderTotal] } = await pool.query(`
|
||||
SELECT ROUND(COALESCE(SUM(on_order_cost), 0)::numeric, 0) AS total_on_order_value
|
||||
FROM product_metrics
|
||||
WHERE is_visible = true
|
||||
`);
|
||||
|
||||
res.json({
|
||||
arrivals: arrivals.map(r => ({
|
||||
week: r.week,
|
||||
poCount: Number(r.po_count) || 0,
|
||||
expectedValue: Number(r.expected_value) || 0,
|
||||
vendorCount: Number(r.vendor_count) || 0,
|
||||
})),
|
||||
overdue: {
|
||||
count: Number(overdue.po_count) || 0,
|
||||
value: Number(overdue.total_value) || 0,
|
||||
},
|
||||
summary: {
|
||||
totalOpenPOs: Number(summary.total_open_pos) || 0,
|
||||
totalOnOrderValue: Number(onOrderTotal.total_on_order_value) || 0,
|
||||
vendorCount: Number(summary.vendor_count) || 0,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching PO pipeline:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch PO pipeline' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -194,7 +194,7 @@ router.post('/upload', upload.single('image'), async (req, res) => {
|
||||
}
|
||||
|
||||
// Create URL for the uploaded file
|
||||
const baseUrl = 'https://inventory.acot.site';
|
||||
const baseUrl = 'https://tools.acherryontop.com';
|
||||
const imageUrl = `${baseUrl}/uploads/reusable/${req.file.filename}`;
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
27
inventory-server/src/server.js
Executable file → Normal file
27
inventory-server/src/server.js
Executable file → Normal file
@@ -15,12 +15,16 @@ const configRouter = require('./routes/config');
|
||||
const metricsRouter = require('./routes/metrics');
|
||||
const importRouter = require('./routes/import');
|
||||
const aiValidationRouter = require('./routes/ai-validation');
|
||||
const aiRouter = require('./routes/ai');
|
||||
const templatesRouter = require('./routes/templates');
|
||||
const aiPromptsRouter = require('./routes/ai-prompts');
|
||||
const reusableImagesRouter = require('./routes/reusable-images');
|
||||
const categoriesAggregateRouter = require('./routes/categoriesAggregate');
|
||||
const vendorsAggregateRouter = require('./routes/vendorsAggregate');
|
||||
const brandsAggregateRouter = require('./routes/brandsAggregate');
|
||||
const htsLookupRouter = require('./routes/hts-lookup');
|
||||
const importSessionsRouter = require('./routes/import-sessions');
|
||||
const newsletterRouter = require('./routes/newsletter');
|
||||
|
||||
// Get the absolute path to the .env file
|
||||
const envPath = '/var/www/html/inventory/.env';
|
||||
@@ -44,8 +48,21 @@ try {
|
||||
console.error('Error loading .env file:', error);
|
||||
}
|
||||
|
||||
// Ensure required directories exist
|
||||
['logs', 'uploads'].forEach(dir => {
|
||||
// Resolve important directories relative to the project root
|
||||
const serverRoot = path.resolve(__dirname, '..');
|
||||
const configuredUploadsDir = process.env.UPLOADS_DIR;
|
||||
const uploadsDir = configuredUploadsDir
|
||||
? (path.isAbsolute(configuredUploadsDir)
|
||||
? configuredUploadsDir
|
||||
: path.resolve(serverRoot, configuredUploadsDir))
|
||||
: path.resolve(serverRoot, 'uploads');
|
||||
|
||||
// Persist the resolved uploads directory so downstream modules share the same path
|
||||
process.env.UPLOADS_DIR = uploadsDir;
|
||||
|
||||
const requiredDirs = [path.resolve(serverRoot, 'logs'), uploadsDir];
|
||||
|
||||
requiredDirs.forEach(dir => {
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
@@ -110,9 +127,13 @@ async function startServer() {
|
||||
app.use('/api/brands-aggregate', brandsAggregateRouter);
|
||||
app.use('/api/import', importRouter);
|
||||
app.use('/api/ai-validation', aiValidationRouter);
|
||||
app.use('/api/ai', aiRouter);
|
||||
app.use('/api/templates', templatesRouter);
|
||||
app.use('/api/ai-prompts', aiPromptsRouter);
|
||||
app.use('/api/reusable-images', reusableImagesRouter);
|
||||
app.use('/api/hts-lookup', htsLookupRouter);
|
||||
app.use('/api/import-sessions', importSessionsRouter);
|
||||
app.use('/api/newsletter', newsletterRouter);
|
||||
|
||||
// Basic health check route
|
||||
app.get('/health', (req, res) => {
|
||||
@@ -209,4 +230,4 @@ const setupSSE = (req, res) => {
|
||||
};
|
||||
|
||||
// Start the server
|
||||
startServer();
|
||||
startServer();
|
||||
|
||||
82
inventory-server/src/services/ai/embeddings/similarity.js
Normal file
82
inventory-server/src/services/ai/embeddings/similarity.js
Normal file
@@ -0,0 +1,82 @@
|
||||
/**
|
||||
* Vector similarity utilities
|
||||
*/
|
||||
|
||||
/**
|
||||
* Compute cosine similarity between two vectors
|
||||
* @param {number[]} a
|
||||
* @param {number[]} b
|
||||
* @returns {number} Similarity score between -1 and 1
|
||||
*/
|
||||
function cosineSimilarity(a, b) {
|
||||
if (!a || !b || a.length !== b.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let dotProduct = 0;
|
||||
let normA = 0;
|
||||
let normB = 0;
|
||||
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
dotProduct += a[i] * b[i];
|
||||
normA += a[i] * a[i];
|
||||
normB += b[i] * b[i];
|
||||
}
|
||||
|
||||
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
|
||||
if (denominator === 0) return 0;
|
||||
|
||||
return dotProduct / denominator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find top K most similar items from a collection
|
||||
* @param {number[]} queryEmbedding - The embedding to search for
|
||||
* @param {Array<{id: any, embedding: number[]}>} items - Items with embeddings
|
||||
* @param {number} topK - Number of results to return
|
||||
* @returns {Array<{id: any, similarity: number}>}
|
||||
*/
|
||||
function findTopMatches(queryEmbedding, items, topK = 10) {
|
||||
if (!queryEmbedding || !items || items.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const scored = items.map(item => ({
|
||||
id: item.id,
|
||||
similarity: cosineSimilarity(queryEmbedding, item.embedding)
|
||||
}));
|
||||
|
||||
scored.sort((a, b) => b.similarity - a.similarity);
|
||||
|
||||
return scored.slice(0, topK);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find matches above a similarity threshold
|
||||
* @param {number[]} queryEmbedding
|
||||
* @param {Array<{id: any, embedding: number[]}>} items
|
||||
* @param {number} threshold - Minimum similarity (0-1)
|
||||
* @returns {Array<{id: any, similarity: number}>}
|
||||
*/
|
||||
function findMatchesAboveThreshold(queryEmbedding, items, threshold = 0.5) {
|
||||
if (!queryEmbedding || !items || items.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const scored = items
|
||||
.map(item => ({
|
||||
id: item.id,
|
||||
similarity: cosineSimilarity(queryEmbedding, item.embedding)
|
||||
}))
|
||||
.filter(item => item.similarity >= threshold);
|
||||
|
||||
scored.sort((a, b) => b.similarity - a.similarity);
|
||||
|
||||
return scored;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cosineSimilarity,
|
||||
findTopMatches,
|
||||
findMatchesAboveThreshold
|
||||
};
|
||||
@@ -0,0 +1,323 @@
|
||||
/**
|
||||
* Taxonomy Embedding Service
|
||||
*
|
||||
* Generates and caches embeddings for categories, themes, and colors.
|
||||
* Excludes "Black Friday", "Gifts", "Deals" categories and their children.
|
||||
*/
|
||||
|
||||
const { findTopMatches } = require('./similarity');
|
||||
|
||||
// Categories to exclude (and all their children)
|
||||
const EXCLUDED_CATEGORY_NAMES = ['black friday', 'gifts', 'deals'];
|
||||
|
||||
class TaxonomyEmbeddings {
|
||||
constructor({ provider, logger }) {
|
||||
this.provider = provider;
|
||||
this.logger = logger || console;
|
||||
|
||||
// Cached taxonomy with embeddings
|
||||
this.categories = [];
|
||||
this.themes = [];
|
||||
this.colors = [];
|
||||
|
||||
// Raw data without embeddings (for lookup)
|
||||
this.categoryMap = new Map();
|
||||
this.themeMap = new Map();
|
||||
this.colorMap = new Map();
|
||||
|
||||
this.initialized = false;
|
||||
this.initializing = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize embeddings - fetch taxonomy and generate embeddings
|
||||
*/
|
||||
async initialize(connection) {
|
||||
if (this.initialized) {
|
||||
return { categories: this.categories.length, themes: this.themes.length, colors: this.colors.length };
|
||||
}
|
||||
|
||||
if (this.initializing) {
|
||||
// Wait for existing initialization
|
||||
while (this.initializing) {
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
return { categories: this.categories.length, themes: this.themes.length, colors: this.colors.length };
|
||||
}
|
||||
|
||||
this.initializing = true;
|
||||
|
||||
try {
|
||||
this.logger.info('[TaxonomyEmbeddings] Starting initialization...');
|
||||
|
||||
// Fetch raw taxonomy data
|
||||
const [categories, themes, colors] = await Promise.all([
|
||||
this._fetchCategories(connection),
|
||||
this._fetchThemes(connection),
|
||||
this._fetchColors(connection)
|
||||
]);
|
||||
|
||||
this.logger.info(`[TaxonomyEmbeddings] Fetched ${categories.length} categories, ${themes.length} themes, ${colors.length} colors`);
|
||||
|
||||
// Generate embeddings in parallel
|
||||
const [catEmbeddings, themeEmbeddings, colorEmbeddings] = await Promise.all([
|
||||
this._generateEmbeddings(categories, 'categories'),
|
||||
this._generateEmbeddings(themes, 'themes'),
|
||||
this._generateEmbeddings(colors, 'colors')
|
||||
]);
|
||||
|
||||
// Store with embeddings
|
||||
this.categories = catEmbeddings;
|
||||
this.themes = themeEmbeddings;
|
||||
this.colors = colorEmbeddings;
|
||||
|
||||
// Build lookup maps
|
||||
this.categoryMap = new Map(this.categories.map(c => [c.id, c]));
|
||||
this.themeMap = new Map(this.themes.map(t => [t.id, t]));
|
||||
this.colorMap = new Map(this.colors.map(c => [c.id, c]));
|
||||
|
||||
this.initialized = true;
|
||||
this.logger.info('[TaxonomyEmbeddings] Initialization complete');
|
||||
|
||||
return {
|
||||
categories: this.categories.length,
|
||||
themes: this.themes.length,
|
||||
colors: this.colors.length
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error('[TaxonomyEmbeddings] Initialization failed:', error);
|
||||
throw error;
|
||||
} finally {
|
||||
this.initializing = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find similar categories for a product embedding
|
||||
*/
|
||||
findSimilarCategories(productEmbedding, topK = 10) {
|
||||
if (!this.initialized || !productEmbedding) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const matches = findTopMatches(productEmbedding, this.categories, topK);
|
||||
|
||||
return matches.map(match => {
|
||||
const cat = this.categoryMap.get(match.id);
|
||||
return {
|
||||
id: match.id,
|
||||
name: cat?.name || '',
|
||||
fullPath: cat?.fullPath || '',
|
||||
similarity: match.similarity
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Find similar themes for a product embedding
|
||||
*/
|
||||
findSimilarThemes(productEmbedding, topK = 5) {
|
||||
if (!this.initialized || !productEmbedding) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const matches = findTopMatches(productEmbedding, this.themes, topK);
|
||||
|
||||
return matches.map(match => {
|
||||
const theme = this.themeMap.get(match.id);
|
||||
return {
|
||||
id: match.id,
|
||||
name: theme?.name || '',
|
||||
fullPath: theme?.fullPath || '',
|
||||
similarity: match.similarity
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Find similar colors for a product embedding
|
||||
*/
|
||||
findSimilarColors(productEmbedding, topK = 5) {
|
||||
if (!this.initialized || !productEmbedding) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const matches = findTopMatches(productEmbedding, this.colors, topK);
|
||||
|
||||
return matches.map(match => {
|
||||
const color = this.colorMap.get(match.id);
|
||||
return {
|
||||
id: match.id,
|
||||
name: color?.name || '',
|
||||
similarity: match.similarity
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all taxonomy data (without embeddings) for frontend
|
||||
*/
|
||||
getTaxonomyData() {
|
||||
return {
|
||||
categories: this.categories.map(({ id, name, fullPath, parentId }) => ({ id, name, fullPath, parentId })),
|
||||
themes: this.themes.map(({ id, name, fullPath, parentId }) => ({ id, name, fullPath, parentId })),
|
||||
colors: this.colors.map(({ id, name }) => ({ id, name }))
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if service is ready
|
||||
*/
|
||||
isReady() {
|
||||
return this.initialized;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Private Methods
|
||||
// ============================================================================
|
||||
|
||||
async _fetchCategories(connection) {
|
||||
// Fetch hierarchical categories (types 10-13)
|
||||
const [rows] = await connection.query(`
|
||||
SELECT cat_id, name, master_cat_id, type
|
||||
FROM product_categories
|
||||
WHERE type IN (10, 11, 12, 13)
|
||||
ORDER BY type, name
|
||||
`);
|
||||
|
||||
// Build lookup for hierarchy
|
||||
const byId = new Map(rows.map(r => [r.cat_id, r]));
|
||||
|
||||
// Find IDs of excluded top-level categories and all their descendants
|
||||
const excludedIds = new Set();
|
||||
|
||||
// First pass: find excluded top-level categories
|
||||
for (const row of rows) {
|
||||
if (row.type === 10 && EXCLUDED_CATEGORY_NAMES.includes(row.name.toLowerCase())) {
|
||||
excludedIds.add(row.cat_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Multiple passes to find all descendants
|
||||
let foundNew = true;
|
||||
while (foundNew) {
|
||||
foundNew = false;
|
||||
for (const row of rows) {
|
||||
if (!excludedIds.has(row.cat_id) && excludedIds.has(row.master_cat_id)) {
|
||||
excludedIds.add(row.cat_id);
|
||||
foundNew = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.info(`[TaxonomyEmbeddings] Excluding ${excludedIds.size} categories (Black Friday, Gifts, Deals and children)`);
|
||||
|
||||
// Build category objects with full paths, excluding filtered ones
|
||||
const categories = [];
|
||||
|
||||
for (const row of rows) {
|
||||
if (excludedIds.has(row.cat_id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const path = [];
|
||||
let current = row;
|
||||
|
||||
// Walk up the tree to build full path
|
||||
while (current) {
|
||||
path.unshift(current.name);
|
||||
current = current.master_cat_id ? byId.get(current.master_cat_id) : null;
|
||||
}
|
||||
|
||||
categories.push({
|
||||
id: row.cat_id,
|
||||
name: row.name,
|
||||
parentId: row.master_cat_id,
|
||||
type: row.type,
|
||||
fullPath: path.join(' > '),
|
||||
embeddingText: path.join(' ')
|
||||
});
|
||||
}
|
||||
|
||||
return categories;
|
||||
}
|
||||
|
||||
async _fetchThemes(connection) {
|
||||
// Fetch themes (types 20-21)
|
||||
const [rows] = await connection.query(`
|
||||
SELECT cat_id, name, master_cat_id, type
|
||||
FROM product_categories
|
||||
WHERE type IN (20, 21)
|
||||
ORDER BY type, name
|
||||
`);
|
||||
|
||||
const byId = new Map(rows.map(r => [r.cat_id, r]));
|
||||
const themes = [];
|
||||
|
||||
for (const row of rows) {
|
||||
const path = [];
|
||||
let current = row;
|
||||
|
||||
while (current) {
|
||||
path.unshift(current.name);
|
||||
current = current.master_cat_id ? byId.get(current.master_cat_id) : null;
|
||||
}
|
||||
|
||||
themes.push({
|
||||
id: row.cat_id,
|
||||
name: row.name,
|
||||
parentId: row.master_cat_id,
|
||||
type: row.type,
|
||||
fullPath: path.join(' > '),
|
||||
embeddingText: path.join(' ')
|
||||
});
|
||||
}
|
||||
|
||||
return themes;
|
||||
}
|
||||
|
||||
async _fetchColors(connection) {
|
||||
const [rows] = await connection.query(`
|
||||
SELECT color, name, hex_color
|
||||
FROM product_color_list
|
||||
ORDER BY \`order\`
|
||||
`);
|
||||
|
||||
return rows.map(row => ({
|
||||
id: row.color,
|
||||
name: row.name,
|
||||
hexColor: row.hex_color,
|
||||
embeddingText: row.name
|
||||
}));
|
||||
}
|
||||
|
||||
async _generateEmbeddings(items, label) {
|
||||
if (items.length === 0) {
|
||||
return items;
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
const texts = items.map(item => item.embeddingText);
|
||||
const results = [...items];
|
||||
|
||||
// Process in batches
|
||||
let batchNum = 0;
|
||||
for await (const chunk of this.provider.embedBatchChunked(texts, { batchSize: 100 })) {
|
||||
batchNum++;
|
||||
for (let i = 0; i < chunk.embeddings.length; i++) {
|
||||
const globalIndex = chunk.startIndex + i;
|
||||
results[globalIndex] = {
|
||||
...results[globalIndex],
|
||||
embedding: chunk.embeddings[i]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - startTime;
|
||||
this.logger.info(`[TaxonomyEmbeddings] Generated ${items.length} ${label} embeddings in ${elapsed}ms`);
|
||||
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { TaxonomyEmbeddings };
|
||||
386
inventory-server/src/services/ai/index.js
Normal file
386
inventory-server/src/services/ai/index.js
Normal file
@@ -0,0 +1,386 @@
|
||||
/**
|
||||
* AI Service
|
||||
*
|
||||
* Main entry point for AI functionality including:
|
||||
* - Embeddings for taxonomy suggestions (OpenAI)
|
||||
* - Chat completions for validation tasks (Groq)
|
||||
* - Task registry for AI operations
|
||||
*/
|
||||
|
||||
const { OpenAIProvider } = require('./providers/openaiProvider');
|
||||
const { GroqProvider, MODELS: GROQ_MODELS } = require('./providers/groqProvider');
|
||||
const { TaxonomyEmbeddings } = require('./embeddings/taxonomyEmbeddings');
|
||||
const { cosineSimilarity, findTopMatches } = require('./embeddings/similarity');
|
||||
const { getRegistry, TASK_IDS, registerAllTasks } = require('./tasks');
|
||||
|
||||
let initialized = false;
|
||||
let initializing = false;
|
||||
let openaiProvider = null;
|
||||
let groqProvider = null;
|
||||
let taxonomyEmbeddings = null;
|
||||
let logger = console;
|
||||
|
||||
// Store pool reference for task access
|
||||
let appPool = null;
|
||||
|
||||
/**
|
||||
* Initialize the AI service
|
||||
* @param {Object} options
|
||||
* @param {string} options.openaiApiKey - OpenAI API key (for embeddings)
|
||||
* @param {string} [options.groqApiKey] - Groq API key (for chat completions)
|
||||
* @param {Object} options.mysqlConnection - MySQL connection for taxonomy data
|
||||
* @param {Object} [options.pool] - PostgreSQL pool for prompt loading
|
||||
* @param {Object} [options.logger] - Logger instance
|
||||
*/
|
||||
async function initialize({ openaiApiKey, groqApiKey, mysqlConnection, pool, logger: customLogger }) {
|
||||
if (initialized) {
|
||||
return { success: true, message: 'Already initialized' };
|
||||
}
|
||||
|
||||
if (initializing) {
|
||||
// Wait for existing initialization
|
||||
while (initializing) {
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
return { success: initialized, message: initialized ? 'Initialized' : 'Initialization failed' };
|
||||
}
|
||||
|
||||
initializing = true;
|
||||
|
||||
try {
|
||||
if (customLogger) {
|
||||
logger = customLogger;
|
||||
}
|
||||
|
||||
if (!openaiApiKey) {
|
||||
throw new Error('OpenAI API key is required');
|
||||
}
|
||||
|
||||
logger.info('[AI] Initializing AI service...');
|
||||
|
||||
// Store pool reference for tasks
|
||||
if (pool) {
|
||||
appPool = pool;
|
||||
}
|
||||
|
||||
// Create OpenAI provider (for embeddings)
|
||||
openaiProvider = new OpenAIProvider({ apiKey: openaiApiKey });
|
||||
|
||||
// Create Groq provider (for chat completions) if API key provided
|
||||
if (groqApiKey) {
|
||||
groqProvider = new GroqProvider({ apiKey: groqApiKey });
|
||||
logger.info('[AI] Groq provider initialized for chat completions');
|
||||
} else {
|
||||
logger.warn('[AI] No Groq API key provided - chat completion tasks will not be available');
|
||||
}
|
||||
|
||||
// Create and initialize taxonomy embeddings
|
||||
taxonomyEmbeddings = new TaxonomyEmbeddings({
|
||||
provider: openaiProvider,
|
||||
logger
|
||||
});
|
||||
|
||||
const stats = await taxonomyEmbeddings.initialize(mysqlConnection);
|
||||
|
||||
// Register validation tasks if Groq is available
|
||||
if (groqProvider) {
|
||||
registerValidationTasks();
|
||||
}
|
||||
|
||||
initialized = true;
|
||||
logger.info('[AI] AI service initialized', {
|
||||
...stats,
|
||||
groqEnabled: !!groqProvider,
|
||||
tasksRegistered: getRegistry().list()
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Initialized',
|
||||
stats,
|
||||
groqEnabled: !!groqProvider
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('[AI] Initialization failed:', error);
|
||||
return { success: false, message: error.message };
|
||||
} finally {
|
||||
initializing = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register validation tasks with the task registry
|
||||
* Called during initialization if Groq is available
|
||||
*/
|
||||
function registerValidationTasks() {
|
||||
registerAllTasks(logger);
|
||||
logger.info('[AI] Validation tasks registered');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if service is ready
|
||||
*/
|
||||
function isReady() {
|
||||
return initialized && taxonomyEmbeddings?.isReady();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build weighted product text for embedding.
|
||||
* Weights the product name heavily by repeating it, and truncates long descriptions
|
||||
* to prevent verbose marketing copy from drowning out the product signal.
|
||||
*
|
||||
* @param {Object} product - Product with name, description, company, line
|
||||
* @returns {string} - Combined text for embedding
|
||||
*/
|
||||
function buildProductText(product) {
|
||||
const parts = [];
|
||||
const name = product.name?.trim();
|
||||
const description = product.description?.trim();
|
||||
const company = (product.company_name || product.company)?.trim();
|
||||
const line = (product.line_name || product.line)?.trim();
|
||||
|
||||
// Name is most important - repeat 3x to weight it heavily in the embedding
|
||||
if (name) {
|
||||
parts.push(name, name, name);
|
||||
}
|
||||
|
||||
// Company and line provide context
|
||||
if (company) {
|
||||
parts.push(company);
|
||||
}
|
||||
if (line) {
|
||||
parts.push(line);
|
||||
}
|
||||
|
||||
// Truncate description to prevent it from overwhelming the signal
|
||||
if (description) {
|
||||
const truncated = description.length > 500
|
||||
? description.substring(0, 500) + '...'
|
||||
: description;
|
||||
parts.push(truncated);
|
||||
}
|
||||
|
||||
return parts.join(' ').trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate embedding for a product
|
||||
* @param {Object} product - Product with name, description, company, line
|
||||
* @returns {Promise<{embedding: number[], latencyMs: number}>}
|
||||
*/
|
||||
async function getProductEmbedding(product) {
|
||||
if (!initialized || !openaiProvider) {
|
||||
throw new Error('AI service not initialized');
|
||||
}
|
||||
|
||||
const text = buildProductText(product);
|
||||
|
||||
if (!text) {
|
||||
return { embedding: null, latencyMs: 0 };
|
||||
}
|
||||
|
||||
const result = await openaiProvider.embed(text);
|
||||
|
||||
return {
|
||||
embedding: result.embeddings[0],
|
||||
latencyMs: result.latencyMs
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate embeddings for multiple products
|
||||
* @param {Object[]} products - Array of products
|
||||
* @returns {Promise<{embeddings: Array<{index: number, embedding: number[]}>, latencyMs: number}>}
|
||||
*/
|
||||
async function getProductEmbeddings(products) {
|
||||
if (!initialized || !openaiProvider) {
|
||||
throw new Error('AI service not initialized');
|
||||
}
|
||||
|
||||
const texts = products.map(buildProductText);
|
||||
|
||||
// Track which products have empty text
|
||||
const validIndices = texts.map((t, i) => t ? i : -1).filter(i => i >= 0);
|
||||
const validTexts = texts.filter(t => t);
|
||||
|
||||
if (validTexts.length === 0) {
|
||||
return { embeddings: [], latencyMs: 0 };
|
||||
}
|
||||
|
||||
const result = await openaiProvider.embed(validTexts);
|
||||
|
||||
// Map embeddings back to original indices
|
||||
const embeddings = validIndices.map((originalIndex, resultIndex) => ({
|
||||
index: originalIndex,
|
||||
embedding: result.embeddings[resultIndex]
|
||||
}));
|
||||
|
||||
return {
|
||||
embeddings,
|
||||
latencyMs: result.latencyMs
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Find similar taxonomy items for a product embedding
|
||||
* @param {number[]} productEmbedding
|
||||
* @param {Object} options
|
||||
* @returns {{categories: Array, themes: Array, colors: Array}}
|
||||
*/
|
||||
function findSimilarTaxonomy(productEmbedding, options = {}) {
|
||||
if (!initialized || !taxonomyEmbeddings) {
|
||||
throw new Error('AI service not initialized');
|
||||
}
|
||||
|
||||
const topCategories = options.topCategories ?? 10;
|
||||
const topThemes = options.topThemes ?? 5;
|
||||
const topColors = options.topColors ?? 5;
|
||||
|
||||
return {
|
||||
categories: taxonomyEmbeddings.findSimilarCategories(productEmbedding, topCategories),
|
||||
themes: taxonomyEmbeddings.findSimilarThemes(productEmbedding, topThemes),
|
||||
colors: taxonomyEmbeddings.findSimilarColors(productEmbedding, topColors)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get product embedding and find similar taxonomy in one call
|
||||
* @param {Object} product
|
||||
* @param {Object} options
|
||||
*/
|
||||
async function getSuggestionsForProduct(product, options = {}) {
|
||||
const { embedding, latencyMs: embeddingLatency } = await getProductEmbedding(product);
|
||||
|
||||
if (!embedding) {
|
||||
return {
|
||||
categories: [],
|
||||
themes: [],
|
||||
colors: [],
|
||||
latencyMs: embeddingLatency
|
||||
};
|
||||
}
|
||||
|
||||
const startSearch = Date.now();
|
||||
const suggestions = findSimilarTaxonomy(embedding, options);
|
||||
const searchLatency = Date.now() - startSearch;
|
||||
|
||||
return {
|
||||
...suggestions,
|
||||
latencyMs: embeddingLatency + searchLatency,
|
||||
embeddingLatencyMs: embeddingLatency,
|
||||
searchLatencyMs: searchLatency
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all taxonomy data (without embeddings) for frontend
|
||||
*/
|
||||
function getTaxonomyData() {
|
||||
if (!initialized || !taxonomyEmbeddings) {
|
||||
throw new Error('AI service not initialized');
|
||||
}
|
||||
|
||||
return taxonomyEmbeddings.getTaxonomyData();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get service status
|
||||
*/
|
||||
function getStatus() {
|
||||
const registry = getRegistry();
|
||||
|
||||
return {
|
||||
initialized,
|
||||
ready: isReady(),
|
||||
hasOpenAI: !!openaiProvider,
|
||||
hasGroq: !!groqProvider,
|
||||
hasTaxonomy: !!taxonomyEmbeddings,
|
||||
taxonomyStats: taxonomyEmbeddings ? {
|
||||
categories: taxonomyEmbeddings.categories?.length || 0,
|
||||
themes: taxonomyEmbeddings.themes?.length || 0,
|
||||
colors: taxonomyEmbeddings.colors?.length || 0
|
||||
} : null,
|
||||
tasks: {
|
||||
registered: registry.list(),
|
||||
count: registry.size()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run an AI task by ID
|
||||
* @param {string} taskId - Task identifier from TASK_IDS
|
||||
* @param {Object} payload - Task-specific input
|
||||
* @returns {Promise<Object>} Task result
|
||||
*/
|
||||
async function runTask(taskId, payload = {}) {
|
||||
if (!initialized) {
|
||||
throw new Error('AI service not initialized');
|
||||
}
|
||||
|
||||
if (!groqProvider) {
|
||||
throw new Error('Groq provider not available - chat completion tasks require GROQ_API_KEY');
|
||||
}
|
||||
|
||||
const registry = getRegistry();
|
||||
return registry.runTask(taskId, {
|
||||
...payload,
|
||||
// Inject dependencies tasks may need
|
||||
provider: groqProvider,
|
||||
// Use pool from payload if provided (from route), fall back to stored appPool
|
||||
pool: payload.pool || appPool,
|
||||
logger
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Groq provider instance (for direct use if needed)
|
||||
* @returns {GroqProvider|null}
|
||||
*/
|
||||
function getGroqProvider() {
|
||||
return groqProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the PostgreSQL pool (for tasks that need DB access)
|
||||
* @returns {Object|null}
|
||||
*/
|
||||
function getPool() {
|
||||
return appPool;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if chat completion tasks are available
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function hasChatCompletion() {
|
||||
return !!groqProvider;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
// Initialization
|
||||
initialize,
|
||||
isReady,
|
||||
getStatus,
|
||||
|
||||
// Embeddings (OpenAI)
|
||||
getProductEmbedding,
|
||||
getProductEmbeddings,
|
||||
findSimilarTaxonomy,
|
||||
getSuggestionsForProduct,
|
||||
getTaxonomyData,
|
||||
|
||||
// Chat completions (Groq)
|
||||
runTask,
|
||||
hasChatCompletion,
|
||||
getGroqProvider,
|
||||
getPool,
|
||||
|
||||
// Constants
|
||||
TASK_IDS,
|
||||
GROQ_MODELS,
|
||||
|
||||
// Re-export utilities
|
||||
cosineSimilarity,
|
||||
findTopMatches
|
||||
};
|
||||
176
inventory-server/src/services/ai/prompts/descriptionPrompts.js
Normal file
176
inventory-server/src/services/ai/prompts/descriptionPrompts.js
Normal file
@@ -0,0 +1,176 @@
|
||||
/**
|
||||
* Description Validation Prompts
|
||||
*
|
||||
* Functions for building and parsing description validation prompts.
|
||||
* System and general prompts are loaded from the database.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Sanitize an issue string from AI response
|
||||
* AI sometimes returns malformed strings with escape sequences
|
||||
*
|
||||
* @param {string} issue - Raw issue string
|
||||
* @returns {string} Cleaned issue string
|
||||
*/
|
||||
function sanitizeIssue(issue) {
|
||||
if (!issue || typeof issue !== 'string') return '';
|
||||
|
||||
let cleaned = issue
|
||||
// Remove trailing backslashes (incomplete escapes)
|
||||
.replace(/\\+$/, '')
|
||||
// Fix malformed escaped quotes at end of string
|
||||
.replace(/\\",?\)?$/, '')
|
||||
// Clean up double-escaped quotes
|
||||
.replace(/\\\\"/g, '"')
|
||||
// Clean up single escaped quotes that aren't needed
|
||||
.replace(/\\"/g, '"')
|
||||
// Remove any remaining trailing punctuation artifacts
|
||||
.replace(/[,\s]+$/, '')
|
||||
// Trim whitespace
|
||||
.trim();
|
||||
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the user prompt for description validation
|
||||
* Combines database prompts with product data
|
||||
*
|
||||
* @param {Object} product - Product data
|
||||
* @param {string} product.name - Product name
|
||||
* @param {string} product.description - Current description
|
||||
* @param {string} [product.company_name] - Company name
|
||||
* @param {string} [product.categories] - Product categories
|
||||
* @param {Object} prompts - Prompts loaded from database
|
||||
* @param {string} prompts.general - General description guidelines
|
||||
* @param {string} [prompts.companySpecific] - Company-specific rules
|
||||
* @returns {string} Complete user prompt
|
||||
*/
|
||||
function buildDescriptionUserPrompt(product, prompts) {
|
||||
const parts = [];
|
||||
|
||||
// Add general prompt/guidelines if provided
|
||||
if (prompts.general) {
|
||||
parts.push(prompts.general);
|
||||
parts.push(''); // Empty line for separation
|
||||
}
|
||||
|
||||
// Add company-specific rules if provided
|
||||
if (prompts.companySpecific) {
|
||||
parts.push(`COMPANY-SPECIFIC RULES FOR ${product.company_name || 'THIS COMPANY'}:`);
|
||||
parts.push(prompts.companySpecific);
|
||||
parts.push(''); // Empty line for separation
|
||||
}
|
||||
|
||||
// Add product information
|
||||
parts.push('PRODUCT TO VALIDATE:');
|
||||
parts.push(`NAME: "${product.name || ''}"`);
|
||||
parts.push(`COMPANY: ${product.company_name || 'Unknown'}`);
|
||||
|
||||
if (product.categories) {
|
||||
parts.push(`CATEGORIES: ${product.categories}`);
|
||||
}
|
||||
|
||||
parts.push('');
|
||||
parts.push('CURRENT DESCRIPTION:');
|
||||
parts.push(`"${product.description || '(empty)'}"`);
|
||||
|
||||
// Add response format instructions
|
||||
parts.push('');
|
||||
parts.push('CRITICAL RULES:');
|
||||
parts.push('- If isValid is false, you MUST provide a suggestion with the improved description');
|
||||
parts.push('- If there are ANY issues, isValid MUST be false and suggestion MUST contain the corrected text');
|
||||
parts.push('- Only set isValid to true if there are ZERO issues and the description needs no changes');
|
||||
parts.push('');
|
||||
parts.push('RESPOND WITH JSON:');
|
||||
parts.push(JSON.stringify({
|
||||
isValid: 'true if perfect, false if ANY changes needed',
|
||||
suggestion: 'REQUIRED when isValid is false - the complete improved description',
|
||||
issues: ['list each problem found (empty array only if isValid is true)']
|
||||
}, null, 2));
|
||||
|
||||
return parts.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the AI response for description validation
|
||||
*
|
||||
* @param {Object|null} parsed - Parsed JSON from AI
|
||||
* @param {string} content - Raw response content
|
||||
* @returns {Object}
|
||||
*/
|
||||
function parseDescriptionResponse(parsed, content) {
|
||||
// If we got valid parsed JSON, use it
|
||||
if (parsed && typeof parsed.isValid === 'boolean') {
|
||||
// Sanitize issues - AI sometimes returns malformed escape sequences
|
||||
const rawIssues = Array.isArray(parsed.issues) ? parsed.issues : [];
|
||||
const issues = rawIssues
|
||||
.map(sanitizeIssue)
|
||||
.filter(issue => issue.length > 0);
|
||||
|
||||
const suggestion = parsed.suggestion || null;
|
||||
|
||||
// IMPORTANT: LLMs sometimes return contradictory data (isValid: true with issues).
|
||||
// If there are issues, treat as invalid regardless of what the AI said.
|
||||
// Also if there's a suggestion, the AI thought something needed to change.
|
||||
const isValid = parsed.isValid && issues.length === 0 && !suggestion;
|
||||
|
||||
return { isValid, suggestion, issues };
|
||||
}
|
||||
|
||||
// Handle case where isValid is a string "true"/"false" instead of boolean
|
||||
if (parsed && typeof parsed.isValid === 'string') {
|
||||
const rawIssues = Array.isArray(parsed.issues) ? parsed.issues : [];
|
||||
const issues = rawIssues
|
||||
.map(sanitizeIssue)
|
||||
.filter(issue => issue.length > 0);
|
||||
const suggestion = parsed.suggestion || null;
|
||||
const rawIsValid = parsed.isValid.toLowerCase() !== 'false';
|
||||
|
||||
// Same defensive logic: if there are issues, it's not valid
|
||||
const isValid = rawIsValid && issues.length === 0 && !suggestion;
|
||||
|
||||
return { isValid, suggestion, issues };
|
||||
}
|
||||
|
||||
// Try to extract from content if parsing failed
|
||||
try {
|
||||
// Look for isValid pattern
|
||||
const isValidMatch = content.match(/"isValid"\s*:\s*(true|false)/i);
|
||||
const isValid = isValidMatch ? isValidMatch[1].toLowerCase() === 'true' : true;
|
||||
|
||||
// Look for suggestion (might be multiline)
|
||||
const suggestionMatch = content.match(/"suggestion"\s*:\s*"((?:[^"\\]|\\.)*)"/s);
|
||||
let suggestion = suggestionMatch ? suggestionMatch[1] : null;
|
||||
if (suggestion) {
|
||||
// Unescape common escapes
|
||||
suggestion = suggestion.replace(/\\n/g, '\n').replace(/\\"/g, '"');
|
||||
}
|
||||
|
||||
// Look for issues array
|
||||
const issuesMatch = content.match(/"issues"\s*:\s*\[([\s\S]*?)\]/);
|
||||
let issues = [];
|
||||
if (issuesMatch) {
|
||||
const issuesContent = issuesMatch[1];
|
||||
const issueStrings = issuesContent.match(/"([^"]+)"/g);
|
||||
if (issueStrings) {
|
||||
issues = issueStrings
|
||||
.map(s => sanitizeIssue(s.replace(/"/g, '')))
|
||||
.filter(issue => issue.length > 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Same logic: if there are issues, it's not valid
|
||||
const finalIsValid = isValid && issues.length === 0 && !suggestion;
|
||||
|
||||
return { isValid: finalIsValid, suggestion, issues };
|
||||
} catch {
|
||||
// Default to valid if we can't parse anything
|
||||
return { isValid: true, suggestion: null, issues: [] };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildDescriptionUserPrompt,
|
||||
parseDescriptionResponse
|
||||
};
|
||||
187
inventory-server/src/services/ai/prompts/namePrompts.js
Normal file
187
inventory-server/src/services/ai/prompts/namePrompts.js
Normal file
@@ -0,0 +1,187 @@
|
||||
/**
|
||||
* Name Validation Prompts
|
||||
*
|
||||
* Functions for building and parsing name validation prompts.
|
||||
* System and general prompts are loaded from the database.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Sanitize an issue string from AI response
|
||||
* AI sometimes returns malformed strings with escape sequences
|
||||
*
|
||||
* @param {string} issue - Raw issue string
|
||||
* @returns {string} Cleaned issue string
|
||||
*/
|
||||
function sanitizeIssue(issue) {
|
||||
if (!issue || typeof issue !== 'string') return '';
|
||||
|
||||
let cleaned = issue
|
||||
// Remove trailing backslashes (incomplete escapes)
|
||||
.replace(/\\+$/, '')
|
||||
// Fix malformed escaped quotes at end of string
|
||||
.replace(/\\",?\)?$/, '')
|
||||
// Clean up double-escaped quotes
|
||||
.replace(/\\\\"/g, '"')
|
||||
// Clean up single escaped quotes that aren't needed
|
||||
.replace(/\\"/g, '"')
|
||||
// Remove any remaining trailing punctuation artifacts
|
||||
.replace(/[,\s]+$/, '')
|
||||
// Trim whitespace
|
||||
.trim();
|
||||
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the user prompt for name validation
|
||||
* Combines database prompts with product data
|
||||
*
|
||||
* @param {Object} product - Product data
|
||||
* @param {string} product.name - Current product name
|
||||
* @param {string} [product.company_name] - Company name
|
||||
* @param {string} [product.line_name] - Product line name
|
||||
* @param {string} [product.subline_name] - Product subline name
|
||||
* @param {string[]} [product.siblingNames] - Names of other products in the same line
|
||||
* @param {Object} prompts - Prompts loaded from database
|
||||
* @param {string} prompts.general - General naming conventions
|
||||
* @param {string} [prompts.companySpecific] - Company-specific rules
|
||||
* @returns {string} Complete user prompt
|
||||
*/
|
||||
function buildNameUserPrompt(product, prompts) {
|
||||
const parts = [];
|
||||
|
||||
// Add general prompt/conventions if provided
|
||||
if (prompts.general) {
|
||||
parts.push(prompts.general);
|
||||
parts.push(''); // Empty line for separation
|
||||
}
|
||||
|
||||
// Add company-specific rules if provided
|
||||
if (prompts.companySpecific) {
|
||||
parts.push(`COMPANY-SPECIFIC RULES FOR ${product.company_name || 'THIS COMPANY'}:`);
|
||||
parts.push(prompts.companySpecific);
|
||||
parts.push(''); // Empty line for separation
|
||||
}
|
||||
|
||||
// Add product information
|
||||
parts.push('PRODUCT TO VALIDATE:');
|
||||
parts.push(`NAME: "${product.name || ''}"`);
|
||||
parts.push(`COMPANY: ${product.company_name || 'Unknown'}`);
|
||||
parts.push(`LINE: ${product.line_name || 'None'}`);
|
||||
if (product.subline_name) {
|
||||
parts.push(`SUBLINE: ${product.subline_name}`);
|
||||
}
|
||||
|
||||
// Add sibling context for naming decisions
|
||||
if (product.siblingNames && product.siblingNames.length > 0) {
|
||||
parts.push('');
|
||||
parts.push(`OTHER PRODUCTS IN THIS LINE (${product.siblingNames.length + 1} total including this one):`);
|
||||
product.siblingNames.forEach(name => {
|
||||
parts.push(`- ${name}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Add response format instructions
|
||||
parts.push('');
|
||||
parts.push('RESPOND WITH JSON:');
|
||||
parts.push(JSON.stringify({
|
||||
isValid: 'true/false',
|
||||
suggestion: 'corrected name if changes needed, or null if valid',
|
||||
issues: ['issue 1', 'issue 2 (empty array if valid)']
|
||||
}, null, 2));
|
||||
|
||||
return parts.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the AI response for name validation
|
||||
*
|
||||
* @param {Object|null} parsed - Parsed JSON from AI
|
||||
* @param {string} content - Raw response content
|
||||
* @returns {Object}
|
||||
*/
|
||||
function parseNameResponse(parsed, content) {
|
||||
// Debug: Log what we're trying to parse
|
||||
console.log('[parseNameResponse] Input:', {
|
||||
hasParsed: !!parsed,
|
||||
parsedIsValid: parsed?.isValid,
|
||||
parsedType: typeof parsed?.isValid,
|
||||
contentPreview: content?.substring(0, 3000)
|
||||
});
|
||||
|
||||
// If we got valid parsed JSON, use it
|
||||
if (parsed && typeof parsed.isValid === 'boolean') {
|
||||
// Sanitize issues - AI sometimes returns malformed escape sequences
|
||||
const rawIssues = Array.isArray(parsed.issues) ? parsed.issues : [];
|
||||
const issues = rawIssues
|
||||
.map(sanitizeIssue)
|
||||
.filter(issue => issue.length > 0);
|
||||
const suggestion = parsed.suggestion || null;
|
||||
|
||||
// IMPORTANT: LLMs sometimes return contradictory data (isValid: true with issues).
|
||||
// If there are issues, treat as invalid regardless of what the AI said.
|
||||
const isValid = parsed.isValid && issues.length === 0 && !suggestion;
|
||||
|
||||
return { isValid, suggestion, issues };
|
||||
}
|
||||
|
||||
// Handle case where isValid is a string "true"/"false" instead of boolean
|
||||
if (parsed && typeof parsed.isValid === 'string') {
|
||||
const rawIssues = Array.isArray(parsed.issues) ? parsed.issues : [];
|
||||
const issues = rawIssues
|
||||
.map(sanitizeIssue)
|
||||
.filter(issue => issue.length > 0);
|
||||
const suggestion = parsed.suggestion || null;
|
||||
const rawIsValid = parsed.isValid.toLowerCase() !== 'false';
|
||||
|
||||
// Same defensive logic: if there are issues, it's not valid
|
||||
const isValid = rawIsValid && issues.length === 0 && !suggestion;
|
||||
|
||||
console.log('[parseNameResponse] Parsed isValid as string:', parsed.isValid, '→', isValid);
|
||||
return { isValid, suggestion, issues };
|
||||
}
|
||||
|
||||
// Try to extract from content if parsing failed
|
||||
try {
|
||||
// Look for isValid pattern - handle both boolean and quoted string
|
||||
// Matches: "isValid": true, "isValid": false, "isValid": "true", "isValid": "false"
|
||||
const isValidMatch = content.match(/"isValid"\s*:\s*"?(true|false)"?/i);
|
||||
const isValid = isValidMatch ? isValidMatch[1].toLowerCase() === 'true' : true;
|
||||
|
||||
console.log('[parseNameResponse] Regex extraction:', {
|
||||
isValidMatch: isValidMatch?.[0],
|
||||
isValidValue: isValidMatch?.[1],
|
||||
resultIsValid: isValid
|
||||
});
|
||||
|
||||
// Look for suggestion - handle escaped quotes and null
|
||||
const suggestionMatch = content.match(/"suggestion"\s*:\s*(?:"([^"\\]*(?:\\.[^"\\]*)*)"|null)/);
|
||||
const suggestion = suggestionMatch ? (suggestionMatch[1] || null) : null;
|
||||
|
||||
// Look for issues array
|
||||
const issuesMatch = content.match(/"issues"\s*:\s*\[([\s\S]*?)\]/);
|
||||
let issues = [];
|
||||
if (issuesMatch) {
|
||||
const issuesContent = issuesMatch[1];
|
||||
const issueStrings = issuesContent.match(/"([^"]+)"/g);
|
||||
if (issueStrings) {
|
||||
issues = issueStrings
|
||||
.map(s => sanitizeIssue(s.replace(/"/g, '')))
|
||||
.filter(issue => issue.length > 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Same defensive logic: if there are issues, it's not valid
|
||||
const finalIsValid = isValid && issues.length === 0 && !suggestion;
|
||||
|
||||
return { isValid: finalIsValid, suggestion, issues };
|
||||
} catch {
|
||||
// Default to valid if we can't parse anything
|
||||
return { isValid: true, suggestion: null, issues: [] };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildNameUserPrompt,
|
||||
parseNameResponse
|
||||
};
|
||||
194
inventory-server/src/services/ai/prompts/promptLoader.js
Normal file
194
inventory-server/src/services/ai/prompts/promptLoader.js
Normal file
@@ -0,0 +1,194 @@
|
||||
/**
|
||||
* Prompt Loader
|
||||
*
|
||||
* Utilities to load AI prompts from the ai_prompts PostgreSQL table.
|
||||
* Supports loading prompts by base type (e.g., 'name_validation' loads
|
||||
* name_validation_system, name_validation_general, and optionally
|
||||
* name_validation_company_specific).
|
||||
*/
|
||||
|
||||
/**
|
||||
* Load a single prompt by exact type
|
||||
* @param {Object} pool - PostgreSQL pool
|
||||
* @param {string} promptType - Exact prompt type (e.g., 'name_validation_system')
|
||||
* @param {string} [company] - Company identifier (for company_specific types)
|
||||
* @returns {Promise<string|null>} Prompt text or null if not found
|
||||
*/
|
||||
async function loadPromptByType(pool, promptType, company = null) {
|
||||
try {
|
||||
let result;
|
||||
|
||||
if (company) {
|
||||
result = await pool.query(
|
||||
'SELECT prompt_text FROM ai_prompts WHERE prompt_type = $1 AND company = $2',
|
||||
[promptType, company]
|
||||
);
|
||||
} else {
|
||||
result = await pool.query(
|
||||
'SELECT prompt_text FROM ai_prompts WHERE prompt_type = $1 AND company IS NULL',
|
||||
[promptType]
|
||||
);
|
||||
}
|
||||
|
||||
return result.rows[0]?.prompt_text || null;
|
||||
} catch (error) {
|
||||
console.error(`[PromptLoader] Error loading ${promptType} prompt:`, error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load all prompts for a task type (system, general, and optionally company-specific)
|
||||
*
|
||||
* @param {Object} pool - PostgreSQL pool
|
||||
* @param {string} baseType - Base type name (e.g., 'name_validation', 'description_validation')
|
||||
* @param {string|null} [company] - Optional company ID for company-specific prompts
|
||||
* @returns {Promise<{system: string|null, general: string|null, companySpecific: string|null}>}
|
||||
*/
|
||||
async function loadPromptsByType(pool, baseType, company = null) {
|
||||
const systemType = `${baseType}_system`;
|
||||
const generalType = `${baseType}_general`;
|
||||
const companyType = `${baseType}_company_specific`;
|
||||
|
||||
// Load system and general prompts in parallel
|
||||
const [system, general] = await Promise.all([
|
||||
loadPromptByType(pool, systemType),
|
||||
loadPromptByType(pool, generalType)
|
||||
]);
|
||||
|
||||
// Load company-specific prompt if company is provided
|
||||
let companySpecific = null;
|
||||
if (company) {
|
||||
companySpecific = await loadPromptByType(pool, companyType, company);
|
||||
}
|
||||
|
||||
return {
|
||||
system,
|
||||
general,
|
||||
companySpecific
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load name validation prompts
|
||||
* @param {Object} pool - PostgreSQL pool
|
||||
* @param {string|null} [company] - Optional company ID
|
||||
* @returns {Promise<{system: string|null, general: string|null, companySpecific: string|null}>}
|
||||
*/
|
||||
async function loadNameValidationPrompts(pool, company = null) {
|
||||
return loadPromptsByType(pool, 'name_validation', company);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load description validation prompts
|
||||
* @param {Object} pool - PostgreSQL pool
|
||||
* @param {string|null} [company] - Optional company ID
|
||||
* @returns {Promise<{system: string|null, general: string|null, companySpecific: string|null}>}
|
||||
*/
|
||||
async function loadDescriptionValidationPrompts(pool, company = null) {
|
||||
return loadPromptsByType(pool, 'description_validation', company);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load sanity check prompts (no company-specific variant)
|
||||
* @param {Object} pool - PostgreSQL pool
|
||||
* @returns {Promise<{system: string|null, general: string|null, companySpecific: null}>}
|
||||
*/
|
||||
async function loadSanityCheckPrompts(pool) {
|
||||
return loadPromptsByType(pool, 'sanity_check', null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load bulk validation prompts (GPT-5 validation)
|
||||
* @param {Object} pool - PostgreSQL pool
|
||||
* @param {string|null} [company] - Optional company ID
|
||||
* @returns {Promise<{system: string|null, general: string|null, companySpecific: string|null}>}
|
||||
*/
|
||||
async function loadBulkValidationPrompts(pool, company = null) {
|
||||
return loadPromptsByType(pool, 'bulk_validation', company);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load bulk validation prompts for multiple companies at once
|
||||
* @param {Object} pool - PostgreSQL pool
|
||||
* @param {string[]} companyIds - Array of company IDs
|
||||
* @returns {Promise<{system: string|null, general: string|null, companyPrompts: Map<string, string>}>}
|
||||
*/
|
||||
async function loadBulkValidationPromptsForCompanies(pool, companyIds = []) {
|
||||
// Load system and general prompts
|
||||
const [system, general] = await Promise.all([
|
||||
loadPromptByType(pool, 'bulk_validation_system'),
|
||||
loadPromptByType(pool, 'bulk_validation_general')
|
||||
]);
|
||||
|
||||
// Load company-specific prompts for all provided companies
|
||||
const companyPrompts = new Map();
|
||||
|
||||
if (companyIds.length > 0) {
|
||||
try {
|
||||
const result = await pool.query(
|
||||
`SELECT company, prompt_text FROM ai_prompts
|
||||
WHERE prompt_type = 'bulk_validation_company_specific'
|
||||
AND company = ANY($1)`,
|
||||
[companyIds]
|
||||
);
|
||||
|
||||
for (const row of result.rows) {
|
||||
companyPrompts.set(row.company, row.prompt_text);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[PromptLoader] Error loading company-specific prompts:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
system,
|
||||
general,
|
||||
companyPrompts
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that required prompts exist, throw error if missing
|
||||
* @param {Object} prompts - Prompts object from loadPromptsByType
|
||||
* @param {string} baseType - Base type for error messages
|
||||
* @param {Object} options - Validation options
|
||||
* @param {boolean} [options.requireSystem=true] - Require system prompt
|
||||
* @param {boolean} [options.requireGeneral=true] - Require general prompt
|
||||
* @throws {Error} If required prompts are missing
|
||||
*/
|
||||
function validateRequiredPrompts(prompts, baseType, options = {}) {
|
||||
const { requireSystem = true, requireGeneral = true } = options;
|
||||
const missing = [];
|
||||
|
||||
if (requireSystem && !prompts.system) {
|
||||
missing.push(`${baseType}_system`);
|
||||
}
|
||||
|
||||
if (requireGeneral && !prompts.general) {
|
||||
missing.push(`${baseType}_general`);
|
||||
}
|
||||
|
||||
if (missing.length > 0) {
|
||||
throw new Error(
|
||||
`Missing required AI prompts: ${missing.join(', ')}. ` +
|
||||
`Please add these prompts in Settings > AI Validation Prompts.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
// Core loader
|
||||
loadPromptByType,
|
||||
loadPromptsByType,
|
||||
|
||||
// Task-specific loaders
|
||||
loadNameValidationPrompts,
|
||||
loadDescriptionValidationPrompts,
|
||||
loadSanityCheckPrompts,
|
||||
loadBulkValidationPrompts,
|
||||
loadBulkValidationPromptsForCompanies,
|
||||
|
||||
// Validation
|
||||
validateRequiredPrompts
|
||||
};
|
||||
128
inventory-server/src/services/ai/prompts/sanityCheckPrompts.js
Normal file
128
inventory-server/src/services/ai/prompts/sanityCheckPrompts.js
Normal file
@@ -0,0 +1,128 @@
|
||||
/**
|
||||
* Sanity Check Prompts
|
||||
*
|
||||
* Functions for building and parsing batch product consistency validation prompts.
|
||||
* System and general prompts are loaded from the database.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Build the user prompt for sanity check
|
||||
* Combines database prompts with product data
|
||||
*
|
||||
* @param {Object[]} products - Array of product data (limited fields for context)
|
||||
* @param {Object} prompts - Prompts loaded from database
|
||||
* @param {string} prompts.general - General sanity check rules
|
||||
* @returns {string} Complete user prompt
|
||||
*/
|
||||
function buildSanityCheckUserPrompt(products, prompts) {
|
||||
// Build a simplified product list for the prompt
|
||||
const productSummaries = products.map((p, index) => ({
|
||||
index,
|
||||
name: p.name,
|
||||
supplier: p.supplier_name || p.supplier,
|
||||
company: p.company_name || p.company,
|
||||
supplier_no: p.supplier_no,
|
||||
msrp: p.msrp,
|
||||
cost_each: p.cost_each,
|
||||
qty_per_unit: p.qty_per_unit,
|
||||
case_qty: p.case_qty,
|
||||
tax_cat: p.tax_cat_name || p.tax_cat,
|
||||
size_cat: p.size_cat_name || p.size_cat,
|
||||
themes: p.theme_names || p.themes,
|
||||
categories: p.category_names || p.categories,
|
||||
weight: p.weight,
|
||||
length: p.length,
|
||||
width: p.width,
|
||||
height: p.height
|
||||
}));
|
||||
|
||||
const parts = [];
|
||||
|
||||
// Add general prompt/rules if provided
|
||||
if (prompts.general) {
|
||||
parts.push(prompts.general);
|
||||
parts.push(''); // Empty line for separation
|
||||
}
|
||||
|
||||
// Add products to review
|
||||
parts.push(`PRODUCTS TO REVIEW (${products.length} items):`);
|
||||
parts.push(JSON.stringify(productSummaries, null, 2));
|
||||
|
||||
// Add response format
|
||||
parts.push('');
|
||||
parts.push('RESPOND WITH JSON:');
|
||||
parts.push(JSON.stringify({
|
||||
issues: [
|
||||
{
|
||||
productIndex: 0,
|
||||
field: 'msrp',
|
||||
issue: 'Description of the issue found',
|
||||
suggestion: 'Suggested fix or verification (optional)'
|
||||
}
|
||||
],
|
||||
summary: '2-3 sentences summarizing the overall product quality'
|
||||
}, null, 2));
|
||||
|
||||
parts.push('');
|
||||
parts.push('If no issues are found, return empty issues array with positive summary.');
|
||||
|
||||
return parts.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the AI response for sanity check
|
||||
*
|
||||
* @param {Object|null} parsed - Parsed JSON from AI
|
||||
* @param {string} content - Raw response content
|
||||
* @returns {Object}
|
||||
*/
|
||||
function parseSanityCheckResponse(parsed, content) {
|
||||
// If we got valid parsed JSON, use it
|
||||
if (parsed && Array.isArray(parsed.issues)) {
|
||||
return {
|
||||
issues: parsed.issues.map(issue => ({
|
||||
productIndex: issue.productIndex ?? issue.index ?? 0,
|
||||
field: issue.field || 'unknown',
|
||||
issue: issue.issue || issue.message || '',
|
||||
suggestion: issue.suggestion || null
|
||||
})),
|
||||
summary: parsed.summary || 'Review complete'
|
||||
};
|
||||
}
|
||||
|
||||
// Try to extract from content if parsing failed
|
||||
try {
|
||||
// Try to find issues array
|
||||
const issuesMatch = content.match(/"issues"\s*:\s*\[([\s\S]*?)\]/);
|
||||
let issues = [];
|
||||
|
||||
if (issuesMatch) {
|
||||
// Try to parse the array content
|
||||
try {
|
||||
const arrayContent = `[${issuesMatch[1]}]`;
|
||||
const parsedIssues = JSON.parse(arrayContent);
|
||||
issues = parsedIssues.map(issue => ({
|
||||
productIndex: issue.productIndex ?? issue.index ?? 0,
|
||||
field: issue.field || 'unknown',
|
||||
issue: issue.issue || issue.message || '',
|
||||
suggestion: issue.suggestion || null
|
||||
}));
|
||||
} catch {
|
||||
// Couldn't parse the array
|
||||
}
|
||||
}
|
||||
|
||||
// Try to find summary
|
||||
const summaryMatch = content.match(/"summary"\s*:\s*"([^"]+)"/);
|
||||
const summary = summaryMatch ? summaryMatch[1] : 'Review complete';
|
||||
|
||||
return { issues, summary };
|
||||
} catch {
|
||||
return { issues: [], summary: 'Could not parse review results' };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildSanityCheckUserPrompt,
|
||||
parseSanityCheckResponse
|
||||
};
|
||||
203
inventory-server/src/services/ai/providers/groqProvider.js
Normal file
203
inventory-server/src/services/ai/providers/groqProvider.js
Normal file
@@ -0,0 +1,203 @@
|
||||
/**
|
||||
* Groq Provider - Handles chat completions via Groq's OpenAI-compatible API
|
||||
*
|
||||
* Uses Groq's fast inference for real-time AI validation tasks.
|
||||
* Supports models like openai/gpt-oss-120b (complex) and openai/gpt-oss-20b (simple).
|
||||
*/
|
||||
|
||||
const GROQ_BASE_URL = 'https://api.groq.com/openai/v1';
|
||||
|
||||
// Default models
|
||||
const MODELS = {
|
||||
LARGE: 'openai/gpt-oss-120b', // For complex tasks (descriptions, sanity checks)
|
||||
SMALL: 'openai/gpt-oss-20b' // For simple tasks (name validation)
|
||||
};
|
||||
|
||||
class GroqProvider {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {string} options.apiKey - Groq API key
|
||||
* @param {string} [options.baseUrl] - Override base URL
|
||||
* @param {number} [options.timeoutMs=30000] - Default timeout
|
||||
*/
|
||||
constructor({ apiKey, baseUrl = GROQ_BASE_URL, timeoutMs = 30000 }) {
|
||||
if (!apiKey) {
|
||||
throw new Error('Groq API key is required');
|
||||
}
|
||||
this.apiKey = apiKey;
|
||||
this.baseUrl = baseUrl;
|
||||
this.timeoutMs = timeoutMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a chat completion request
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {Array<{role: string, content: string}>} params.messages - Conversation messages
|
||||
* @param {string} [params.model] - Model to use (defaults to LARGE)
|
||||
* @param {number} [params.temperature=0.3] - Response randomness (0-2)
|
||||
* @param {number} [params.maxTokens=500] - Max tokens in response
|
||||
* @param {Object} [params.responseFormat] - For JSON mode: { type: 'json_object' }
|
||||
* @param {number} [params.timeoutMs] - Request timeout override
|
||||
* @returns {Promise<{content: string, parsed: Object|null, usage: Object, latencyMs: number, model: string}>}
|
||||
*/
|
||||
async chatCompletion({
|
||||
messages,
|
||||
model = MODELS.LARGE,
|
||||
temperature = 0.3,
|
||||
maxTokens = 500,
|
||||
responseFormat = null,
|
||||
timeoutMs = this.timeoutMs
|
||||
}) {
|
||||
const started = Date.now();
|
||||
|
||||
const body = {
|
||||
model,
|
||||
messages,
|
||||
temperature,
|
||||
max_completion_tokens: maxTokens
|
||||
};
|
||||
|
||||
// Enable JSON mode if requested
|
||||
if (responseFormat?.type === 'json_object') {
|
||||
body.response_format = { type: 'json_object' };
|
||||
}
|
||||
|
||||
// Debug: Log request being sent
|
||||
console.log('[Groq] Request:', {
|
||||
model: body.model,
|
||||
temperature: body.temperature,
|
||||
maxTokens: body.max_completion_tokens,
|
||||
hasResponseFormat: !!body.response_format,
|
||||
messageCount: body.messages?.length,
|
||||
systemPromptLength: body.messages?.[0]?.content?.length,
|
||||
userPromptLength: body.messages?.[1]?.content?.length
|
||||
});
|
||||
|
||||
const response = await this._makeRequest('chat/completions', body, timeoutMs);
|
||||
|
||||
// Debug: Log raw response structure
|
||||
console.log('[Groq] Raw response:', {
|
||||
hasChoices: !!response.choices,
|
||||
choicesLength: response.choices?.length,
|
||||
firstChoice: response.choices?.[0] ? {
|
||||
finishReason: response.choices[0].finish_reason,
|
||||
hasMessage: !!response.choices[0].message,
|
||||
contentLength: response.choices[0].message?.content?.length,
|
||||
contentPreview: response.choices[0].message?.content?.substring(0, 200)
|
||||
} : null,
|
||||
usage: response.usage,
|
||||
model: response.model
|
||||
});
|
||||
|
||||
const content = response.choices?.[0]?.message?.content || '';
|
||||
const usage = response.usage || {};
|
||||
|
||||
// Attempt to parse JSON if response format was requested
|
||||
let parsed = null;
|
||||
if (responseFormat && content) {
|
||||
try {
|
||||
parsed = JSON.parse(content);
|
||||
} catch {
|
||||
// Content isn't valid JSON - try to extract JSON from markdown
|
||||
parsed = this._extractJson(content);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content,
|
||||
parsed,
|
||||
usage: {
|
||||
promptTokens: usage.prompt_tokens || 0,
|
||||
completionTokens: usage.completion_tokens || 0,
|
||||
totalTokens: usage.total_tokens || 0
|
||||
},
|
||||
latencyMs: Date.now() - started,
|
||||
model: response.model || model
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract JSON from content that might be wrapped in markdown code blocks
|
||||
* @private
|
||||
*/
|
||||
_extractJson(content) {
|
||||
// Try to find JSON in code blocks
|
||||
const codeBlockMatch = content.match(/```(?:json)?\s*([\s\S]*?)```/);
|
||||
if (codeBlockMatch) {
|
||||
try {
|
||||
return JSON.parse(codeBlockMatch[1].trim());
|
||||
} catch {
|
||||
// Fall through
|
||||
}
|
||||
}
|
||||
|
||||
// Try to find JSON object/array directly
|
||||
const jsonMatch = content.match(/(\{[\s\S]*\}|\[[\s\S]*\])/);
|
||||
if (jsonMatch) {
|
||||
try {
|
||||
return JSON.parse(jsonMatch[1]);
|
||||
} catch {
|
||||
// Fall through
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an HTTP request to Groq API
|
||||
* @private
|
||||
*/
|
||||
async _makeRequest(endpoint, body, timeoutMs) {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
||||
|
||||
try {
|
||||
const response = await fetch(`${this.baseUrl}/${endpoint}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${this.apiKey}`
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
signal: controller.signal
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}));
|
||||
const message = error.error?.message || `Groq API error: ${response.status}`;
|
||||
const err = new Error(message);
|
||||
err.status = response.status;
|
||||
err.code = error.error?.code;
|
||||
// Include failed_generation if available (for JSON mode failures)
|
||||
if (error.error?.failed_generation) {
|
||||
err.failedGeneration = error.error.failed_generation;
|
||||
console.error('[Groq] JSON validation failed. Model output:', error.error.failed_generation);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
return response.json();
|
||||
} catch (error) {
|
||||
if (error.name === 'AbortError') {
|
||||
const err = new Error(`Groq request timed out after ${timeoutMs}ms`);
|
||||
err.code = 'TIMEOUT';
|
||||
throw err;
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the provider is properly configured
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isConfigured() {
|
||||
return !!this.apiKey;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { GroqProvider, MODELS, GROQ_BASE_URL };
|
||||
117
inventory-server/src/services/ai/providers/openaiProvider.js
Normal file
117
inventory-server/src/services/ai/providers/openaiProvider.js
Normal file
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* OpenAI Provider - Handles embedding generation
|
||||
*/
|
||||
|
||||
const EMBEDDING_MODEL = 'text-embedding-3-small';
|
||||
const EMBEDDING_DIMENSIONS = 1536;
|
||||
const MAX_BATCH_SIZE = 2048;
|
||||
|
||||
class OpenAIProvider {
|
||||
constructor({ apiKey, baseUrl = 'https://api.openai.com/v1', timeoutMs = 60000 }) {
|
||||
if (!apiKey) {
|
||||
throw new Error('OpenAI API key is required');
|
||||
}
|
||||
this.apiKey = apiKey;
|
||||
this.baseUrl = baseUrl;
|
||||
this.timeoutMs = timeoutMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate embeddings for one or more texts
|
||||
* @param {string|string[]} input - Text or array of texts
|
||||
* @param {Object} options
|
||||
* @returns {Promise<{embeddings: number[][], usage: Object, model: string, latencyMs: number}>}
|
||||
*/
|
||||
async embed(input, options = {}) {
|
||||
const texts = Array.isArray(input) ? input : [input];
|
||||
const model = options.model || EMBEDDING_MODEL;
|
||||
const dimensions = options.dimensions || EMBEDDING_DIMENSIONS;
|
||||
const timeoutMs = options.timeoutMs || this.timeoutMs;
|
||||
|
||||
if (texts.length > MAX_BATCH_SIZE) {
|
||||
throw new Error(`Batch size ${texts.length} exceeds max of ${MAX_BATCH_SIZE}`);
|
||||
}
|
||||
|
||||
const started = Date.now();
|
||||
|
||||
// Clean and truncate input texts
|
||||
const cleanedTexts = texts.map(t =>
|
||||
(t || '').replace(/\n+/g, ' ').trim().substring(0, 8000)
|
||||
);
|
||||
|
||||
const body = {
|
||||
input: cleanedTexts,
|
||||
model,
|
||||
encoding_format: 'float'
|
||||
};
|
||||
|
||||
// Only embedding-3 models support dimensions parameter
|
||||
if (model.includes('embedding-3')) {
|
||||
body.dimensions = dimensions;
|
||||
}
|
||||
|
||||
const response = await this._makeRequest('embeddings', body, timeoutMs);
|
||||
|
||||
// Sort by index to ensure order matches input
|
||||
const sortedData = response.data.sort((a, b) => a.index - b.index);
|
||||
|
||||
return {
|
||||
embeddings: sortedData.map(item => item.embedding),
|
||||
usage: {
|
||||
promptTokens: response.usage?.prompt_tokens || 0,
|
||||
totalTokens: response.usage?.total_tokens || 0
|
||||
},
|
||||
model: response.model || model,
|
||||
latencyMs: Date.now() - started
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generator for processing large batches in chunks
|
||||
*/
|
||||
async *embedBatchChunked(texts, options = {}) {
|
||||
const batchSize = Math.min(options.batchSize || 100, MAX_BATCH_SIZE);
|
||||
|
||||
for (let i = 0; i < texts.length; i += batchSize) {
|
||||
const chunk = texts.slice(i, i + batchSize);
|
||||
const result = await this.embed(chunk, options);
|
||||
|
||||
yield {
|
||||
embeddings: result.embeddings,
|
||||
startIndex: i,
|
||||
endIndex: i + chunk.length,
|
||||
usage: result.usage,
|
||||
model: result.model,
|
||||
latencyMs: result.latencyMs
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async _makeRequest(endpoint, body, timeoutMs) {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
||||
|
||||
try {
|
||||
const response = await fetch(`${this.baseUrl}/${endpoint}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${this.apiKey}`
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
signal: controller.signal
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}));
|
||||
throw new Error(error.error?.message || `OpenAI API error: ${response.status}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { OpenAIProvider, EMBEDDING_MODEL, EMBEDDING_DIMENSIONS };
|
||||
@@ -0,0 +1,158 @@
|
||||
/**
|
||||
* Description Validation Task
|
||||
*
|
||||
* Validates a product description for quality, accuracy, and guideline compliance.
|
||||
* Uses Groq with the larger model for better reasoning about content quality.
|
||||
* Loads all prompts from the database (no hardcoded prompts).
|
||||
*/
|
||||
|
||||
const { MODELS } = require('../providers/groqProvider');
|
||||
const {
|
||||
loadDescriptionValidationPrompts,
|
||||
validateRequiredPrompts
|
||||
} = require('../prompts/promptLoader');
|
||||
const {
|
||||
buildDescriptionUserPrompt,
|
||||
parseDescriptionResponse
|
||||
} = require('../prompts/descriptionPrompts');
|
||||
|
||||
const TASK_ID = 'validate.description';
|
||||
|
||||
/**
|
||||
* Create the description validation task
|
||||
*
|
||||
* @returns {Object} Task definition
|
||||
*/
|
||||
function createDescriptionValidationTask() {
|
||||
return {
|
||||
id: TASK_ID,
|
||||
description: 'Validate product description for quality and guideline compliance',
|
||||
|
||||
/**
|
||||
* Run the description validation
|
||||
*
|
||||
* @param {Object} payload
|
||||
* @param {Object} payload.product - Product data
|
||||
* @param {string} payload.product.name - Product name (for context)
|
||||
* @param {string} payload.product.description - Description to validate
|
||||
* @param {string} [payload.product.company_name] - Company name
|
||||
* @param {string} [payload.product.company_id] - Company ID for loading specific rules
|
||||
* @param {string} [payload.product.categories] - Product categories
|
||||
* @param {Object} payload.provider - Groq provider instance
|
||||
* @param {Object} payload.pool - PostgreSQL pool
|
||||
* @param {Object} [payload.logger] - Logger instance
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async run(payload) {
|
||||
const { product, provider, pool, logger } = payload;
|
||||
const log = logger || console;
|
||||
|
||||
// Validate required input
|
||||
if (!product?.name && !product?.description) {
|
||||
return {
|
||||
isValid: true,
|
||||
suggestion: null,
|
||||
issues: [],
|
||||
skipped: true,
|
||||
reason: 'No name or description provided'
|
||||
};
|
||||
}
|
||||
|
||||
if (!provider) {
|
||||
throw new Error('Groq provider not available');
|
||||
}
|
||||
|
||||
if (!pool) {
|
||||
throw new Error('Database pool not available');
|
||||
}
|
||||
|
||||
try {
|
||||
// Load prompts from database
|
||||
const companyKey = product.company_id || product.company_name || product.company;
|
||||
const prompts = await loadDescriptionValidationPrompts(pool, companyKey);
|
||||
|
||||
// Validate required prompts exist
|
||||
validateRequiredPrompts(prompts, 'description_validation');
|
||||
|
||||
// Build the user prompt with database-loaded prompts
|
||||
const userPrompt = buildDescriptionUserPrompt(product, prompts);
|
||||
|
||||
let response;
|
||||
let result;
|
||||
|
||||
try {
|
||||
// Try with JSON mode first
|
||||
response = await provider.chatCompletion({
|
||||
messages: [
|
||||
{ role: 'system', content: prompts.system },
|
||||
{ role: 'user', content: userPrompt }
|
||||
],
|
||||
model: MODELS.LARGE, // openai/gpt-oss-120b - better for content analysis
|
||||
temperature: 0.3, // Slightly higher for creative suggestions
|
||||
maxTokens: 2000, // Reasoning models need extra tokens for thinking
|
||||
responseFormat: { type: 'json_object' }
|
||||
});
|
||||
|
||||
// Log full raw response for debugging
|
||||
log.info('[DescriptionValidation] Raw AI response:', {
|
||||
parsed: response.parsed,
|
||||
content: response.content,
|
||||
contentLength: response.content?.length
|
||||
});
|
||||
|
||||
// Parse the response
|
||||
result = parseDescriptionResponse(response.parsed, response.content);
|
||||
} catch (jsonError) {
|
||||
// If JSON mode failed, check if we have failedGeneration to parse
|
||||
if (jsonError.failedGeneration) {
|
||||
log.warn('[DescriptionValidation] JSON mode failed, attempting to parse failed_generation:', {
|
||||
failedGeneration: jsonError.failedGeneration
|
||||
});
|
||||
result = parseDescriptionResponse(null, jsonError.failedGeneration);
|
||||
response = { latencyMs: 0, usage: {}, model: MODELS.LARGE };
|
||||
} else {
|
||||
// Retry without JSON mode
|
||||
log.warn('[DescriptionValidation] JSON mode failed, retrying without JSON mode');
|
||||
response = await provider.chatCompletion({
|
||||
messages: [
|
||||
{ role: 'system', content: prompts.system },
|
||||
{ role: 'user', content: userPrompt }
|
||||
],
|
||||
model: MODELS.LARGE,
|
||||
temperature: 0.3,
|
||||
maxTokens: 2000 // Reasoning models need extra tokens for thinking
|
||||
// No responseFormat - let the model respond freely
|
||||
});
|
||||
log.info('[DescriptionValidation] Raw AI response (no JSON mode):', {
|
||||
parsed: response.parsed,
|
||||
content: response.content,
|
||||
contentLength: response.content?.length
|
||||
});
|
||||
result = parseDescriptionResponse(response.parsed, response.content);
|
||||
}
|
||||
}
|
||||
|
||||
log.info(`[DescriptionValidation] Validated description for "${product.name}" in ${response.latencyMs}ms`, {
|
||||
isValid: result.isValid,
|
||||
hasSuggestion: !!result.suggestion,
|
||||
issueCount: result.issues.length
|
||||
});
|
||||
|
||||
return {
|
||||
...result,
|
||||
latencyMs: response.latencyMs,
|
||||
usage: response.usage,
|
||||
model: response.model
|
||||
};
|
||||
} catch (error) {
|
||||
log.error('[DescriptionValidation] Error:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
TASK_ID,
|
||||
createDescriptionValidationTask
|
||||
};
|
||||
186
inventory-server/src/services/ai/tasks/index.js
Normal file
186
inventory-server/src/services/ai/tasks/index.js
Normal file
@@ -0,0 +1,186 @@
|
||||
/**
|
||||
* AI Task Registry
|
||||
*
|
||||
* Simple registry pattern for AI tasks. Each task has:
|
||||
* - id: Unique identifier
|
||||
* - run: Async function that executes the task
|
||||
*
|
||||
* This allows adding new AI capabilities without modifying core code.
|
||||
*/
|
||||
|
||||
const { createNameValidationTask, TASK_ID: NAME_TASK_ID } = require('./nameValidationTask');
|
||||
const { createDescriptionValidationTask, TASK_ID: DESC_TASK_ID } = require('./descriptionValidationTask');
|
||||
const { createSanityCheckTask, TASK_ID: SANITY_TASK_ID } = require('./sanityCheckTask');
|
||||
|
||||
/**
|
||||
* Task IDs - frozen constants for type safety
|
||||
*/
|
||||
const TASK_IDS = Object.freeze({
|
||||
// Inline validation (triggered on field blur)
|
||||
VALIDATE_NAME: NAME_TASK_ID,
|
||||
VALIDATE_DESCRIPTION: DESC_TASK_ID,
|
||||
|
||||
// Batch operations (triggered on user action)
|
||||
SANITY_CHECK: SANITY_TASK_ID
|
||||
});
|
||||
|
||||
/**
|
||||
* Task Registry
|
||||
*/
|
||||
class TaskRegistry {
|
||||
constructor() {
|
||||
this.tasks = new Map();
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a task
|
||||
* @param {Object} task
|
||||
* @param {string} task.id - Unique task identifier
|
||||
* @param {Function} task.run - Async function: (payload) => result
|
||||
* @param {string} [task.description] - Human-readable description
|
||||
*/
|
||||
register(task) {
|
||||
if (!task?.id) {
|
||||
throw new Error('Task must have an id');
|
||||
}
|
||||
if (typeof task.run !== 'function') {
|
||||
throw new Error(`Task ${task.id} must have a run function`);
|
||||
}
|
||||
if (this.tasks.has(task.id)) {
|
||||
throw new Error(`Task ${task.id} is already registered`);
|
||||
}
|
||||
|
||||
this.tasks.set(task.id, task);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a task by ID
|
||||
* @param {string} taskId
|
||||
* @returns {Object|null}
|
||||
*/
|
||||
get(taskId) {
|
||||
return this.tasks.get(taskId) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a task exists
|
||||
* @param {string} taskId
|
||||
* @returns {boolean}
|
||||
*/
|
||||
has(taskId) {
|
||||
return this.tasks.has(taskId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a task by ID
|
||||
* @param {string} taskId
|
||||
* @param {Object} payload - Task-specific input
|
||||
* @returns {Promise<Object>} Task result
|
||||
*/
|
||||
async runTask(taskId, payload = {}) {
|
||||
const task = this.get(taskId);
|
||||
if (!task) {
|
||||
throw new Error(`Unknown task: ${taskId}`);
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await task.run(payload);
|
||||
return {
|
||||
success: true,
|
||||
taskId,
|
||||
...result
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
taskId,
|
||||
error: error.message,
|
||||
code: error.code
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all registered task IDs
|
||||
* @returns {string[]}
|
||||
*/
|
||||
list() {
|
||||
return Array.from(this.tasks.keys());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get count of registered tasks
|
||||
* @returns {number}
|
||||
*/
|
||||
size() {
|
||||
return this.tasks.size;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let registry = null;
|
||||
|
||||
/**
|
||||
* Get or create the task registry
|
||||
* @returns {TaskRegistry}
|
||||
*/
|
||||
function getRegistry() {
|
||||
if (!registry) {
|
||||
registry = new TaskRegistry();
|
||||
}
|
||||
return registry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the registry (mainly for testing)
|
||||
*/
|
||||
function resetRegistry() {
|
||||
registry = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register all validation tasks with the registry
|
||||
* Call this during initialization after the registry is created
|
||||
*
|
||||
* @param {Object} [logger] - Optional logger
|
||||
*/
|
||||
function registerAllTasks(logger = console) {
|
||||
const reg = getRegistry();
|
||||
|
||||
// Register name validation
|
||||
if (!reg.has(TASK_IDS.VALIDATE_NAME)) {
|
||||
reg.register(createNameValidationTask());
|
||||
logger.info(`[Tasks] Registered: ${TASK_IDS.VALIDATE_NAME}`);
|
||||
}
|
||||
|
||||
// Register description validation
|
||||
if (!reg.has(TASK_IDS.VALIDATE_DESCRIPTION)) {
|
||||
reg.register(createDescriptionValidationTask());
|
||||
logger.info(`[Tasks] Registered: ${TASK_IDS.VALIDATE_DESCRIPTION}`);
|
||||
}
|
||||
|
||||
// Register sanity check
|
||||
if (!reg.has(TASK_IDS.SANITY_CHECK)) {
|
||||
reg.register(createSanityCheckTask());
|
||||
logger.info(`[Tasks] Registered: ${TASK_IDS.SANITY_CHECK}`);
|
||||
}
|
||||
|
||||
return reg;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
// Constants
|
||||
TASK_IDS,
|
||||
|
||||
// Registry
|
||||
TaskRegistry,
|
||||
getRegistry,
|
||||
resetRegistry,
|
||||
registerAllTasks,
|
||||
|
||||
// Task factories (for custom registration)
|
||||
createNameValidationTask,
|
||||
createDescriptionValidationTask,
|
||||
createSanityCheckTask
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user