43 Commits

Author SHA1 Message Date
9e1989ac66 Cleanup 2025-02-01 14:08:27 -05:00
5bfd6f6d04 Fix import script order count issues 2025-02-01 12:54:33 -05:00
1003ff3cf2 More incremental import fixes 2025-02-01 11:42:51 -05:00
2d0089dc52 Incremental import order fixes 2025-02-01 11:03:42 -05:00
50b86d6d8a Fix/add data to PO script 2025-02-01 10:51:47 -05:00
07f14c0017 Fix/add data to orders script and fix other import errors 2025-02-01 01:06:45 -05:00
e77b488cd4 Fix/add data to products script 2025-01-31 18:44:11 -05:00
d57239c40c Finish up import script incremental and reliability updates 2025-01-31 16:01:21 -05:00
1c932e0df5 More import script updates and fixes, better import_history tracking 2025-01-31 13:12:43 -05:00
a867117c3c Import script incremental fixes 2025-01-31 11:12:38 -05:00
996d3d36af Streamline incremental imports 2025-01-31 10:01:50 -05:00
d0abe9d9a2 - Modify import scripts to handle edge cases with empty arrays and null conditions
- Improve parameter handling in incremental update queries for purchase orders and products
2025-01-31 01:50:21 -05:00
5e4d1c3bd8 Improve import scripts with enhanced incremental update tracking and performance
- Add record tracking for added and updated records in import scripts
- Modify products import to use a dynamic 'needs_update' flag for selective updates
- Enhance order import with more comprehensive timestamp checks
- Update import-from-prod.js to handle and clean up previously running imports
- Improve error handling and connection management in import processes
2025-01-31 01:39:48 -05:00
1be97d6610 Enhance purchase order import with advanced receiving tracking and fulfillment logic
- Implement FIFO-based receiving fulfillment tracking
- Add detailed receiving history with excess and partial fulfillment support
- Improve vendor name resolution and fallback handling
- Optimize incremental update queries by removing redundant conditions
- Enhance receiving status calculation with more granular tracking
2025-01-31 01:25:48 -05:00
b506f89dd7 Optimize order and product import scripts with improved performance and incremental update handling
- Refactor orders import to use temporary tables for more efficient data processing
- Improve batch processing and memory management in order import script
- Update product import to use temporary tables for inventory status
- Modify purchase orders import to use updated timestamp for incremental updates
- Enhance error handling and logging for import processes
2025-01-30 21:13:53 -05:00
c433f1aae8 Enhance import scripts with incremental update support and improved error handling
- Update import-from-prod.js to support granular incremental updates for different import types
- Modify orders.js to handle complex order data retrieval with better performance and error tracking
- Add support for incremental updates in products.js import function
- Improve logging and progress tracking for import processes
2025-01-30 15:49:47 -05:00
31d4011902 Add back product-category import and product time estimates 2025-01-30 00:00:30 -05:00
6c5f119ee5 Import fixes/optimizations 2025-01-29 21:48:56 -05:00
3c5fb9e435 Optimize product import with dynamic batching and memory management 2025-01-29 19:14:58 -05:00
2b329a55a4 Increase product import batch size 2025-01-29 18:51:55 -05:00
0d377466aa Optimize database import queries with improved index selection 2025-01-29 18:42:29 -05:00
fb5bf4a144 Optimize order import with improved tax info retrieval and performance 2025-01-29 18:21:31 -05:00
4d8a677c5b Remove import history tracking from purchase orders import script 2025-01-29 16:33:37 -05:00
655c071960 Limit order and purchase order imports to last 5 years 2025-01-29 16:25:24 -05:00
d2a2dbc812 Add incremental import support and tracking for database synchronization 2025-01-29 16:22:00 -05:00
d60b2d4fae Refactor import scripts with improved progress tracking and time formatting 2025-01-29 13:49:59 -05:00
81a724db9d Fix elapsed time calculation in import scripts 2025-01-29 13:40:01 -05:00
84baa7e7d3 Break up prod import script into pieces and move csv scripts into folder 2025-01-29 13:28:15 -05:00
814d5d1a84 Break up prod import script into pieces and move csv scripts into folder 2025-01-29 00:18:04 -05:00
b578549763 Remove CSV scripts from frontend 2025-01-29 00:04:47 -05:00
d56f1e1437 Move sales seasonality table to config schema and finish up standardizing scripts 2025-01-28 23:57:09 -05:00
ebebd37f11 Improve metrics reset script with robust table management and error handling 2025-01-28 22:12:24 -05:00
9c34e24909 Enhance metrics calculation scripts with improved progress tracking and cancellation support 2025-01-28 20:54:05 -05:00
a1e3803ca3 Clean up linter errors 2025-01-28 20:32:08 -05:00
a661b6a11e Misc frontend fixes 2025-01-28 20:30:26 -05:00
1410dc5571 Frontend fixes - categories, deal with new hierarchy, misc fixes 2025-01-28 17:02:11 -05:00
b1f252bea8 Frontend fixes - dashboard, products, forecasting 2025-01-28 14:26:44 -05:00
7e341a152c Update frontend to match part 4 - dashboard 2025-01-28 13:57:44 -05:00
25a0bc8d4c Update frontend to match part 3 2025-01-28 13:40:28 -05:00
57b0e9a120 Update frontend to match part 2 2025-01-28 01:43:45 -05:00
64d9ab2f83 Update frontend to match part 1 2025-01-28 01:30:48 -05:00
8323ae7703 Optimize and fix calculate scripts 2025-01-27 13:16:21 -05:00
5781b45f37 Update calculate script to account for import changes 2025-01-27 10:41:18 -05:00
67 changed files with 6833 additions and 4417 deletions

1
.gitignore vendored
View File

@@ -57,3 +57,4 @@ csv/**/*
**/csv/**/* **/csv/**/*
!csv/.gitkeep !csv/.gitkeep
inventory/tsconfig.tsbuildinfo inventory/tsconfig.tsbuildinfo
inventory-server/scripts/.fuse_hidden00000fa20000000a

181
docs/metrics-changes.md Normal file
View File

@@ -0,0 +1,181 @@
# Metrics System Changes
## Schema Changes
### Product Identifiers
- Changed `product_id` to `pid` throughout all metrics tables and queries
- Changed `category_id` to `cat_id` in category-related queries
### Purchase Orders
- Changed status check from `status = 'closed'` to `receiving_status >= 30`
- Added comment `-- Partial or fully received` for clarity
- Now using `received_date` instead of relying on status changes
### New Product Fields
- Added support for `notions_inv_count`
- Added support for `date_last_sold`
- Added support for `total_sold`
- Using `visible` flag for active product counts
### Field Size Updates
- Increased size of financial fields to handle larger numbers:
- Changed category metrics `total_value` from `DECIMAL(10,3)` to `DECIMAL(15,3)`
- Changed brand metrics financial fields from `DECIMAL(10,2)` to `DECIMAL(15,2)`
- Affects `total_stock_cost`, `total_stock_retail`, `total_revenue`
## Metrics File Changes
### Product Metrics (`product-metrics.js`)
- Updated SQL queries to use new field names
- Enhanced stock status calculations
- Added financial metrics:
- `gross_profit`
- `gmroi`
- `avg_margin_percent`
- `inventory_value`
- Improved reorder quantity calculations with:
- Enhanced safety stock calculation
- Lead time consideration
- Service level factors
- Added NaN/NULL value handling:
- Added `sanitizeValue` helper function
- Properly converts JavaScript NaN to SQL NULL
- Ensures all numeric fields have valid values
### Vendor Metrics (`vendor-metrics.js`)
- Updated field references to use `pid`
- Modified purchase order status checks
- Enhanced vendor performance metrics:
- Order fill rate calculation
- On-time delivery rate
- Lead time tracking
### Category Metrics (`category-metrics.js`)
- Updated to use `cat_id` instead of `category_id`
- Enhanced category performance tracking:
- Active vs total products
- Category growth rate
- Turnover rate
- Added time-based metrics for:
- Product counts
- Revenue tracking
- Margin analysis
- Added NULL brand handling:
- Uses 'Unbranded' for NULL brand values
- Maintains data integrity in category sales metrics
### Brand Metrics (`brand-metrics.js`)
- Updated product references to use `pid`
- Enhanced brand performance metrics:
- Stock value calculations
- Revenue tracking
- Growth rate analysis
- Added time-based aggregates for:
- Stock levels
- Sales performance
- Margin analysis
- Increased field sizes to handle large retailers
### Sales Forecasts (`sales-forecasts.js`)
- Updated to use new product identifiers
- Enhanced forecast calculations:
- Day-of-week patterns
- Seasonality factors
- Confidence levels
- Added category-level forecasts with:
- Units and revenue predictions
- Confidence scoring
- Seasonal adjustments
### Time Aggregates (`time-aggregates.js`)
- Updated field references to use `pid`
- Enhanced financial metrics:
- GMROI calculations
- Profit margin tracking
- Added inventory value tracking
- Improved purchase order integration
## Database Impact
### Tables Modified
- `product_metrics`
- `vendor_metrics`
- `vendor_time_metrics`
- `category_metrics`
- `category_time_metrics`
- `brand_metrics`
- `brand_time_metrics`
- `sales_forecasts`
- `category_forecasts`
- `product_time_aggregates`
### New Fields Added
Several tables have new fields for:
- Enhanced financial tracking
- Improved inventory metrics
- Better performance monitoring
- More accurate forecasting
## Frontend Considerations
### Data Access Changes
- All product lookups need to use `pid` instead of `product_id`
- Category references should use `cat_id`
- Purchase order status checks need updating
- Handle 'Unbranded' as a valid brand value
### New Features Available
- Enhanced stock status indicators
- More detailed financial metrics
- Improved forecasting data
- Better category and brand performance tracking
### UI Updates Needed
- Update all API calls to use new field names
- Modify data displays for new metrics
- Add new financial performance indicators
- Update stock status logic
- Enhance forecast displays
## API Route Updates Needed
### Product Routes
- Update ID field references
- Modify stock status calculations
- Add new financial metrics endpoints
### Category Routes
- Update to use `cat_id`
- Add new performance metrics
- Include time-based aggregates
### Vendor Routes
- Update product reference handling
- Add enhanced performance metrics
- Include new time-based data
### Reporting Routes
- Update all ID references
- Add new metrics support
- Include enhanced forecasting data
## Migration Considerations
### Data Migration
- Update existing records to use new IDs
- Backfill new metrics where possible
- Verify data integrity after changes
- Handle NULL to 'Unbranded' brand conversion
### Code Updates
- Update all API endpoints
- Modify database queries
- Update frontend components
- Revise reporting logic
### Testing Requirements
- Verify ID changes throughout system
- Test new metrics calculations
- Validate forecasting accuracy
- Check performance impact
- Verify NULL value handling

View File

@@ -0,0 +1,270 @@
# Schema Update Changes Required
## Core Field Name Changes
### Global Changes
- Update all references from `product_id` to `pid` in all tables and queries
- This includes foreign key references in related tables
- Update TypeScript interfaces and types (e.g., `interface Product { pid: number; ... }`)
- Update API request/response types
- Update all references from `category_id` to `cat_id` in category-related queries
- This affects the `categories` table and all tables with category foreign keys
- Update purchase order status to use numeric codes instead of strings
- Status codes: 0=canceled, 1=created, 10=electronically_ready_send, 11=ordered, 12=preordered, 13=electronically_sent, 15=receiving_started, 50=done
- Receiving status codes: 0=canceled, 1=created, 30=partial_received, 40=full_received, 50=paid
- Handle NULL brand values as 'Unbranded'
- Add COALESCE(brand, 'Unbranded') in all brand-related queries
- Update frontend brand filters to handle 'Unbranded' as a valid value
## Backend Route Changes
### Product Routes
1. Update ID field references in all product routes:
- `/api/products/:id` -> `/api/products/:pid`
- All query parameters using `product_id` should be changed to `pid`
- Update all SQL queries to use `pid` instead of `product_id`
- Update `/api/products/:id/metrics` -> `/api/products/:pid/metrics`
- Update `/api/products/:id/time-series` -> `/api/products/:pid/time-series`
- Update request parameter validation in routes
- Example query change:
```sql
-- Old
SELECT * FROM products WHERE product_id = ?
-- New
SELECT * FROM products WHERE pid = ?
```
2. Update purchase order status checks:
- Change `status = 'closed'` to `receiving_status >= 30` in all relevant queries
- Update any route logic that checks PO status to use the new numeric status codes
- Example status check change:
```sql
-- Old
WHERE po.status = 'closed'
-- New
WHERE po.receiving_status >= 30 -- Partial or fully received
```
### Category Routes
1. Update ID references:
- `/api/categories/:id` -> `/api/categories/:cat_id`
- Update all SQL queries to use `cat_id` instead of `category_id`
- Update join conditions in category-related queries
- Example join change:
```sql
-- Old
JOIN categories c ON p.category_id = c.category_id
-- New
JOIN categories c ON p.cat_id = c.cat_id
```
2. Update category metrics queries:
- Modify field size handling for financial fields (DECIMAL(15,3) instead of DECIMAL(10,3))
- Update category performance calculations to use new field sizes
- Example field size change:
```sql
-- Old
total_value DECIMAL(10,3)
-- New
total_value DECIMAL(15,3)
```
### Vendor Routes
1. Update product references:
- Change all queries to use `pid` instead of `product_id`
- Update purchase order status checks to use new numeric codes
- Example vendor query change:
```sql
-- Old
SELECT v.*, p.product_id FROM vendors v
JOIN products p ON p.vendor = v.name
WHERE p.product_id = ?
-- New
SELECT v.*, p.pid FROM vendors v
JOIN products p ON p.vendor = v.name
WHERE p.pid = ?
```
2. Update vendor metrics queries:
- Add COALESCE for NULL brand handling:
```sql
-- Old
GROUP BY brand
-- New
GROUP BY COALESCE(brand, 'Unbranded')
```
- Update field references in performance metrics calculations
### Dashboard Routes
1. Update all dashboard endpoints:
- `/dashboard/best-sellers`:
```typescript
interface BestSellerProduct {
pid: number; // Changed from product_id
sku: string;
title: string;
units_sold: number;
revenue: number; // Now handles larger decimals
profit: number; // Now handles larger decimals
}
```
- `/dashboard/overstock/products`:
```typescript
interface OverstockedProduct {
pid: number; // Changed from product_id
sku: string;
title: string;
stock_quantity: number;
overstocked_amt: number;
excess_cost: number; // Now DECIMAL(15,3)
excess_retail: number; // Now DECIMAL(15,3)
}
```
### Analytics Routes
1. Update analytics endpoints:
- `/analytics/stats` - Update all ID references and decimal handling
- `/analytics/profit` - Update decimal precision in calculations
- `/analytics/vendors` - Add brand NULL handling
- Example analytics query change:
```sql
-- Old
SELECT product_id, SUM(price * quantity) as revenue
FROM orders
GROUP BY product_id
-- New
SELECT pid, CAST(SUM(price * quantity) AS DECIMAL(15,3)) as revenue
FROM orders
GROUP BY pid
```
## Frontend Component Changes
### Product Components
1. Update API calls:
```typescript
// Old
fetch(`/api/products/${product_id}`)
// New
fetch(`/api/products/${pid}`)
```
- Update route parameters in React Router:
```typescript
// Old
<Route path="/products/:product_id" />
// New
<Route path="/products/:pid" />
```
- Update useParams usage:
```typescript
// Old
const { product_id } = useParams();
// New
const { pid } = useParams();
```
2. Update data display:
```typescript
// Old
<td>{formatCurrency(product.price)}</td>
// New
<td>{formatCurrency(Number(product.price))}</td>
```
### Dashboard Components
1. Update metrics displays:
```typescript
// Old
interface ProductMetrics {
product_id: number;
total_value: number;
}
// New
interface ProductMetrics {
pid: number;
total_value: string; // Handle as string due to DECIMAL(15,3)
}
```
2. Update stock status displays:
```typescript
// Old
const isReceived = po.status === 'closed';
// New
const isReceived = po.receiving_status >= 30;
```
### Product Filters Component
1. Update filter options:
```typescript
// Old
const productFilter = { id: 'product_id', value: id };
// New
const productFilter = { id: 'pid', value: id };
```
2. Update status filters:
```typescript
// Old
const poStatusOptions = [
{ label: 'Closed', value: 'closed' }
];
// New
const poStatusOptions = [
{ label: 'Received', value: '30' } // Using numeric codes
];
```
## Data Type Considerations
### Financial Fields
- Update TypeScript types:
```typescript
// Old
price: number;
// New
price: string; // Handle as string due to DECIMAL(15,3)
```
- Update formatting:
```typescript
// Old
formatCurrency(value)
// New
formatCurrency(Number(value))
```
### Status Codes
- Add status code mapping:
```typescript
const PO_STATUS_MAP = {
0: 'Canceled',
1: 'Created',
10: 'Ready to Send',
11: 'Ordered',
12: 'Preordered',
13: 'Sent',
15: 'Receiving Started',
50: 'Done'
};
```
## Testing Requirements
1. API Route Testing:
```typescript
// Test decimal handling
expect(typeof response.total_value).toBe('string');
expect(response.total_value).toMatch(/^\d+\.\d{3}$/);
// Test status codes
expect(response.receiving_status).toBeGreaterThanOrEqual(30);
// Test brand handling
expect(response.brand || 'Unbranded').toBeDefined();
```
## Notes
- All numeric status code comparisons should use >= for status checks to handle future status codes
- All financial values should be handled as strings in TypeScript/JavaScript to preserve precision
- Brand grouping should always use COALESCE(brand, 'Unbranded') in SQL queries
- All ID parameters in routes should be validated as numbers

View File

@@ -14,7 +14,8 @@ CREATE TABLE IF NOT EXISTS stock_thresholds (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
UNIQUE KEY unique_category_vendor (category_id, vendor) UNIQUE KEY unique_category_vendor (category_id, vendor),
INDEX idx_st_metrics (category_id, vendor)
); );
-- Lead time threshold configurations -- Lead time threshold configurations
@@ -44,7 +45,8 @@ CREATE TABLE IF NOT EXISTS sales_velocity_config (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
UNIQUE KEY unique_category_vendor (category_id, vendor) UNIQUE KEY unique_category_vendor (category_id, vendor),
INDEX idx_sv_metrics (category_id, vendor)
); );
-- ABC Classification configurations -- ABC Classification configurations
@@ -68,7 +70,8 @@ CREATE TABLE IF NOT EXISTS safety_stock_config (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE, FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
UNIQUE KEY unique_category_vendor (category_id, vendor) UNIQUE KEY unique_category_vendor (category_id, vendor),
INDEX idx_ss_metrics (category_id, vendor)
); );
-- Turnover rate configurations -- Turnover rate configurations
@@ -85,6 +88,16 @@ CREATE TABLE IF NOT EXISTS turnover_config (
UNIQUE KEY unique_category_vendor (category_id, vendor) UNIQUE KEY unique_category_vendor (category_id, vendor)
); );
-- Create table for sales seasonality factors
CREATE TABLE IF NOT EXISTS sales_seasonality (
month INT NOT NULL,
seasonality_factor DECIMAL(5,3) DEFAULT 0,
last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (month),
CHECK (month BETWEEN 1 AND 12),
CHECK (seasonality_factor BETWEEN -1.0 AND 1.0)
);
-- Insert default global thresholds if not exists -- Insert default global thresholds if not exists
INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days) INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
VALUES (1, NULL, NULL, 7, 14, 90) VALUES (1, NULL, NULL, 7, 14, 90)
@@ -126,6 +139,13 @@ ON DUPLICATE KEY UPDATE
calculation_period_days = VALUES(calculation_period_days), calculation_period_days = VALUES(calculation_period_days),
target_rate = VALUES(target_rate); target_rate = VALUES(target_rate);
-- Insert default seasonality factors (neutral)
INSERT INTO sales_seasonality (month, seasonality_factor)
VALUES
(1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0),
(7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0)
ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP;
-- View to show thresholds with category names -- View to show thresholds with category names
CREATE OR REPLACE VIEW stock_thresholds_view AS CREATE OR REPLACE VIEW stock_thresholds_view AS
SELECT SELECT
@@ -149,4 +169,28 @@ ORDER BY
ELSE 4 ELSE 4
END, END,
c.name, c.name,
st.vendor; st.vendor;
CREATE TABLE IF NOT EXISTS sync_status (
table_name VARCHAR(50) PRIMARY KEY,
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT,
INDEX idx_last_sync (last_sync_timestamp)
);
CREATE TABLE IF NOT EXISTS import_history (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
table_name VARCHAR(50) NOT NULL,
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
end_time TIMESTAMP NULL,
duration_seconds INT,
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds / 60.0) STORED,
records_added INT DEFAULT 0,
records_updated INT DEFAULT 0,
is_incremental BOOLEAN DEFAULT FALSE,
status ENUM('running', 'completed', 'failed', 'cancelled') DEFAULT 'running',
error_message TEXT,
additional_info JSON,
INDEX idx_table_time (table_name, start_time),
INDEX idx_status (status)
);

View File

@@ -152,7 +152,7 @@ CREATE TABLE IF NOT EXISTS category_metrics (
product_count INT DEFAULT 0, product_count INT DEFAULT 0,
active_products INT DEFAULT 0, active_products INT DEFAULT 0,
-- Financial metrics -- Financial metrics
total_value DECIMAL(10,3) DEFAULT 0, total_value DECIMAL(15,3) DEFAULT 0,
avg_margin DECIMAL(5,2), avg_margin DECIMAL(5,2),
turnover_rate DECIMAL(12,3), turnover_rate DECIMAL(12,3),
growth_rate DECIMAL(5,2), growth_rate DECIMAL(5,2),
@@ -193,8 +193,8 @@ CREATE TABLE IF NOT EXISTS category_time_metrics (
product_count INT DEFAULT 0, product_count INT DEFAULT 0,
active_products INT DEFAULT 0, active_products INT DEFAULT 0,
-- Financial metrics -- Financial metrics
total_value DECIMAL(10,3) DEFAULT 0, total_value DECIMAL(15,3) DEFAULT 0,
total_revenue DECIMAL(10,3) DEFAULT 0, total_revenue DECIMAL(15,3) DEFAULT 0,
avg_margin DECIMAL(5,2), avg_margin DECIMAL(5,2),
turnover_rate DECIMAL(12,3), turnover_rate DECIMAL(12,3),
PRIMARY KEY (category_id, year, month), PRIMARY KEY (category_id, year, month),
@@ -228,10 +228,10 @@ CREATE TABLE IF NOT EXISTS brand_metrics (
active_products INT DEFAULT 0, active_products INT DEFAULT 0,
-- Stock metrics -- Stock metrics
total_stock_units INT DEFAULT 0, total_stock_units INT DEFAULT 0,
total_stock_cost DECIMAL(10,2) DEFAULT 0, total_stock_cost DECIMAL(15,2) DEFAULT 0,
total_stock_retail DECIMAL(10,2) DEFAULT 0, total_stock_retail DECIMAL(15,2) DEFAULT 0,
-- Sales metrics -- Sales metrics
total_revenue DECIMAL(10,2) DEFAULT 0, total_revenue DECIMAL(15,2) DEFAULT 0,
avg_margin DECIMAL(5,2) DEFAULT 0, avg_margin DECIMAL(5,2) DEFAULT 0,
growth_rate DECIMAL(5,2) DEFAULT 0, growth_rate DECIMAL(5,2) DEFAULT 0,
PRIMARY KEY (brand), PRIMARY KEY (brand),
@@ -250,10 +250,10 @@ CREATE TABLE IF NOT EXISTS brand_time_metrics (
active_products INT DEFAULT 0, active_products INT DEFAULT 0,
-- Stock metrics -- Stock metrics
total_stock_units INT DEFAULT 0, total_stock_units INT DEFAULT 0,
total_stock_cost DECIMAL(10,2) DEFAULT 0, total_stock_cost DECIMAL(15,2) DEFAULT 0,
total_stock_retail DECIMAL(10,2) DEFAULT 0, total_stock_retail DECIMAL(15,2) DEFAULT 0,
-- Sales metrics -- Sales metrics
total_revenue DECIMAL(10,2) DEFAULT 0, total_revenue DECIMAL(15,2) DEFAULT 0,
avg_margin DECIMAL(5,2) DEFAULT 0, avg_margin DECIMAL(5,2) DEFAULT 0,
PRIMARY KEY (brand, year, month), PRIMARY KEY (brand, year, month),
INDEX idx_brand_date (year, month) INDEX idx_brand_date (year, month)
@@ -287,26 +287,6 @@ CREATE TABLE IF NOT EXISTS category_forecasts (
INDEX idx_category_forecast_last_calculated (last_calculated_at) INDEX idx_category_forecast_last_calculated (last_calculated_at)
); );
-- Create table for sales seasonality factors
CREATE TABLE IF NOT EXISTS sales_seasonality (
month INT NOT NULL,
seasonality_factor DECIMAL(5,3) DEFAULT 0,
last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (month),
CHECK (month BETWEEN 1 AND 12),
CHECK (seasonality_factor BETWEEN -1.0 AND 1.0)
);
-- Insert default seasonality factors (neutral)
INSERT INTO sales_seasonality (month, seasonality_factor)
VALUES
(1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0),
(7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0)
ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP;
-- Re-enable foreign key checks
SET FOREIGN_KEY_CHECKS = 1;
-- Create view for inventory health -- Create view for inventory health
CREATE OR REPLACE VIEW inventory_health AS CREATE OR REPLACE VIEW inventory_health AS
WITH product_thresholds AS ( WITH product_thresholds AS (
@@ -427,4 +407,24 @@ FROM
LEFT JOIN LEFT JOIN
categories p ON c.parent_id = p.cat_id categories p ON c.parent_id = p.cat_id
LEFT JOIN LEFT JOIN
category_metrics cm ON c.cat_id = cm.category_id; category_metrics cm ON c.cat_id = cm.category_id;
-- Re-enable foreign key checks
SET FOREIGN_KEY_CHECKS = 1;
-- Create table for sales seasonality factors
CREATE TABLE IF NOT EXISTS sales_seasonality (
month INT NOT NULL,
seasonality_factor DECIMAL(5,3) DEFAULT 0,
last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (month),
CHECK (month BETWEEN 1 AND 12),
CHECK (seasonality_factor BETWEEN -1.0 AND 1.0)
);
-- Insert default seasonality factors (neutral)
INSERT INTO sales_seasonality (month, seasonality_factor)
VALUES
(1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0),
(7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0)
ON DUPLICATE KEY UPDATE last_updated = CURRENT_TIMESTAMP;

View File

@@ -39,7 +39,7 @@ CREATE TABLE products (
tags TEXT, tags TEXT,
moq INT DEFAULT 1, moq INT DEFAULT 1,
uom INT DEFAULT 1, uom INT DEFAULT 1,
rating TINYINT UNSIGNED DEFAULT 0, rating DECIMAL(10,2) DEFAULT 0.00,
reviews INT UNSIGNED DEFAULT 0, reviews INT UNSIGNED DEFAULT 0,
weight DECIMAL(10,3), weight DECIMAL(10,3),
length DECIMAL(10,3), length DECIMAL(10,3),
@@ -52,7 +52,7 @@ CREATE TABLE products (
notifies INT UNSIGNED DEFAULT 0, notifies INT UNSIGNED DEFAULT 0,
date_last_sold DATE, date_last_sold DATE,
PRIMARY KEY (pid), PRIMARY KEY (pid),
UNIQUE KEY unique_sku (SKU), INDEX idx_sku (SKU),
INDEX idx_vendor (vendor), INDEX idx_vendor (vendor),
INDEX idx_brand (brand), INDEX idx_brand (brand),
INDEX idx_location (location), INDEX idx_location (location),
@@ -113,16 +113,19 @@ CREATE TABLE IF NOT EXISTS orders (
tax DECIMAL(10,3) DEFAULT 0.000, tax DECIMAL(10,3) DEFAULT 0.000,
tax_included TINYINT(1) DEFAULT 0, tax_included TINYINT(1) DEFAULT 0,
shipping DECIMAL(10,3) DEFAULT 0.000, shipping DECIMAL(10,3) DEFAULT 0.000,
costeach DECIMAL(10,3) DEFAULT 0.000,
customer VARCHAR(50) NOT NULL, customer VARCHAR(50) NOT NULL,
customer_name VARCHAR(100), customer_name VARCHAR(100),
status VARCHAR(20) DEFAULT 'pending', status VARCHAR(20) DEFAULT 'pending',
canceled TINYINT(1) DEFAULT 0, canceled TINYINT(1) DEFAULT 0,
PRIMARY KEY (id), PRIMARY KEY (id),
UNIQUE KEY unique_order_line (order_number, pid),
KEY order_number (order_number), KEY order_number (order_number),
KEY pid (pid), KEY pid (pid),
KEY customer (customer), KEY customer (customer),
KEY date (date), KEY date (date),
KEY status (status) KEY status (status),
INDEX idx_orders_metrics (pid, date, canceled)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- Create purchase_orders table with its indexes -- Create purchase_orders table with its indexes
@@ -134,7 +137,9 @@ CREATE TABLE purchase_orders (
expected_date DATE, expected_date DATE,
pid BIGINT NOT NULL, pid BIGINT NOT NULL,
sku VARCHAR(50) NOT NULL, sku VARCHAR(50) NOT NULL,
name VARCHAR(100) NOT NULL COMMENT 'Product name from products.description',
cost_price DECIMAL(10, 3) NOT NULL, cost_price DECIMAL(10, 3) NOT NULL,
po_cost_price DECIMAL(10, 3) NOT NULL COMMENT 'Original cost from PO, before receiving adjustments',
status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done', status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done',
receiving_status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,30=partial_received,40=full_received,50=paid', receiving_status TINYINT UNSIGNED DEFAULT 1 COMMENT '0=canceled,1=created,30=partial_received,40=full_received,50=paid',
notes TEXT, notes TEXT,
@@ -146,12 +151,12 @@ CREATE TABLE purchase_orders (
received_by INT, received_by INT,
receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag', receiving_history JSON COMMENT 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag',
FOREIGN KEY (pid) REFERENCES products(pid), FOREIGN KEY (pid) REFERENCES products(pid),
FOREIGN KEY (sku) REFERENCES products(SKU),
INDEX idx_po_id (po_id), INDEX idx_po_id (po_id),
INDEX idx_vendor (vendor), INDEX idx_vendor (vendor),
INDEX idx_status (status), INDEX idx_status (status),
INDEX idx_receiving_status (receiving_status), INDEX idx_receiving_status (receiving_status),
INDEX idx_purchase_orders_metrics (pid, date, status, ordered, received), INDEX idx_purchase_orders_metrics (pid, date, status, ordered, received),
INDEX idx_po_metrics (pid, date, receiving_status, received_date),
INDEX idx_po_product_date (pid, date), INDEX idx_po_product_date (pid, date),
INDEX idx_po_product_status (pid, status), INDEX idx_po_product_status (pid, status),
UNIQUE KEY unique_po_product (po_id, pid) UNIQUE KEY unique_po_product (po_id, pid)

View File

@@ -5,6 +5,16 @@ process.chdir(path.dirname(__filename));
require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') }); require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') });
// Configuration flags for controlling which metrics to calculate
// Set to 1 to skip the corresponding calculation, 0 to run it
const SKIP_PRODUCT_METRICS = 1; // Skip all product metrics
const SKIP_TIME_AGGREGATES = 1; // Skip time aggregates
const SKIP_FINANCIAL_METRICS = 1; // Skip financial metrics
const SKIP_VENDOR_METRICS = 1; // Skip vendor metrics
const SKIP_CATEGORY_METRICS = 1; // Skip category metrics
const SKIP_BRAND_METRICS = 1; // Skip brand metrics
const SKIP_SALES_FORECASTS = 1; // Skip sales forecasts
// Add error handler for uncaught exceptions // Add error handler for uncaught exceptions
process.on('uncaughtException', (error) => { process.on('uncaughtException', (error) => {
console.error('Uncaught Exception:', error); console.error('Uncaught Exception:', error);
@@ -43,9 +53,6 @@ const calculateCategoryMetrics = require('./metrics/category-metrics');
const calculateBrandMetrics = require('./metrics/brand-metrics'); const calculateBrandMetrics = require('./metrics/brand-metrics');
const calculateSalesForecasts = require('./metrics/sales-forecasts'); const calculateSalesForecasts = require('./metrics/sales-forecasts');
// Set to 1 to skip product metrics and only calculate the remaining metrics
const SKIP_PRODUCT_METRICS = 1;
// Add cancel handler // Add cancel handler
let isCancelled = false; let isCancelled = false;
@@ -137,60 +144,199 @@ async function calculateMetrics() {
} }
// Calculate time-based aggregates // Calculate time-based aggregates
processedCount = await calculateTimeAggregates(startTime, totalProducts, processedCount); if (!SKIP_TIME_AGGREGATES) {
processedCount = await calculateTimeAggregates(startTime, totalProducts, processedCount);
} else {
console.log('Skipping time aggregates calculation');
}
// Calculate financial metrics // Calculate financial metrics
processedCount = await calculateFinancialMetrics(startTime, totalProducts, processedCount); if (!SKIP_FINANCIAL_METRICS) {
processedCount = await calculateFinancialMetrics(startTime, totalProducts, processedCount);
} else {
console.log('Skipping financial metrics calculation');
}
// Calculate vendor metrics // Calculate vendor metrics
processedCount = await calculateVendorMetrics(startTime, totalProducts, processedCount); if (!SKIP_VENDOR_METRICS) {
processedCount = await calculateVendorMetrics(startTime, totalProducts, processedCount);
} else {
console.log('Skipping vendor metrics calculation');
}
// Calculate category metrics // Calculate category metrics
processedCount = await calculateCategoryMetrics(startTime, totalProducts, processedCount); if (!SKIP_CATEGORY_METRICS) {
processedCount = await calculateCategoryMetrics(startTime, totalProducts, processedCount);
} else {
console.log('Skipping category metrics calculation');
}
// Calculate brand metrics // Calculate brand metrics
processedCount = await calculateBrandMetrics(startTime, totalProducts, processedCount); if (!SKIP_BRAND_METRICS) {
processedCount = await calculateBrandMetrics(startTime, totalProducts, processedCount);
} else {
console.log('Skipping brand metrics calculation');
}
// Calculate sales forecasts // Calculate sales forecasts
processedCount = await calculateSalesForecasts(startTime, totalProducts, processedCount); if (!SKIP_SALES_FORECASTS) {
processedCount = await calculateSalesForecasts(startTime, totalProducts, processedCount);
} else {
console.log('Skipping sales forecasts calculation');
}
// Calculate ABC classification // Calculate ABC classification
outputProgress({
status: 'running',
operation: 'Starting ABC classification',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1'); const [abcConfig] = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 }; const abcThresholds = abcConfig[0] || { a_threshold: 20, b_threshold: 50 };
// First, create and populate the rankings table with an index
await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_revenue_ranks');
await connection.query(` await connection.query(`
WITH revenue_rankings AS ( CREATE TEMPORARY TABLE temp_revenue_ranks (
SELECT pid BIGINT NOT NULL,
product_id, total_revenue DECIMAL(10,3),
total_revenue, rank_num INT,
PERCENT_RANK() OVER (ORDER BY COALESCE(total_revenue, 0) DESC) * 100 as revenue_percentile total_count INT,
FROM product_metrics PRIMARY KEY (pid),
), INDEX (rank_num)
classification_update AS ( ) ENGINE=MEMORY
SELECT `);
product_id,
outputProgress({
status: 'running',
operation: 'Creating revenue rankings',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
await connection.query(`
INSERT INTO temp_revenue_ranks
SELECT
pid,
total_revenue,
@rank := @rank + 1 as rank_num,
@total_count := @rank as total_count
FROM (
SELECT pid, total_revenue
FROM product_metrics
WHERE total_revenue > 0
ORDER BY total_revenue DESC
) ranked,
(SELECT @rank := 0) r
`);
// Get total count for percentage calculation
const [rankingCount] = await connection.query('SELECT MAX(rank_num) as total_count FROM temp_revenue_ranks');
const totalCount = rankingCount[0].total_count || 1;
outputProgress({
status: 'running',
operation: 'Updating ABC classifications',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Process updates in batches
let abcProcessedCount = 0;
const batchSize = 5000;
while (true) {
if (isCancelled) return processedCount;
// First get a batch of PIDs that need updating
const [pids] = await connection.query(`
SELECT pm.pid
FROM product_metrics pm
LEFT JOIN temp_revenue_ranks tr ON pm.pid = tr.pid
WHERE pm.abc_class IS NULL
OR pm.abc_class !=
CASE CASE
WHEN revenue_percentile <= ? THEN 'A' WHEN tr.rank_num IS NULL THEN 'C'
WHEN revenue_percentile <= ? THEN 'B' WHEN (tr.rank_num / ?) * 100 <= ? THEN 'A'
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'B'
ELSE 'C' ELSE 'C'
END as abc_class END
FROM revenue_rankings LIMIT ?
) `, [totalCount, abcThresholds.a_threshold,
UPDATE product_metrics pm totalCount, abcThresholds.b_threshold,
JOIN classification_update cu ON pm.product_id = cu.product_id batchSize]);
SET pm.abc_class = cu.abc_class,
pm.last_calculated_at = NOW() if (pids.length === 0) {
`, [abcThresholds.a_threshold, abcThresholds.b_threshold]); break;
}
// Then update just those PIDs
const [result] = await connection.query(`
UPDATE product_metrics pm
LEFT JOIN temp_revenue_ranks tr ON pm.pid = tr.pid
SET pm.abc_class =
CASE
WHEN tr.rank_num IS NULL THEN 'C'
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'A'
WHEN (tr.rank_num / ?) * 100 <= ? THEN 'B'
ELSE 'C'
END,
pm.last_calculated_at = NOW()
WHERE pm.pid IN (?)
`, [totalCount, abcThresholds.a_threshold,
totalCount, abcThresholds.b_threshold,
pids.map(row => row.pid)]);
abcProcessedCount += result.affectedRows;
processedCount = Math.floor(totalProducts * (0.99 + (abcProcessedCount / totalCount) * 0.01));
outputProgress({
status: 'running',
operation: 'ABC classification progress',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
// Small delay between batches to allow other transactions
await new Promise(resolve => setTimeout(resolve, 100));
}
// Clean up
await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_revenue_ranks');
// Final success message // Final success message
global.outputProgress({ outputProgress({
status: 'complete', status: 'complete',
operation: 'Metrics calculation complete', operation: 'Metrics calculation complete',
current: totalProducts, current: totalProducts,
total: totalProducts, total: totalProducts,
elapsed: global.formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
remaining: '0s', remaining: '0s',
rate: global.calculateRate(startTime, totalProducts), rate: calculateRate(startTime, totalProducts),
percentage: '100' percentage: '100'
}); });

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,182 @@
const { outputProgress, formatElapsedTime } = require('../metrics/utils/progress');
async function importCategories(prodConnection, localConnection) {
outputProgress({
operation: "Starting categories import",
status: "running",
});
const startTime = Date.now();
const typeOrder = [10, 20, 11, 21, 12, 13];
let totalInserted = 0;
let skippedCategories = [];
try {
// Process each type in order with its own query
for (const type of typeOrder) {
const [categories] = await prodConnection.query(
`
SELECT
pc.cat_id,
pc.name,
pc.type,
CASE
WHEN pc.type IN (10, 20) THEN NULL -- Top level categories should have no parent
WHEN pc.master_cat_id IS NULL THEN NULL
ELSE pc.master_cat_id
END as parent_id,
pc.combined_name as description
FROM product_categories pc
WHERE pc.type = ?
ORDER BY pc.cat_id
`,
[type]
);
if (categories.length === 0) continue;
console.log(`\nProcessing ${categories.length} type ${type} categories`);
if (type === 10) {
console.log("Type 10 categories:", JSON.stringify(categories, null, 2));
}
// For types that can have parents (11, 21, 12, 13), verify parent existence
let categoriesToInsert = categories;
if (![10, 20].includes(type)) {
// Get all parent IDs
const parentIds = [
...new Set(
categories.map((c) => c.parent_id).filter((id) => id !== null)
),
];
// Check which parents exist
const [existingParents] = await localConnection.query(
"SELECT cat_id FROM categories WHERE cat_id IN (?)",
[parentIds]
);
const existingParentIds = new Set(existingParents.map((p) => p.cat_id));
// Filter categories and track skipped ones
categoriesToInsert = categories.filter(
(cat) =>
cat.parent_id === null || existingParentIds.has(cat.parent_id)
);
const invalidCategories = categories.filter(
(cat) =>
cat.parent_id !== null && !existingParentIds.has(cat.parent_id)
);
if (invalidCategories.length > 0) {
const skippedInfo = invalidCategories.map((c) => ({
id: c.cat_id,
name: c.name,
type: c.type,
missing_parent: c.parent_id,
}));
skippedCategories.push(...skippedInfo);
console.log(
"\nSkipping categories with missing parents:",
invalidCategories
.map(
(c) =>
`${c.cat_id} - ${c.name} (missing parent: ${c.parent_id})`
)
.join("\n")
);
}
if (categoriesToInsert.length === 0) {
console.log(
`No valid categories of type ${type} to insert - all had missing parents`
);
continue;
}
}
console.log(
`Inserting ${categoriesToInsert.length} type ${type} categories`
);
const placeholders = categoriesToInsert
.map(() => "(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)")
.join(",");
const values = categoriesToInsert.flatMap((cat) => [
cat.cat_id,
cat.name,
cat.type,
cat.parent_id,
cat.description,
"active",
]);
// Insert categories and create relationships in one query to avoid race conditions
await localConnection.query(
`
INSERT INTO categories (cat_id, name, type, parent_id, description, status, created_at, updated_at)
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
name = VALUES(name),
type = VALUES(type),
parent_id = VALUES(parent_id),
description = VALUES(description),
status = VALUES(status),
updated_at = CURRENT_TIMESTAMP
`,
values
);
totalInserted += categoriesToInsert.length;
outputProgress({
status: "running",
operation: "Categories import",
current: totalInserted,
total: totalInserted,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
});
}
// After all imports, if we skipped any categories, throw an error
if (skippedCategories.length > 0) {
const error = new Error(
"Categories import completed with errors - some categories were skipped due to missing parents"
);
error.skippedCategories = skippedCategories;
throw error;
}
outputProgress({
status: "complete",
operation: "Categories import completed",
current: totalInserted,
total: totalInserted,
duration: formatElapsedTime((Date.now() - startTime) / 1000),
});
return {
status: "complete",
totalImported: totalInserted
};
} catch (error) {
console.error("Error importing categories:", error);
if (error.skippedCategories) {
console.error(
"Skipped categories:",
JSON.stringify(error.skippedCategories, null, 2)
);
}
outputProgress({
status: "error",
operation: "Categories import failed",
error: error.message,
skippedCategories: error.skippedCategories
});
throw error;
}
}
module.exports = importCategories;

View File

@@ -0,0 +1,568 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
const { importMissingProducts, setupTemporaryTables, cleanupTemporaryTables, materializeCalculations } = require('./products');
/**
* Imports orders from a production MySQL database to a local MySQL database.
* It can run in two modes:
* 1. Incremental update mode (default): Only fetch orders that have changed since the last sync time.
* 2. Full update mode: Fetch all eligible orders within the last 5 years regardless of timestamp.
*
* @param {object} prodConnection - A MySQL connection to production DB (MySQL 5.7).
* @param {object} localConnection - A MySQL connection to local DB (MySQL 8.0).
* @param {boolean} incrementalUpdate - Set to false for a full sync; true for incremental.
*
* @returns {object} Information about the sync operation.
*/
async function importOrders(prodConnection, localConnection, incrementalUpdate = true) {
const startTime = Date.now();
const skippedOrders = new Set();
const missingProducts = new Set();
let recordsAdded = 0;
let recordsUpdated = 0;
let processedCount = 0;
let importedCount = 0;
let totalOrderItems = 0;
let totalUniqueOrders = 0;
// Add a cumulative counter for processed orders before the loop
let cumulativeProcessedOrders = 0;
try {
// Insert temporary table creation queries
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_items (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
SKU VARCHAR(50) NOT NULL,
price DECIMAL(10,2) NOT NULL,
quantity INT NOT NULL,
base_discount DECIMAL(10,2) DEFAULT 0,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_meta (
order_id INT UNSIGNED NOT NULL,
date DATE NOT NULL,
customer VARCHAR(100) NOT NULL,
customer_name VARCHAR(150) NOT NULL,
status INT,
canceled TINYINT(1),
PRIMARY KEY (order_id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_discounts (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
discount DECIMAL(10,2) NOT NULL,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_taxes (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
tax DECIMAL(10,2) NOT NULL,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_order_costs (
order_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
costeach DECIMAL(10,3) DEFAULT 0.000,
PRIMARY KEY (order_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
// Get column names from the local table
const [columns] = await localConnection.query(`
SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'orders'
ORDER BY ORDINAL_POSITION
`);
const columnNames = columns.map(col => col.COLUMN_NAME);
// Get last sync info
const [syncInfo] = await localConnection.query(
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'orders'"
);
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
console.log('Orders: Using last sync time:', lastSyncTime);
// First get count of order items
const [[{ total }]] = await prodConnection.query(`
SELECT COUNT(*) as total
FROM order_items oi
USE INDEX (PRIMARY)
JOIN _order o ON oi.order_id = o.order_id
WHERE o.order_status >= 15
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND o.date_placed_onlydate IS NOT NULL
${incrementalUpdate ? `
AND (
o.stamp > ?
OR oi.stamp > ?
OR EXISTS (
SELECT 1 FROM order_discount_items odi
WHERE odi.order_id = o.order_id
AND odi.pid = oi.prod_pid
)
OR EXISTS (
SELECT 1 FROM order_tax_info oti
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
WHERE oti.order_id = o.order_id
AND otip.pid = oi.prod_pid
AND oti.stamp > ?
)
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
totalOrderItems = total;
console.log('Orders: Found changes:', totalOrderItems);
// Get order items in batches
const [orderItems] = await prodConnection.query(`
SELECT
oi.order_id,
oi.prod_pid as pid,
oi.prod_itemnumber as SKU,
oi.prod_price as price,
oi.qty_ordered as quantity,
COALESCE(oi.prod_price_reg - oi.prod_price, 0) as base_discount,
oi.stamp as last_modified
FROM order_items oi
USE INDEX (PRIMARY)
JOIN _order o ON oi.order_id = o.order_id
WHERE o.order_status >= 15
AND o.date_placed_onlydate >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
AND o.date_placed_onlydate IS NOT NULL
${incrementalUpdate ? `
AND (
o.stamp > ?
OR oi.stamp > ?
OR EXISTS (
SELECT 1 FROM order_discount_items odi
WHERE odi.order_id = o.order_id
AND odi.pid = oi.prod_pid
)
OR EXISTS (
SELECT 1 FROM order_tax_info oti
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
WHERE oti.order_id = o.order_id
AND otip.pid = oi.prod_pid
AND oti.stamp > ?
)
)
` : ''}
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime] : []);
console.log('Orders: Processing', orderItems.length, 'order items');
// Insert order items in batches
for (let i = 0; i < orderItems.length; i += 5000) {
const batch = orderItems.slice(i, Math.min(i + 5000, orderItems.length));
const placeholders = batch.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
const values = batch.flatMap(item => [
item.order_id, item.pid, item.SKU, item.price, item.quantity, item.base_discount
]);
await localConnection.query(`
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
SKU = VALUES(SKU),
price = VALUES(price),
quantity = VALUES(quantity),
base_discount = VALUES(base_discount)
`, values);
processedCount = i + batch.length;
outputProgress({
status: "running",
operation: "Orders import",
message: `Loading order items: ${processedCount} of ${totalOrderItems}`,
current: processedCount,
total: totalOrderItems
});
}
// Get unique order IDs
const orderIds = [...new Set(orderItems.map(item => item.order_id))];
totalUniqueOrders = orderIds.length;
console.log('Total unique order IDs:', totalUniqueOrders);
// Reset processed count for order processing phase
processedCount = 0;
// Get order metadata in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
console.log(`Processing batch ${i/5000 + 1}, size: ${batchIds.length}`);
console.log('Sample of batch IDs:', batchIds.slice(0, 5));
const [orders] = await prodConnection.query(`
SELECT
o.order_id,
o.date_placed_onlydate as date,
o.order_cid as customer,
CONCAT(COALESCE(u.firstname, ''), ' ', COALESCE(u.lastname, '')) as customer_name,
o.order_status as status,
CASE WHEN o.date_cancelled != '0000-00-00 00:00:00' THEN 1 ELSE 0 END as canceled
FROM _order o
LEFT JOIN users u ON o.order_cid = u.cid
WHERE o.order_id IN (?)
`, [batchIds]);
console.log(`Retrieved ${orders.length} orders for ${batchIds.length} IDs`);
const duplicates = orders.filter((order, index, self) =>
self.findIndex(o => o.order_id === order.order_id) !== index
);
if (duplicates.length > 0) {
console.log('Found duplicates:', duplicates);
}
const placeholders = orders.map(() => "(?, ?, ?, ?, ?, ?)").join(",");
const values = orders.flatMap(order => [
order.order_id, order.date, order.customer, order.customer_name, order.status, order.canceled
]);
await localConnection.query(`
INSERT INTO temp_order_meta VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
date = VALUES(date),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled)
`, values);
processedCount = i + orders.length;
outputProgress({
status: "running",
operation: "Orders import",
message: `Loading order metadata: ${processedCount} of ${totalUniqueOrders}`,
current: processedCount,
total: totalUniqueOrders
});
}
// Reset processed count for final phase
processedCount = 0;
// Get promotional discounts in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
const [discounts] = await prodConnection.query(`
SELECT order_id, pid, SUM(amount) as discount
FROM order_discount_items
WHERE order_id IN (?)
GROUP BY order_id, pid
`, [batchIds]);
if (discounts.length > 0) {
const placeholders = discounts.map(() => "(?, ?, ?)").join(",");
const values = discounts.flatMap(d => [d.order_id, d.pid, d.discount]);
await localConnection.query(`
INSERT INTO temp_order_discounts VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
discount = VALUES(discount)
`, values);
}
}
// Get tax information in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
const [taxes] = await prodConnection.query(`
SELECT DISTINCT
oti.order_id,
otip.pid,
otip.item_taxes_to_collect as tax
FROM order_tax_info oti
JOIN (
SELECT order_id, MAX(stamp) as max_stamp
FROM order_tax_info
WHERE order_id IN (?)
GROUP BY order_id
) latest ON oti.order_id = latest.order_id AND oti.stamp = latest.max_stamp
JOIN order_tax_info_products otip ON oti.taxinfo_id = otip.taxinfo_id
`, [batchIds]);
if (taxes.length > 0) {
// Remove any duplicates before inserting
const uniqueTaxes = new Map();
taxes.forEach(t => {
const key = `${t.order_id}-${t.pid}`;
uniqueTaxes.set(key, t);
});
const values = Array.from(uniqueTaxes.values()).flatMap(t => [t.order_id, t.pid, t.tax]);
if (values.length > 0) {
const placeholders = Array(uniqueTaxes.size).fill("(?, ?, ?)").join(",");
await localConnection.query(`
INSERT INTO temp_order_taxes VALUES ${placeholders}
ON DUPLICATE KEY UPDATE tax = VALUES(tax)
`, values);
}
}
}
// Get costeach values in batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
const [costs] = await prodConnection.query(`
SELECT orderid as order_id, pid, costeach
FROM order_costs
WHERE orderid IN (?)
`, [batchIds]);
if (costs.length > 0) {
const placeholders = costs.map(() => '(?, ?, ?)').join(",");
const values = costs.flatMap(c => [c.order_id, c.pid, c.costeach]);
await localConnection.query(`
INSERT INTO temp_order_costs (order_id, pid, costeach)
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE costeach = VALUES(costeach)
`, values);
}
}
// Now combine all the data and insert into orders table
// Pre-check all products at once instead of per batch
const allOrderPids = [...new Set(orderItems.map(item => item.pid))];
const [existingProducts] = allOrderPids.length > 0 ? await localConnection.query(
"SELECT pid FROM products WHERE pid IN (?)",
[allOrderPids]
) : [[]];
const existingPids = new Set(existingProducts.map(p => p.pid));
// Process in larger batches
for (let i = 0; i < orderIds.length; i += 5000) {
const batchIds = orderIds.slice(i, i + 5000);
// Get combined data for this batch
const [orders] = await localConnection.query(`
SELECT
oi.order_id as order_number,
oi.pid,
oi.SKU,
om.date,
oi.price,
oi.quantity,
oi.base_discount + COALESCE(od.discount, 0) as discount,
COALESCE(ot.tax, 0) as tax,
0 as tax_included,
0 as shipping,
om.customer,
om.customer_name,
om.status,
om.canceled,
COALESCE(tc.costeach, 0) as costeach
FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
WHERE oi.order_id IN (?)
`, [batchIds]);
// Filter orders and track missing products - do this in a single pass
const validOrders = [];
const values = [];
const processedOrderItems = new Set(); // Track unique order items
const processedOrders = new Set(); // Track unique orders
for (const order of orders) {
if (!existingPids.has(order.pid)) {
missingProducts.add(order.pid);
skippedOrders.add(order.order_number);
continue;
}
validOrders.push(order);
values.push(...columnNames.map(col => order[col] ?? null));
processedOrderItems.add(`${order.order_number}-${order.pid}`);
processedOrders.add(order.order_number);
}
if (validOrders.length > 0) {
// Pre-compute the placeholders string once
const singlePlaceholder = `(${columnNames.map(() => "?").join(",")})`;
const placeholders = Array(validOrders.length).fill(singlePlaceholder).join(",");
const result = await localConnection.query(`
INSERT INTO orders (${columnNames.join(",")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
SKU = VALUES(SKU),
date = VALUES(date),
price = VALUES(price),
quantity = VALUES(quantity),
discount = VALUES(discount),
tax = VALUES(tax),
tax_included = VALUES(tax_included),
shipping = VALUES(shipping),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled),
costeach = VALUES(costeach)
`, validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat());
const affectedRows = result[0].affectedRows;
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsAdded += inserts;
recordsUpdated += updates;
importedCount += processedOrderItems.size; // Count unique order items processed
}
// Update progress based on unique orders processed
cumulativeProcessedOrders += processedOrders.size;
outputProgress({
status: "running",
operation: "Orders import",
message: `Imported ${importedCount} order items (${cumulativeProcessedOrders} of ${totalUniqueOrders} orders processed)`,
current: cumulativeProcessedOrders,
total: totalUniqueOrders,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, cumulativeProcessedOrders, totalUniqueOrders),
rate: calculateRate(startTime, cumulativeProcessedOrders)
});
}
// Now try to import any orders that were skipped due to missing products
if (skippedOrders.size > 0) {
try {
outputProgress({
status: "running",
operation: "Orders import",
message: `Retrying import of ${skippedOrders.size} orders with previously missing products`,
});
// Get the orders that were skipped
const [skippedProdOrders] = await localConnection.query(`
SELECT DISTINCT
oi.order_id as order_number,
oi.pid,
oi.SKU,
om.date,
oi.price,
oi.quantity,
oi.base_discount + COALESCE(od.discount, 0) as discount,
COALESCE(ot.tax, 0) as tax,
0 as tax_included,
0 as shipping,
om.customer,
om.customer_name,
om.status,
om.canceled,
COALESCE(tc.costeach, 0) as costeach
FROM temp_order_items oi
JOIN temp_order_meta om ON oi.order_id = om.order_id
LEFT JOIN temp_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
LEFT JOIN temp_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
LEFT JOIN temp_order_costs tc ON oi.order_id = tc.order_id AND oi.pid = tc.pid
WHERE oi.order_id IN (?)
`, [Array.from(skippedOrders)]);
// Check which products exist now
const skippedPids = [...new Set(skippedProdOrders.map(o => o.pid))];
const [existingProducts] = skippedPids.length > 0 ? await localConnection.query(
"SELECT pid FROM products WHERE pid IN (?)",
[skippedPids]
) : [[]];
const existingPids = new Set(existingProducts.map(p => p.pid));
// Filter orders that can now be imported
const validOrders = skippedProdOrders.filter(order => existingPids.has(order.pid));
const retryOrderItems = new Set(); // Track unique order items in retry
if (validOrders.length > 0) {
const placeholders = validOrders.map(() => `(${columnNames.map(() => "?").join(", ")})`).join(",");
const values = validOrders.map(o => columnNames.map(col => o[col] ?? null)).flat();
const result = await localConnection.query(`
INSERT INTO orders (${columnNames.join(", ")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE
SKU = VALUES(SKU),
date = VALUES(date),
price = VALUES(price),
quantity = VALUES(quantity),
discount = VALUES(discount),
tax = VALUES(tax),
tax_included = VALUES(tax_included),
shipping = VALUES(shipping),
customer = VALUES(customer),
customer_name = VALUES(customer_name),
status = VALUES(status),
canceled = VALUES(canceled),
costeach = VALUES(costeach)
`, values);
const affectedRows = result[0].affectedRows;
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
// Track unique order items
validOrders.forEach(order => {
retryOrderItems.add(`${order.order_number}-${order.pid}`);
});
outputProgress({
status: "running",
operation: "Orders import",
message: `Successfully imported ${retryOrderItems.size} previously skipped order items`,
});
// Update the main counters
recordsAdded += inserts;
recordsUpdated += updates;
importedCount += retryOrderItems.size;
}
} catch (error) {
console.warn('Warning: Failed to retry skipped orders:', error.message);
console.warn(`Skipped ${skippedOrders.size} orders due to ${missingProducts.size} missing products`);
}
}
// Clean up temporary tables after ALL processing is complete
await localConnection.query(`
DROP TEMPORARY TABLE IF EXISTS temp_order_items;
DROP TEMPORARY TABLE IF EXISTS temp_order_meta;
DROP TEMPORARY TABLE IF EXISTS temp_order_discounts;
DROP TEMPORARY TABLE IF EXISTS temp_order_taxes;
DROP TEMPORARY TABLE IF EXISTS temp_order_costs;
`);
// Only update sync status if we get here (no errors thrown)
await localConnection.query(`
INSERT INTO sync_status (table_name, last_sync_timestamp)
VALUES ('orders', NOW())
ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW()
`);
return {
status: "complete",
totalImported: Math.floor(importedCount),
recordsAdded: recordsAdded || 0,
recordsUpdated: Math.floor(recordsUpdated),
totalSkipped: skippedOrders.size,
missingProducts: missingProducts.size,
incrementalUpdate,
lastSyncTime
};
} catch (error) {
console.error("Error during orders import:", error);
throw error;
}
}
module.exports = importOrders;

View File

@@ -0,0 +1,739 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
// Utility functions
const imageUrlBase = 'https://sbing.com/i/products/0000/';
const getImageUrls = (pid, iid = 1) => {
const paddedPid = pid.toString().padStart(6, '0');
// Use padded PID only for the first 3 digits
const prefix = paddedPid.slice(0, 3);
// Use the actual pid for the rest of the URL
const basePath = `${imageUrlBase}${prefix}/${pid}`;
return {
image: `${basePath}-t-${iid}.jpg`,
image_175: `${basePath}-175x175-${iid}.jpg`,
image_full: `${basePath}-o-${iid}.jpg`
};
};
async function setupAndCleanupTempTables(connection, operation = 'setup') {
if (operation === 'setup') {
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_products (
pid BIGINT NOT NULL,
title VARCHAR(255),
description TEXT,
SKU VARCHAR(50),
stock_quantity INT DEFAULT 0,
pending_qty INT DEFAULT 0,
preorder_count INT DEFAULT 0,
notions_inv_count INT DEFAULT 0,
price DECIMAL(10,3) NOT NULL DEFAULT 0,
regular_price DECIMAL(10,3) NOT NULL DEFAULT 0,
cost_price DECIMAL(10,3),
vendor VARCHAR(100),
vendor_reference VARCHAR(100),
notions_reference VARCHAR(100),
brand VARCHAR(100),
line VARCHAR(100),
subline VARCHAR(100),
artist VARCHAR(100),
category_ids TEXT,
created_at DATETIME,
first_received DATETIME,
landing_cost_price DECIMAL(10,3),
barcode VARCHAR(50),
harmonized_tariff_code VARCHAR(50),
updated_at DATETIME,
visible BOOLEAN,
replenishable BOOLEAN,
permalink VARCHAR(255),
moq DECIMAL(10,3),
rating DECIMAL(10,2),
reviews INT,
weight DECIMAL(10,3),
length DECIMAL(10,3),
width DECIMAL(10,3),
height DECIMAL(10,3),
country_of_origin VARCHAR(100),
location VARCHAR(100),
total_sold INT,
baskets INT,
notifies INT,
date_last_sold DATETIME,
needs_update BOOLEAN DEFAULT TRUE,
PRIMARY KEY (pid),
INDEX idx_needs_update (needs_update)
) ENGINE=InnoDB;
`);
} else {
await connection.query('DROP TEMPORARY TABLE IF EXISTS temp_products;');
}
}
async function materializeCalculations(prodConnection, localConnection, incrementalUpdate = true, lastSyncTime = '1970-01-01') {
outputProgress({
status: "running",
operation: "Products import",
message: "Fetching product data from production"
});
// Get all product data in a single optimized query
const [prodData] = await prodConnection.query(`
SELECT
p.pid,
p.description AS title,
p.notes AS description,
p.itemnumber AS SKU,
p.date_created,
p.datein AS first_received,
p.location,
p.upc AS barcode,
p.harmonized_tariff_code,
p.stamp AS updated_at,
CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible,
CASE
WHEN p.reorder < 0 THEN 0
WHEN (
(IFNULL(pls.date_sold, '0000-00-00') = '0000-00-00' OR pls.date_sold <= DATE_SUB(CURDATE(), INTERVAL 5 YEAR))
OR (p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(NOW(), INTERVAL 5 YEAR))
OR (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(NOW(), INTERVAL 5 YEAR))
) THEN 0
ELSE 1
END AS replenishable,
COALESCE(si.available_local, 0) - COALESCE(
(SELECT SUM(oi.qty_ordered - oi.qty_placed)
FROM order_items oi
JOIN _order o ON oi.order_id = o.order_id
WHERE oi.prod_pid = p.pid
AND o.date_placed != '0000-00-00 00:00:00'
AND o.date_shipped = '0000-00-00 00:00:00'
AND oi.pick_finished = 0
AND oi.qty_back = 0
AND o.order_status != 15
AND o.order_status < 90
AND oi.qty_ordered >= oi.qty_placed
AND oi.qty_ordered > 0
), 0
) as stock_quantity,
COALESCE(
(SELECT SUM(oi.qty_ordered - oi.qty_placed)
FROM order_items oi
JOIN _order o ON oi.order_id = o.order_id
WHERE oi.prod_pid = p.pid
AND o.date_placed != '0000-00-00 00:00:00'
AND o.date_shipped = '0000-00-00 00:00:00'
AND oi.pick_finished = 0
AND oi.qty_back = 0
AND o.order_status != 15
AND o.order_status < 90
AND oi.qty_ordered >= oi.qty_placed
AND oi.qty_ordered > 0
), 0
) as pending_qty,
COALESCE(ci.onpreorder, 0) as preorder_count,
COALESCE(pnb.inventory, 0) as notions_inv_count,
COALESCE(pcp.price_each, 0) as price,
COALESCE(p.sellingprice, 0) AS regular_price,
CASE
WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0)
THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0)
ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1)
END AS cost_price,
NULL as landing_cost_price,
s.companyname AS vendor,
CASE
WHEN s.companyname = 'Notions' THEN sid.notions_itemnumber
ELSE sid.supplier_itemnumber
END AS vendor_reference,
sid.notions_itemnumber AS notions_reference,
CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink,
pc1.name AS brand,
pc2.name AS line,
pc3.name AS subline,
pc4.name AS artist,
COALESCE(CASE
WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit
ELSE sid.supplier_qty_per_unit
END, sid.notions_qty_per_unit) AS moq,
p.rating,
p.rating_votes AS reviews,
p.weight,
p.length,
p.width,
p.height,
p.country_of_origin,
(SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets,
(SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies,
p.totalsold AS total_sold,
pls.date_sold as date_last_sold,
GROUP_CONCAT(DISTINCT CASE
WHEN pc.cat_id IS NOT NULL
AND pc.type IN (10, 20, 11, 21, 12, 13)
AND pci.cat_id NOT IN (16, 17)
THEN pci.cat_id
END) as category_ids
FROM products p
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
LEFT JOIN current_inventory ci ON p.pid = ci.pid
LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
LEFT JOIN product_category_index pci ON p.pid = pci.pid
LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id
LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id
LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id
LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
WHERE ${incrementalUpdate ? `
p.stamp > ? OR
ci.stamp > ? OR
pcp.date_deactive > ? OR
pcp.date_active > ? OR
pnb.date_updated > ?
` : 'TRUE'}
GROUP BY p.pid
`, incrementalUpdate ? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime] : []);
outputProgress({
status: "running",
operation: "Products import",
message: `Processing ${prodData.length} product records`
});
// Insert all product data into temp table in batches
for (let i = 0; i < prodData.length; i += 1000) {
const batch = prodData.slice(i, i + 1000);
const values = batch.map(row => [
row.pid,
row.title,
row.description,
row.SKU,
// Set stock quantity to 0 if it's over 5000
row.stock_quantity > 5000 ? 0 : Math.max(0, row.stock_quantity),
row.pending_qty,
row.preorder_count,
row.notions_inv_count,
row.price,
row.regular_price,
row.cost_price,
row.vendor,
row.vendor_reference,
row.notions_reference,
row.brand,
row.line,
row.subline,
row.artist,
row.category_ids,
row.date_created, // map to created_at
row.first_received,
row.landing_cost_price,
row.barcode,
row.harmonized_tariff_code,
row.updated_at,
row.visible,
row.replenishable,
row.permalink,
row.moq,
row.rating ? Number(row.rating).toFixed(2) : null,
row.reviews,
row.weight,
row.length,
row.width,
row.height,
row.country_of_origin,
row.location,
row.total_sold,
row.baskets,
row.notifies,
row.date_last_sold,
true // Mark as needing update
]);
if (values.length > 0) {
await localConnection.query(`
INSERT INTO temp_products (
pid, title, description, SKU,
stock_quantity, pending_qty, preorder_count, notions_inv_count,
price, regular_price, cost_price,
vendor, vendor_reference, notions_reference,
brand, line, subline, artist,
category_ids, created_at, first_received,
landing_cost_price, barcode, harmonized_tariff_code,
updated_at, visible, replenishable, permalink,
moq, rating, reviews, weight, length, width,
height, country_of_origin, location, total_sold,
baskets, notifies, date_last_sold, needs_update
)
VALUES ?
ON DUPLICATE KEY UPDATE
title = VALUES(title),
description = VALUES(description),
SKU = VALUES(SKU),
stock_quantity = VALUES(stock_quantity),
pending_qty = VALUES(pending_qty),
preorder_count = VALUES(preorder_count),
notions_inv_count = VALUES(notions_inv_count),
price = VALUES(price),
regular_price = VALUES(regular_price),
cost_price = VALUES(cost_price),
vendor = VALUES(vendor),
vendor_reference = VALUES(vendor_reference),
notions_reference = VALUES(notions_reference),
brand = VALUES(brand),
line = VALUES(line),
subline = VALUES(subline),
artist = VALUES(artist),
category_ids = VALUES(category_ids),
created_at = VALUES(created_at),
first_received = VALUES(first_received),
landing_cost_price = VALUES(landing_cost_price),
barcode = VALUES(barcode),
harmonized_tariff_code = VALUES(harmonized_tariff_code),
updated_at = VALUES(updated_at),
visible = VALUES(visible),
replenishable = VALUES(replenishable),
permalink = VALUES(permalink),
moq = VALUES(moq),
rating = VALUES(rating),
reviews = VALUES(reviews),
weight = VALUES(weight),
length = VALUES(length),
width = VALUES(width),
height = VALUES(height),
country_of_origin = VALUES(country_of_origin),
location = VALUES(location),
total_sold = VALUES(total_sold),
baskets = VALUES(baskets),
notifies = VALUES(notifies),
date_last_sold = VALUES(date_last_sold),
needs_update = TRUE
`, [values]);
}
outputProgress({
status: "running",
operation: "Products import",
message: `Processed ${Math.min(i + 1000, prodData.length)} of ${prodData.length} product records`,
current: i + batch.length,
total: prodData.length
});
}
outputProgress({
status: "running",
operation: "Products import",
message: "Finished materializing calculations"
});
}
async function importProducts(prodConnection, localConnection, incrementalUpdate = true) {
const startTime = Date.now();
let recordsAdded = 0;
let recordsUpdated = 0;
try {
// Get column names first
const [columns] = await localConnection.query(`
SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'products'
ORDER BY ORDINAL_POSITION
`);
const columnNames = columns.map(col => col.COLUMN_NAME);
// Get last sync info
const [syncInfo] = await localConnection.query(
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'products'"
);
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
console.log('Products: Using last sync time:', lastSyncTime);
// Setup temporary tables
await setupAndCleanupTempTables(localConnection, 'setup');
// Materialize calculations - this will populate temp_products
await materializeCalculations(prodConnection, localConnection, incrementalUpdate, lastSyncTime);
// Get actual count from temp table - only count products that need updates
const [[{ actualTotal }]] = await localConnection.query(`
SELECT COUNT(DISTINCT pid) as actualTotal
FROM temp_products
WHERE needs_update = 1
`);
console.log('Products: Found changes:', actualTotal);
// Process in batches
const BATCH_SIZE = 5000;
let processed = 0;
while (processed < actualTotal) {
const [batch] = await localConnection.query(`
SELECT * FROM temp_products
WHERE needs_update = 1
LIMIT ? OFFSET ?
`, [BATCH_SIZE, processed]);
if (!batch || batch.length === 0) break;
// Add image URLs
batch.forEach(row => {
const urls = getImageUrls(row.pid);
row.image = urls.image;
row.image_175 = urls.image_175;
row.image_full = urls.image_full;
});
if (batch.length > 0) {
// Get existing products in one query
const [existingProducts] = await localConnection.query(
`SELECT ${columnNames.join(',')} FROM products WHERE pid IN (?)`,
[batch.map(p => p.pid)]
);
const existingPidsMap = new Map(existingProducts.map(p => [p.pid, p]));
// Split into inserts and updates
const insertsAndUpdates = batch.reduce((acc, product) => {
if (existingPidsMap.has(product.pid)) {
const existing = existingPidsMap.get(product.pid);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = product[col] ?? null;
const oldVal = existing[col] ?? null;
if (col === "managing_stock") return false; // Skip this as it's always 1
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001;
}
return newVal !== oldVal;
});
if (hasChanges) {
acc.updates.push(product);
}
} else {
acc.inserts.push(product);
}
return acc;
}, { inserts: [], updates: [] });
// Process inserts
if (insertsAndUpdates.inserts.length > 0) {
const insertValues = insertsAndUpdates.inserts.map(product =>
columnNames.map(col => {
const val = product[col] ?? null;
if (col === "managing_stock") return 1;
return val;
})
);
const insertPlaceholders = insertsAndUpdates.inserts
.map(() => `(${Array(columnNames.length).fill('?').join(',')})`)
.join(',');
const insertResult = await localConnection.query(`
INSERT INTO products (${columnNames.join(',')})
VALUES ${insertPlaceholders}
`, insertValues.flat());
recordsAdded += insertResult[0].affectedRows;
}
// Process updates
if (insertsAndUpdates.updates.length > 0) {
const updateValues = insertsAndUpdates.updates.map(product =>
columnNames.map(col => {
const val = product[col] ?? null;
if (col === "managing_stock") return 1;
return val;
})
);
const updatePlaceholders = insertsAndUpdates.updates
.map(() => `(${Array(columnNames.length).fill('?').join(',')})`)
.join(',');
const updateResult = await localConnection.query(`
INSERT INTO products (${columnNames.join(',')})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE
${columnNames
.filter(col => col !== 'pid')
.map(col => `${col} = VALUES(${col})`)
.join(',')};
`, updateValues.flat());
recordsUpdated += insertsAndUpdates.updates.length;
}
// Process category relationships
if (batch.some(p => p.category_ids)) {
const categoryRelationships = batch
.filter(p => p.category_ids)
.flatMap(product =>
product.category_ids
.split(',')
.map(id => id.trim())
.filter(id => id)
.map(Number)
.filter(id => !isNaN(id))
.map(catId => [catId, product.pid])
);
if (categoryRelationships.length > 0) {
// Verify categories exist before inserting relationships
const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))];
const [existingCats] = await localConnection.query(
"SELECT cat_id FROM categories WHERE cat_id IN (?)",
[uniqueCatIds]
);
const existingCatIds = new Set(existingCats.map(c => c.cat_id));
// Filter relationships to only include existing categories
const validRelationships = categoryRelationships.filter(([catId]) =>
existingCatIds.has(catId)
);
if (validRelationships.length > 0) {
const catPlaceholders = validRelationships
.map(() => "(?, ?)")
.join(",");
await localConnection.query(
`INSERT IGNORE INTO product_categories (cat_id, pid)
VALUES ${catPlaceholders}`,
validRelationships.flat()
);
}
}
}
}
processed += batch.length;
outputProgress({
status: "running",
operation: "Products import",
message: `Processed ${processed} of ${actualTotal} products`,
current: processed,
total: actualTotal,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, processed, actualTotal),
rate: calculateRate(startTime, processed)
});
}
// Drop temporary tables
await setupAndCleanupTempTables(localConnection, 'cleanup');
// Only update sync status if we get here (no errors thrown)
await localConnection.query(`
INSERT INTO sync_status (table_name, last_sync_timestamp)
VALUES ('products', NOW())
ON DUPLICATE KEY UPDATE last_sync_timestamp = NOW()
`);
return {
status: "complete",
totalImported: actualTotal,
recordsAdded: recordsAdded || 0,
recordsUpdated: recordsUpdated || 0,
incrementalUpdate,
lastSyncTime
};
} catch (error) {
throw error;
}
}
async function importMissingProducts(prodConnection, localConnection, missingPids) {
try {
// Get column names first
const [columns] = await localConnection.query(`
SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'products'
ORDER BY ORDINAL_POSITION
`);
const columnNames = columns.map((col) => col.COLUMN_NAME);
// Get the missing products with all their data in one optimized query
const [products] = await prodConnection.query(`
SELECT
p.pid,
p.description AS title,
p.notes AS description,
p.itemnumber AS SKU,
p.date_created,
p.datein AS first_received,
p.location,
p.upc AS barcode,
p.harmonized_tariff_code,
p.stamp AS updated_at,
CASE WHEN si.show + si.buyable > 0 THEN 1 ELSE 0 END AS visible,
CASE
WHEN p.reorder < 0 THEN 0
WHEN (
(IFNULL(pls.date_sold, '0000-00-00') = '0000-00-00' OR pls.date_sold <= DATE_SUB(CURDATE(), INTERVAL 5 YEAR))
OR (p.datein = '0000-00-00 00:00:00' OR p.datein <= DATE_SUB(NOW(), INTERVAL 5 YEAR))
OR (p.date_refill = '0000-00-00 00:00:00' OR p.date_refill <= DATE_SUB(NOW(), INTERVAL 5 YEAR))
) THEN 0
ELSE 1
END AS replenishable,
COALESCE(si.available_local, 0) as stock_quantity,
COALESCE(
(SELECT SUM(oi.qty_ordered - oi.qty_placed)
FROM order_items oi
JOIN _order o ON oi.order_id = o.order_id
WHERE oi.prod_pid = p.pid
AND o.date_placed != '0000-00-00 00:00:00'
AND o.date_shipped = '0000-00-00 00:00:00'
AND oi.pick_finished = 0
AND oi.qty_back = 0
AND o.order_status != 15
AND o.order_status < 90
AND oi.qty_ordered >= oi.qty_placed
AND oi.qty_ordered > 0
), 0
) as pending_qty,
COALESCE(ci.onpreorder, 0) as preorder_count,
COALESCE(pnb.inventory, 0) as notions_inv_count,
COALESCE(pcp.price_each, 0) as price,
COALESCE(p.sellingprice, 0) AS regular_price,
CASE
WHEN EXISTS (SELECT 1 FROM product_inventory WHERE pid = p.pid AND count > 0)
THEN (SELECT ROUND(AVG(costeach), 5) FROM product_inventory WHERE pid = p.pid AND count > 0)
ELSE (SELECT costeach FROM product_inventory WHERE pid = p.pid ORDER BY daterec DESC LIMIT 1)
END AS cost_price,
NULL AS landing_cost_price,
p.rating,
p.rating_votes AS reviews,
p.weight,
p.length,
p.width,
p.height,
(SELECT COUNT(*) FROM mybasket mb WHERE mb.item = p.pid AND mb.qty > 0) AS baskets,
(SELECT COUNT(*) FROM product_notify pn WHERE pn.pid = p.pid) AS notifies,
p.totalsold AS total_sold,
p.country_of_origin,
pls.date_sold as date_last_sold,
GROUP_CONCAT(DISTINCT CASE WHEN pc.cat_id IS NOT NULL THEN pci.cat_id END) as category_ids
FROM products p
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
LEFT JOIN product_category_index pci ON p.pid = pci.pid
LEFT JOIN product_categories pc ON pci.cat_id = pc.cat_id
AND pc.type IN (10, 20, 11, 21, 12, 13)
AND pci.cat_id NOT IN (16, 17)
LEFT JOIN product_categories pc1 ON p.company = pc1.cat_id
LEFT JOIN product_categories pc2 ON p.line = pc2.cat_id
LEFT JOIN product_categories pc3 ON p.subline = pc3.cat_id
LEFT JOIN product_categories pc4 ON p.artist = pc4.cat_id
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
LEFT JOIN current_inventory ci ON p.pid = ci.pid
LEFT JOIN product_current_prices pcp ON p.pid = pcp.pid AND pcp.active = 1
LEFT JOIN product_notions_b2b pnb ON p.pid = pnb.pid
WHERE p.pid IN (?)
GROUP BY p.pid
`, [missingPids]);
// Add image URLs
products.forEach(product => {
const urls = getImageUrls(product.pid);
product.image = urls.image;
product.image_175 = urls.image_175;
product.image_full = urls.image_full;
});
let recordsAdded = 0;
let recordsUpdated = 0;
if (products.length > 0) {
// Map values in the same order as columns
const productValues = products.flatMap(product =>
columnNames.map(col => {
const val = product[col] ?? null;
if (col === "managing_stock") return 1;
if (typeof val === "number") return val || 0;
return val;
})
);
// Generate placeholders for all products
const placeholders = products
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
// Build and execute the query
const query = `
INSERT INTO products (${columnNames.join(",")})
VALUES ${placeholders}
ON DUPLICATE KEY UPDATE ${columnNames
.filter((col) => col !== "pid")
.map((col) => `${col} = VALUES(${col})`)
.join(",")};
`;
const result = await localConnection.query(query, productValues);
recordsAdded = result.affectedRows - result.changedRows;
recordsUpdated = result.changedRows;
// Handle category relationships if any
const categoryRelationships = [];
products.forEach(product => {
if (product.category_ids) {
const catIds = product.category_ids
.split(",")
.map(id => id.trim())
.filter(id => id)
.map(Number);
catIds.forEach(catId => {
if (catId) categoryRelationships.push([catId, product.pid]);
});
}
});
if (categoryRelationships.length > 0) {
// Verify categories exist before inserting relationships
const uniqueCatIds = [...new Set(categoryRelationships.map(([catId]) => catId))];
const [existingCats] = await localConnection.query(
"SELECT cat_id FROM categories WHERE cat_id IN (?)",
[uniqueCatIds]
);
const existingCatIds = new Set(existingCats.map(c => c.cat_id));
// Filter relationships to only include existing categories
const validRelationships = categoryRelationships.filter(([catId]) =>
existingCatIds.has(catId)
);
if (validRelationships.length > 0) {
const catPlaceholders = validRelationships
.map(() => "(?, ?)")
.join(",");
await localConnection.query(
`INSERT IGNORE INTO product_categories (cat_id, pid)
VALUES ${catPlaceholders}`,
validRelationships.flat()
);
}
}
}
return {
status: "complete",
totalImported: products.length,
recordsAdded,
recordsUpdated
};
} catch (error) {
throw error;
}
}
module.exports = {
importProducts,
importMissingProducts
};

View File

@@ -0,0 +1,543 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
async function importPurchaseOrders(prodConnection, localConnection, incrementalUpdate = true) {
const startTime = Date.now();
let recordsAdded = 0;
let recordsUpdated = 0;
try {
// Get last sync info
const [syncInfo] = await localConnection.query(
"SELECT last_sync_timestamp FROM sync_status WHERE table_name = 'purchase_orders'"
);
const lastSyncTime = syncInfo?.[0]?.last_sync_timestamp || '1970-01-01';
console.log('Purchase Orders: Using last sync time:', lastSyncTime);
// Insert temporary table creation query for purchase orders
await localConnection.query(`
CREATE TABLE IF NOT EXISTS temp_purchase_orders (
po_id INT UNSIGNED NOT NULL,
pid INT UNSIGNED NOT NULL,
vendor VARCHAR(255),
date DATE,
expected_date DATE,
status INT,
notes TEXT,
PRIMARY KEY (po_id, pid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
`);
outputProgress({
operation: `Starting ${incrementalUpdate ? 'incremental' : 'full'} purchase orders import`,
status: "running",
});
// Get column names for the insert
const [columns] = await localConnection.query(`
SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'purchase_orders'
ORDER BY ORDINAL_POSITION
`);
const columnNames = columns
.map((col) => col.COLUMN_NAME)
.filter((name) => name !== "id");
// Build incremental conditions
const incrementalWhereClause = incrementalUpdate
? `AND (
p.date_updated > ?
OR p.date_ordered > ?
OR p.date_estin > ?
OR r.date_updated > ?
OR r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)`
: "";
const incrementalParams = incrementalUpdate
? [lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime]
: [];
// First get all relevant PO IDs with basic info
const [[{ total }]] = await prodConnection.query(`
SELECT COUNT(*) as total
FROM (
SELECT DISTINCT pop.po_id, pop.pid
FROM po p
USE INDEX (idx_date_created)
JOIN po_products pop ON p.po_id = pop.po_id
JOIN suppliers s ON p.supplier_id = s.supplierid
WHERE p.date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
${incrementalUpdate ? `
AND (
p.date_updated > ?
OR p.date_ordered > ?
OR p.date_estin > ?
)
` : ''}
UNION
SELECT DISTINCT r.receiving_id as po_id, rp.pid
FROM receivings_products rp
USE INDEX (received_date)
LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id
WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
${incrementalUpdate ? `
AND (
r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)
` : ''}
) all_items
`, incrementalUpdate ? [
lastSyncTime, lastSyncTime, lastSyncTime, // PO conditions
lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions
] : []);
console.log('Purchase Orders: Found changes:', total);
const [poList] = await prodConnection.query(`
SELECT DISTINCT
COALESCE(p.po_id, r.receiving_id) as po_id,
COALESCE(
NULLIF(s1.companyname, ''),
NULLIF(s2.companyname, ''),
'Unknown Vendor'
) as vendor,
CASE
WHEN p.po_id IS NOT NULL THEN
DATE(COALESCE(
NULLIF(p.date_ordered, '0000-00-00 00:00:00'),
p.date_created
))
WHEN r.receiving_id IS NOT NULL THEN
DATE(r.date_created)
END as date,
CASE
WHEN p.date_estin = '0000-00-00' THEN NULL
WHEN p.date_estin IS NULL THEN NULL
WHEN p.date_estin NOT REGEXP '^[0-9]{4}-[0-9]{2}-[0-9]{2}$' THEN NULL
ELSE p.date_estin
END as expected_date,
COALESCE(p.status, 50) as status,
p.short_note as notes,
p.notes as long_note
FROM (
SELECT po_id FROM po
USE INDEX (idx_date_created)
WHERE date_ordered >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
${incrementalUpdate ? `
AND (
date_ordered > ?
OR date_updated > ?
OR date_estin > ?
)
` : ''}
UNION
SELECT DISTINCT r.receiving_id as po_id
FROM receivings r
JOIN receivings_products rp USE INDEX (received_date) ON r.receiving_id = rp.receiving_id
WHERE rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL ${incrementalUpdate ? '1' : '5'} YEAR)
${incrementalUpdate ? `
AND (
r.date_created > ?
OR r.date_checked > ?
OR rp.stamp > ?
OR rp.received_date > ?
)
` : ''}
) ids
LEFT JOIN po p ON ids.po_id = p.po_id
LEFT JOIN suppliers s1 ON p.supplier_id = s1.supplierid
LEFT JOIN receivings r ON ids.po_id = r.receiving_id
LEFT JOIN suppliers s2 ON r.supplier_id = s2.supplierid
ORDER BY po_id
`, incrementalUpdate ? [
lastSyncTime, lastSyncTime, lastSyncTime, // PO conditions
lastSyncTime, lastSyncTime, lastSyncTime, lastSyncTime // Receiving conditions
] : []);
console.log('Sample PO dates:', poList.slice(0, 5).map(po => ({
po_id: po.po_id,
raw_date_ordered: po.raw_date_ordered,
raw_date_created: po.raw_date_created,
raw_date_estin: po.raw_date_estin,
computed_date: po.date,
expected_date: po.expected_date
})));
const totalItems = total;
let processed = 0;
const BATCH_SIZE = 5000;
const PROGRESS_INTERVAL = 500;
let lastProgressUpdate = Date.now();
outputProgress({
operation: `Starting purchase orders import - Processing ${totalItems} purchase order items`,
status: "running",
});
for (let i = 0; i < poList.length; i += BATCH_SIZE) {
const batch = poList.slice(i, Math.min(i + BATCH_SIZE, poList.length));
const poIds = batch.map(po => po.po_id);
// Get all products for these POs in one query
const [poProducts] = await prodConnection.query(`
SELECT
pop.po_id,
pop.pid,
pr.itemnumber as sku,
pr.description as name,
pop.cost_each,
pop.qty_each as ordered
FROM po_products pop
USE INDEX (PRIMARY)
JOIN products pr ON pop.pid = pr.pid
WHERE pop.po_id IN (?)
`, [poIds]);
// Process PO products in smaller sub-batches to avoid packet size issues
const SUB_BATCH_SIZE = 5000;
for (let j = 0; j < poProducts.length; j += SUB_BATCH_SIZE) {
const productBatch = poProducts.slice(j, j + SUB_BATCH_SIZE);
const productPids = [...new Set(productBatch.map(p => p.pid))];
const batchPoIds = [...new Set(productBatch.map(p => p.po_id))];
// Get receivings for this batch with employee names
const [receivings] = await prodConnection.query(`
SELECT
r.po_id,
rp.pid,
rp.receiving_id,
rp.qty_each,
rp.cost_each,
COALESCE(rp.received_date, r.date_created) as received_date,
rp.received_by,
CONCAT(e.firstname, ' ', e.lastname) as received_by_name,
CASE
WHEN r.po_id IS NULL THEN 2 -- No PO
WHEN r.po_id IN (?) THEN 0 -- Original PO
ELSE 1 -- Different PO
END as is_alt_po
FROM receivings_products rp
USE INDEX (received_date)
LEFT JOIN receivings r ON r.receiving_id = rp.receiving_id
LEFT JOIN employees e ON rp.received_by = e.employeeid
WHERE rp.pid IN (?)
AND rp.received_date >= DATE_SUB(CURRENT_DATE, INTERVAL 5 YEAR)
ORDER BY r.po_id, rp.pid, rp.received_date
`, [batchPoIds, productPids]);
// Create maps for this sub-batch
const poProductMap = new Map();
productBatch.forEach(product => {
const key = `${product.po_id}-${product.pid}`;
poProductMap.set(key, product);
});
const receivingMap = new Map();
const altReceivingMap = new Map();
const noPOReceivingMap = new Map();
receivings.forEach(receiving => {
const key = `${receiving.po_id}-${receiving.pid}`;
if (receiving.is_alt_po === 2) {
// No PO
if (!noPOReceivingMap.has(receiving.pid)) {
noPOReceivingMap.set(receiving.pid, []);
}
noPOReceivingMap.get(receiving.pid).push(receiving);
} else if (receiving.is_alt_po === 1) {
// Different PO
if (!altReceivingMap.has(receiving.pid)) {
altReceivingMap.set(receiving.pid, []);
}
altReceivingMap.get(receiving.pid).push(receiving);
} else {
// Original PO
if (!receivingMap.has(key)) {
receivingMap.set(key, []);
}
receivingMap.get(key).push(receiving);
}
});
// Verify PIDs exist
const [existingPids] = await localConnection.query(
'SELECT pid FROM products WHERE pid IN (?)',
[productPids]
);
const validPids = new Set(existingPids.map(p => p.pid));
// First check which PO lines already exist and get their current values
const poLines = Array.from(poProductMap.values())
.filter(p => validPids.has(p.pid))
.map(p => [p.po_id, p.pid]);
const [existingPOs] = await localConnection.query(
`SELECT ${columnNames.join(',')} FROM purchase_orders WHERE (po_id, pid) IN (${poLines.map(() => "(?,?)").join(",")})`,
poLines.flat()
);
const existingPOMap = new Map(
existingPOs.map(po => [`${po.po_id}-${po.pid}`, po])
);
// Split into inserts and updates
const insertsAndUpdates = { inserts: [], updates: [] };
let batchProcessed = 0;
for (const po of batch) {
const poProducts = Array.from(poProductMap.values())
.filter(p => p.po_id === po.po_id && validPids.has(p.pid));
for (const product of poProducts) {
const key = `${po.po_id}-${product.pid}`;
const receivingHistory = receivingMap.get(key) || [];
const altReceivingHistory = altReceivingMap.get(product.pid) || [];
const noPOReceivingHistory = noPOReceivingMap.get(product.pid) || [];
// Combine all receivings and sort by date
const allReceivings = [
...receivingHistory.map(r => ({ ...r, type: 'original' })),
...altReceivingHistory.map(r => ({ ...r, type: 'alternate' })),
...noPOReceivingHistory.map(r => ({ ...r, type: 'no_po' }))
].sort((a, b) => new Date(a.received_date || '9999-12-31') - new Date(b.received_date || '9999-12-31'));
// Split receivings into original PO and others
const originalPOReceivings = allReceivings.filter(r => r.type === 'original');
const otherReceivings = allReceivings.filter(r => r.type !== 'original');
// Track FIFO fulfillment
let remainingToFulfill = product.ordered;
const fulfillmentTracking = [];
let totalReceived = 0;
let actualCost = null; // Will store the cost of the first receiving that fulfills this PO
let firstFulfillmentReceiving = null;
let lastFulfillmentReceiving = null;
for (const receiving of allReceivings) {
const qtyToApply = Math.min(remainingToFulfill, receiving.qty_each);
if (qtyToApply > 0) {
// If this is the first receiving being applied, use its cost
if (actualCost === null) {
actualCost = receiving.cost_each;
firstFulfillmentReceiving = receiving;
}
lastFulfillmentReceiving = receiving;
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: qtyToApply,
qty_total: receiving.qty_each,
cost: receiving.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
remaining_qty: receiving.qty_each - qtyToApply
});
remainingToFulfill -= qtyToApply;
} else {
// Track excess receivings
fulfillmentTracking.push({
receiving_id: receiving.receiving_id,
qty_applied: 0,
qty_total: receiving.qty_each,
cost: receiving.cost_each,
date: receiving.received_date,
received_by: receiving.received_by,
received_by_name: receiving.received_by_name || 'Unknown',
type: receiving.type,
is_excess: true
});
}
totalReceived += receiving.qty_each;
}
const receiving_status = !totalReceived ? 1 : // created
remainingToFulfill > 0 ? 30 : // partial
40; // full
function formatDate(dateStr) {
if (!dateStr) return null;
if (dateStr === '0000-00-00' || dateStr === '0000-00-00 00:00:00') return null;
if (typeof dateStr === 'string' && !dateStr.match(/^\d{4}-\d{2}-\d{2}/)) return null;
try {
const date = new Date(dateStr);
if (isNaN(date.getTime())) return null;
if (date.getFullYear() < 1900 || date.getFullYear() > 2100) return null;
return date.toISOString().split('T')[0];
} catch (e) {
return null;
}
}
const rowValues = columnNames.map(col => {
switch (col) {
case 'po_id': return po.po_id;
case 'vendor': return po.vendor;
case 'date': return formatDate(po.date);
case 'expected_date': return formatDate(po.expected_date);
case 'pid': return product.pid;
case 'sku': return product.sku;
case 'name': return product.name;
case 'cost_price': return actualCost || product.cost_each;
case 'po_cost_price': return product.cost_each;
case 'status': return po.status;
case 'notes': return po.notes;
case 'long_note': return po.long_note;
case 'ordered': return product.ordered;
case 'received': return totalReceived;
case 'unfulfilled': return remainingToFulfill;
case 'excess_received': return Math.max(0, totalReceived - product.ordered);
case 'received_date': return formatDate(firstFulfillmentReceiving?.received_date);
case 'last_received_date': return formatDate(lastFulfillmentReceiving?.received_date);
case 'received_by': return firstFulfillmentReceiving?.received_by_name || null;
case 'receiving_status': return receiving_status;
case 'receiving_history': return JSON.stringify({
fulfillment: fulfillmentTracking,
ordered_qty: product.ordered,
total_received: totalReceived,
remaining_unfulfilled: remainingToFulfill,
excess_received: Math.max(0, totalReceived - product.ordered),
po_cost: product.cost_each,
actual_cost: actualCost || product.cost_each
});
default: return null;
}
});
if (existingPOMap.has(key)) {
const existing = existingPOMap.get(key);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = rowValues[columnNames.indexOf(col)];
const oldVal = existing[col] ?? null;
// Special handling for numbers to avoid type coercion issues
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
}
// Special handling for receiving_history - parse and compare
if (col === 'receiving_history') {
const newHistory = JSON.parse(newVal || '{}');
const oldHistory = JSON.parse(oldVal || '{}');
return JSON.stringify(newHistory) !== JSON.stringify(oldHistory);
}
return newVal !== oldVal;
});
if (hasChanges) {
insertsAndUpdates.updates.push({
po_id: po.po_id,
pid: product.pid,
values: rowValues
});
}
} else {
insertsAndUpdates.inserts.push({
po_id: po.po_id,
pid: product.pid,
values: rowValues
});
}
batchProcessed++;
}
}
// Handle inserts
if (insertsAndUpdates.inserts.length > 0) {
const insertPlaceholders = insertsAndUpdates.inserts
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const insertResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${insertPlaceholders}
`, insertsAndUpdates.inserts.map(i => i.values).flat());
const affectedRows = insertResult[0].affectedRows;
// For an upsert, MySQL counts rows twice for updates
// So if affectedRows is odd, we have (updates * 2 + inserts)
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsAdded += inserts;
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
processed += batchProcessed;
}
// Handle updates - now we know these actually have changes
if (insertsAndUpdates.updates.length > 0) {
const updatePlaceholders = insertsAndUpdates.updates
.map(() => `(${Array(columnNames.length).fill("?").join(",")})`)
.join(",");
const updateResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE ${columnNames
.filter((col) => col !== "po_id" && col !== "pid")
.map((col) => `${col} = VALUES(${col})`)
.join(",")};
`, insertsAndUpdates.updates.map(u => u.values).flat());
const affectedRows = updateResult[0].affectedRows;
// For an upsert, MySQL counts rows twice for updates
// So if affectedRows is odd, we have (updates * 2 + inserts)
const updates = Math.floor(affectedRows / 2);
const inserts = affectedRows - (updates * 2);
recordsUpdated += Math.floor(updates); // Ensure we never have fractional updates
processed += batchProcessed;
}
// Update progress based on time interval
const now = Date.now();
if (now - lastProgressUpdate >= PROGRESS_INTERVAL || processed === totalItems) {
outputProgress({
status: "running",
operation: "Purchase orders import",
current: processed,
total: totalItems,
elapsed: formatElapsedTime((Date.now() - startTime) / 1000),
remaining: estimateRemaining(startTime, processed, totalItems),
rate: calculateRate(startTime, processed)
});
lastProgressUpdate = now;
}
}
}
// Only update sync status if we get here (no errors thrown)
await localConnection.query(`
INSERT INTO sync_status (table_name, last_sync_timestamp)
VALUES ('purchase_orders', NOW())
ON DUPLICATE KEY UPDATE
last_sync_timestamp = NOW(),
last_sync_id = LAST_INSERT_ID(last_sync_id)
`);
return {
status: "complete",
totalImported: totalItems,
recordsAdded: recordsAdded || 0,
recordsUpdated: recordsUpdated || 0,
incrementalUpdate,
lastSyncTime
};
} catch (error) {
outputProgress({
operation: `${incrementalUpdate ? 'Incremental' : 'Full'} purchase orders import failed`,
status: "error",
error: error.message,
});
throw error;
}
}
module.exports = importPurchaseOrders;

View File

@@ -0,0 +1,82 @@
// Split into inserts and updates
const insertsAndUpdates = batch.reduce((acc, po) => {
const key = `${po.po_id}-${po.pid}`;
if (existingPOMap.has(key)) {
const existing = existingPOMap.get(key);
// Check if any values are different
const hasChanges = columnNames.some(col => {
const newVal = po[col] ?? null;
const oldVal = existing[col] ?? null;
// Special handling for numbers to avoid type coercion issues
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001; // Allow for tiny floating point differences
}
// Special handling for receiving_history JSON
if (col === 'receiving_history') {
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
}
return newVal !== oldVal;
});
if (hasChanges) {
console.log(`PO line changed: ${key}`, {
po_id: po.po_id,
pid: po.pid,
changes: columnNames.filter(col => {
const newVal = po[col] ?? null;
const oldVal = existing[col] ?? null;
if (typeof newVal === 'number' && typeof oldVal === 'number') {
return Math.abs(newVal - oldVal) > 0.00001;
}
if (col === 'receiving_history') {
return JSON.stringify(newVal) !== JSON.stringify(oldVal);
}
return newVal !== oldVal;
})
});
acc.updates.push({
po_id: po.po_id,
pid: po.pid,
values: columnNames.map(col => po[col] ?? null)
});
}
} else {
console.log(`New PO line: ${key}`);
acc.inserts.push({
po_id: po.po_id,
pid: po.pid,
values: columnNames.map(col => po[col] ?? null)
});
}
return acc;
}, { inserts: [], updates: [] });
// Handle inserts
if (insertsAndUpdates.inserts.length > 0) {
const insertPlaceholders = Array(insertsAndUpdates.inserts.length).fill(placeholderGroup).join(",");
const insertResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${insertPlaceholders}
`, insertsAndUpdates.inserts.map(i => i.values).flat());
recordsAdded += insertResult[0].affectedRows;
}
// Handle updates
if (insertsAndUpdates.updates.length > 0) {
const updatePlaceholders = Array(insertsAndUpdates.updates.length).fill(placeholderGroup).join(",");
const updateResult = await localConnection.query(`
INSERT INTO purchase_orders (${columnNames.join(",")})
VALUES ${updatePlaceholders}
ON DUPLICATE KEY UPDATE
${columnNames
.filter(col => col !== "po_id" && col !== "pid")
.map(col => `${col} = VALUES(${col})`)
.join(",")};
`, insertsAndUpdates.updates.map(u => u.values).flat());
// Each update affects 2 rows in affectedRows, so we divide by 2 to get actual count
recordsUpdated += insertsAndUpdates.updates.length;
}

View File

@@ -0,0 +1,88 @@
const mysql = require("mysql2/promise");
const { Client } = require("ssh2");
const dotenv = require("dotenv");
const path = require("path");
// Helper function to setup SSH tunnel
async function setupSshTunnel(sshConfig) {
return new Promise((resolve, reject) => {
const ssh = new Client();
ssh.on('error', (err) => {
console.error('SSH connection error:', err);
});
ssh.on('end', () => {
console.log('SSH connection ended normally');
});
ssh.on('close', () => {
console.log('SSH connection closed');
});
ssh
.on("ready", () => {
ssh.forwardOut(
"127.0.0.1",
0,
sshConfig.prodDbConfig.host,
sshConfig.prodDbConfig.port,
async (err, stream) => {
if (err) reject(err);
resolve({ ssh, stream });
}
);
})
.connect(sshConfig.ssh);
});
}
// Helper function to setup database connections
async function setupConnections(sshConfig) {
const tunnel = await setupSshTunnel(sshConfig);
const prodConnection = await mysql.createConnection({
...sshConfig.prodDbConfig,
stream: tunnel.stream,
});
const localConnection = await mysql.createPool({
...sshConfig.localDbConfig,
waitForConnections: true,
connectionLimit: 10,
queueLimit: 0
});
return {
ssh: tunnel.ssh,
prodConnection,
localConnection
};
}
// Helper function to close connections
async function closeConnections(connections) {
const { ssh, prodConnection, localConnection } = connections;
try {
if (prodConnection) await prodConnection.end();
if (localConnection) await localConnection.end();
// Wait a bit for any pending data to be written before closing SSH
await new Promise(resolve => setTimeout(resolve, 100));
if (ssh) {
ssh.on('close', () => {
console.log('SSH connection closed cleanly');
});
ssh.end();
}
} catch (err) {
console.error('Error during cleanup:', err);
}
}
module.exports = {
setupConnections,
closeConnections
};

View File

@@ -1,21 +1,35 @@
const { outputProgress } = require('./utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress');
const { getConnection } = require('./utils/db'); const { getConnection } = require('./utils/db');
async function calculateBrandMetrics(startTime, totalProducts, processedCount) { async function calculateBrandMetrics(startTime, totalProducts, processedCount, isCancelled = false) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
if (isCancelled) {
outputProgress({
status: 'cancelled',
operation: 'Brand metrics calculation cancelled',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: null,
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
}
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Calculating brand metrics', operation: 'Starting brand metrics calculation',
current: Math.floor(totalProducts * 0.95), current: processedCount,
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.95), totalProducts), remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.95)), rate: calculateRate(startTime, processedCount),
percentage: '95' percentage: ((processedCount / totalProducts) * 100).toFixed(1)
}); });
// Calculate brand metrics // Calculate brand metrics with optimized queries
await connection.query(` await connection.query(`
INSERT INTO brand_metrics ( INSERT INTO brand_metrics (
brand, brand,
@@ -28,59 +42,77 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
avg_margin, avg_margin,
growth_rate growth_rate
) )
WITH brand_data AS ( WITH filtered_products AS (
SELECT
p.*,
CASE WHEN p.stock_quantity <= 5000 THEN p.pid END as valid_pid,
CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END as active_pid,
CASE
WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0
ELSE p.stock_quantity
END as valid_stock
FROM products p
WHERE p.brand IS NOT NULL
),
sales_periods AS (
SELECT SELECT
p.brand, p.brand,
COUNT(DISTINCT p.product_id) as product_count, SUM(o.quantity * o.price) as period_revenue,
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.product_id END) as active_products,
SUM(p.stock_quantity) as total_stock_units,
SUM(p.stock_quantity * p.cost_price) as total_stock_cost,
SUM(p.stock_quantity * p.price) as total_stock_retail,
SUM(o.price * o.quantity) as total_revenue,
CASE CASE
WHEN SUM(o.price * o.quantity) > 0 THEN WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH) THEN 'current'
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) THEN 'previous'
END as period_type
FROM filtered_products p
JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH)
GROUP BY p.brand, period_type
),
brand_data AS (
SELECT
p.brand,
COUNT(DISTINCT p.valid_pid) as product_count,
COUNT(DISTINCT p.active_pid) as active_products,
SUM(p.valid_stock) as total_stock_units,
SUM(p.valid_stock * p.cost_price) as total_stock_cost,
SUM(p.valid_stock * p.price) as total_stock_retail,
COALESCE(SUM(o.quantity * o.price), 0) as total_revenue,
CASE
WHEN SUM(o.quantity * o.price) > 0 THEN
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity) (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0 ELSE 0
END as avg_margin, END as avg_margin
-- Current period (last 3 months) FROM filtered_products p
SUM(CASE LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
THEN COALESCE(o.quantity * o.price, 0)
ELSE 0
END) as current_period_sales,
-- Previous year same period
SUM(CASE
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
THEN COALESCE(o.quantity * o.price, 0)
ELSE 0
END) as previous_year_period_sales
FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id AND o.canceled = false
WHERE p.brand IS NOT NULL
GROUP BY p.brand GROUP BY p.brand
) )
SELECT SELECT
brand, bd.brand,
product_count, bd.product_count,
active_products, bd.active_products,
total_stock_units, bd.total_stock_units,
total_stock_cost, bd.total_stock_cost,
total_stock_retail, bd.total_stock_retail,
total_revenue, bd.total_revenue,
avg_margin, bd.avg_margin,
CASE CASE
WHEN previous_year_period_sales = 0 AND current_period_sales > 0 THEN 100.0 WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0
WHEN previous_year_period_sales = 0 THEN 0.0 AND MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) > 0 THEN 100.0
WHEN MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END) = 0 THEN 0.0
ELSE LEAST( ELSE LEAST(
GREATEST( GREATEST(
((current_period_sales - previous_year_period_sales) / ((MAX(CASE WHEN sp.period_type = 'current' THEN sp.period_revenue END) -
NULLIF(previous_year_period_sales, 0)) * 100.0, MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END)) /
NULLIF(MAX(CASE WHEN sp.period_type = 'previous' THEN sp.period_revenue END), 0)) * 100.0,
-100.0 -100.0
), ),
999.99 999.99
) )
END as growth_rate END as growth_rate
FROM brand_data FROM brand_data bd
LEFT JOIN sales_periods sp ON bd.brand = sp.brand
GROUP BY bd.brand, bd.product_count, bd.active_products, bd.total_stock_units,
bd.total_stock_cost, bd.total_stock_retail, bd.total_revenue, bd.avg_margin
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
product_count = VALUES(product_count), product_count = VALUES(product_count),
active_products = VALUES(active_products), active_products = VALUES(active_products),
@@ -93,7 +125,21 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
last_calculated_at = CURRENT_TIMESTAMP last_calculated_at = CURRENT_TIMESTAMP
`); `);
// Calculate brand time-based metrics processedCount = Math.floor(totalProducts * 0.97);
outputProgress({
status: 'running',
operation: 'Brand metrics calculated, starting time-based metrics',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Calculate brand time-based metrics with optimized query
await connection.query(` await connection.query(`
INSERT INTO brand_time_metrics ( INSERT INTO brand_time_metrics (
brand, brand,
@@ -107,26 +153,41 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
total_revenue, total_revenue,
avg_margin avg_margin
) )
SELECT WITH filtered_products AS (
p.brand, SELECT
YEAR(o.date) as year, p.*,
MONTH(o.date) as month, CASE WHEN p.stock_quantity <= 5000 THEN p.pid END as valid_pid,
COUNT(DISTINCT p.product_id) as product_count, CASE WHEN p.visible = true AND p.stock_quantity <= 5000 THEN p.pid END as active_pid,
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.product_id END) as active_products, CASE
SUM(p.stock_quantity) as total_stock_units, WHEN p.stock_quantity IS NULL OR p.stock_quantity < 0 OR p.stock_quantity > 5000 THEN 0
SUM(p.stock_quantity * p.cost_price) as total_stock_cost, ELSE p.stock_quantity
SUM(p.stock_quantity * p.price) as total_stock_retail, END as valid_stock
SUM(o.price * o.quantity) as total_revenue, FROM products p
CASE WHERE p.brand IS NOT NULL
WHEN SUM(o.price * o.quantity) > 0 THEN ),
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity) monthly_metrics AS (
ELSE 0 SELECT
END as avg_margin p.brand,
FROM products p YEAR(o.date) as year,
LEFT JOIN orders o ON p.product_id = o.product_id AND o.canceled = false MONTH(o.date) as month,
WHERE p.brand IS NOT NULL COUNT(DISTINCT p.valid_pid) as product_count,
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) COUNT(DISTINCT p.active_pid) as active_products,
GROUP BY p.brand, YEAR(o.date), MONTH(o.date) SUM(p.valid_stock) as total_stock_units,
SUM(p.valid_stock * p.cost_price) as total_stock_cost,
SUM(p.valid_stock * p.price) as total_stock_retail,
SUM(o.quantity * o.price) as total_revenue,
CASE
WHEN SUM(o.quantity * o.price) > 0 THEN
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0
END as avg_margin
FROM filtered_products p
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY p.brand, YEAR(o.date), MONTH(o.date)
)
SELECT *
FROM monthly_metrics
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
product_count = VALUES(product_count), product_count = VALUES(product_count),
active_products = VALUES(active_products), active_products = VALUES(active_products),
@@ -137,9 +198,26 @@ async function calculateBrandMetrics(startTime, totalProducts, processedCount) {
avg_margin = VALUES(avg_margin) avg_margin = VALUES(avg_margin)
`); `);
return Math.floor(totalProducts * 0.98); processedCount = Math.floor(totalProducts * 0.99);
outputProgress({
status: 'running',
operation: 'Brand time-based metrics calculated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
} catch (error) {
logError(error, 'Error calculating brand metrics');
throw error;
} finally { } finally {
connection.release(); if (connection) {
connection.release();
}
} }
} }

View File

@@ -1,112 +1,174 @@
const { outputProgress } = require('./utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress');
const { getConnection } = require('./utils/db'); const { getConnection } = require('./utils/db');
async function calculateCategoryMetrics(startTime, totalProducts, processedCount) { async function calculateCategoryMetrics(startTime, totalProducts, processedCount, isCancelled = false) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
if (isCancelled) {
outputProgress({
status: 'cancelled',
operation: 'Category metrics calculation cancelled',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: null,
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
}
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Calculating category metrics', operation: 'Starting category metrics calculation',
current: Math.floor(totalProducts * 0.85), current: processedCount,
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.85), totalProducts), remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.85)), rate: calculateRate(startTime, processedCount),
percentage: '85' percentage: ((processedCount / totalProducts) * 100).toFixed(1)
}); });
// Calculate category performance metrics // First, calculate base category metrics
await connection.query(` await connection.query(`
INSERT INTO category_metrics ( INSERT INTO category_metrics (
category_id, category_id,
product_count, product_count,
active_products, active_products,
total_value, total_value,
avg_margin, status,
turnover_rate, last_calculated_at
growth_rate,
status
)
WITH category_sales AS (
SELECT
c.id as category_id,
COUNT(DISTINCT p.product_id) as product_count,
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.product_id END) as active_products,
SUM(p.stock_quantity * p.cost_price) as total_value,
CASE
WHEN SUM(o.price * o.quantity) > 0
THEN (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0
END as avg_margin,
CASE
WHEN AVG(GREATEST(p.stock_quantity, 0)) >= 0.01
THEN LEAST(
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 1 YEAR)
THEN COALESCE(o.quantity, 0)
ELSE 0
END) /
GREATEST(
AVG(GREATEST(p.stock_quantity, 0)),
1.0
),
999.99
)
ELSE 0
END as turnover_rate,
-- Current period (last 3 months)
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
THEN COALESCE(o.quantity * o.price, 0)
ELSE 0
END) as current_period_sales,
-- Previous year same period
SUM(CASE
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH) AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
THEN COALESCE(o.quantity * o.price, 0)
ELSE 0
END) as previous_year_period_sales,
c.status
FROM categories c
LEFT JOIN product_categories pc ON c.id = pc.category_id
LEFT JOIN products p ON pc.product_id = p.product_id
LEFT JOIN orders o ON p.product_id = o.product_id AND o.canceled = false
GROUP BY c.id, c.status
) )
SELECT SELECT
category_id, c.cat_id,
product_count, COUNT(DISTINCT p.pid) as product_count,
active_products, COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
total_value, COALESCE(SUM(p.stock_quantity * p.cost_price), 0) as total_value,
COALESCE(avg_margin, 0) as avg_margin, c.status,
COALESCE(turnover_rate, 0) as turnover_rate, NOW() as last_calculated_at
-- Enhanced YoY growth rate calculation FROM categories c
CASE LEFT JOIN product_categories pc ON c.cat_id = pc.cat_id
WHEN previous_year_period_sales = 0 AND current_period_sales > 0 THEN 100.0 LEFT JOIN products p ON pc.pid = p.pid
WHEN previous_year_period_sales = 0 THEN 0.0 GROUP BY c.cat_id, c.status
ELSE LEAST(
GREATEST(
((current_period_sales - previous_year_period_sales) /
NULLIF(previous_year_period_sales, 0)) * 100.0,
-100.0
),
999.99
)
END as growth_rate,
status
FROM category_sales
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
product_count = VALUES(product_count), product_count = VALUES(product_count),
active_products = VALUES(active_products), active_products = VALUES(active_products),
total_value = VALUES(total_value), total_value = VALUES(total_value),
avg_margin = VALUES(avg_margin),
turnover_rate = VALUES(turnover_rate),
growth_rate = VALUES(growth_rate),
status = VALUES(status), status = VALUES(status),
last_calculated_at = CURRENT_TIMESTAMP last_calculated_at = VALUES(last_calculated_at)
`); `);
// Calculate category time-based metrics processedCount = Math.floor(totalProducts * 0.90);
outputProgress({
status: 'running',
operation: 'Base category metrics calculated, updating with margin data',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Then update with margin and turnover data
await connection.query(`
WITH category_sales AS (
SELECT
pc.cat_id,
SUM(o.quantity * o.price) as total_sales,
SUM(o.quantity * (o.price - p.cost_price)) as total_margin,
SUM(o.quantity) as units_sold,
AVG(GREATEST(p.stock_quantity, 0)) as avg_stock
FROM product_categories pc
JOIN products p ON pc.pid = p.pid
JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 1 YEAR)
GROUP BY pc.cat_id
)
UPDATE category_metrics cm
JOIN category_sales cs ON cm.category_id = cs.cat_id
SET
cm.avg_margin = COALESCE(cs.total_margin * 100.0 / NULLIF(cs.total_sales, 0), 0),
cm.turnover_rate = LEAST(COALESCE(cs.units_sold / NULLIF(cs.avg_stock, 0), 0), 999.99),
cm.last_calculated_at = NOW()
`);
processedCount = Math.floor(totalProducts * 0.95);
outputProgress({
status: 'running',
operation: 'Margin data updated, calculating growth rates',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Finally update growth rates
await connection.query(`
WITH current_period AS (
SELECT
pc.cat_id,
SUM(o.quantity * o.price) as revenue
FROM product_categories pc
JOIN products p ON pc.pid = p.pid
JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 3 MONTH)
GROUP BY pc.cat_id
),
previous_period AS (
SELECT
pc.cat_id,
SUM(o.quantity * o.price) as revenue
FROM product_categories pc
JOIN products p ON pc.pid = p.pid
JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
AND o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 15 MONTH)
AND DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY pc.cat_id
)
UPDATE category_metrics cm
LEFT JOIN current_period cp ON cm.category_id = cp.cat_id
LEFT JOIN previous_period pp ON cm.category_id = pp.cat_id
SET
cm.growth_rate = CASE
WHEN pp.revenue = 0 AND COALESCE(cp.revenue, 0) > 0 THEN 100.0
WHEN pp.revenue = 0 THEN 0.0
ELSE LEAST(
GREATEST(
((COALESCE(cp.revenue, 0) - pp.revenue) / pp.revenue) * 100.0,
-100.0
),
999.99
)
END,
cm.last_calculated_at = NOW()
WHERE cp.cat_id IS NOT NULL OR pp.cat_id IS NOT NULL
`);
processedCount = Math.floor(totalProducts * 0.97);
outputProgress({
status: 'running',
operation: 'Growth rates calculated, updating time-based metrics',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Calculate time-based metrics
await connection.query(` await connection.query(`
INSERT INTO category_time_metrics ( INSERT INTO category_time_metrics (
category_id, category_id,
@@ -120,29 +182,28 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
turnover_rate turnover_rate
) )
SELECT SELECT
c.id as category_id, pc.cat_id,
YEAR(o.date) as year, YEAR(o.date) as year,
MONTH(o.date) as month, MONTH(o.date) as month,
COUNT(DISTINCT p.product_id) as product_count, COUNT(DISTINCT p.pid) as product_count,
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.product_id END) as active_products, COUNT(DISTINCT CASE WHEN p.visible = true THEN p.pid END) as active_products,
SUM(p.stock_quantity * p.cost_price) as total_value, SUM(p.stock_quantity * p.cost_price) as total_value,
SUM(o.price * o.quantity) as total_revenue, SUM(o.quantity * o.price) as total_revenue,
CASE COALESCE(
WHEN SUM(o.price * o.quantity) > 0 SUM(o.quantity * (o.price - p.cost_price)) * 100.0 /
THEN (SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity) NULLIF(SUM(o.quantity * o.price), 0),
ELSE 0 0
END as avg_margin, ) as avg_margin,
CASE COALESCE(
WHEN AVG(p.stock_quantity) > 0 SUM(o.quantity) / NULLIF(AVG(GREATEST(p.stock_quantity, 0)), 0),
THEN SUM(o.quantity) / AVG(p.stock_quantity) 0
ELSE 0 ) as turnover_rate
END as turnover_rate FROM product_categories pc
FROM categories c JOIN products p ON pc.pid = p.pid
LEFT JOIN product_categories pc ON c.id = pc.category_id JOIN orders o ON p.pid = o.pid
LEFT JOIN products p ON pc.product_id = p.product_id WHERE o.canceled = false
LEFT JOIN orders o ON p.product_id = o.product_id AND o.canceled = false AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH) GROUP BY pc.cat_id, YEAR(o.date), MONTH(o.date)
GROUP BY c.id, YEAR(o.date), MONTH(o.date)
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
product_count = VALUES(product_count), product_count = VALUES(product_count),
active_products = VALUES(active_products), active_products = VALUES(active_products),
@@ -152,77 +213,26 @@ async function calculateCategoryMetrics(startTime, totalProducts, processedCount
turnover_rate = VALUES(turnover_rate) turnover_rate = VALUES(turnover_rate)
`); `);
// Calculate category sales metrics processedCount = Math.floor(totalProducts * 0.99);
await connection.query(` outputProgress({
INSERT INTO category_sales_metrics ( status: 'running',
category_id, operation: 'Time-based metrics calculated',
brand, current: processedCount,
period_start, total: totalProducts,
period_end, elapsed: formatElapsedTime(startTime),
avg_daily_sales, remaining: estimateRemaining(startTime, processedCount, totalProducts),
total_sold, rate: calculateRate(startTime, processedCount),
num_products, percentage: ((processedCount / totalProducts) * 100).toFixed(1)
avg_price, });
last_calculated_at
)
WITH date_ranges AS (
SELECT
DATE_SUB(CURDATE(), INTERVAL 30 DAY) as period_start,
CURDATE() as period_end
UNION ALL
SELECT
DATE_SUB(CURDATE(), INTERVAL 90 DAY),
CURDATE()
UNION ALL
SELECT
DATE_SUB(CURDATE(), INTERVAL 180 DAY),
CURDATE()
UNION ALL
SELECT
DATE_SUB(CURDATE(), INTERVAL 365 DAY),
CURDATE()
),
category_metrics AS (
SELECT
c.id as category_id,
p.brand,
dr.period_start,
dr.period_end,
COUNT(DISTINCT p.product_id) as num_products,
COALESCE(SUM(o.quantity), 0) / DATEDIFF(dr.period_end, dr.period_start) as avg_daily_sales,
COALESCE(SUM(o.quantity), 0) as total_sold,
COALESCE(AVG(o.price), 0) as avg_price
FROM categories c
JOIN product_categories pc ON c.id = pc.category_id
JOIN products p ON pc.product_id = p.product_id
CROSS JOIN date_ranges dr
LEFT JOIN orders o ON p.product_id = o.product_id
AND o.date BETWEEN dr.period_start AND dr.period_end
AND o.canceled = false
GROUP BY c.id, p.brand, dr.period_start, dr.period_end
)
SELECT
category_id,
brand,
period_start,
period_end,
avg_daily_sales,
total_sold,
num_products,
avg_price,
NOW() as last_calculated_at
FROM category_metrics
ON DUPLICATE KEY UPDATE
avg_daily_sales = VALUES(avg_daily_sales),
total_sold = VALUES(total_sold),
num_products = VALUES(num_products),
avg_price = VALUES(avg_price),
last_calculated_at = NOW()
`);
return Math.floor(totalProducts * 0.9); return processedCount;
} catch (error) {
logError(error, 'Error calculating category metrics');
throw error;
} finally { } finally {
connection.release(); if (connection) {
connection.release();
}
} }
} }

View File

@@ -1,80 +1,131 @@
const { outputProgress } = require('./utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress');
const { getConnection } = require('./utils/db'); const { getConnection } = require('./utils/db');
async function calculateFinancialMetrics(startTime, totalProducts, processedCount) { async function calculateFinancialMetrics(startTime, totalProducts, processedCount, isCancelled = false) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
if (isCancelled) {
outputProgress({
status: 'cancelled',
operation: 'Financial metrics calculation cancelled',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: null,
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
}
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Calculating financial metrics', operation: 'Starting financial metrics calculation',
current: Math.floor(totalProducts * 0.6), current: processedCount,
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.6), totalProducts), remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.6)), rate: calculateRate(startTime, processedCount),
percentage: '60' percentage: ((processedCount / totalProducts) * 100).toFixed(1)
}); });
// Calculate financial metrics with optimized query
await connection.query(` await connection.query(`
UPDATE product_metrics pm WITH product_financials AS (
JOIN (
SELECT SELECT
p.product_id, p.pid,
p.cost_price * p.stock_quantity as inventory_value, p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * o.price) as total_revenue, SUM(o.quantity * o.price) as total_revenue,
SUM(o.quantity * p.cost_price) as cost_of_goods_sold, SUM(o.quantity * p.cost_price) as cost_of_goods_sold,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit, SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
MIN(o.date) as first_sale_date, MIN(o.date) as first_sale_date,
MAX(o.date) as last_sale_date, MAX(o.date) as last_sale_date,
DATEDIFF(MAX(o.date), MIN(o.date)) + 1 as calculation_period_days DATEDIFF(MAX(o.date), MIN(o.date)) + 1 as calculation_period_days,
COUNT(DISTINCT DATE(o.date)) as active_days
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false WHERE o.canceled = false
AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH) AND DATE(o.date) >= DATE_SUB(CURDATE(), INTERVAL 12 MONTH)
GROUP BY p.product_id GROUP BY p.pid
) fin ON pm.product_id = fin.product_id )
UPDATE product_metrics pm
JOIN product_financials pf ON pm.pid = pf.pid
SET SET
pm.inventory_value = COALESCE(fin.inventory_value, 0), pm.inventory_value = COALESCE(pf.inventory_value, 0),
pm.total_revenue = COALESCE(fin.total_revenue, 0), pm.total_revenue = COALESCE(pf.total_revenue, 0),
pm.cost_of_goods_sold = COALESCE(fin.cost_of_goods_sold, 0), pm.cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0),
pm.gross_profit = COALESCE(fin.gross_profit, 0), pm.gross_profit = COALESCE(pf.gross_profit, 0),
pm.gmroi = CASE pm.gmroi = CASE
WHEN COALESCE(fin.inventory_value, 0) > 0 AND fin.calculation_period_days > 0 THEN WHEN COALESCE(pf.inventory_value, 0) > 0 AND pf.active_days > 0 THEN
(COALESCE(fin.gross_profit, 0) * (365.0 / fin.calculation_period_days)) / COALESCE(fin.inventory_value, 0) (COALESCE(pf.gross_profit, 0) * (365.0 / pf.active_days)) / COALESCE(pf.inventory_value, 0)
ELSE 0 ELSE 0
END END
`); `);
// Update time-based aggregates with financial metrics processedCount = Math.floor(totalProducts * 0.65);
outputProgress({
status: 'running',
operation: 'Base financial metrics calculated, updating time aggregates',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Update time-based aggregates with optimized query
await connection.query(` await connection.query(`
UPDATE product_time_aggregates pta WITH monthly_financials AS (
JOIN (
SELECT SELECT
p.product_id, p.pid,
YEAR(o.date) as year, YEAR(o.date) as year,
MONTH(o.date) as month, MONTH(o.date) as month,
p.cost_price * p.stock_quantity as inventory_value, p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit, SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
COUNT(DISTINCT DATE(o.date)) as days_in_period COUNT(DISTINCT DATE(o.date)) as active_days,
MIN(o.date) as period_start,
MAX(o.date) as period_end
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false WHERE o.canceled = false
GROUP BY p.product_id, YEAR(o.date), MONTH(o.date) GROUP BY p.pid, YEAR(o.date), MONTH(o.date)
) fin ON pta.product_id = fin.product_id )
AND pta.year = fin.year UPDATE product_time_aggregates pta
AND pta.month = fin.month JOIN monthly_financials mf ON pta.pid = mf.pid
AND pta.year = mf.year
AND pta.month = mf.month
SET SET
pta.inventory_value = COALESCE(fin.inventory_value, 0), pta.inventory_value = COALESCE(mf.inventory_value, 0),
pta.gmroi = CASE pta.gmroi = CASE
WHEN COALESCE(fin.inventory_value, 0) > 0 AND fin.days_in_period > 0 THEN WHEN COALESCE(mf.inventory_value, 0) > 0 AND mf.active_days > 0 THEN
(COALESCE(fin.gross_profit, 0) * (365.0 / fin.days_in_period)) / COALESCE(fin.inventory_value, 0) (COALESCE(mf.gross_profit, 0) * (365.0 / mf.active_days)) / COALESCE(mf.inventory_value, 0)
ELSE 0 ELSE 0
END END
`); `);
return Math.floor(totalProducts * 0.7); processedCount = Math.floor(totalProducts * 0.70);
outputProgress({
status: 'running',
operation: 'Time-based aggregates updated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
} catch (error) {
logError(error, 'Error calculating financial metrics');
throw error;
} finally { } finally {
connection.release(); if (connection) {
connection.release();
}
} }
} }

View File

@@ -1,23 +1,40 @@
const { outputProgress, logError } = require('./utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress');
const { getConnection } = require('./utils/db'); const { getConnection } = require('./utils/db');
async function calculateProductMetrics(startTime, totalProducts, processedCount = 0) { // Helper function to handle NaN and undefined values
function sanitizeValue(value) {
if (value === undefined || value === null || Number.isNaN(value)) {
return null;
}
return value;
}
async function calculateProductMetrics(startTime, totalProducts, processedCount = 0, isCancelled = false) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
// Process in batches of 250 // Skip flags are inherited from the parent scope
const batchSize = 250; const SKIP_PRODUCT_BASE_METRICS = 0;
for (let offset = 0; offset < totalProducts; offset += batchSize) { const SKIP_PRODUCT_TIME_AGGREGATES = 0;
const [products] = await connection.query('SELECT product_id, vendor FROM products LIMIT ? OFFSET ?', [batchSize, offset])
.catch(err => {
logError(err, `Failed to fetch products batch at offset ${offset}`);
throw err;
});
processedCount += products.length;
// Update progress after each batch if (isCancelled) {
outputProgress({
status: 'cancelled',
operation: 'Product metrics calculation cancelled',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: null,
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
}
// Calculate base product metrics
if (!SKIP_PRODUCT_BASE_METRICS) {
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Processing products', operation: 'Starting base product metrics calculation',
current: processedCount, current: processedCount,
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
@@ -26,473 +43,239 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
percentage: ((processedCount / totalProducts) * 100).toFixed(1) percentage: ((processedCount / totalProducts) * 100).toFixed(1)
}); });
// Process the batch // Calculate base metrics
const metricsUpdates = []; await connection.query(`
for (const product of products) { UPDATE product_metrics pm
try { JOIN (
// Get configuration values for this product SELECT
const [configs] = await connection.query(` p.pid,
WITH product_info AS ( p.cost_price * p.stock_quantity as inventory_value,
SELECT SUM(o.quantity) as total_quantity,
p.product_id, COUNT(DISTINCT o.order_number) as number_of_orders,
p.vendor, SUM(o.quantity * o.price) as total_revenue,
pc.category_id SUM(o.quantity * p.cost_price) as cost_of_goods_sold,
FROM products p AVG(o.price) as avg_price,
LEFT JOIN product_categories pc ON p.product_id = pc.product_id STDDEV(o.price) as price_std,
WHERE p.product_id = ? MIN(o.date) as first_sale_date,
), MAX(o.date) as last_sale_date,
threshold_options AS ( COUNT(DISTINCT DATE(o.date)) as active_days
SELECT FROM products p
st.*, LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
CASE GROUP BY p.pid
WHEN st.category_id = pi.category_id AND st.vendor = pi.vendor THEN 1 ) stats ON pm.pid = stats.pid
WHEN st.category_id = pi.category_id AND st.vendor IS NULL THEN 2 SET
WHEN st.category_id IS NULL AND st.vendor = pi.vendor THEN 3 pm.inventory_value = COALESCE(stats.inventory_value, 0),
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 4 pm.avg_quantity_per_order = COALESCE(stats.total_quantity / NULLIF(stats.number_of_orders, 0), 0),
ELSE 5 pm.number_of_orders = COALESCE(stats.number_of_orders, 0),
END as priority pm.total_revenue = COALESCE(stats.total_revenue, 0),
FROM product_info pi pm.cost_of_goods_sold = COALESCE(stats.cost_of_goods_sold, 0),
CROSS JOIN stock_thresholds st pm.gross_profit = COALESCE(stats.total_revenue - stats.cost_of_goods_sold, 0),
WHERE (st.category_id = pi.category_id OR st.category_id IS NULL) pm.avg_margin_percent = CASE
AND (st.vendor = pi.vendor OR st.vendor IS NULL) WHEN COALESCE(stats.total_revenue, 0) > 0
), THEN ((stats.total_revenue - stats.cost_of_goods_sold) / stats.total_revenue) * 100
velocity_options AS ( ELSE 0
SELECT END,
sv.*, pm.first_sale_date = stats.first_sale_date,
CASE pm.last_sale_date = stats.last_sale_date,
WHEN sv.category_id = pi.category_id AND sv.vendor = pi.vendor THEN 1 pm.gmroi = CASE
WHEN sv.category_id = pi.category_id AND sv.vendor IS NULL THEN 2 WHEN COALESCE(stats.inventory_value, 0) > 0
WHEN sv.category_id IS NULL AND sv.vendor = pi.vendor THEN 3 THEN (stats.total_revenue - stats.cost_of_goods_sold) / stats.inventory_value
WHEN sv.category_id IS NULL AND sv.vendor IS NULL THEN 4 ELSE 0
ELSE 5 END,
END as priority pm.last_calculated_at = NOW()
FROM product_info pi `);
CROSS JOIN sales_velocity_config sv
WHERE (sv.category_id = pi.category_id OR sv.category_id IS NULL)
AND (sv.vendor = pi.vendor OR sv.vendor IS NULL)
),
safety_options AS (
SELECT
ss.*,
CASE
WHEN ss.category_id = pi.category_id AND ss.vendor = pi.vendor THEN 1
WHEN ss.category_id = pi.category_id AND ss.vendor IS NULL THEN 2
WHEN ss.category_id IS NULL AND ss.vendor = pi.vendor THEN 3
WHEN ss.category_id IS NULL AND ss.vendor IS NULL THEN 4
ELSE 5
END as priority
FROM product_info pi
CROSS JOIN safety_stock_config ss
WHERE (ss.category_id = pi.category_id OR ss.category_id IS NULL)
AND (ss.vendor = pi.vendor OR ss.vendor IS NULL)
)
SELECT
COALESCE(
(SELECT critical_days
FROM threshold_options
ORDER BY priority LIMIT 1),
7
) as critical_days,
COALESCE(
(SELECT reorder_days
FROM threshold_options
ORDER BY priority LIMIT 1),
14
) as reorder_days,
COALESCE(
(SELECT overstock_days
FROM threshold_options
ORDER BY priority LIMIT 1),
90
) as overstock_days,
COALESCE(
(SELECT low_stock_threshold
FROM threshold_options
ORDER BY priority LIMIT 1),
5
) as low_stock_threshold,
COALESCE(
(SELECT daily_window_days
FROM velocity_options
ORDER BY priority LIMIT 1),
30
) as daily_window_days,
COALESCE(
(SELECT weekly_window_days
FROM velocity_options
ORDER BY priority LIMIT 1),
7
) as weekly_window_days,
COALESCE(
(SELECT monthly_window_days
FROM velocity_options
ORDER BY priority LIMIT 1),
90
) as monthly_window_days,
COALESCE(
(SELECT coverage_days
FROM safety_options
ORDER BY priority LIMIT 1),
14
) as safety_stock_days,
COALESCE(
(SELECT service_level
FROM safety_options
ORDER BY priority LIMIT 1),
95.0
) as service_level
`, [product.product_id]);
const config = configs[0]; processedCount = Math.floor(totalProducts * 0.4);
outputProgress({
status: 'running',
operation: 'Base product metrics calculated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
} else {
processedCount = Math.floor(totalProducts * 0.4);
outputProgress({
status: 'running',
operation: 'Skipping base product metrics calculation',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
}
// Calculate sales metrics if (isCancelled) return processedCount;
const [salesMetrics] = await connection.query(`
WITH sales_summary AS (
SELECT
SUM(o.quantity) as total_quantity_sold,
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
MAX(o.date) as last_sale_date,
MIN(o.date) as first_sale_date,
COUNT(DISTINCT o.order_number) as number_of_orders,
AVG(o.quantity) as avg_quantity_per_order,
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_30_days_qty,
CASE
WHEN SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) IS NULL THEN 0
ELSE SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END)
END as rolling_weekly_avg,
SUM(CASE WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) THEN o.quantity ELSE 0 END) as last_month_qty
FROM orders o
JOIN products p ON o.product_id = p.product_id
WHERE o.canceled = 0 AND o.product_id = ?
GROUP BY o.product_id
)
SELECT
total_quantity_sold,
total_revenue,
total_cost,
last_sale_date,
first_sale_date,
number_of_orders,
avg_quantity_per_order,
last_30_days_qty / ? as rolling_daily_avg,
rolling_weekly_avg / ? as rolling_weekly_avg,
last_month_qty / ? as rolling_monthly_avg
FROM sales_summary
`, [
config.daily_window_days,
config.weekly_window_days,
config.weekly_window_days,
config.monthly_window_days,
product.product_id,
config.daily_window_days,
config.weekly_window_days,
config.monthly_window_days
]);
// Calculate purchase metrics // Calculate product time aggregates
const [purchaseMetrics] = await connection.query(` if (!SKIP_PRODUCT_TIME_AGGREGATES) {
WITH recent_orders AS ( outputProgress({
SELECT status: 'running',
date, operation: 'Starting product time aggregates calculation',
received_date, current: processedCount,
received, total: totalProducts,
cost_price, elapsed: formatElapsedTime(startTime),
DATEDIFF(received_date, date) as lead_time_days, remaining: estimateRemaining(startTime, processedCount, totalProducts),
ROW_NUMBER() OVER (ORDER BY date DESC) as order_rank rate: calculateRate(startTime, processedCount),
FROM purchase_orders percentage: ((processedCount / totalProducts) * 100).toFixed(1)
WHERE status = 'closed' });
AND product_id = ?
AND received > 0
AND received_date IS NOT NULL
),
lead_time_orders AS (
SELECT *
FROM recent_orders
WHERE order_rank <= 5
OR date >= DATE_SUB(CURDATE(), INTERVAL 90 DAY)
)
SELECT
SUM(CASE WHEN received >= 0 THEN received ELSE 0 END) as total_quantity_purchased,
SUM(CASE WHEN received >= 0 THEN cost_price * received ELSE 0 END) as total_cost,
MAX(date) as last_purchase_date,
MIN(received_date) as first_received_date,
MAX(received_date) as last_received_date,
AVG(lead_time_days) as avg_lead_time_days
FROM lead_time_orders
`, [product.product_id]);
// Get stock info // Calculate time-based aggregates
const [stockInfo] = await connection.query(` await connection.query(`
SELECT INSERT INTO product_time_aggregates (
p.stock_quantity, pid,
p.cost_price, year,
p.created_at, month,
p.replenishable, total_quantity_sold,
p.moq, total_revenue,
DATEDIFF(CURDATE(), MIN(po.received_date)) as days_since_first_stock, total_cost,
DATEDIFF(CURDATE(), COALESCE( order_count,
(SELECT MAX(o2.date) avg_price,
FROM orders o2 profit_margin,
WHERE o2.product_id = p.product_id inventory_value,
AND o2.canceled = false), gmroi
CURDATE() )
)) as days_since_last_sale, SELECT
(SELECT SUM(quantity) p.pid,
FROM orders o3 YEAR(o.date) as year,
WHERE o3.product_id = p.product_id MONTH(o.date) as month,
AND o3.canceled = false) as total_quantity_sold, SUM(o.quantity) as total_quantity_sold,
CASE SUM(o.quantity * o.price) as total_revenue,
WHEN EXISTS ( SUM(o.quantity * p.cost_price) as total_cost,
SELECT 1 FROM orders o COUNT(DISTINCT o.order_number) as order_count,
WHERE o.product_id = p.product_id AVG(o.price) as avg_price,
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) CASE
AND o.canceled = false WHEN SUM(o.quantity * o.price) > 0
AND (SELECT SUM(quantity) FROM orders o2 THEN ((SUM(o.quantity * o.price) - SUM(o.quantity * p.cost_price)) / SUM(o.quantity * o.price)) * 100
WHERE o2.product_id = p.product_id ELSE 0
AND o2.date >= o.date END as profit_margin,
AND o2.canceled = false) = 0 p.cost_price * p.stock_quantity as inventory_value,
) THEN true CASE
ELSE false WHEN p.cost_price * p.stock_quantity > 0
END as had_recent_stockout THEN (SUM(o.quantity * (o.price - p.cost_price))) / (p.cost_price * p.stock_quantity)
FROM products p ELSE 0
LEFT JOIN purchase_orders po ON p.product_id = po.product_id END as gmroi
AND po.status = 'closed' FROM products p
AND po.received > 0 LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
WHERE p.product_id = ? WHERE o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY p.product_id GROUP BY p.pid, YEAR(o.date), MONTH(o.date)
`, [product.product_id]); ON DUPLICATE KEY UPDATE
total_quantity_sold = VALUES(total_quantity_sold),
total_revenue = VALUES(total_revenue),
total_cost = VALUES(total_cost),
order_count = VALUES(order_count),
avg_price = VALUES(avg_price),
profit_margin = VALUES(profit_margin),
inventory_value = VALUES(inventory_value),
gmroi = VALUES(gmroi)
`);
// Calculate metrics processedCount = Math.floor(totalProducts * 0.6);
const metrics = salesMetrics[0] || {}; outputProgress({
const purchases = purchaseMetrics[0] || {}; status: 'running',
const stock = stockInfo[0] || {}; operation: 'Product time aggregates calculated',
current: processedCount,
const daily_sales_avg = metrics.rolling_daily_avg || 0; total: totalProducts,
const weekly_sales_avg = metrics.rolling_weekly_avg || 0; elapsed: formatElapsedTime(startTime),
const monthly_sales_avg = metrics.total_quantity_sold ? metrics.total_quantity_sold / 30 : 0; remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
// Calculate days of inventory percentage: ((processedCount / totalProducts) * 100).toFixed(1)
const days_of_inventory = daily_sales_avg > 0 ? });
Math.ceil( } else {
(stock.stock_quantity / daily_sales_avg) + processedCount = Math.floor(totalProducts * 0.6);
(purchases.avg_lead_time_days || config.reorder_days) * outputProgress({
(1 + (config.service_level / 100)) status: 'running',
) : null; operation: 'Skipping product time aggregates calculation',
current: processedCount,
const weeks_of_inventory = days_of_inventory ? Math.ceil(days_of_inventory / 7) : null; total: totalProducts,
elapsed: formatElapsedTime(startTime),
// Calculate margin percent remaining: estimateRemaining(startTime, processedCount, totalProducts),
const margin_percent = metrics.total_revenue > 0 ? rate: calculateRate(startTime, processedCount),
((metrics.total_revenue - metrics.total_cost) / metrics.total_revenue) * 100 : percentage: ((processedCount / totalProducts) * 100).toFixed(1)
null; });
// Calculate inventory value
const inventory_value = (stock.stock_quantity || 0) * (stock.cost_price || 0);
// Calculate stock status
const stock_status = calculateStockStatus(stock, config, daily_sales_avg, weekly_sales_avg, monthly_sales_avg);
// Calculate reorder quantity and overstocked amount
const { reorder_qty, overstocked_amt } = calculateReorderQuantities(
stock,
stock_status,
daily_sales_avg,
purchases.avg_lead_time_days,
config
);
// Add to batch update
metricsUpdates.push([
product.product_id,
daily_sales_avg || null,
weekly_sales_avg || null,
monthly_sales_avg || null,
metrics.avg_quantity_per_order || null,
metrics.number_of_orders || 0,
metrics.first_sale_date || null,
metrics.last_sale_date || null,
days_of_inventory,
weeks_of_inventory,
daily_sales_avg > 0 ? Math.max(1, Math.ceil(daily_sales_avg * config.reorder_days)) : null,
margin_percent,
metrics.total_revenue || 0,
inventory_value || 0,
purchases.avg_lead_time_days || null,
purchases.last_purchase_date || null,
purchases.first_received_date || null,
purchases.last_received_date || null,
stock_status,
reorder_qty,
overstocked_amt
]);
} catch (err) {
logError(err, `Failed processing product ${product.product_id}`);
continue;
}
}
// Batch update metrics
if (metricsUpdates.length > 0) {
await connection.query(`
INSERT INTO product_metrics (
product_id,
daily_sales_avg,
weekly_sales_avg,
monthly_sales_avg,
avg_quantity_per_order,
number_of_orders,
first_sale_date,
last_sale_date,
days_of_inventory,
weeks_of_inventory,
reorder_point,
avg_margin_percent,
total_revenue,
inventory_value,
avg_lead_time_days,
last_purchase_date,
first_received_date,
last_received_date,
stock_status,
reorder_qty,
overstocked_amt
) VALUES ?
ON DUPLICATE KEY UPDATE
last_calculated_at = NOW(),
daily_sales_avg = VALUES(daily_sales_avg),
weekly_sales_avg = VALUES(weekly_sales_avg),
monthly_sales_avg = VALUES(monthly_sales_avg),
avg_quantity_per_order = VALUES(avg_quantity_per_order),
number_of_orders = VALUES(number_of_orders),
first_sale_date = VALUES(first_sale_date),
last_sale_date = VALUES(last_sale_date),
days_of_inventory = VALUES(days_of_inventory),
weeks_of_inventory = VALUES(weeks_of_inventory),
reorder_point = VALUES(reorder_point),
avg_margin_percent = VALUES(avg_margin_percent),
total_revenue = VALUES(total_revenue),
inventory_value = VALUES(inventory_value),
avg_lead_time_days = VALUES(avg_lead_time_days),
last_purchase_date = VALUES(last_purchase_date),
first_received_date = VALUES(first_received_date),
last_received_date = VALUES(last_received_date),
stock_status = VALUES(stock_status),
reorder_qty = VALUES(reorder_qty),
overstocked_amt = VALUES(overstocked_amt)
`, [metricsUpdates]);
}
} }
return processedCount; return processedCount;
} catch (error) {
logError(error, 'Error calculating product metrics');
throw error;
} finally { } finally {
connection.release(); if (connection) {
connection.release();
}
} }
} }
function calculateStockStatus(stock, config, daily_sales_avg, weekly_sales_avg, monthly_sales_avg) { function calculateStockStatus(stock, config, daily_sales_avg, weekly_sales_avg, monthly_sales_avg) {
const days_since_first_stock = stock.days_since_first_stock || 0; if (stock <= 0) {
const days_since_last_sale = stock.days_since_last_sale || 9999; return 'Out of Stock';
const total_quantity_sold = stock.total_quantity_sold || 0;
const had_recent_stockout = stock.had_recent_stockout || false;
const dq = stock.stock_quantity || 0;
const ds = daily_sales_avg || 0;
const ws = weekly_sales_avg || 0;
const ms = monthly_sales_avg || 0;
// If no stock, return immediately
if (dq === 0) {
return had_recent_stockout ? 'Critical' : 'Out of Stock';
} }
// 1. Check if truly "New" (≤30 days and no sales) // Use the most appropriate sales average based on data quality
if (days_since_first_stock <= 30 && total_quantity_sold === 0) { let sales_avg = daily_sales_avg;
return 'New'; if (sales_avg === 0) {
sales_avg = weekly_sales_avg / 7;
}
if (sales_avg === 0) {
sales_avg = monthly_sales_avg / 30;
} }
// 2. Handle zero or very low sales velocity cases if (sales_avg === 0) {
if (ds === 0 || (ds < 0.1 && ws < 0.5)) { return stock <= config.low_stock_threshold ? 'Low Stock' : 'In Stock';
if (days_since_first_stock > config.overstock_days) {
return 'Overstocked';
}
if (days_since_first_stock > 30) {
return 'At Risk';
}
} }
// 3. Calculate days of supply and check velocity trends const days_of_stock = stock / sales_avg;
const days_of_supply = ds > 0 ? dq / ds : 999;
const velocity_trend = ds > 0 ? (ds / (ms || ds) - 1) * 100 : 0;
// Critical stock level if (days_of_stock <= config.critical_days) {
if (days_of_supply <= config.critical_days) {
return 'Critical'; return 'Critical';
} } else if (days_of_stock <= config.reorder_days) {
// Reorder cases
if (days_of_supply <= config.reorder_days ||
(had_recent_stockout && days_of_supply <= config.reorder_days * 1.5)) {
return 'Reorder'; return 'Reorder';
} } else if (days_of_stock > config.overstock_days) {
// At Risk cases
if (
(days_of_supply >= config.overstock_days * 0.8) ||
(velocity_trend <= -50 && days_of_supply > config.reorder_days * 2) ||
(days_since_last_sale > 45 && dq > 0) ||
(ds > 0 && ds < 0.2 && dq > ds * config.overstock_days * 0.5)
) {
return 'At Risk';
}
// Overstock cases
if (days_of_supply >= config.overstock_days) {
return 'Overstocked'; return 'Overstocked';
} }
// If none of the above conditions are met
return 'Healthy'; return 'Healthy';
} }
function calculateReorderQuantities(stock, stock_status, daily_sales_avg, avg_lead_time, config) { function calculateReorderQuantities(stock, stock_status, daily_sales_avg, avg_lead_time, config) {
// Calculate safety stock based on service level and lead time
const z_score = 1.96; // 95% service level
const lead_time = avg_lead_time || config.target_days;
const safety_stock = Math.ceil(daily_sales_avg * Math.sqrt(lead_time) * z_score);
// Calculate reorder point
const lead_time_demand = daily_sales_avg * lead_time;
const reorder_point = Math.ceil(lead_time_demand + safety_stock);
// Calculate reorder quantity using EOQ formula if we have the necessary data
let reorder_qty = 0; let reorder_qty = 0;
let overstocked_amt = 0; if (daily_sales_avg > 0) {
const annual_demand = daily_sales_avg * 365;
// Only calculate reorder quantity for replenishable products const order_cost = 25; // Fixed cost per order
if (stock.replenishable && (stock_status === 'Critical' || stock_status === 'Reorder')) { const holding_cost_percent = 0.25; // 25% annual holding cost
const ds = daily_sales_avg || 0;
const lt = avg_lead_time || 14;
const sc = config.safety_stock_days || 14;
const ss = config.safety_stock_days || 14;
const dq = stock.stock_quantity || 0;
const moq = stock.moq || 1;
// Calculate desired stock level
const desired_stock = (ds * (lt + sc)) + ss;
// Calculate raw reorder amount reorder_qty = Math.ceil(Math.sqrt((2 * annual_demand * order_cost) / holding_cost_percent));
const raw_reorder = Math.max(0, desired_stock - dq); } else {
// If no sales data, use a basic calculation
// Round up to nearest MOQ reorder_qty = Math.max(safety_stock, config.low_stock_threshold);
reorder_qty = Math.ceil(raw_reorder / moq) * moq;
} }
// Calculate overstocked amount for overstocked products // Calculate overstocked amount
if (stock_status === 'Overstocked') { const overstocked_amt = stock_status === 'Overstocked' ?
const ds = daily_sales_avg || 0; stock - Math.ceil(daily_sales_avg * config.overstock_days) :
const dq = stock.stock_quantity || 0; 0;
const lt = avg_lead_time || 14;
const sc = config.safety_stock_days || 14;
const ss = config.safety_stock_days || 14;
// Calculate maximum desired stock return {
const max_desired_stock = (ds * config.overstock_days) + ss; safety_stock,
reorder_point,
// Calculate excess inventory reorder_qty,
overstocked_amt = Math.max(0, dq - max_desired_stock); overstocked_amt
} };
return { reorder_qty, overstocked_amt };
} }
module.exports = calculateProductMetrics; module.exports = calculateProductMetrics;

View File

@@ -1,116 +1,170 @@
const { outputProgress } = require('./utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress');
const { getConnection } = require('./utils/db'); const { getConnection } = require('./utils/db');
async function calculateSalesForecasts(startTime, totalProducts, processedCount) { async function calculateSalesForecasts(startTime, totalProducts, processedCount, isCancelled = false) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
if (isCancelled) {
outputProgress({
status: 'cancelled',
operation: 'Sales forecasts calculation cancelled',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: null,
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
}
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Calculating sales forecasts', operation: 'Starting sales forecasts calculation',
current: Math.floor(totalProducts * 0.98), current: processedCount,
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.98), totalProducts), remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.98)), rate: calculateRate(startTime, processedCount),
percentage: '98' percentage: ((processedCount / totalProducts) * 100).toFixed(1)
}); });
// First, create a temporary table for forecast dates
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_forecast_dates (
forecast_date DATE,
day_of_week INT,
month INT,
PRIMARY KEY (forecast_date)
)
`);
await connection.query(`
INSERT INTO temp_forecast_dates
SELECT
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date,
DAYOFWEEK(DATE_ADD(CURRENT_DATE, INTERVAL n DAY)) as day_of_week,
MONTH(DATE_ADD(CURRENT_DATE, INTERVAL n DAY)) as month
FROM (
SELECT a.N + b.N * 10 as n
FROM
(SELECT 0 as N UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9) a,
(SELECT 0 as N UNION SELECT 1 UNION SELECT 2) b
ORDER BY n
LIMIT 31
) numbers
`);
processedCount = Math.floor(totalProducts * 0.92);
outputProgress({
status: 'running',
operation: 'Forecast dates prepared, calculating daily sales stats',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Create temporary table for daily sales stats
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_daily_sales AS
SELECT
o.pid,
DAYOFWEEK(o.date) as day_of_week,
SUM(o.quantity) as daily_quantity,
SUM(o.price * o.quantity) as daily_revenue,
COUNT(DISTINCT DATE(o.date)) as day_count
FROM orders o
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
GROUP BY o.pid, DAYOFWEEK(o.date)
`);
processedCount = Math.floor(totalProducts * 0.94);
outputProgress({
status: 'running',
operation: 'Daily sales stats calculated, preparing product stats',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Create temporary table for product stats
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_product_stats AS
SELECT
pid,
AVG(daily_revenue) as overall_avg_revenue,
SUM(day_count) as total_days
FROM temp_daily_sales
GROUP BY pid
`);
processedCount = Math.floor(totalProducts * 0.96);
outputProgress({
status: 'running',
operation: 'Product stats prepared, calculating product-level forecasts',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Calculate product-level forecasts // Calculate product-level forecasts
await connection.query(` await connection.query(`
INSERT INTO sales_forecasts ( INSERT INTO sales_forecasts (
product_id, pid,
forecast_date, forecast_date,
forecast_units, forecast_units,
forecast_revenue, forecast_revenue,
confidence_level, confidence_level,
last_calculated_at last_calculated_at
) )
WITH daily_sales AS (
SELECT
o.product_id,
DATE(o.date) as sale_date,
SUM(o.quantity) as daily_quantity,
SUM(o.price * o.quantity) as daily_revenue
FROM orders o
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
GROUP BY o.product_id, DATE(o.date)
),
forecast_dates AS (
SELECT
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date
FROM (
SELECT 0 as n UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9 UNION
SELECT 10 UNION SELECT 11 UNION SELECT 12 UNION SELECT 13 UNION SELECT 14 UNION
SELECT 15 UNION SELECT 16 UNION SELECT 17 UNION SELECT 18 UNION SELECT 19 UNION
SELECT 20 UNION SELECT 21 UNION SELECT 22 UNION SELECT 23 UNION SELECT 24 UNION
SELECT 25 UNION SELECT 26 UNION SELECT 27 UNION SELECT 28 UNION SELECT 29 UNION
SELECT 30
) numbers
),
product_stats AS (
SELECT
ds.product_id,
AVG(ds.daily_quantity) as avg_daily_quantity,
STDDEV_SAMP(ds.daily_quantity) as std_daily_quantity,
AVG(ds.daily_revenue) as avg_daily_revenue,
STDDEV_SAMP(ds.daily_revenue) as std_daily_revenue,
COUNT(*) as data_points,
-- Calculate day-of-week averages
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 1 THEN ds.daily_revenue END) as sunday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 2 THEN ds.daily_revenue END) as monday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 3 THEN ds.daily_revenue END) as tuesday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 4 THEN ds.daily_revenue END) as wednesday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 5 THEN ds.daily_revenue END) as thursday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 6 THEN ds.daily_revenue END) as friday_avg,
AVG(CASE WHEN DAYOFWEEK(ds.sale_date) = 7 THEN ds.daily_revenue END) as saturday_avg
FROM daily_sales ds
GROUP BY ds.product_id
)
SELECT SELECT
ps.product_id, ds.pid,
fd.forecast_date, fd.forecast_date,
GREATEST(0, GREATEST(0,
ps.avg_daily_quantity * AVG(ds.daily_quantity) *
(1 + COALESCE( (1 + COALESCE(sf.seasonality_factor, 0))
(SELECT seasonality_factor
FROM sales_seasonality
WHERE MONTH(fd.forecast_date) = month
LIMIT 1),
0
))
) as forecast_units, ) as forecast_units,
GREATEST(0, GREATEST(0,
CASE DAYOFWEEK(fd.forecast_date) COALESCE(
WHEN 1 THEN COALESCE(ps.sunday_avg, ps.avg_daily_revenue) CASE
WHEN 2 THEN COALESCE(ps.monday_avg, ps.avg_daily_revenue) WHEN SUM(ds.day_count) >= 4 THEN AVG(ds.daily_revenue)
WHEN 3 THEN COALESCE(ps.tuesday_avg, ps.avg_daily_revenue) ELSE ps.overall_avg_revenue
WHEN 4 THEN COALESCE(ps.wednesday_avg, ps.avg_daily_revenue) END *
WHEN 5 THEN COALESCE(ps.thursday_avg, ps.avg_daily_revenue) (1 + COALESCE(sf.seasonality_factor, 0)) *
WHEN 6 THEN COALESCE(ps.friday_avg, ps.avg_daily_revenue) (0.95 + (RAND() * 0.1)),
WHEN 7 THEN COALESCE(ps.saturday_avg, ps.avg_daily_revenue)
END *
(1 + COALESCE(
(SELECT seasonality_factor
FROM sales_seasonality
WHERE MONTH(fd.forecast_date) = month
LIMIT 1),
0 0
)) * )
-- Add some randomness within a small range (±5%)
(0.95 + (RAND() * 0.1))
) as forecast_revenue, ) as forecast_revenue,
CASE CASE
WHEN ps.data_points >= 60 THEN 90 WHEN ps.total_days >= 60 THEN 90
WHEN ps.data_points >= 30 THEN 80 WHEN ps.total_days >= 30 THEN 80
WHEN ps.data_points >= 14 THEN 70 WHEN ps.total_days >= 14 THEN 70
ELSE 60 ELSE 60
END as confidence_level, END as confidence_level,
NOW() as last_calculated_at NOW() as last_calculated_at
FROM product_stats ps FROM temp_daily_sales ds
CROSS JOIN forecast_dates fd JOIN temp_product_stats ps ON ds.pid = ps.pid
WHERE ps.avg_daily_quantity > 0 CROSS JOIN temp_forecast_dates fd
LEFT JOIN sales_seasonality sf ON fd.month = sf.month
GROUP BY ds.pid, fd.forecast_date, ps.overall_avg_revenue, ps.total_days, sf.seasonality_factor
HAVING AVG(ds.daily_quantity) > 0
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
forecast_units = VALUES(forecast_units), forecast_units = VALUES(forecast_units),
forecast_revenue = VALUES(forecast_revenue), forecast_revenue = VALUES(forecast_revenue),
@@ -118,6 +172,60 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
last_calculated_at = NOW() last_calculated_at = NOW()
`); `);
processedCount = Math.floor(totalProducts * 0.98);
outputProgress({
status: 'running',
operation: 'Product forecasts calculated, preparing category stats',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Create temporary table for category stats
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_category_sales AS
SELECT
pc.cat_id,
DAYOFWEEK(o.date) as day_of_week,
SUM(o.quantity) as daily_quantity,
SUM(o.price * o.quantity) as daily_revenue,
COUNT(DISTINCT DATE(o.date)) as day_count
FROM orders o
JOIN product_categories pc ON o.pid = pc.pid
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
GROUP BY pc.cat_id, DAYOFWEEK(o.date)
`);
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_category_stats AS
SELECT
cat_id,
AVG(daily_revenue) as overall_avg_revenue,
SUM(day_count) as total_days
FROM temp_category_sales
GROUP BY cat_id
`);
processedCount = Math.floor(totalProducts * 0.99);
outputProgress({
status: 'running',
operation: 'Category stats prepared, calculating category-level forecasts',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Calculate category-level forecasts // Calculate category-level forecasts
await connection.query(` await connection.query(`
INSERT INTO category_forecasts ( INSERT INTO category_forecasts (
@@ -128,93 +236,37 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
confidence_level, confidence_level,
last_calculated_at last_calculated_at
) )
WITH category_daily_sales AS (
SELECT
pc.category_id,
DATE(o.date) as sale_date,
SUM(o.quantity) as daily_quantity,
SUM(o.price * o.quantity) as daily_revenue
FROM orders o
JOIN product_categories pc ON o.product_id = pc.product_id
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 90 DAY)
GROUP BY pc.category_id, DATE(o.date)
),
forecast_dates AS (
SELECT
DATE_ADD(CURRENT_DATE, INTERVAL n DAY) as forecast_date
FROM (
SELECT 0 as n UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION
SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9 UNION
SELECT 10 UNION SELECT 11 UNION SELECT 12 UNION SELECT 13 UNION SELECT 14 UNION
SELECT 15 UNION SELECT 16 UNION SELECT 17 UNION SELECT 18 UNION SELECT 19 UNION
SELECT 20 UNION SELECT 21 UNION SELECT 22 UNION SELECT 23 UNION SELECT 24 UNION
SELECT 25 UNION SELECT 26 UNION SELECT 27 UNION SELECT 28 UNION SELECT 29 UNION
SELECT 30
) numbers
),
category_stats AS (
SELECT
cds.category_id,
AVG(cds.daily_quantity) as avg_daily_quantity,
STDDEV_SAMP(cds.daily_quantity) as std_daily_quantity,
AVG(cds.daily_revenue) as avg_daily_revenue,
STDDEV_SAMP(cds.daily_revenue) as std_daily_revenue,
COUNT(*) as data_points,
-- Calculate day-of-week averages
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 1 THEN cds.daily_revenue END) as sunday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 2 THEN cds.daily_revenue END) as monday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 3 THEN cds.daily_revenue END) as tuesday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 4 THEN cds.daily_revenue END) as wednesday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 5 THEN cds.daily_revenue END) as thursday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 6 THEN cds.daily_revenue END) as friday_avg,
AVG(CASE WHEN DAYOFWEEK(cds.sale_date) = 7 THEN cds.daily_revenue END) as saturday_avg
FROM category_daily_sales cds
GROUP BY cds.category_id
)
SELECT SELECT
cs.category_id, cs.cat_id as category_id,
fd.forecast_date, fd.forecast_date,
GREATEST(0, GREATEST(0,
cs.avg_daily_quantity * AVG(cs.daily_quantity) *
(1 + COALESCE( (1 + COALESCE(sf.seasonality_factor, 0))
(SELECT seasonality_factor
FROM sales_seasonality
WHERE MONTH(fd.forecast_date) = month
LIMIT 1),
0
))
) as forecast_units, ) as forecast_units,
GREATEST(0, GREATEST(0,
CASE DAYOFWEEK(fd.forecast_date) COALESCE(
WHEN 1 THEN COALESCE(cs.sunday_avg, cs.avg_daily_revenue) CASE
WHEN 2 THEN COALESCE(cs.monday_avg, cs.avg_daily_revenue) WHEN SUM(cs.day_count) >= 4 THEN AVG(cs.daily_revenue)
WHEN 3 THEN COALESCE(cs.tuesday_avg, cs.avg_daily_revenue) ELSE ct.overall_avg_revenue
WHEN 4 THEN COALESCE(cs.wednesday_avg, cs.avg_daily_revenue) END *
WHEN 5 THEN COALESCE(cs.thursday_avg, cs.avg_daily_revenue) (1 + COALESCE(sf.seasonality_factor, 0)) *
WHEN 6 THEN COALESCE(cs.friday_avg, cs.avg_daily_revenue) (0.95 + (RAND() * 0.1)),
WHEN 7 THEN COALESCE(cs.saturday_avg, cs.avg_daily_revenue)
END *
(1 + COALESCE(
(SELECT seasonality_factor
FROM sales_seasonality
WHERE MONTH(fd.forecast_date) = month
LIMIT 1),
0 0
)) * )
-- Add some randomness within a small range (±5%)
(0.95 + (RAND() * 0.1))
) as forecast_revenue, ) as forecast_revenue,
CASE CASE
WHEN cs.data_points >= 60 THEN 90 WHEN ct.total_days >= 60 THEN 90
WHEN cs.data_points >= 30 THEN 80 WHEN ct.total_days >= 30 THEN 80
WHEN cs.data_points >= 14 THEN 70 WHEN ct.total_days >= 14 THEN 70
ELSE 60 ELSE 60
END as confidence_level, END as confidence_level,
NOW() as last_calculated_at NOW() as last_calculated_at
FROM category_stats cs FROM temp_category_sales cs
CROSS JOIN forecast_dates fd JOIN temp_category_stats ct ON cs.cat_id = ct.cat_id
WHERE cs.avg_daily_quantity > 0 CROSS JOIN temp_forecast_dates fd
LEFT JOIN sales_seasonality sf ON fd.month = sf.month
GROUP BY cs.cat_id, fd.forecast_date, ct.overall_avg_revenue, ct.total_days, sf.seasonality_factor
HAVING AVG(cs.daily_quantity) > 0
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
forecast_units = VALUES(forecast_units), forecast_units = VALUES(forecast_units),
forecast_revenue = VALUES(forecast_revenue), forecast_revenue = VALUES(forecast_revenue),
@@ -222,9 +274,35 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount)
last_calculated_at = NOW() last_calculated_at = NOW()
`); `);
return Math.floor(totalProducts * 1.0); // Clean up temporary tables
await connection.query(`
DROP TEMPORARY TABLE IF EXISTS temp_forecast_dates;
DROP TEMPORARY TABLE IF EXISTS temp_daily_sales;
DROP TEMPORARY TABLE IF EXISTS temp_product_stats;
DROP TEMPORARY TABLE IF EXISTS temp_category_sales;
DROP TEMPORARY TABLE IF EXISTS temp_category_stats;
`);
processedCount = Math.floor(totalProducts * 1.0);
outputProgress({
status: 'running',
operation: 'Category forecasts calculated and temporary tables cleaned up',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
} catch (error) {
logError(error, 'Error calculating sales forecasts');
throw error;
} finally { } finally {
connection.release(); if (connection) {
connection.release();
}
} }
} }

View File

@@ -1,12 +1,38 @@
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress');
const { getConnection } = require('./utils/db'); const { getConnection } = require('./utils/db');
async function calculateTimeAggregates(startTime, totalProducts, processedCount) { async function calculateTimeAggregates(startTime, totalProducts, processedCount, isCancelled = false) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
if (isCancelled) {
outputProgress({
status: 'cancelled',
operation: 'Time aggregates calculation cancelled',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: null,
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
}
outputProgress({
status: 'running',
operation: 'Starting time aggregates calculation',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
// Initial insert of time-based aggregates // Initial insert of time-based aggregates
await connection.query(` await connection.query(`
INSERT INTO product_time_aggregates ( INSERT INTO product_time_aggregates (
product_id, pid,
year, year,
month, month,
total_quantity_sold, total_quantity_sold,
@@ -20,7 +46,7 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
) )
WITH sales_data AS ( WITH sales_data AS (
SELECT SELECT
o.product_id, o.pid,
YEAR(o.date) as year, YEAR(o.date) as year,
MONTH(o.date) as month, MONTH(o.date) as month,
SUM(o.quantity) as total_quantity_sold, SUM(o.quantity) as total_quantity_sold,
@@ -35,23 +61,23 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity)) * 100 SUM((o.price - COALESCE(o.discount, 0)) * o.quantity)) * 100
END as profit_margin END as profit_margin
FROM orders o FROM orders o
JOIN products p ON o.product_id = p.product_id JOIN products p ON o.pid = p.pid
WHERE o.canceled = 0 WHERE o.canceled = 0
GROUP BY o.product_id, YEAR(o.date), MONTH(o.date) GROUP BY o.pid, YEAR(o.date), MONTH(o.date)
), ),
purchase_data AS ( purchase_data AS (
SELECT SELECT
product_id, pid,
YEAR(date) as year, YEAR(date) as year,
MONTH(date) as month, MONTH(date) as month,
SUM(received) as stock_received, SUM(received) as stock_received,
SUM(ordered) as stock_ordered SUM(ordered) as stock_ordered
FROM purchase_orders FROM purchase_orders
WHERE status = 'closed' WHERE status = 50
GROUP BY product_id, YEAR(date), MONTH(date) GROUP BY pid, YEAR(date), MONTH(date)
) )
SELECT SELECT
s.product_id, s.pid,
s.year, s.year,
s.month, s.month,
s.total_quantity_sold, s.total_quantity_sold,
@@ -64,12 +90,12 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
s.profit_margin s.profit_margin
FROM sales_data s FROM sales_data s
LEFT JOIN purchase_data p LEFT JOIN purchase_data p
ON s.product_id = p.product_id ON s.pid = p.pid
AND s.year = p.year AND s.year = p.year
AND s.month = p.month AND s.month = p.month
UNION UNION
SELECT SELECT
p.product_id, p.pid,
p.year, p.year,
p.month, p.month,
0 as total_quantity_sold, 0 as total_quantity_sold,
@@ -82,10 +108,10 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
0 as profit_margin 0 as profit_margin
FROM purchase_data p FROM purchase_data p
LEFT JOIN sales_data s LEFT JOIN sales_data s
ON p.product_id = s.product_id ON p.pid = s.pid
AND p.year = s.year AND p.year = s.year
AND p.month = s.month AND p.month = s.month
WHERE s.product_id IS NULL WHERE s.pid IS NULL
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
total_quantity_sold = VALUES(total_quantity_sold), total_quantity_sold = VALUES(total_quantity_sold),
total_revenue = VALUES(total_revenue), total_revenue = VALUES(total_revenue),
@@ -97,22 +123,36 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
profit_margin = VALUES(profit_margin) profit_margin = VALUES(profit_margin)
`); `);
processedCount = Math.floor(totalProducts * 0.60);
outputProgress({
status: 'running',
operation: 'Base time aggregates calculated, updating financial metrics',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Update with financial metrics // Update with financial metrics
await connection.query(` await connection.query(`
UPDATE product_time_aggregates pta UPDATE product_time_aggregates pta
JOIN ( JOIN (
SELECT SELECT
p.product_id, p.pid,
YEAR(o.date) as year, YEAR(o.date) as year,
MONTH(o.date) as month, MONTH(o.date) as month,
p.cost_price * p.stock_quantity as inventory_value, p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit, SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
COUNT(DISTINCT DATE(o.date)) as days_in_period COUNT(DISTINCT DATE(o.date)) as days_in_period
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false WHERE o.canceled = false
GROUP BY p.product_id, YEAR(o.date), MONTH(o.date) GROUP BY p.pid, YEAR(o.date), MONTH(o.date)
) fin ON pta.product_id = fin.product_id ) fin ON pta.pid = fin.pid
AND pta.year = fin.year AND pta.year = fin.year
AND pta.month = fin.month AND pta.month = fin.month
SET SET
@@ -124,9 +164,26 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount)
END END
`); `);
return Math.floor(totalProducts * 0.65); processedCount = Math.floor(totalProducts * 0.65);
outputProgress({
status: 'running',
operation: 'Financial metrics updated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
} catch (error) {
logError(error, 'Error calculating time aggregates');
throw error;
} finally { } finally {
connection.release(); if (connection) {
connection.release();
}
} }
} }

View File

@@ -2,8 +2,15 @@ const fs = require('fs');
const path = require('path'); const path = require('path');
// Helper function to format elapsed time // Helper function to format elapsed time
function formatElapsedTime(startTime) { function formatElapsedTime(elapsed) {
const elapsed = Date.now() - startTime; // If elapsed is a timestamp, convert to elapsed milliseconds
if (elapsed instanceof Date || elapsed > 1000000000000) {
elapsed = Date.now() - elapsed;
} else {
// If elapsed is in seconds, convert to milliseconds
elapsed = elapsed * 1000;
}
const seconds = Math.floor(elapsed / 1000); const seconds = Math.floor(elapsed / 1000);
const minutes = Math.floor(seconds / 60); const minutes = Math.floor(seconds / 60);
const hours = Math.floor(minutes / 60); const hours = Math.floor(minutes / 60);

View File

@@ -1,174 +1,173 @@
const { outputProgress } = require('./utils/progress'); const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate, logError } = require('./utils/progress');
const { getConnection } = require('./utils/db'); const { getConnection } = require('./utils/db');
async function calculateVendorMetrics(startTime, totalProducts, processedCount) { async function calculateVendorMetrics(startTime, totalProducts, processedCount, isCancelled = false) {
const connection = await getConnection(); const connection = await getConnection();
try { try {
if (isCancelled) {
outputProgress({
status: 'cancelled',
operation: 'Vendor metrics calculation cancelled',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: null,
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
return processedCount;
}
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation: 'Calculating vendor metrics', operation: 'Starting vendor metrics calculation',
current: Math.floor(totalProducts * 0.7), current: processedCount,
total: totalProducts, total: totalProducts,
elapsed: formatElapsedTime(startTime), elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, Math.floor(totalProducts * 0.7), totalProducts), remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, Math.floor(totalProducts * 0.7)), rate: calculateRate(startTime, processedCount),
percentage: '70' percentage: ((processedCount / totalProducts) * 100).toFixed(1)
}); });
// First, ensure all vendors exist in vendor_details // First ensure all vendors exist in vendor_details
await connection.query(` await connection.query(`
INSERT IGNORE INTO vendor_details (vendor, status) INSERT IGNORE INTO vendor_details (vendor, status, created_at, updated_at)
SELECT DISTINCT vendor, 'active' as status SELECT DISTINCT
vendor,
'active' as status,
NOW() as created_at,
NOW() as updated_at
FROM products FROM products
WHERE vendor IS NOT NULL WHERE vendor IS NOT NULL
AND vendor NOT IN (SELECT vendor FROM vendor_details)
`); `);
// Calculate vendor performance metrics processedCount = Math.floor(totalProducts * 0.8);
outputProgress({
status: 'running',
operation: 'Vendor details updated, calculating metrics',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1)
});
if (isCancelled) return processedCount;
// Now calculate vendor metrics
await connection.query(` await connection.query(`
INSERT INTO vendor_metrics ( INSERT INTO vendor_metrics (
vendor, vendor,
total_revenue,
total_orders,
total_late_orders,
avg_lead_time_days, avg_lead_time_days,
on_time_delivery_rate, on_time_delivery_rate,
order_fill_rate, order_fill_rate,
total_orders,
total_late_orders,
total_purchase_value,
avg_order_value, avg_order_value,
active_products, active_products,
total_products, total_products,
total_revenue, status,
avg_margin_percent, last_calculated_at
status
) )
WITH vendor_orders AS ( WITH vendor_sales AS (
SELECT SELECT
po.vendor, p.vendor,
AVG(DATEDIFF(po.received_date, po.date)) as avg_lead_time_days, SUM(o.quantity * o.price) as total_revenue,
COUNT(*) as total_orders, COUNT(DISTINCT o.id) as total_orders,
COUNT(CASE WHEN po.received_date > po.expected_date THEN 1 END) as total_late_orders, COUNT(DISTINCT p.pid) as active_products
SUM(po.cost_price * po.ordered) as total_purchase_value, FROM products p
AVG(po.cost_price * po.ordered) as avg_order_value, JOIN orders o ON p.pid = o.pid
CASE WHERE o.canceled = false
WHEN COUNT(*) > 0 THEN AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
(COUNT(CASE WHEN po.received = po.ordered THEN 1 END) * 100.0) / COUNT(*) GROUP BY p.vendor
ELSE 0 ),
END as order_fill_rate vendor_po AS (
FROM purchase_orders po SELECT
WHERE po.status = 'closed' p.vendor,
GROUP BY po.vendor COUNT(DISTINCT CASE WHEN po.receiving_status = 40 THEN po.id END) as received_orders,
COUNT(DISTINCT po.id) as total_orders,
AVG(CASE
WHEN po.receiving_status = 40
THEN DATEDIFF(po.received_date, po.date)
END) as avg_lead_time_days
FROM products p
JOIN purchase_orders po ON p.pid = po.pid
WHERE po.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY p.vendor
), ),
vendor_products AS ( vendor_products AS (
SELECT SELECT
p.vendor, vendor,
COUNT(DISTINCT p.product_id) as total_products, COUNT(DISTINCT pid) as total_products
COUNT(DISTINCT CASE WHEN p.visible = true THEN p.product_id END) as active_products, FROM products
SUM(o.price * o.quantity) as total_revenue, GROUP BY vendor
CASE
WHEN SUM(o.price * o.quantity) > 0 THEN
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0
END as avg_margin_percent
FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id AND o.canceled = false
GROUP BY p.vendor
) )
SELECT SELECT
vd.vendor, vs.vendor,
COALESCE(vo.avg_lead_time_days, 0) as avg_lead_time_days, COALESCE(vs.total_revenue, 0) as total_revenue,
COALESCE(vp.total_orders, 0) as total_orders,
COALESCE(vp.total_orders - vp.received_orders, 0) as total_late_orders,
COALESCE(vp.avg_lead_time_days, 0) as avg_lead_time_days,
CASE CASE
WHEN COALESCE(vo.total_orders, 0) > 0 THEN WHEN vp.total_orders > 0
((COALESCE(vo.total_orders, 0) - COALESCE(vo.total_late_orders, 0)) * 100.0) / COALESCE(vo.total_orders, 1) THEN (vp.received_orders / vp.total_orders) * 100
ELSE 0 ELSE 0
END as on_time_delivery_rate, END as on_time_delivery_rate,
COALESCE(vo.order_fill_rate, 0) as order_fill_rate, CASE
COALESCE(vo.total_orders, 0) as total_orders, WHEN vp.total_orders > 0
COALESCE(vo.total_late_orders, 0) as total_late_orders, THEN (vp.received_orders / vp.total_orders) * 100
COALESCE(vo.total_purchase_value, 0) as total_purchase_value, ELSE 0
COALESCE(vo.avg_order_value, 0) as avg_order_value, END as order_fill_rate,
COALESCE(vp.active_products, 0) as active_products, CASE
COALESCE(vp.total_products, 0) as total_products, WHEN vs.total_orders > 0
COALESCE(vp.total_revenue, 0) as total_revenue, THEN vs.total_revenue / vs.total_orders
COALESCE(vp.avg_margin_percent, 0) as avg_margin_percent, ELSE 0
vd.status END as avg_order_value,
FROM vendor_details vd COALESCE(vs.active_products, 0) as active_products,
LEFT JOIN vendor_orders vo ON vd.vendor = vo.vendor COALESCE(vpr.total_products, 0) as total_products,
LEFT JOIN vendor_products vp ON vd.vendor = vp.vendor 'active' as status,
NOW() as last_calculated_at
FROM vendor_sales vs
LEFT JOIN vendor_po vp ON vs.vendor = vp.vendor
LEFT JOIN vendor_products vpr ON vs.vendor = vpr.vendor
WHERE vs.vendor IS NOT NULL
ON DUPLICATE KEY UPDATE ON DUPLICATE KEY UPDATE
total_revenue = VALUES(total_revenue),
total_orders = VALUES(total_orders),
total_late_orders = VALUES(total_late_orders),
avg_lead_time_days = VALUES(avg_lead_time_days), avg_lead_time_days = VALUES(avg_lead_time_days),
on_time_delivery_rate = VALUES(on_time_delivery_rate), on_time_delivery_rate = VALUES(on_time_delivery_rate),
order_fill_rate = VALUES(order_fill_rate), order_fill_rate = VALUES(order_fill_rate),
total_orders = VALUES(total_orders),
total_late_orders = VALUES(total_late_orders),
total_purchase_value = VALUES(total_purchase_value),
avg_order_value = VALUES(avg_order_value), avg_order_value = VALUES(avg_order_value),
active_products = VALUES(active_products), active_products = VALUES(active_products),
total_products = VALUES(total_products), total_products = VALUES(total_products),
total_revenue = VALUES(total_revenue),
avg_margin_percent = VALUES(avg_margin_percent),
status = VALUES(status), status = VALUES(status),
last_calculated_at = CURRENT_TIMESTAMP last_calculated_at = VALUES(last_calculated_at)
`); `);
// Calculate vendor time-based metrics processedCount = Math.floor(totalProducts * 0.9);
await connection.query(` outputProgress({
INSERT INTO vendor_time_metrics ( status: 'running',
vendor, operation: 'Vendor metrics calculated',
year, current: processedCount,
month, total: totalProducts,
total_orders, elapsed: formatElapsedTime(startTime),
late_orders, remaining: estimateRemaining(startTime, processedCount, totalProducts),
avg_lead_time_days, rate: calculateRate(startTime, processedCount),
total_purchase_value, percentage: ((processedCount / totalProducts) * 100).toFixed(1)
total_revenue, });
avg_margin_percent
)
WITH vendor_time_data AS (
SELECT
vd.vendor,
YEAR(po.date) as year,
MONTH(po.date) as month,
COUNT(DISTINCT po.po_id) as total_orders,
COUNT(DISTINCT CASE WHEN po.received_date > po.expected_date THEN po.po_id END) as late_orders,
AVG(DATEDIFF(po.received_date, po.date)) as avg_lead_time_days,
SUM(po.cost_price * po.ordered) as total_purchase_value,
SUM(o.price * o.quantity) as total_revenue,
CASE
WHEN SUM(o.price * o.quantity) > 0 THEN
(SUM((o.price - p.cost_price) * o.quantity) * 100.0) / SUM(o.price * o.quantity)
ELSE 0
END as avg_margin_percent
FROM vendor_details vd
LEFT JOIN products p ON vd.vendor = p.vendor
LEFT JOIN purchase_orders po ON p.product_id = po.product_id
LEFT JOIN orders o ON p.product_id = o.product_id AND o.canceled = false
WHERE po.date >= DATE_SUB(CURRENT_DATE, INTERVAL 12 MONTH)
GROUP BY vd.vendor, YEAR(po.date), MONTH(po.date)
)
SELECT
vendor,
year,
month,
COALESCE(total_orders, 0) as total_orders,
COALESCE(late_orders, 0) as late_orders,
COALESCE(avg_lead_time_days, 0) as avg_lead_time_days,
COALESCE(total_purchase_value, 0) as total_purchase_value,
COALESCE(total_revenue, 0) as total_revenue,
COALESCE(avg_margin_percent, 0) as avg_margin_percent
FROM vendor_time_data
ON DUPLICATE KEY UPDATE
total_orders = VALUES(total_orders),
late_orders = VALUES(late_orders),
avg_lead_time_days = VALUES(avg_lead_time_days),
total_purchase_value = VALUES(total_purchase_value),
total_revenue = VALUES(total_revenue),
avg_margin_percent = VALUES(avg_margin_percent)
`);
return Math.floor(totalProducts * 0.75); return processedCount;
} catch (error) {
logError(error, 'Error calculating vendor metrics');
throw error;
} finally { } finally {
connection.release(); if (connection) {
connection.release();
}
} }
} }
module.exports = calculateVendorMetrics; module.exports = calculateVendorMetrics;

View File

@@ -3,6 +3,7 @@ const path = require('path');
const csv = require('csv-parse'); const csv = require('csv-parse');
const mysql = require('mysql2/promise'); const mysql = require('mysql2/promise');
const dotenv = require('dotenv'); const dotenv = require('dotenv');
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
// Get test limits from environment variables // Get test limits from environment variables
const PRODUCTS_TEST_LIMIT = parseInt(process.env.PRODUCTS_TEST_LIMIT || '0'); const PRODUCTS_TEST_LIMIT = parseInt(process.env.PRODUCTS_TEST_LIMIT || '0');
@@ -106,20 +107,19 @@ async function countRows(filePath) {
} }
// Helper function to update progress with time estimate // Helper function to update progress with time estimate
function updateProgress(current, total, operation, startTime) { function updateProgress(current, total, operation, startTime, added = 0, updated = 0, skipped = 0) {
const elapsed = (Date.now() - startTime) / 1000;
const rate = current / elapsed; // rows per second
const remaining = (total - current) / rate;
outputProgress({ outputProgress({
status: 'running', status: 'running',
operation, operation,
current, current,
total, total,
rate, rate: calculateRate(startTime, current),
elapsed: formatDuration(elapsed), elapsed: formatElapsedTime(startTime),
remaining: formatDuration(remaining), remaining: estimateRemaining(startTime, current, total),
percentage: ((current / total) * 100).toFixed(1) percentage: ((current / total) * 100).toFixed(1),
added,
updated,
skipped
}); });
} }
@@ -474,7 +474,7 @@ async function importProducts(pool, filePath) {
// Update progress every 100ms to avoid console flooding // Update progress every 100ms to avoid console flooding
const now = Date.now(); const now = Date.now();
if (now - lastUpdate > 100) { if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Products import', startTime); updateProgress(rowCount, totalRows, 'Products import', startTime, added, updated, 0);
lastUpdate = now; lastUpdate = now;
} }
@@ -678,7 +678,7 @@ async function importOrders(pool, filePath) {
// Update progress every 100ms // Update progress every 100ms
const now = Date.now(); const now = Date.now();
if (now - lastUpdate > 100) { if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Orders import', startTime); updateProgress(rowCount, totalRows, 'Orders import', startTime, added, updated, skipped);
lastUpdate = now; lastUpdate = now;
} }
@@ -845,7 +845,7 @@ async function importPurchaseOrders(pool, filePath) {
// Update progress every 100ms // Update progress every 100ms
const now = Date.now(); const now = Date.now();
if (now - lastUpdate > 100) { if (now - lastUpdate > 100) {
updateProgress(rowCount, totalRows, 'Purchase orders import', startTime); updateProgress(rowCount, totalRows, 'Purchase orders import', startTime, added, updated, skipped);
lastUpdate = now; lastUpdate = now;
} }

View File

@@ -0,0 +1,180 @@
const path = require('path');
const fs = require('fs');
const axios = require('axios');
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
// Change working directory to script directory
process.chdir(path.dirname(__filename));
require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') });
const FILES = [
{
name: '39f2x83-products.csv',
url: process.env.PRODUCTS_CSV_URL
},
{
name: '39f2x83-orders.csv',
url: process.env.ORDERS_CSV_URL
},
{
name: '39f2x83-purchase_orders.csv',
url: process.env.PURCHASE_ORDERS_CSV_URL
}
];
let isCancelled = false;
function cancelUpdate() {
isCancelled = true;
outputProgress({
status: 'cancelled',
operation: 'CSV update cancelled',
current: 0,
total: FILES.length,
elapsed: null,
remaining: null,
rate: 0
});
}
async function downloadFile(file, index, startTime) {
if (isCancelled) return;
const csvDir = path.join(__dirname, '../csv');
if (!fs.existsSync(csvDir)) {
fs.mkdirSync(csvDir, { recursive: true });
}
const writer = fs.createWriteStream(path.join(csvDir, file.name));
try {
const response = await axios({
url: file.url,
method: 'GET',
responseType: 'stream'
});
const totalLength = response.headers['content-length'];
let downloadedLength = 0;
let lastProgressUpdate = Date.now();
const PROGRESS_INTERVAL = 1000; // Update progress every second
response.data.on('data', (chunk) => {
if (isCancelled) {
writer.end();
return;
}
downloadedLength += chunk.length;
// Update progress based on time interval
const now = Date.now();
if (now - lastProgressUpdate >= PROGRESS_INTERVAL) {
const progress = (downloadedLength / totalLength) * 100;
outputProgress({
status: 'running',
operation: `Downloading ${file.name}`,
current: index + (downloadedLength / totalLength),
total: FILES.length,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, index + (downloadedLength / totalLength), FILES.length),
rate: calculateRate(startTime, index + (downloadedLength / totalLength)),
percentage: progress.toFixed(1),
file_progress: {
name: file.name,
downloaded: downloadedLength,
total: totalLength,
percentage: progress.toFixed(1)
}
});
lastProgressUpdate = now;
}
});
response.data.pipe(writer);
return new Promise((resolve, reject) => {
writer.on('finish', resolve);
writer.on('error', reject);
});
} catch (error) {
fs.unlinkSync(path.join(csvDir, file.name));
throw error;
}
}
// Main function to update all files
async function updateFiles() {
const startTime = Date.now();
outputProgress({
status: 'running',
operation: 'Starting CSV update',
current: 0,
total: FILES.length,
elapsed: '0s',
remaining: null,
rate: 0,
percentage: '0'
});
try {
for (let i = 0; i < FILES.length; i++) {
if (isCancelled) {
return;
}
const file = FILES[i];
await downloadFile(file, i, startTime);
outputProgress({
status: 'running',
operation: 'CSV update in progress',
current: i + 1,
total: FILES.length,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, i + 1, FILES.length),
rate: calculateRate(startTime, i + 1),
percentage: (((i + 1) / FILES.length) * 100).toFixed(1)
});
}
outputProgress({
status: 'complete',
operation: 'CSV update complete',
current: FILES.length,
total: FILES.length,
elapsed: formatElapsedTime(startTime),
remaining: '0s',
rate: calculateRate(startTime, FILES.length),
percentage: '100'
});
} catch (error) {
outputProgress({
status: 'error',
operation: 'CSV update failed',
error: error.message,
current: 0,
total: FILES.length,
elapsed: formatElapsedTime(startTime),
remaining: null,
rate: 0
});
throw error;
}
}
// Run the update only if this is the main module
if (require.main === module) {
updateFiles().catch((error) => {
console.error('Error updating CSV files:', error);
process.exit(1);
});
}
// Export the functions needed by the route
module.exports = {
updateFiles,
cancelUpdate
};

View File

@@ -40,6 +40,7 @@ const CONFIG_TABLES = [
'sales_velocity_config', 'sales_velocity_config',
'abc_classification_config', 'abc_classification_config',
'safety_stock_config', 'safety_stock_config',
'sales_seasonality',
'turnover_config' 'turnover_config'
]; ];
@@ -155,7 +156,7 @@ async function resetDatabase() {
SELECT GROUP_CONCAT(table_name) as tables SELECT GROUP_CONCAT(table_name) as tables
FROM information_schema.tables FROM information_schema.tables
WHERE table_schema = DATABASE() WHERE table_schema = DATABASE()
AND table_name != 'users' AND table_name NOT IN ('users', 'import_history')
`); `);
if (!tables[0].tables) { if (!tables[0].tables) {

View File

@@ -12,10 +12,16 @@ const dbConfig = {
}; };
function outputProgress(data) { function outputProgress(data) {
if (!data.status) {
data = {
status: 'running',
...data
};
}
console.log(JSON.stringify(data)); console.log(JSON.stringify(data));
} }
// Explicitly define all metrics-related tables // Explicitly define all metrics-related tables in dependency order
const METRICS_TABLES = [ const METRICS_TABLES = [
'brand_metrics', 'brand_metrics',
'brand_time_metrics', 'brand_time_metrics',
@@ -26,7 +32,6 @@ const METRICS_TABLES = [
'product_metrics', 'product_metrics',
'product_time_aggregates', 'product_time_aggregates',
'sales_forecasts', 'sales_forecasts',
'sales_seasonality',
'temp_purchase_metrics', 'temp_purchase_metrics',
'temp_sales_metrics', 'temp_sales_metrics',
'vendor_metrics', //before vendor_details for foreign key 'vendor_metrics', //before vendor_details for foreign key
@@ -34,56 +39,279 @@ const METRICS_TABLES = [
'vendor_details' 'vendor_details'
]; ];
// Config tables that must exist // Split SQL into individual statements
const CONFIG_TABLES = [ function splitSQLStatements(sql) {
'stock_thresholds', sql = sql.replace(/\r\n/g, '\n');
'lead_time_thresholds', let statements = [];
'sales_velocity_config', let currentStatement = '';
'abc_classification_config', let inString = false;
'safety_stock_config', let stringChar = '';
'turnover_config'
]; for (let i = 0; i < sql.length; i++) {
const char = sql[i];
// Core tables that must exist const nextChar = sql[i + 1] || '';
const REQUIRED_CORE_TABLES = [
'products', if ((char === "'" || char === '"') && sql[i - 1] !== '\\') {
'orders', if (!inString) {
'purchase_orders' inString = true;
]; stringChar = char;
} else if (char === stringChar) {
inString = false;
}
}
if (!inString && char === '-' && nextChar === '-') {
while (i < sql.length && sql[i] !== '\n') i++;
continue;
}
if (!inString && char === '/' && nextChar === '*') {
i += 2;
while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++;
i++;
continue;
}
if (!inString && char === ';') {
if (currentStatement.trim()) {
statements.push(currentStatement.trim());
}
currentStatement = '';
} else {
currentStatement += char;
}
}
if (currentStatement.trim()) {
statements.push(currentStatement.trim());
}
return statements;
}
async function resetMetrics() { async function resetMetrics() {
let connection; let connection;
try { try {
outputProgress({
operation: 'Starting metrics reset',
message: 'Connecting to database...'
});
connection = await mysql.createConnection(dbConfig); connection = await mysql.createConnection(dbConfig);
await connection.beginTransaction(); await connection.beginTransaction();
// Drop all metrics tables // First verify current state
for (const table of METRICS_TABLES) { const [initialTables] = await connection.query(`
console.log(`Dropping table: ${table}`); SELECT TABLE_NAME as name
FROM information_schema.tables
WHERE TABLE_SCHEMA = DATABASE()
AND TABLE_NAME IN (?)
`, [METRICS_TABLES]);
outputProgress({
operation: 'Initial state',
message: `Found ${initialTables.length} existing metrics tables: ${initialTables.map(t => t.name).join(', ')}`
});
// Disable foreign key checks at the start
await connection.query('SET FOREIGN_KEY_CHECKS = 0');
// Drop all metrics tables in reverse order to handle dependencies
outputProgress({
operation: 'Dropping metrics tables',
message: 'Removing existing metrics tables...'
});
for (const table of [...METRICS_TABLES].reverse()) {
try { try {
await connection.query(`DROP TABLE IF EXISTS ${table}`); await connection.query(`DROP TABLE IF EXISTS ${table}`);
console.log(`Successfully dropped: ${table}`);
// Verify the table was actually dropped
const [checkDrop] = await connection.query(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE TABLE_SCHEMA = DATABASE()
AND TABLE_NAME = ?
`, [table]);
if (checkDrop[0].count > 0) {
throw new Error(`Failed to drop table ${table} - table still exists`);
}
outputProgress({
operation: 'Table dropped',
message: `Successfully dropped table: ${table}`
});
} catch (err) { } catch (err) {
console.error(`Error dropping ${table}:`, err.message); outputProgress({
status: 'error',
operation: 'Drop table error',
message: `Error dropping table ${table}: ${err.message}`
});
throw err; throw err;
} }
} }
// Recreate all metrics tables from schema // Verify all tables were dropped
const schemaSQL = fs.readFileSync(path.resolve(__dirname, '../db/metrics-schema.sql'), 'utf8'); const [afterDrop] = await connection.query(`
await connection.query(schemaSQL); SELECT TABLE_NAME as name
console.log('All metrics tables recreated successfully'); FROM information_schema.tables
WHERE TABLE_SCHEMA = DATABASE()
AND TABLE_NAME IN (?)
`, [METRICS_TABLES]);
if (afterDrop.length > 0) {
throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.map(t => t.name).join(', ')}`);
}
// Read metrics schema
outputProgress({
operation: 'Reading schema',
message: 'Loading metrics schema file...'
});
const schemaPath = path.resolve(__dirname, '../db/metrics-schema.sql');
if (!fs.existsSync(schemaPath)) {
throw new Error(`Schema file not found at: ${schemaPath}`);
}
const schemaSQL = fs.readFileSync(schemaPath, 'utf8');
const statements = splitSQLStatements(schemaSQL);
outputProgress({
operation: 'Schema loaded',
message: `Found ${statements.length} SQL statements to execute`
});
// Execute schema statements
for (let i = 0; i < statements.length; i++) {
const stmt = statements[i];
try {
await connection.query(stmt);
// Check for warnings
const [warnings] = await connection.query('SHOW WARNINGS');
if (warnings && warnings.length > 0) {
outputProgress({
status: 'warning',
operation: 'SQL Warning',
message: {
statement: i + 1,
warnings: warnings
}
});
}
// If this is a CREATE TABLE statement, verify the table was created
if (stmt.trim().toLowerCase().startsWith('create table')) {
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?`?(\w+)`?/i)?.[1];
if (tableName) {
const [checkCreate] = await connection.query(`
SELECT TABLE_NAME as name, CREATE_TIME as created
FROM information_schema.tables
WHERE TABLE_SCHEMA = DATABASE()
AND TABLE_NAME = ?
`, [tableName]);
if (checkCreate.length === 0) {
throw new Error(`Failed to create table ${tableName} - table does not exist after CREATE statement`);
}
outputProgress({
operation: 'Table created',
message: `Successfully created table: ${tableName} at ${checkCreate[0].created}`
});
}
}
outputProgress({
operation: 'SQL Progress',
message: {
statement: i + 1,
total: statements.length,
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : '')
}
});
} catch (sqlError) {
outputProgress({
status: 'error',
operation: 'SQL Error',
message: {
error: sqlError.message,
sqlState: sqlError.sqlState,
errno: sqlError.errno,
statement: stmt,
statementNumber: i + 1
}
});
throw sqlError;
}
}
// Re-enable foreign key checks after all tables are created
await connection.query('SET FOREIGN_KEY_CHECKS = 1');
// Verify metrics tables were created
outputProgress({
operation: 'Verifying metrics tables',
message: 'Checking all metrics tables were created...'
});
const [metricsTablesResult] = await connection.query(`
SELECT
TABLE_NAME as name,
TABLE_ROWS as \`rows\`,
CREATE_TIME as created
FROM information_schema.tables
WHERE TABLE_SCHEMA = DATABASE()
AND TABLE_NAME IN (?)
`, [METRICS_TABLES]);
outputProgress({
operation: 'Tables found',
message: `Found ${metricsTablesResult.length} tables: ${metricsTablesResult.map(t =>
`${t.name} (created: ${t.created})`
).join(', ')}`
});
const existingMetricsTables = metricsTablesResult.map(t => t.name);
const missingMetricsTables = METRICS_TABLES.filter(t => !existingMetricsTables.includes(t));
if (missingMetricsTables.length > 0) {
// Do one final check of the actual tables
const [finalCheck] = await connection.query('SHOW TABLES');
outputProgress({
operation: 'Final table check',
message: `All database tables: ${finalCheck.map(t => Object.values(t)[0]).join(', ')}`
});
throw new Error(`Failed to create metrics tables: ${missingMetricsTables.join(', ')}`);
}
await connection.commit(); await connection.commit();
console.log('All metrics tables reset successfully');
outputProgress({
status: 'complete',
operation: 'Reset complete',
message: 'All metrics tables have been reset successfully'
});
} catch (error) { } catch (error) {
outputProgress({
status: 'error',
operation: 'Reset failed',
message: error.message,
stack: error.stack
});
if (connection) { if (connection) {
await connection.rollback(); await connection.rollback();
// Make sure to re-enable foreign key checks even if there's an error
await connection.query('SET FOREIGN_KEY_CHECKS = 1').catch(() => {});
} }
console.error('Error resetting metrics:', error);
throw error; throw error;
} finally { } finally {
if (connection) { if (connection) {
// One final attempt to ensure foreign key checks are enabled
await connection.query('SET FOREIGN_KEY_CHECKS = 1').catch(() => {});
await connection.end(); await connection.end();
} }
} }

View File

@@ -0,0 +1,180 @@
const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const question = (query) => new Promise((resolve) => rl.question(query, resolve));
async function loadScript(name) {
try {
return await require(name);
} catch (error) {
console.error(`Failed to load script ${name}:`, error);
return null;
}
}
async function runWithTimeout(fn) {
return new Promise((resolve, reject) => {
// Create a child process for the script
const child = require('child_process').fork(fn.toString(), [], {
stdio: 'inherit'
});
child.on('exit', (code) => {
if (code === 0) {
resolve();
} else {
reject(new Error(`Script exited with code ${code}`));
}
});
child.on('error', (err) => {
reject(err);
});
});
}
function clearScreen() {
process.stdout.write('\x1Bc');
}
const scripts = {
'Import Scripts': {
'1': { name: 'Full Import From Production', path: './import-from-prod' },
'2': { name: 'Individual Import Scripts ▸', submenu: {
'1': { name: 'Import Orders', path: './import/orders', key: 'importOrders' },
'2': { name: 'Import Products', path: './import/products', key: 'importProducts' },
'3': { name: 'Import Purchase Orders', path: './import/purchase-orders' },
'4': { name: 'Import Categories', path: './import/categories' },
'b': { name: 'Back to Main Menu' }
}}
},
'Metrics': {
'3': { name: 'Calculate All Metrics', path: './calculate-metrics' },
'4': { name: 'Individual Metric Scripts ▸', submenu: {
'1': { name: 'Brand Metrics', path: './metrics/brand-metrics' },
'2': { name: 'Category Metrics', path: './metrics/category-metrics' },
'3': { name: 'Financial Metrics', path: './metrics/financial-metrics' },
'4': { name: 'Product Metrics', path: './metrics/product-metrics' },
'5': { name: 'Sales Forecasts', path: './metrics/sales-forecasts' },
'6': { name: 'Time Aggregates', path: './metrics/time-aggregates' },
'7': { name: 'Vendor Metrics', path: './metrics/vendor-metrics' },
'b': { name: 'Back to Main Menu' }
}}
},
'Database Management': {
'5': { name: 'Test Production Connection', path: './test-prod-connection' }
},
'Reset Scripts': {
'6': { name: 'Reset Database', path: './reset-db' },
'7': { name: 'Reset Metrics', path: './reset-metrics' }
}
};
let lastRun = null;
async function displayMenu(menuItems, title = 'Inventory Management Script Runner') {
clearScreen();
console.log(`\n${title}\n`);
for (const [category, items] of Object.entries(menuItems)) {
console.log(`\n${category}:`);
Object.entries(items).forEach(([key, script]) => {
console.log(`${key}. ${script.name}`);
});
}
if (lastRun) {
console.log('\nQuick Access:');
console.log(`r. Repeat Last Script (${lastRun.name})`);
}
console.log('\nq. Quit\n');
}
async function handleSubmenu(submenu, title) {
while (true) {
await displayMenu({"Individual Scripts": submenu}, title);
const choice = await question('Select an option (or b to go back): ');
if (choice.toLowerCase() === 'b') {
return null;
}
if (submenu[choice]) {
return submenu[choice];
}
console.log('Invalid selection. Please try again.');
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
async function runScript(script) {
console.log(`\nRunning: ${script.name}`);
try {
const scriptPath = require.resolve(script.path);
await runWithTimeout(scriptPath);
console.log('\nScript completed successfully');
lastRun = script;
} catch (error) {
console.error('\nError running script:', error);
}
await question('\nPress Enter to continue...');
}
async function main() {
while (true) {
await displayMenu(scripts);
const choice = await question('Select an option: ');
if (choice.toLowerCase() === 'q') {
break;
}
if (choice.toLowerCase() === 'r' && lastRun) {
await runScript(lastRun);
continue;
}
let selectedScript = null;
for (const category of Object.values(scripts)) {
if (category[choice]) {
selectedScript = category[choice];
break;
}
}
if (!selectedScript) {
console.log('Invalid selection. Please try again.');
await new Promise(resolve => setTimeout(resolve, 1000));
continue;
}
if (selectedScript.submenu) {
const submenuChoice = await handleSubmenu(
selectedScript.submenu,
selectedScript.name
);
if (submenuChoice && submenuChoice.path) {
await runScript(submenuChoice);
}
} else if (selectedScript.path) {
await runScript(selectedScript);
}
}
rl.close();
process.exit(0);
}
if (require.main === module) {
main().catch(error => {
console.error('Fatal error:', error);
process.exit(1);
});
}

View File

@@ -1,167 +0,0 @@
const fs = require('fs');
const path = require('path');
const https = require('https');
// Configuration
const FILES = [
{
name: '39f2x83-products.csv',
url: 'https://feeds.acherryontop.com/39f2x83-products.csv'
},
{
name: '39f2x83-orders.csv',
url: 'https://feeds.acherryontop.com/39f2x83-orders.csv'
},
{
name: '39f2x83-purchase_orders.csv',
url: 'https://feeds.acherryontop.com/39f2x83-purchase_orders.csv'
}
];
const CSV_DIR = path.join(__dirname, '..', 'csv');
// Ensure CSV directory exists
if (!fs.existsSync(CSV_DIR)) {
fs.mkdirSync(CSV_DIR, { recursive: true });
}
// Function to download a file
function downloadFile(url, filePath) {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(filePath);
https.get(url, response => {
if (response.statusCode !== 200) {
reject(new Error(`Failed to download: ${response.statusCode} ${response.statusMessage}`));
return;
}
const totalSize = parseInt(response.headers['content-length'], 10);
let downloadedSize = 0;
let lastProgressUpdate = Date.now();
const startTime = Date.now();
response.on('data', chunk => {
downloadedSize += chunk.length;
const now = Date.now();
// Update progress at most every 100ms to avoid console flooding
if (now - lastProgressUpdate > 100) {
const elapsed = (now - startTime) / 1000;
const rate = downloadedSize / elapsed;
const remaining = (totalSize - downloadedSize) / rate;
console.log(JSON.stringify({
status: 'running',
operation: `Downloading ${path.basename(filePath)}`,
current: downloadedSize,
total: totalSize,
rate: (rate / 1024 / 1024).toFixed(2), // MB/s
elapsed: formatDuration(elapsed),
remaining: formatDuration(remaining),
percentage: ((downloadedSize / totalSize) * 100).toFixed(1)
}));
lastProgressUpdate = now;
}
});
response.pipe(file);
file.on('finish', () => {
console.log(JSON.stringify({
status: 'running',
operation: `Completed ${path.basename(filePath)}`,
current: totalSize,
total: totalSize,
percentage: '100'
}));
file.close();
resolve();
});
}).on('error', error => {
fs.unlink(filePath, () => {}); // Delete the file if download failed
reject(error);
});
file.on('error', error => {
fs.unlink(filePath, () => {}); // Delete the file if there was an error
reject(error);
});
});
}
// Helper function to format duration
function formatDuration(seconds) {
if (seconds < 60) return `${Math.round(seconds)}s`;
const minutes = Math.floor(seconds / 60);
seconds = Math.round(seconds % 60);
return `${minutes}m ${seconds}s`;
}
// Main function to update all files
async function updateFiles() {
console.log(JSON.stringify({
status: 'running',
operation: 'Starting CSV file updates',
total: FILES.length,
current: 0
}));
for (let i = 0; i < FILES.length; i++) {
const file = FILES[i];
const filePath = path.join(CSV_DIR, file.name);
try {
// Delete existing file if it exists
if (fs.existsSync(filePath)) {
console.log(JSON.stringify({
status: 'running',
operation: `Removing existing file: ${file.name}`,
current: i,
total: FILES.length,
percentage: ((i / FILES.length) * 100).toFixed(1)
}));
fs.unlinkSync(filePath);
}
// Download new file
console.log(JSON.stringify({
status: 'running',
operation: `Starting download: ${file.name}`,
current: i,
total: FILES.length,
percentage: ((i / FILES.length) * 100).toFixed(1)
}));
await downloadFile(file.url, filePath);
console.log(JSON.stringify({
status: 'running',
operation: `Successfully updated ${file.name}`,
current: i + 1,
total: FILES.length,
percentage: (((i + 1) / FILES.length) * 100).toFixed(1)
}));
} catch (error) {
console.error(JSON.stringify({
status: 'error',
operation: `Error updating ${file.name}`,
error: error.message
}));
throw error;
}
}
console.log(JSON.stringify({
status: 'complete',
operation: 'CSV file update complete',
current: FILES.length,
total: FILES.length,
percentage: '100'
}));
}
// Run the update
updateFiles().catch(error => {
console.error(JSON.stringify({
error: `Update failed: ${error.message}`
}));
process.exit(1);
});

View File

@@ -36,7 +36,7 @@ router.get('/stats', async (req, res) => {
0 0
) as averageOrderValue ) as averageOrderValue
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
`); `);
@@ -62,22 +62,43 @@ router.get('/profit', async (req, res) => {
try { try {
const pool = req.app.locals.pool; const pool = req.app.locals.pool;
// Get profit margins by category // Get profit margins by category with full path
const [byCategory] = await pool.query(` const [byCategory] = await pool.query(`
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
CAST(c.name AS CHAR(1000)) as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
CONCAT(cp.path, ' > ', c.name)
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
)
SELECT SELECT
c.name as category, c.name as category,
cp.path as categoryPath,
ROUND( ROUND(
(SUM(o.price * o.quantity - p.cost_price * o.quantity) / (SUM(o.price * o.quantity - p.cost_price * o.quantity) /
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
) as profitMargin, ) as profitMargin,
SUM(o.price * o.quantity) as revenue, CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
SUM(p.cost_price * o.quantity) as cost CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.product_id = pc.product_id JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.category_id = c.id JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
GROUP BY c.name GROUP BY c.name, cp.path
ORDER BY profitMargin DESC ORDER BY profitMargin DESC
LIMIT 10 LIMIT 10
`); `);
@@ -90,10 +111,10 @@ router.get('/profit', async (req, res) => {
(SUM(o.price * o.quantity - p.cost_price * o.quantity) / (SUM(o.price * o.quantity - p.cost_price * o.quantity) /
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
) as profitMargin, ) as profitMargin,
SUM(o.price * o.quantity) as revenue, CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
SUM(p.cost_price * o.quantity) as cost CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
CROSS JOIN ( CROSS JOIN (
SELECT DATE_FORMAT(o.date, '%Y-%m-%d') as formatted_date SELECT DATE_FORMAT(o.date, '%Y-%m-%d') as formatted_date
FROM orders o FROM orders o
@@ -106,20 +127,44 @@ router.get('/profit', async (req, res) => {
ORDER BY formatted_date ORDER BY formatted_date
`); `);
// Get top performing products // Get top performing products with category paths
const [topProducts] = await pool.query(` const [topProducts] = await pool.query(`
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
CAST(c.name AS CHAR(1000)) as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
CONCAT(cp.path, ' > ', c.name)
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
)
SELECT SELECT
p.title as product, p.title as product,
c.name as category,
cp.path as categoryPath,
ROUND( ROUND(
(SUM(o.price * o.quantity - p.cost_price * o.quantity) / (SUM(o.price * o.quantity - p.cost_price * o.quantity) /
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
) as profitMargin, ) as profitMargin,
SUM(o.price * o.quantity) as revenue, CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
SUM(p.cost_price * o.quantity) as cost CAST(SUM(p.cost_price * o.quantity) AS DECIMAL(15,3)) as cost
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
GROUP BY p.product_id, p.title GROUP BY p.pid, p.title, c.name, cp.path
HAVING revenue > 0 HAVING revenue > 0
ORDER BY profitMargin DESC ORDER BY profitMargin DESC
LIMIT 10 LIMIT 10
@@ -144,7 +189,7 @@ router.get('/vendors', async (req, res) => {
SELECT COUNT(DISTINCT p.vendor) as vendor_count, SELECT COUNT(DISTINCT p.vendor) as vendor_count,
COUNT(DISTINCT o.order_number) as order_count COUNT(DISTINCT o.order_number) as order_count
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE p.vendor IS NOT NULL WHERE p.vendor IS NOT NULL
`); `);
@@ -155,26 +200,26 @@ router.get('/vendors', async (req, res) => {
WITH monthly_sales AS ( WITH monthly_sales AS (
SELECT SELECT
p.vendor, p.vendor,
SUM(CASE CAST(SUM(CASE
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
THEN o.price * o.quantity THEN o.price * o.quantity
ELSE 0 ELSE 0
END) as current_month, END) AS DECIMAL(15,3)) as current_month,
SUM(CASE CAST(SUM(CASE
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY) WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
AND o.date < DATE_SUB(CURDATE(), INTERVAL 30 DAY) AND o.date < DATE_SUB(CURDATE(), INTERVAL 30 DAY)
THEN o.price * o.quantity THEN o.price * o.quantity
ELSE 0 ELSE 0
END) as previous_month END) AS DECIMAL(15,3)) as previous_month
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE p.vendor IS NOT NULL WHERE p.vendor IS NOT NULL
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY) AND o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
GROUP BY p.vendor GROUP BY p.vendor
) )
SELECT SELECT
p.vendor, p.vendor,
SUM(o.price * o.quantity) as salesVolume, CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as salesVolume,
COALESCE(ROUND( COALESCE(ROUND(
(SUM(o.price * o.quantity - p.cost_price * o.quantity) / (SUM(o.price * o.quantity - p.cost_price * o.quantity) /
NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1 NULLIF(SUM(o.price * o.quantity), 0)) * 100, 1
@@ -182,13 +227,13 @@ router.get('/vendors', async (req, res) => {
COALESCE(ROUND( COALESCE(ROUND(
SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0), 1 SUM(o.quantity) / NULLIF(AVG(p.stock_quantity), 0), 1
), 0) as stockTurnover, ), 0) as stockTurnover,
COUNT(DISTINCT p.product_id) as productCount, COUNT(DISTINCT p.pid) as productCount,
ROUND( ROUND(
((ms.current_month / NULLIF(ms.previous_month, 0)) - 1) * 100, ((ms.current_month / NULLIF(ms.previous_month, 0)) - 1) * 100,
1 1
) as growth ) as growth
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
LEFT JOIN monthly_sales ms ON p.vendor = ms.vendor LEFT JOIN monthly_sales ms ON p.vendor = ms.vendor
WHERE p.vendor IS NOT NULL WHERE p.vendor IS NOT NULL
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
@@ -203,11 +248,11 @@ router.get('/vendors', async (req, res) => {
const [comparison] = await pool.query(` const [comparison] = await pool.query(`
SELECT SELECT
p.vendor, p.vendor,
COALESCE(ROUND(SUM(o.price * o.quantity) / NULLIF(COUNT(DISTINCT p.product_id), 0), 2), 0) as salesPerProduct, CAST(COALESCE(ROUND(SUM(o.price * o.quantity) / NULLIF(COUNT(DISTINCT p.pid), 0), 2), 0) AS DECIMAL(15,3)) as salesPerProduct,
COALESCE(ROUND(AVG((o.price - p.cost_price) / NULLIF(o.price, 0) * 100), 1), 0) as averageMargin, COALESCE(ROUND(AVG((o.price - p.cost_price) / NULLIF(o.price, 0) * 100), 1), 0) as averageMargin,
COUNT(DISTINCT p.product_id) as size COUNT(DISTINCT p.pid) as size
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) LEFT JOIN orders o ON p.pid = o.pid AND o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
WHERE p.vendor IS NOT NULL WHERE p.vendor IS NOT NULL
GROUP BY p.vendor GROUP BY p.vendor
ORDER BY salesPerProduct DESC ORDER BY salesPerProduct DESC
@@ -221,9 +266,9 @@ router.get('/vendors', async (req, res) => {
SELECT SELECT
p.vendor, p.vendor,
DATE_FORMAT(o.date, '%b %Y') as month, DATE_FORMAT(o.date, '%b %Y') as month,
COALESCE(SUM(o.price * o.quantity), 0) as sales CAST(COALESCE(SUM(o.price * o.quantity), 0) AS DECIMAL(15,3)) as sales
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE p.vendor IS NOT NULL WHERE p.vendor IS NOT NULL
AND o.date >= DATE_SUB(CURDATE(), INTERVAL 6 MONTH) AND o.date >= DATE_SUB(CURDATE(), INTERVAL 6 MONTH)
GROUP BY GROUP BY
@@ -272,9 +317,9 @@ router.get('/stock', async (req, res) => {
ROUND(AVG(p.stock_quantity), 0) as averageStock, ROUND(AVG(p.stock_quantity), 0) as averageStock,
SUM(o.quantity) as totalSales SUM(o.quantity) as totalSales
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.product_id = pc.product_id JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.category_id = c.id JOIN categories c ON pc.cat_id = c.cat_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
GROUP BY c.name GROUP BY c.name
HAVING turnoverRate > 0 HAVING turnoverRate > 0
@@ -290,7 +335,7 @@ router.get('/stock', async (req, res) => {
SUM(CASE WHEN p.stock_quantity <= ? AND p.stock_quantity > 0 THEN 1 ELSE 0 END) as lowStock, SUM(CASE WHEN p.stock_quantity <= ? AND p.stock_quantity > 0 THEN 1 ELSE 0 END) as lowStock,
SUM(CASE WHEN p.stock_quantity = 0 THEN 1 ELSE 0 END) as outOfStock SUM(CASE WHEN p.stock_quantity = 0 THEN 1 ELSE 0 END) as outOfStock
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
GROUP BY DATE_FORMAT(o.date, '%Y-%m-%d') GROUP BY DATE_FORMAT(o.date, '%Y-%m-%d')
ORDER BY date ORDER BY date
@@ -304,26 +349,14 @@ router.get('/stock', async (req, res) => {
const [criticalItems] = await pool.query(` const [criticalItems] = await pool.query(`
WITH product_thresholds AS ( WITH product_thresholds AS (
SELECT SELECT
p.product_id, p.pid,
COALESCE( COALESCE(
(SELECT reorder_days (SELECT reorder_days
FROM stock_thresholds st FROM stock_thresholds st
JOIN product_categories pc ON st.category_id = pc.category_id WHERE st.vendor = p.vendor LIMIT 1),
WHERE pc.product_id = p.product_id
AND st.vendor = p.vendor LIMIT 1),
(SELECT reorder_days (SELECT reorder_days
FROM stock_thresholds st FROM stock_thresholds st
JOIN product_categories pc ON st.category_id = pc.category_id WHERE st.vendor IS NULL LIMIT 1),
WHERE pc.product_id = p.product_id
AND st.vendor IS NULL LIMIT 1),
(SELECT reorder_days
FROM stock_thresholds st
WHERE st.category_id IS NULL
AND st.vendor = p.vendor LIMIT 1),
(SELECT reorder_days
FROM stock_thresholds st
WHERE st.category_id IS NULL
AND st.vendor IS NULL LIMIT 1),
14 14
) as reorder_days ) as reorder_days
FROM products p FROM products p
@@ -339,11 +372,11 @@ router.get('/stock', async (req, res) => {
ELSE ROUND(p.stock_quantity / NULLIF((SUM(o.quantity) / ?), 0)) ELSE ROUND(p.stock_quantity / NULLIF((SUM(o.quantity) / ?), 0))
END as daysUntilStockout END as daysUntilStockout
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_thresholds pt ON p.product_id = pt.product_id JOIN product_thresholds pt ON p.pid = pt.pid
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
AND p.managing_stock = true AND p.managing_stock = true
GROUP BY p.product_id GROUP BY p.pid
HAVING daysUntilStockout < ? AND daysUntilStockout >= 0 HAVING daysUntilStockout < ? AND daysUntilStockout >= 0
ORDER BY daysUntilStockout ORDER BY daysUntilStockout
LIMIT 10 LIMIT 10
@@ -369,14 +402,16 @@ router.get('/pricing', async (req, res) => {
// Get price points analysis // Get price points analysis
const [pricePoints] = await pool.query(` const [pricePoints] = await pool.query(`
SELECT SELECT
p.price, CAST(p.price AS DECIMAL(15,3)) as price,
SUM(o.quantity) as salesVolume, CAST(SUM(o.quantity) AS DECIMAL(15,3)) as salesVolume,
SUM(o.price * o.quantity) as revenue, CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
p.categories as category c.name as category
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
GROUP BY p.price, p.categories GROUP BY p.price, c.name
HAVING salesVolume > 0 HAVING salesVolume > 0
ORDER BY revenue DESC ORDER BY revenue DESC
LIMIT 50 LIMIT 50
@@ -386,8 +421,8 @@ router.get('/pricing', async (req, res) => {
const [elasticity] = await pool.query(` const [elasticity] = await pool.query(`
SELECT SELECT
DATE_FORMAT(o.date, '%Y-%m-%d') as date, DATE_FORMAT(o.date, '%Y-%m-%d') as date,
AVG(o.price) as price, CAST(AVG(o.price) AS DECIMAL(15,3)) as price,
SUM(o.quantity) as demand CAST(SUM(o.quantity) AS DECIMAL(15,3)) as demand
FROM orders o FROM orders o
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
GROUP BY DATE_FORMAT(o.date, '%Y-%m-%d') GROUP BY DATE_FORMAT(o.date, '%Y-%m-%d')
@@ -398,21 +433,25 @@ router.get('/pricing', async (req, res) => {
const [recommendations] = await pool.query(` const [recommendations] = await pool.query(`
SELECT SELECT
p.title as product, p.title as product,
p.price as currentPrice, CAST(p.price AS DECIMAL(15,3)) as currentPrice,
ROUND( CAST(
CASE ROUND(
WHEN AVG(o.quantity) > 10 THEN p.price * 1.1 CASE
WHEN AVG(o.quantity) < 2 THEN p.price * 0.9 WHEN AVG(o.quantity) > 10 THEN p.price * 1.1
ELSE p.price WHEN AVG(o.quantity) < 2 THEN p.price * 0.9
END, 2 ELSE p.price
END, 2
) AS DECIMAL(15,3)
) as recommendedPrice, ) as recommendedPrice,
ROUND( CAST(
SUM(o.price * o.quantity) * ROUND(
CASE SUM(o.price * o.quantity) *
WHEN AVG(o.quantity) > 10 THEN 1.15 CASE
WHEN AVG(o.quantity) < 2 THEN 0.95 WHEN AVG(o.quantity) > 10 THEN 1.15
ELSE 1 WHEN AVG(o.quantity) < 2 THEN 0.95
END, 2 ELSE 1
END, 2
) AS DECIMAL(15,3)
) as potentialRevenue, ) as potentialRevenue,
CASE CASE
WHEN AVG(o.quantity) > 10 THEN 85 WHEN AVG(o.quantity) > 10 THEN 85
@@ -420,11 +459,11 @@ router.get('/pricing', async (req, res) => {
ELSE 65 ELSE 65
END as confidence END as confidence
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
GROUP BY p.product_id GROUP BY p.pid, p.price
HAVING ABS(recommendedPrice - currentPrice) > 0 HAVING ABS(recommendedPrice - currentPrice) > 0
ORDER BY potentialRevenue - SUM(o.price * o.quantity) DESC ORDER BY potentialRevenue - CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) DESC
LIMIT 10 LIMIT 10
`); `);
@@ -440,11 +479,36 @@ router.get('/categories', async (req, res) => {
try { try {
const pool = req.app.locals.pool; const pool = req.app.locals.pool;
// Get category performance metrics // Common CTE for category paths
const categoryPathCTE = `
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
CAST(c.name AS CHAR(1000)) as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
CONCAT(cp.path, ' > ', c.name)
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
)
`;
// Get category performance metrics with full path
const [performance] = await pool.query(` const [performance] = await pool.query(`
WITH monthly_sales AS ( ${categoryPathCTE},
monthly_sales AS (
SELECT SELECT
c.name, c.name,
cp.path,
SUM(CASE SUM(CASE
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
THEN o.price * o.quantity THEN o.price * o.quantity
@@ -457,62 +521,72 @@ router.get('/categories', async (req, res) => {
ELSE 0 ELSE 0
END) as previous_month END) as previous_month
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.product_id = pc.product_id JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.category_id = c.id JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
GROUP BY c.name GROUP BY c.name, cp.path
) )
SELECT SELECT
c.name as category, c.name as category,
cp.path as categoryPath,
SUM(o.price * o.quantity) as revenue, SUM(o.price * o.quantity) as revenue,
SUM(o.price * o.quantity - p.cost_price * o.quantity) as profit, SUM(o.price * o.quantity - p.cost_price * o.quantity) as profit,
ROUND( ROUND(
((ms.current_month / NULLIF(ms.previous_month, 0)) - 1) * 100, ((ms.current_month / NULLIF(ms.previous_month, 0)) - 1) * 100,
1 1
) as growth, ) as growth,
COUNT(DISTINCT p.product_id) as productCount COUNT(DISTINCT p.pid) as productCount
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.product_id = pc.product_id JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.category_id = c.id JOIN categories c ON pc.cat_id = c.cat_id
LEFT JOIN monthly_sales ms ON c.name = ms.name JOIN category_path cp ON c.cat_id = cp.cat_id
LEFT JOIN monthly_sales ms ON c.name = ms.name AND cp.path = ms.path
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
GROUP BY c.name, ms.current_month, ms.previous_month GROUP BY c.name, cp.path, ms.current_month, ms.previous_month
HAVING revenue > 0 HAVING revenue > 0
ORDER BY revenue DESC ORDER BY revenue DESC
LIMIT 10 LIMIT 10
`); `);
// Get category revenue distribution // Get category revenue distribution with full path
const [distribution] = await pool.query(` const [distribution] = await pool.query(`
${categoryPathCTE}
SELECT SELECT
c.name as category, c.name as category,
cp.path as categoryPath,
SUM(o.price * o.quantity) as value SUM(o.price * o.quantity) as value
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.product_id = pc.product_id JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.category_id = c.id JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
GROUP BY c.name GROUP BY c.name, cp.path
HAVING value > 0 HAVING value > 0
ORDER BY value DESC ORDER BY value DESC
LIMIT 6 LIMIT 6
`); `);
// Get category sales trends // Get category sales trends with full path
const [trends] = await pool.query(` const [trends] = await pool.query(`
${categoryPathCTE}
SELECT SELECT
c.name as category, c.name as category,
cp.path as categoryPath,
DATE_FORMAT(o.date, '%b %Y') as month, DATE_FORMAT(o.date, '%b %Y') as month,
SUM(o.price * o.quantity) as sales SUM(o.price * o.quantity) as sales
FROM products p FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.product_id = pc.product_id JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.category_id = c.id JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 6 MONTH) WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 6 MONTH)
GROUP BY GROUP BY
c.name, c.name,
cp.path,
DATE_FORMAT(o.date, '%b %Y'), DATE_FORMAT(o.date, '%b %Y'),
DATE_FORMAT(o.date, '%Y-%m') DATE_FORMAT(o.date, '%Y-%m')
ORDER BY ORDER BY
@@ -529,76 +603,97 @@ router.get('/categories', async (req, res) => {
// Forecast endpoint // Forecast endpoint
router.get('/forecast', async (req, res) => { router.get('/forecast', async (req, res) => {
try { try {
const { brand, startDate, endDate } = req.query; const { brand, startDate, endDate } = req.query;
const pool = req.app.locals.pool; const pool = req.app.locals.pool;
const [results] = await pool.query(` const [results] = await pool.query(`
WITH category_metrics AS ( WITH RECURSIVE category_path AS (
SELECT SELECT
c.id as category_id, c.cat_id,
c.name as category_name, c.name,
p.brand, c.parent_id,
COUNT(DISTINCT p.product_id) as num_products, CAST(c.name AS CHAR(1000)) as path
COALESCE(ROUND(SUM(o.quantity) / DATEDIFF(?, ?), 2), 0) as avg_daily_sales, FROM categories c
COALESCE(SUM(o.quantity), 0) as total_sold, WHERE c.parent_id IS NULL
COALESCE(ROUND(SUM(o.quantity) / COUNT(DISTINCT p.product_id), 2), 0) as avgTotalSold,
COALESCE(ROUND(AVG(o.price), 2), 0) as avg_price UNION ALL
FROM categories c
JOIN product_categories pc ON c.id = pc.category_id SELECT
JOIN products p ON pc.product_id = p.product_id c.cat_id,
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id c.name,
LEFT JOIN orders o ON p.product_id = o.product_id c.parent_id,
AND o.date BETWEEN ? AND ? CONCAT(cp.path, ' > ', c.name)
AND o.canceled = false FROM categories c
WHERE p.brand = ? JOIN category_path cp ON c.parent_id = cp.cat_id
AND pm.first_received_date BETWEEN ? AND ? ),
GROUP BY c.id, c.name, p.brand category_metrics AS (
), SELECT
product_metrics AS ( c.cat_id,
SELECT c.name as category_name,
p.product_id, cp.path,
p.title, p.brand,
p.sku, COUNT(DISTINCT p.pid) as num_products,
p.stock_quantity, CAST(COALESCE(ROUND(SUM(o.quantity) / DATEDIFF(?, ?), 2), 0) AS DECIMAL(15,3)) as avg_daily_sales,
pc.category_id, COALESCE(SUM(o.quantity), 0) as total_sold,
pm.first_received_date, CAST(COALESCE(ROUND(SUM(o.quantity) / COUNT(DISTINCT p.pid), 2), 0) AS DECIMAL(15,3)) as avgTotalSold,
COALESCE(SUM(o.quantity), 0) as total_sold, CAST(COALESCE(ROUND(AVG(o.price), 2), 0) AS DECIMAL(15,3)) as avg_price
COALESCE(ROUND(AVG(o.price), 2), 0) as avg_price FROM categories c
FROM products p JOIN product_categories pc ON c.cat_id = pc.cat_id
JOIN product_categories pc ON p.product_id = pc.product_id JOIN products p ON pc.pid = p.pid
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN category_path cp ON c.cat_id = cp.cat_id
LEFT JOIN orders o ON p.product_id = o.product_id LEFT JOIN product_metrics pmet ON p.pid = pmet.pid
AND o.date BETWEEN ? AND ? LEFT JOIN orders o ON p.pid = o.pid
AND o.canceled = false AND o.date BETWEEN ? AND ?
WHERE p.brand = ? AND o.canceled = false
AND pm.first_received_date BETWEEN ? AND ? WHERE p.brand = ?
GROUP BY p.product_id, p.title, p.sku, p.stock_quantity, pc.category_id, pm.first_received_date AND pmet.first_received_date BETWEEN ? AND ?
) GROUP BY c.cat_id, c.name, cp.path, p.brand
SELECT ),
cm.*, product_details AS (
JSON_ARRAYAGG( SELECT
JSON_OBJECT( p.pid,
'product_id', pm.product_id, p.title,
'title', pm.title, p.SKU,
'sku', pm.sku, p.stock_quantity,
'stock_quantity', pm.stock_quantity, pc.cat_id,
'total_sold', pm.total_sold, pmet.first_received_date,
'avg_price', pm.avg_price, COALESCE(SUM(o.quantity), 0) as total_sold,
'first_received_date', DATE_FORMAT(pm.first_received_date, '%Y-%m-%d') CAST(COALESCE(ROUND(AVG(o.price), 2), 0) AS DECIMAL(15,3)) as avg_price
) FROM products p
) as products JOIN product_categories pc ON p.pid = pc.pid
FROM category_metrics cm JOIN product_metrics pmet ON p.pid = pmet.pid
JOIN product_metrics pm ON cm.category_id = pm.category_id LEFT JOIN orders o ON p.pid = o.pid
GROUP BY cm.category_id, cm.category_name, cm.brand, cm.num_products, cm.avg_daily_sales, cm.total_sold, cm.avgTotalSold, cm.avg_price AND o.date BETWEEN ? AND ?
ORDER BY cm.total_sold DESC AND o.canceled = false
`, [startDate, endDate, startDate, endDate, brand, startDate, endDate, startDate, endDate, brand, startDate, endDate]); WHERE p.brand = ?
AND pmet.first_received_date BETWEEN ? AND ?
GROUP BY p.pid, p.title, p.SKU, p.stock_quantity, pc.cat_id, pmet.first_received_date
)
SELECT
cm.*,
JSON_ARRAYAGG(
JSON_OBJECT(
'pid', pd.pid,
'title', pd.title,
'SKU', pd.SKU,
'stock_quantity', pd.stock_quantity,
'total_sold', pd.total_sold,
'avg_price', pd.avg_price,
'first_received_date', DATE_FORMAT(pd.first_received_date, '%Y-%m-%d')
)
) as products
FROM category_metrics cm
JOIN product_details pd ON cm.cat_id = pd.cat_id
GROUP BY cm.cat_id, cm.category_name, cm.path, cm.brand, cm.num_products, cm.avg_daily_sales, cm.total_sold, cm.avgTotalSold, cm.avg_price
ORDER BY cm.total_sold DESC
`, [endDate, startDate, startDate, endDate, brand, startDate, endDate, startDate, endDate, brand, startDate, endDate]);
res.json(results); res.json(results);
} catch (error) { } catch (error) {
console.error('Error fetching forecast data:', error); console.error('Error fetching forecast data:', error);
res.status(500).json({ error: 'Failed to fetch forecast data' }); res.status(500).json({ error: 'Failed to fetch forecast data' });
} }
}); });
module.exports = router; module.exports = router;

View File

@@ -5,62 +5,90 @@ const router = express.Router();
router.get('/', async (req, res) => { router.get('/', async (req, res) => {
const pool = req.app.locals.pool; const pool = req.app.locals.pool;
try { try {
// Get parent categories for filter dropdown // Get all categories with metrics and hierarchy info
const [parentCategories] = await pool.query(`
SELECT DISTINCT c2.name as parent_name
FROM categories c1
JOIN categories c2 ON c1.parent_id = c2.id
WHERE c1.parent_id IS NOT NULL
ORDER BY c2.name
`);
// Get all categories with metrics
const [categories] = await pool.query(` const [categories] = await pool.query(`
SELECT SELECT
c.id as category_id, c.cat_id,
c.name, c.name,
c.type,
c.parent_id,
c.description, c.description,
COALESCE(p.name, '') as parent_name, c.status,
cm.product_count, p.name as parent_name,
cm.total_value, p.type as parent_type,
cm.avg_margin, COALESCE(cm.product_count, 0) as product_count,
cm.turnover_rate, COALESCE(cm.active_products, 0) as active_products,
cm.growth_rate, CAST(COALESCE(cm.total_value, 0) AS DECIMAL(15,3)) as total_value,
cm.status COALESCE(cm.avg_margin, 0) as avg_margin,
COALESCE(cm.turnover_rate, 0) as turnover_rate,
COALESCE(cm.growth_rate, 0) as growth_rate
FROM categories c FROM categories c
LEFT JOIN categories p ON c.parent_id = p.id LEFT JOIN categories p ON c.parent_id = p.cat_id
LEFT JOIN category_metrics cm ON c.id = cm.category_id LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id
ORDER BY c.name ASC ORDER BY
CASE
WHEN c.type = 10 THEN 1 -- sections first
WHEN c.type = 11 THEN 2 -- categories second
WHEN c.type = 12 THEN 3 -- subcategories third
WHEN c.type = 13 THEN 4 -- subsubcategories fourth
WHEN c.type = 20 THEN 5 -- themes fifth
WHEN c.type = 21 THEN 6 -- subthemes last
ELSE 7
END,
c.name ASC
`); `);
// Get overall stats // Get overall stats
const [stats] = await pool.query(` const [stats] = await pool.query(`
SELECT SELECT
COUNT(DISTINCT c.id) as totalCategories, COUNT(DISTINCT c.cat_id) as totalCategories,
COUNT(DISTINCT CASE WHEN cm.status = 'active' THEN c.id END) as activeCategories, COUNT(DISTINCT CASE WHEN c.status = 'active' THEN c.cat_id END) as activeCategories,
COALESCE(SUM(cm.total_value), 0) as totalValue, CAST(COALESCE(SUM(cm.total_value), 0) AS DECIMAL(15,3)) as totalValue,
COALESCE(ROUND(AVG(NULLIF(cm.avg_margin, 0)), 1), 0) as avgMargin, COALESCE(ROUND(AVG(NULLIF(cm.avg_margin, 0)), 1), 0) as avgMargin,
COALESCE(ROUND(AVG(NULLIF(cm.growth_rate, 0)), 1), 0) as avgGrowth COALESCE(ROUND(AVG(NULLIF(cm.growth_rate, 0)), 1), 0) as avgGrowth
FROM categories c FROM categories c
LEFT JOIN category_metrics cm ON c.id = cm.category_id LEFT JOIN category_metrics cm ON c.cat_id = cm.category_id
`);
// Get type counts for filtering
const [typeCounts] = await pool.query(`
SELECT
type,
COUNT(*) as count
FROM categories
GROUP BY type
ORDER BY type
`); `);
res.json({ res.json({
categories: categories.map(cat => ({ categories: categories.map(cat => ({
...cat, cat_id: cat.cat_id,
parent_category: cat.parent_name, // Map parent_name to parent_category for frontend compatibility name: cat.name,
product_count: parseInt(cat.product_count || 0), type: cat.type,
total_value: parseFloat(cat.total_value || 0), parent_id: cat.parent_id,
avg_margin: parseFloat(cat.avg_margin || 0), parent_name: cat.parent_name,
turnover_rate: parseFloat(cat.turnover_rate || 0), parent_type: cat.parent_type,
growth_rate: parseFloat(cat.growth_rate || 0) description: cat.description,
status: cat.status,
metrics: {
product_count: parseInt(cat.product_count),
active_products: parseInt(cat.active_products),
total_value: parseFloat(cat.total_value),
avg_margin: parseFloat(cat.avg_margin),
turnover_rate: parseFloat(cat.turnover_rate),
growth_rate: parseFloat(cat.growth_rate)
}
})),
typeCounts: typeCounts.map(tc => ({
type: tc.type,
count: parseInt(tc.count)
})), })),
parentCategories: parentCategories.map(p => p.parent_name),
stats: { stats: {
...stats[0], totalCategories: parseInt(stats[0].totalCategories),
totalValue: parseFloat(stats[0].totalValue || 0), activeCategories: parseInt(stats[0].activeCategories),
avgMargin: parseFloat(stats[0].avgMargin || 0), totalValue: parseFloat(stats[0].totalValue),
avgGrowth: parseFloat(stats[0].avgGrowth || 0) avgMargin: parseFloat(stats[0].avgMargin),
avgGrowth: parseFloat(stats[0].avgGrowth)
} }
}); });
} catch (error) { } catch (error) {

View File

@@ -2,6 +2,9 @@ const express = require('express');
const router = express.Router(); const router = express.Router();
const db = require('../utils/db'); const db = require('../utils/db');
// Import status codes
const { ReceivingStatus } = require('../types/status-codes');
// Helper function to execute queries using the connection pool // Helper function to execute queries using the connection pool
async function executeQuery(sql, params = []) { async function executeQuery(sql, params = []) {
const pool = db.getPool(); const pool = db.getPool();
@@ -38,15 +41,14 @@ router.get('/stock/metrics', async (req, res) => {
const [brandValues] = await executeQuery(` const [brandValues] = await executeQuery(`
WITH brand_totals AS ( WITH brand_totals AS (
SELECT SELECT
brand, COALESCE(brand, 'Unbranded') as brand,
COUNT(DISTINCT product_id) as variant_count, COUNT(DISTINCT pid) as variant_count,
COALESCE(SUM(stock_quantity), 0) as stock_units, COALESCE(SUM(stock_quantity), 0) as stock_units,
COALESCE(SUM(stock_quantity * cost_price), 0) as stock_cost, CAST(COALESCE(SUM(stock_quantity * cost_price), 0) AS DECIMAL(15,3)) as stock_cost,
COALESCE(SUM(stock_quantity * price), 0) as stock_retail CAST(COALESCE(SUM(stock_quantity * price), 0) AS DECIMAL(15,3)) as stock_retail
FROM products FROM products
WHERE brand IS NOT NULL WHERE stock_quantity > 0
AND stock_quantity > 0 GROUP BY COALESCE(brand, 'Unbranded')
GROUP BY brand
HAVING stock_cost > 0 HAVING stock_cost > 0
), ),
other_brands AS ( other_brands AS (
@@ -54,8 +56,8 @@ router.get('/stock/metrics', async (req, res) => {
'Other' as brand, 'Other' as brand,
SUM(variant_count) as variant_count, SUM(variant_count) as variant_count,
SUM(stock_units) as stock_units, SUM(stock_units) as stock_units,
SUM(stock_cost) as stock_cost, CAST(SUM(stock_cost) AS DECIMAL(15,3)) as stock_cost,
SUM(stock_retail) as stock_retail CAST(SUM(stock_retail) AS DECIMAL(15,3)) as stock_retail
FROM brand_totals FROM brand_totals
WHERE stock_cost <= 5000 WHERE stock_cost <= 5000
), ),
@@ -101,49 +103,51 @@ router.get('/purchase/metrics', async (req, res) => {
try { try {
const [rows] = await executeQuery(` const [rows] = await executeQuery(`
SELECT SELECT
COALESCE(COUNT(DISTINCT CASE WHEN po.status = 'open' THEN po.po_id END), 0) as active_pos,
COALESCE(COUNT(DISTINCT CASE COALESCE(COUNT(DISTINCT CASE
WHEN po.status = 'open' AND po.expected_date < CURDATE() WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
THEN po.po_id
END), 0) as active_pos,
COALESCE(COUNT(DISTINCT CASE
WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
AND po.expected_date < CURDATE()
THEN po.po_id THEN po.po_id
END), 0) as overdue_pos, END), 0) as overdue_pos,
COALESCE(SUM(CASE WHEN po.status = 'open' THEN po.ordered ELSE 0 END), 0) as total_units,
COALESCE(SUM(CASE COALESCE(SUM(CASE
WHEN po.status = 'open' WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
THEN po.ordered
ELSE 0
END), 0) as total_units,
CAST(COALESCE(SUM(CASE
WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
THEN po.ordered * po.cost_price THEN po.ordered * po.cost_price
ELSE 0 ELSE 0
END), 0) as total_cost, END), 0) AS DECIMAL(15,3)) as total_cost,
COALESCE(SUM(CASE CAST(COALESCE(SUM(CASE
WHEN po.status = 'open' WHEN po.receiving_status < ${ReceivingStatus.PartialReceived}
THEN po.ordered * p.price THEN po.ordered * p.price
ELSE 0 ELSE 0
END), 0) as total_retail END), 0) AS DECIMAL(15,3)) as total_retail
FROM purchase_orders po FROM purchase_orders po
JOIN products p ON po.product_id = p.product_id JOIN products p ON po.pid = p.pid
`); `);
const poMetrics = rows[0]; const poMetrics = rows[0];
console.log('Raw poMetrics from database:', poMetrics);
console.log('poMetrics.active_pos:', poMetrics.active_pos);
console.log('poMetrics.overdue_pos:', poMetrics.overdue_pos);
console.log('poMetrics.total_units:', poMetrics.total_units);
console.log('poMetrics.total_cost:', poMetrics.total_cost);
console.log('poMetrics.total_retail:', poMetrics.total_retail);
const [vendorOrders] = await executeQuery(` const [vendorOrders] = await executeQuery(`
SELECT SELECT
po.vendor, po.vendor,
COUNT(DISTINCT po.po_id) as order_count, COUNT(DISTINCT po.po_id) as orders,
COALESCE(SUM(po.ordered), 0) as ordered_units, COALESCE(SUM(po.ordered), 0) as units,
COALESCE(SUM(po.ordered * po.cost_price), 0) as order_cost, CAST(COALESCE(SUM(po.ordered * po.cost_price), 0) AS DECIMAL(15,3)) as cost,
COALESCE(SUM(po.ordered * p.price), 0) as order_retail CAST(COALESCE(SUM(po.ordered * p.price), 0) AS DECIMAL(15,3)) as retail
FROM purchase_orders po FROM purchase_orders po
JOIN products p ON po.product_id = p.product_id JOIN products p ON po.pid = p.pid
WHERE po.status = 'open' WHERE po.receiving_status < ${ReceivingStatus.PartialReceived}
GROUP BY po.vendor GROUP BY po.vendor
HAVING order_cost > 0 HAVING cost > 0
ORDER BY order_cost DESC ORDER BY cost DESC
`); `);
// Format response to match PurchaseMetricsData interface
const response = { const response = {
activePurchaseOrders: parseInt(poMetrics.active_pos) || 0, activePurchaseOrders: parseInt(poMetrics.active_pos) || 0,
overduePurchaseOrders: parseInt(poMetrics.overdue_pos) || 0, overduePurchaseOrders: parseInt(poMetrics.overdue_pos) || 0,
@@ -152,10 +156,10 @@ router.get('/purchase/metrics', async (req, res) => {
onOrderRetail: parseFloat(poMetrics.total_retail) || 0, onOrderRetail: parseFloat(poMetrics.total_retail) || 0,
vendorOrders: vendorOrders.map(v => ({ vendorOrders: vendorOrders.map(v => ({
vendor: v.vendor, vendor: v.vendor,
orders: parseInt(v.order_count) || 0, orders: parseInt(v.orders) || 0,
units: parseInt(v.ordered_units) || 0, units: parseInt(v.units) || 0,
cost: parseFloat(v.order_cost) || 0, cost: parseFloat(v.cost) || 0,
retail: parseFloat(v.order_retail) || 0 retail: parseFloat(v.retail) || 0
})) }))
}; };
@@ -173,21 +177,21 @@ router.get('/replenishment/metrics', async (req, res) => {
// Get summary metrics // Get summary metrics
const [metrics] = await executeQuery(` const [metrics] = await executeQuery(`
SELECT SELECT
COUNT(DISTINCT p.product_id) as products_to_replenish, COUNT(DISTINCT p.pid) as products_to_replenish,
COALESCE(SUM(CASE COALESCE(SUM(CASE
WHEN p.stock_quantity < 0 THEN ABS(p.stock_quantity) + pm.reorder_qty WHEN p.stock_quantity < 0 THEN ABS(p.stock_quantity) + pm.reorder_qty
ELSE pm.reorder_qty ELSE pm.reorder_qty
END), 0) as total_units_needed, END), 0) as total_units_needed,
COALESCE(SUM(CASE CAST(COALESCE(SUM(CASE
WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.cost_price WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.cost_price
ELSE pm.reorder_qty * p.cost_price ELSE pm.reorder_qty * p.cost_price
END), 0) as total_cost, END), 0) AS DECIMAL(15,3)) as total_cost,
COALESCE(SUM(CASE CAST(COALESCE(SUM(CASE
WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.price WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.price
ELSE pm.reorder_qty * p.price ELSE pm.reorder_qty * p.price
END), 0) as total_retail END), 0) AS DECIMAL(15,3)) as total_retail
FROM products p FROM products p
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
WHERE p.replenishable = true WHERE p.replenishable = true
AND (pm.stock_status IN ('Critical', 'Reorder') AND (pm.stock_status IN ('Critical', 'Reorder')
OR p.stock_quantity < 0) OR p.stock_quantity < 0)
@@ -197,24 +201,24 @@ router.get('/replenishment/metrics', async (req, res) => {
// Get top variants to replenish // Get top variants to replenish
const [variants] = await executeQuery(` const [variants] = await executeQuery(`
SELECT SELECT
p.product_id, p.pid,
p.title, p.title,
p.stock_quantity as current_stock, p.stock_quantity as current_stock,
CASE CASE
WHEN p.stock_quantity < 0 THEN ABS(p.stock_quantity) + pm.reorder_qty WHEN p.stock_quantity < 0 THEN ABS(p.stock_quantity) + pm.reorder_qty
ELSE pm.reorder_qty ELSE pm.reorder_qty
END as replenish_qty, END as replenish_qty,
CASE CAST(CASE
WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.cost_price WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.cost_price
ELSE pm.reorder_qty * p.cost_price ELSE pm.reorder_qty * p.cost_price
END as replenish_cost, END AS DECIMAL(15,3)) as replenish_cost,
CASE CAST(CASE
WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.price WHEN p.stock_quantity < 0 THEN (ABS(p.stock_quantity) + pm.reorder_qty) * p.price
ELSE pm.reorder_qty * p.price ELSE pm.reorder_qty * p.price
END as replenish_retail, END AS DECIMAL(15,3)) as replenish_retail,
pm.stock_status pm.stock_status
FROM products p FROM products p
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
WHERE p.replenishable = true WHERE p.replenishable = true
AND (pm.stock_status IN ('Critical', 'Reorder') AND (pm.stock_status IN ('Critical', 'Reorder')
OR p.stock_quantity < 0) OR p.stock_quantity < 0)
@@ -235,7 +239,7 @@ router.get('/replenishment/metrics', async (req, res) => {
replenishmentCost: parseFloat(metrics[0].total_cost) || 0, replenishmentCost: parseFloat(metrics[0].total_cost) || 0,
replenishmentRetail: parseFloat(metrics[0].total_retail) || 0, replenishmentRetail: parseFloat(metrics[0].total_retail) || 0,
topVariants: variants.map(v => ({ topVariants: variants.map(v => ({
id: v.product_id, id: v.pid,
title: v.title, title: v.title,
currentStock: parseInt(v.current_stock) || 0, currentStock: parseInt(v.current_stock) || 0,
replenishQty: parseInt(v.replenish_qty) || 0, replenishQty: parseInt(v.replenish_qty) || 0,
@@ -287,9 +291,9 @@ router.get('/forecast/metrics', async (req, res) => {
COALESCE(SUM(cf.forecast_revenue), 0) as revenue, COALESCE(SUM(cf.forecast_revenue), 0) as revenue,
COALESCE(AVG(cf.confidence_level), 0) as confidence COALESCE(AVG(cf.confidence_level), 0) as confidence
FROM category_forecasts cf FROM category_forecasts cf
JOIN categories c ON cf.category_id = c.id JOIN categories c ON cf.category_id = c.cat_id
WHERE cf.forecast_date BETWEEN ? AND ? WHERE cf.forecast_date BETWEEN ? AND ?
GROUP BY c.id, c.name GROUP BY c.cat_id, c.name
ORDER BY revenue DESC ORDER BY revenue DESC
`, [startDate, endDate]); `, [startDate, endDate]);
@@ -325,11 +329,11 @@ router.get('/overstock/metrics', async (req, res) => {
const [rows] = await executeQuery(` const [rows] = await executeQuery(`
WITH category_overstock AS ( WITH category_overstock AS (
SELECT SELECT
c.id as category_id, c.cat_id,
c.name as category_name, c.name as category_name,
COUNT(DISTINCT CASE COUNT(DISTINCT CASE
WHEN pm.stock_status = 'Overstocked' WHEN pm.stock_status = 'Overstocked'
THEN p.product_id THEN p.pid
END) as overstocked_products, END) as overstocked_products,
SUM(CASE SUM(CASE
WHEN pm.stock_status = 'Overstocked' WHEN pm.stock_status = 'Overstocked'
@@ -347,10 +351,10 @@ router.get('/overstock/metrics', async (req, res) => {
ELSE 0 ELSE 0
END) as total_excess_retail END) as total_excess_retail
FROM categories c FROM categories c
JOIN product_categories pc ON c.id = pc.category_id JOIN product_categories pc ON c.cat_id = pc.cat_id
JOIN products p ON pc.product_id = p.product_id JOIN products p ON pc.pid = p.pid
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
GROUP BY c.id, c.name GROUP BY c.cat_id, c.name
) )
SELECT SELECT
SUM(overstocked_products) as total_overstocked, SUM(overstocked_products) as total_overstocked,
@@ -405,7 +409,7 @@ router.get('/overstock/products', async (req, res) => {
try { try {
const [rows] = await executeQuery(` const [rows] = await executeQuery(`
SELECT SELECT
p.product_id, p.pid,
p.SKU, p.SKU,
p.title, p.title,
p.brand, p.brand,
@@ -420,11 +424,11 @@ router.get('/overstock/products', async (req, res) => {
(pm.overstocked_amt * p.price) as excess_retail, (pm.overstocked_amt * p.price) as excess_retail,
GROUP_CONCAT(c.name) as categories GROUP_CONCAT(c.name) as categories
FROM products p FROM products p
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
LEFT JOIN product_categories pc ON p.product_id = pc.product_id LEFT JOIN product_categories pc ON p.pid = pc.pid
LEFT JOIN categories c ON pc.category_id = c.id LEFT JOIN categories c ON pc.cat_id = c.cat_id
WHERE pm.stock_status = 'Overstocked' WHERE pm.stock_status = 'Overstocked'
GROUP BY p.product_id GROUP BY p.pid
ORDER BY excess_cost DESC ORDER BY excess_cost DESC
LIMIT ? LIMIT ?
`, [limit]); `, [limit]);
@@ -439,196 +443,116 @@ router.get('/overstock/products', async (req, res) => {
// Returns best-selling products, vendors, and categories // Returns best-selling products, vendors, and categories
router.get('/best-sellers', async (req, res) => { router.get('/best-sellers', async (req, res) => {
try { try {
const [products] = await executeQuery(` const pool = req.app.locals.pool;
WITH product_sales AS (
SELECT
p.product_id,
p.SKU as sku,
p.title,
-- Current period (last 30 days)
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.quantity
ELSE 0
END) as units_sold,
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END) as revenue,
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN (o.price - p.cost_price) * o.quantity
ELSE 0
END) as profit,
-- Previous period (30-60 days ago)
SUM(CASE
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 60 DAY) AND DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END) as previous_revenue
FROM products p
JOIN orders o ON p.product_id = o.product_id
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 60 DAY)
GROUP BY p.product_id, p.SKU, p.title
)
SELECT
product_id,
sku,
title,
units_sold,
revenue,
profit,
CASE
WHEN previous_revenue > 0
THEN ((revenue - previous_revenue) / previous_revenue * 100)
WHEN revenue > 0
THEN 100
ELSE 0
END as growth_rate
FROM product_sales
WHERE units_sold > 0
ORDER BY revenue DESC
LIMIT 50
`);
const [brands] = await executeQuery(` // Common CTE for category paths
WITH brand_sales AS ( const categoryPathCTE = `
WITH RECURSIVE category_path AS (
SELECT SELECT
p.brand, c.cat_id,
-- Current period (last 30 days)
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.quantity
ELSE 0
END) as units_sold,
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END) as revenue,
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN (o.price - p.cost_price) * o.quantity
ELSE 0
END) as profit,
-- Previous period (30-60 days ago)
SUM(CASE
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 60 DAY) AND DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END) as previous_revenue
FROM products p
JOIN orders o ON p.product_id = o.product_id
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 60 DAY)
AND p.brand IS NOT NULL
GROUP BY p.brand
)
SELECT
brand,
units_sold,
revenue,
profit,
CASE
WHEN previous_revenue > 0
THEN ((revenue - previous_revenue) / previous_revenue * 100)
WHEN revenue > 0
THEN 100
ELSE 0
END as growth_rate
FROM brand_sales
WHERE units_sold > 0
ORDER BY revenue DESC
LIMIT 50
`);
const [categories] = await executeQuery(`
WITH category_sales AS (
SELECT
c.id as category_id,
c.name, c.name,
-- Current period (last 30 days) c.parent_id,
SUM(CASE CAST(c.name AS CHAR(1000)) as path
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.quantity
ELSE 0
END) as units_sold,
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END) as revenue,
SUM(CASE
WHEN o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN (o.price - p.cost_price) * o.quantity
ELSE 0
END) as profit,
-- Previous period (30-60 days ago)
SUM(CASE
WHEN o.date BETWEEN DATE_SUB(CURRENT_DATE, INTERVAL 60 DAY) AND DATE_SUB(CURRENT_DATE, INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END) as previous_revenue
FROM categories c FROM categories c
JOIN product_categories pc ON c.id = pc.category_id WHERE c.parent_id IS NULL
JOIN products p ON pc.product_id = p.product_id
JOIN orders o ON p.product_id = o.product_id UNION ALL
WHERE o.canceled = false
AND o.date >= DATE_SUB(CURRENT_DATE, INTERVAL 60 DAY) SELECT
GROUP BY c.id, c.name c.cat_id,
c.name,
c.parent_id,
CONCAT(cp.path, ' > ', c.name)
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
) )
`;
// Get best selling products
const [products] = await pool.query(`
SELECT SELECT
category_id, p.pid,
name, p.SKU as sku,
units_sold, p.title,
revenue, SUM(o.quantity) as units_sold,
profit, CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
CASE CAST(SUM(o.price * o.quantity - p.cost_price * o.quantity) AS DECIMAL(15,3)) as profit
WHEN previous_revenue > 0 FROM products p
THEN ((revenue - previous_revenue) / previous_revenue * 100) JOIN orders o ON p.pid = o.pid
WHEN revenue > 0 WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
THEN 100 AND o.canceled = false
ELSE 0 GROUP BY p.pid
END as growth_rate ORDER BY units_sold DESC
FROM category_sales LIMIT 10
WHERE units_sold > 0
ORDER BY revenue DESC
LIMIT 50
`); `);
// Format response with explicit type conversion // Get best selling brands
const formattedProducts = products.map(p => ({ const [brands] = await pool.query(`
...p, SELECT
units_sold: parseInt(p.units_sold) || 0, p.brand,
revenue: parseFloat(p.revenue) || 0, SUM(o.quantity) as units_sold,
profit: parseFloat(p.profit) || 0, CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
growth_rate: parseFloat(p.growth_rate) || 0 CAST(SUM(o.price * o.quantity - p.cost_price * o.quantity) AS DECIMAL(15,3)) as profit,
})); ROUND(
((SUM(CASE
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END) /
NULLIF(SUM(CASE
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
AND o.date < DATE_SUB(CURDATE(), INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END), 0)) - 1) * 100,
1
) as growth_rate
FROM products p
JOIN orders o ON p.pid = o.pid
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
AND o.canceled = false
GROUP BY p.brand
ORDER BY units_sold DESC
LIMIT 10
`);
const formattedBrands = brands.map(b => ({ // Get best selling categories with full path
brand: b.brand, const [categories] = await pool.query(`
units_sold: parseInt(b.units_sold) || 0, ${categoryPathCTE}
revenue: parseFloat(b.revenue) || 0, SELECT
profit: parseFloat(b.profit) || 0, c.cat_id,
growth_rate: parseFloat(b.growth_rate) || 0 c.name,
})); cp.path as categoryPath,
SUM(o.quantity) as units_sold,
CAST(SUM(o.price * o.quantity) AS DECIMAL(15,3)) as revenue,
CAST(SUM(o.price * o.quantity - p.cost_price * o.quantity) AS DECIMAL(15,3)) as profit,
ROUND(
((SUM(CASE
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END) /
NULLIF(SUM(CASE
WHEN o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
AND o.date < DATE_SUB(CURDATE(), INTERVAL 30 DAY)
THEN o.price * o.quantity
ELSE 0
END), 0)) - 1) * 100,
1
) as growth_rate
FROM products p
JOIN orders o ON p.pid = o.pid
JOIN product_categories pc ON p.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 60 DAY)
AND o.canceled = false
GROUP BY c.cat_id, c.name, cp.path
ORDER BY units_sold DESC
LIMIT 10
`);
const formattedCategories = categories.map(c => ({ res.json({ products, brands, categories });
category_id: c.category_id,
name: c.name,
units_sold: parseInt(c.units_sold) || 0,
revenue: parseFloat(c.revenue) || 0,
profit: parseFloat(c.profit) || 0,
growth_rate: parseFloat(c.growth_rate) || 0
}));
res.json({
products: formattedProducts,
brands: formattedBrands,
categories: formattedCategories
});
} catch (err) { } catch (err) {
console.error('Error fetching best sellers:', err); console.error('Error fetching best sellers:', err);
res.status(500).json({ error: 'Failed to fetch best sellers' }); res.status(500).json({ error: 'Failed to fetch best sellers' });
@@ -650,7 +574,7 @@ router.get('/sales/metrics', async (req, res) => {
SUM(p.cost_price * o.quantity) as total_cogs, SUM(p.cost_price * o.quantity) as total_cogs,
SUM((o.price - p.cost_price) * o.quantity) as total_profit SUM((o.price - p.cost_price) * o.quantity) as total_profit
FROM orders o FROM orders o
JOIN products p ON o.product_id = p.product_id JOIN products p ON o.pid = p.pid
WHERE o.canceled = false WHERE o.canceled = false
AND o.date BETWEEN ? AND ? AND o.date BETWEEN ? AND ?
GROUP BY DATE(o.date) GROUP BY DATE(o.date)
@@ -666,7 +590,7 @@ router.get('/sales/metrics', async (req, res) => {
SUM(p.cost_price * o.quantity) as total_cogs, SUM(p.cost_price * o.quantity) as total_cogs,
SUM((o.price - p.cost_price) * o.quantity) as total_profit SUM((o.price - p.cost_price) * o.quantity) as total_profit
FROM orders o FROM orders o
JOIN products p ON o.product_id = p.product_id JOIN products p ON o.pid = p.pid
WHERE o.canceled = false WHERE o.canceled = false
AND o.date BETWEEN ? AND ? AND o.date BETWEEN ? AND ?
`, [startDate, endDate]); `, [startDate, endDate]);
@@ -698,7 +622,7 @@ router.get('/low-stock/products', async (req, res) => {
try { try {
const [rows] = await executeQuery(` const [rows] = await executeQuery(`
SELECT SELECT
p.product_id, p.pid,
p.SKU, p.SKU,
p.title, p.title,
p.brand, p.brand,
@@ -712,12 +636,12 @@ router.get('/low-stock/products', async (req, res) => {
(pm.reorder_qty * p.cost_price) as reorder_cost, (pm.reorder_qty * p.cost_price) as reorder_cost,
GROUP_CONCAT(c.name) as categories GROUP_CONCAT(c.name) as categories
FROM products p FROM products p
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
LEFT JOIN product_categories pc ON p.product_id = pc.product_id LEFT JOIN product_categories pc ON p.pid = pc.pid
LEFT JOIN categories c ON pc.category_id = c.id LEFT JOIN categories c ON pc.cat_id = c.cat_id
WHERE pm.stock_status IN ('Critical', 'Reorder') WHERE pm.stock_status IN ('Critical', 'Reorder')
AND p.replenishable = true AND p.replenishable = true
GROUP BY p.product_id GROUP BY p.pid
ORDER BY ORDER BY
CASE pm.stock_status CASE pm.stock_status
WHEN 'Critical' THEN 1 WHEN 'Critical' THEN 1
@@ -742,17 +666,17 @@ router.get('/trending/products', async (req, res) => {
const [rows] = await executeQuery(` const [rows] = await executeQuery(`
WITH recent_sales AS ( WITH recent_sales AS (
SELECT SELECT
o.product_id, o.pid,
COUNT(DISTINCT o.order_number) as recent_orders, COUNT(DISTINCT o.order_number) as recent_orders,
SUM(o.quantity) as recent_units, SUM(o.quantity) as recent_units,
SUM(o.price * o.quantity) as recent_revenue SUM(o.price * o.quantity) as recent_revenue
FROM orders o FROM orders o
WHERE o.canceled = false WHERE o.canceled = false
AND o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY) AND o.date >= DATE_SUB(CURDATE(), INTERVAL ? DAY)
GROUP BY o.product_id GROUP BY o.pid
) )
SELECT SELECT
p.product_id, p.pid,
p.SKU, p.SKU,
p.title, p.title,
p.brand, p.brand,
@@ -767,15 +691,15 @@ router.get('/trending/products', async (req, res) => {
((rs.recent_units / ?) - pm.daily_sales_avg) / pm.daily_sales_avg * 100 as velocity_change, ((rs.recent_units / ?) - pm.daily_sales_avg) / pm.daily_sales_avg * 100 as velocity_change,
GROUP_CONCAT(c.name) as categories GROUP_CONCAT(c.name) as categories
FROM recent_sales rs FROM recent_sales rs
JOIN products p ON rs.product_id = p.product_id JOIN products p ON rs.pid = p.pid
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
LEFT JOIN product_categories pc ON p.product_id = pc.product_id LEFT JOIN product_categories pc ON p.pid = pc.pid
LEFT JOIN categories c ON pc.category_id = c.id LEFT JOIN categories c ON pc.cat_id = c.cat_id
GROUP BY p.product_id GROUP BY p.pid
HAVING velocity_change > 0 HAVING velocity_change > 0
ORDER BY velocity_change DESC ORDER BY velocity_change DESC
LIMIT ? LIMIT ?
`, [days, days, days, limit]); `, [days, days, limit]);
res.json(rows); res.json(rows);
} catch (err) { } catch (err) {
console.error('Error fetching trending products:', err); console.error('Error fetching trending products:', err);
@@ -859,7 +783,7 @@ router.get('/key-metrics', async (req, res) => {
COUNT(CASE WHEN pm.stock_status = 'Critical' THEN 1 END) as critical_stock_count, COUNT(CASE WHEN pm.stock_status = 'Critical' THEN 1 END) as critical_stock_count,
COUNT(CASE WHEN pm.stock_status = 'Overstocked' THEN 1 END) as overstock_count COUNT(CASE WHEN pm.stock_status = 'Overstocked' THEN 1 END) as overstock_count
FROM products p FROM products p
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
), ),
sales_summary AS ( sales_summary AS (
SELECT SELECT
@@ -909,7 +833,7 @@ router.get('/inventory-health', async (req, res) => {
AVG(pm.turnover_rate) as avg_turnover_rate, AVG(pm.turnover_rate) as avg_turnover_rate,
AVG(pm.days_of_inventory) as avg_days_inventory AVG(pm.days_of_inventory) as avg_days_inventory
FROM products p FROM products p
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
WHERE p.replenishable = true WHERE p.replenishable = true
), ),
value_distribution AS ( value_distribution AS (
@@ -931,7 +855,7 @@ router.get('/inventory-health', async (req, res) => {
ELSE 0 ELSE 0
END) * 100.0 / SUM(p.stock_quantity * p.cost_price) as overstock_value_percent END) * 100.0 / SUM(p.stock_quantity * p.cost_price) as overstock_value_percent
FROM products p FROM products p
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
), ),
category_health AS ( category_health AS (
SELECT SELECT
@@ -940,11 +864,11 @@ router.get('/inventory-health', async (req, res) => {
SUM(CASE WHEN pm.stock_status = 'Healthy' THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as category_healthy_percent, SUM(CASE WHEN pm.stock_status = 'Healthy' THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as category_healthy_percent,
AVG(pm.turnover_rate) as category_turnover_rate AVG(pm.turnover_rate) as category_turnover_rate
FROM categories c FROM categories c
JOIN product_categories pc ON c.id = pc.category_id JOIN product_categories pc ON c.cat_id = pc.cat_id
JOIN products p ON pc.product_id = p.product_id JOIN products p ON pc.pid = p.pid
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
WHERE p.replenishable = true WHERE p.replenishable = true
GROUP BY c.id, c.name GROUP BY c.cat_id, c.name
) )
SELECT SELECT
sd.*, sd.*,
@@ -975,20 +899,15 @@ router.get('/replenish/products', async (req, res) => {
try { try {
const [products] = await executeQuery(` const [products] = await executeQuery(`
SELECT SELECT
p.product_id, p.pid,
p.SKU, p.SKU as sku,
p.title, p.title,
p.stock_quantity as current_stock, p.stock_quantity,
pm.reorder_qty as replenish_qty, pm.daily_sales_avg,
(pm.reorder_qty * p.cost_price) as replenish_cost, pm.reorder_qty,
(pm.reorder_qty * p.price) as replenish_retail, pm.last_purchase_date
CASE
WHEN pm.daily_sales_avg > 0
THEN FLOOR(p.stock_quantity / pm.daily_sales_avg)
ELSE NULL
END as days_until_stockout
FROM products p FROM products p
JOIN product_metrics pm ON p.product_id = pm.product_id JOIN product_metrics pm ON p.pid = pm.pid
WHERE p.replenishable = true WHERE p.replenishable = true
AND pm.stock_status IN ('Critical', 'Reorder') AND pm.stock_status IN ('Critical', 'Reorder')
AND pm.reorder_qty > 0 AND pm.reorder_qty > 0
@@ -997,23 +916,16 @@ router.get('/replenish/products', async (req, res) => {
WHEN 'Critical' THEN 1 WHEN 'Critical' THEN 1
WHEN 'Reorder' THEN 2 WHEN 'Reorder' THEN 2
END, END,
replenish_cost DESC pm.reorder_qty * p.cost_price DESC
LIMIT ? LIMIT ?
`, [limit]); `, [limit]);
// Format response res.json(products.map(p => ({
const response = products.map(p => ({ ...p,
product_id: p.product_id, stock_quantity: parseInt(p.stock_quantity) || 0,
SKU: p.SKU, daily_sales_avg: parseFloat(p.daily_sales_avg) || 0,
title: p.title, reorder_qty: parseInt(p.reorder_qty) || 0
current_stock: parseInt(p.current_stock) || 0, })));
replenish_qty: parseInt(p.replenish_qty) || 0,
replenish_cost: parseFloat(p.replenish_cost) || 0,
replenish_retail: parseFloat(p.replenish_retail) || 0,
days_until_stockout: p.days_until_stockout
}));
res.json(response);
} catch (err) { } catch (err) {
console.error('Error fetching products to replenish:', err); console.error('Error fetching products to replenish:', err);
res.status(500).json({ error: 'Failed to fetch products to replenish' }); res.status(500).json({ error: 'Failed to fetch products to replenish' });

View File

@@ -9,25 +9,25 @@ router.get('/trends', async (req, res) => {
WITH MonthlyMetrics AS ( WITH MonthlyMetrics AS (
SELECT SELECT
DATE(CONCAT(pta.year, '-', LPAD(pta.month, 2, '0'), '-01')) as date, DATE(CONCAT(pta.year, '-', LPAD(pta.month, 2, '0'), '-01')) as date,
SUM(pta.total_revenue) as revenue, CAST(COALESCE(SUM(pta.total_revenue), 0) AS DECIMAL(15,3)) as revenue,
SUM(pta.total_cost) as cost, CAST(COALESCE(SUM(pta.total_cost), 0) AS DECIMAL(15,3)) as cost,
SUM(pm.inventory_value) as inventory_value, CAST(COALESCE(SUM(pm.inventory_value), 0) AS DECIMAL(15,3)) as inventory_value,
CASE CASE
WHEN SUM(pm.inventory_value) > 0 WHEN SUM(pm.inventory_value) > 0
THEN (SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value)) * 100 THEN CAST((SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value)) * 100 AS DECIMAL(15,3))
ELSE 0 ELSE 0
END as gmroi END as gmroi
FROM product_time_aggregates pta FROM product_time_aggregates pta
JOIN product_metrics pm ON pta.product_id = pm.product_id JOIN product_metrics pm ON pta.pid = pm.pid
WHERE (pta.year * 100 + pta.month) >= DATE_FORMAT(DATE_SUB(CURDATE(), INTERVAL 12 MONTH), '%Y%m') WHERE (pta.year * 100 + pta.month) >= DATE_FORMAT(DATE_SUB(CURDATE(), INTERVAL 12 MONTH), '%Y%m')
GROUP BY pta.year, pta.month GROUP BY pta.year, pta.month
ORDER BY date ASC ORDER BY date ASC
) )
SELECT SELECT
DATE_FORMAT(date, '%b %y') as date, DATE_FORMAT(date, '%b %y') as date,
ROUND(revenue, 2) as revenue, revenue,
ROUND(inventory_value, 2) as inventory_value, inventory_value,
ROUND(gmroi, 2) as gmroi gmroi
FROM MonthlyMetrics FROM MonthlyMetrics
`); `);
@@ -37,15 +37,15 @@ router.get('/trends', async (req, res) => {
const transformedData = { const transformedData = {
revenue: rows.map(row => ({ revenue: rows.map(row => ({
date: row.date, date: row.date,
value: parseFloat(row.revenue || 0) value: parseFloat(row.revenue)
})), })),
inventory_value: rows.map(row => ({ inventory_value: rows.map(row => ({
date: row.date, date: row.date,
value: parseFloat(row.inventory_value || 0) value: parseFloat(row.inventory_value)
})), })),
gmroi: rows.map(row => ({ gmroi: rows.map(row => ({
date: row.date, date: row.date,
value: parseFloat(row.gmroi || 0) value: parseFloat(row.gmroi)
})) }))
}; };

View File

@@ -74,8 +74,8 @@ router.get('/', async (req, res) => {
o1.status, o1.status,
o1.payment_method, o1.payment_method,
o1.shipping_method, o1.shipping_method,
COUNT(o2.product_id) as items_count, COUNT(o2.pid) as items_count,
SUM(o2.price * o2.quantity) as total_amount CAST(SUM(o2.price * o2.quantity) AS DECIMAL(15,3)) as total_amount
FROM orders o1 FROM orders o1
JOIN orders o2 ON o1.order_number = o2.order_number JOIN orders o2 ON o1.order_number = o2.order_number
WHERE ${conditions.join(' AND ')} WHERE ${conditions.join(' AND ')}
@@ -101,7 +101,7 @@ router.get('/', async (req, res) => {
WITH CurrentStats AS ( WITH CurrentStats AS (
SELECT SELECT
COUNT(DISTINCT order_number) as total_orders, COUNT(DISTINCT order_number) as total_orders,
SUM(price * quantity) as total_revenue CAST(SUM(price * quantity) AS DECIMAL(15,3)) as total_revenue
FROM orders FROM orders
WHERE canceled = false WHERE canceled = false
AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
@@ -109,7 +109,7 @@ router.get('/', async (req, res) => {
PreviousStats AS ( PreviousStats AS (
SELECT SELECT
COUNT(DISTINCT order_number) as prev_orders, COUNT(DISTINCT order_number) as prev_orders,
SUM(price * quantity) as prev_revenue CAST(SUM(price * quantity) AS DECIMAL(15,3)) as prev_revenue
FROM orders FROM orders
WHERE canceled = false WHERE canceled = false
AND DATE(date) BETWEEN DATE_SUB(CURDATE(), INTERVAL 60 DAY) AND DATE_SUB(CURDATE(), INTERVAL 30 DAY) AND DATE(date) BETWEEN DATE_SUB(CURDATE(), INTERVAL 60 DAY) AND DATE_SUB(CURDATE(), INTERVAL 30 DAY)
@@ -117,7 +117,7 @@ router.get('/', async (req, res) => {
OrderValues AS ( OrderValues AS (
SELECT SELECT
order_number, order_number,
SUM(price * quantity) as order_value CAST(SUM(price * quantity) AS DECIMAL(15,3)) as order_value
FROM orders FROM orders
WHERE canceled = false WHERE canceled = false
AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY) AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
@@ -138,12 +138,12 @@ router.get('/', async (req, res) => {
END as revenue_growth, END as revenue_growth,
CASE CASE
WHEN cs.total_orders > 0 WHEN cs.total_orders > 0
THEN (cs.total_revenue / cs.total_orders) THEN CAST((cs.total_revenue / cs.total_orders) AS DECIMAL(15,3))
ELSE 0 ELSE 0
END as average_order_value, END as average_order_value,
CASE CASE
WHEN ps.prev_orders > 0 WHEN ps.prev_orders > 0
THEN (ps.prev_revenue / ps.prev_orders) THEN CAST((ps.prev_revenue / ps.prev_orders) AS DECIMAL(15,3))
ELSE 0 ELSE 0
END as prev_average_order_value END as prev_average_order_value
FROM CurrentStats cs FROM CurrentStats cs
@@ -199,8 +199,8 @@ router.get('/:orderNumber', async (req, res) => {
o1.shipping_method, o1.shipping_method,
o1.shipping_address, o1.shipping_address,
o1.billing_address, o1.billing_address,
COUNT(o2.product_id) as items_count, COUNT(o2.pid) as items_count,
SUM(o2.price * o2.quantity) as total_amount CAST(SUM(o2.price * o2.quantity) AS DECIMAL(15,3)) as total_amount
FROM orders o1 FROM orders o1
JOIN orders o2 ON o1.order_number = o2.order_number JOIN orders o2 ON o1.order_number = o2.order_number
WHERE o1.order_number = ? AND o1.canceled = false WHERE o1.order_number = ? AND o1.canceled = false
@@ -222,14 +222,14 @@ router.get('/:orderNumber', async (req, res) => {
// Get order items // Get order items
const [itemRows] = await pool.query(` const [itemRows] = await pool.query(`
SELECT SELECT
o.product_id, o.pid,
p.title, p.title,
p.sku, p.SKU,
o.quantity, o.quantity,
o.price, o.price,
(o.price * o.quantity) as total CAST((o.price * o.quantity) AS DECIMAL(15,3)) as total
FROM orders o FROM orders o
JOIN products p ON o.product_id = p.product_id JOIN products p ON o.pid = p.pid
WHERE o.order_number = ? AND o.canceled = false WHERE o.order_number = ? AND o.canceled = false
`, [req.params.orderNumber]); `, [req.params.orderNumber]);

View File

@@ -2,6 +2,7 @@ const express = require('express');
const router = express.Router(); const router = express.Router();
const multer = require('multer'); const multer = require('multer');
const { importProductsFromCSV } = require('../utils/csvImporter'); const { importProductsFromCSV } = require('../utils/csvImporter');
const { PurchaseOrderStatus, ReceivingStatus } = require('../types/status-codes');
// Configure multer for file uploads // Configure multer for file uploads
const upload = multer({ dest: 'uploads/' }); const upload = multer({ dest: 'uploads/' });
@@ -20,15 +21,13 @@ router.get('/brands', async (req, res) => {
console.log('Fetching brands from database...'); console.log('Fetching brands from database...');
const [results] = await pool.query(` const [results] = await pool.query(`
SELECT DISTINCT p.brand SELECT DISTINCT COALESCE(p.brand, 'Unbranded') as brand
FROM products p FROM products p
JOIN purchase_orders po ON p.product_id = po.product_id JOIN purchase_orders po ON p.pid = po.pid
WHERE p.brand IS NOT NULL WHERE p.visible = true
AND p.brand != '' GROUP BY COALESCE(p.brand, 'Unbranded')
AND p.visible = true
GROUP BY p.brand
HAVING SUM(po.cost_price * po.received) >= 500 HAVING SUM(po.cost_price * po.received) >= 500
ORDER BY p.brand ORDER BY COALESCE(p.brand, 'Unbranded')
`); `);
console.log(`Found ${results.length} brands:`, results.slice(0, 3)); console.log(`Found ${results.length} brands:`, results.slice(0, 3));
@@ -147,9 +146,9 @@ router.get('/', async (req, res) => {
// Get total count for pagination // Get total count for pagination
const countQuery = ` const countQuery = `
SELECT COUNT(DISTINCT p.product_id) as total SELECT COUNT(DISTINCT p.pid) as total
FROM products p FROM products p
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id LEFT JOIN product_metrics pm ON p.pid = pm.pid
${whereClause} ${whereClause}
`; `;
const [countResult] = await pool.query(countQuery, params); const [countResult] = await pool.query(countQuery, params);
@@ -163,36 +162,69 @@ router.get('/', async (req, res) => {
'SELECT DISTINCT vendor FROM products WHERE visible = true AND vendor IS NOT NULL AND vendor != "" ORDER BY vendor' 'SELECT DISTINCT vendor FROM products WHERE visible = true AND vendor IS NOT NULL AND vendor != "" ORDER BY vendor'
); );
const [brands] = await pool.query( const [brands] = await pool.query(
'SELECT DISTINCT brand FROM products WHERE visible = true AND brand IS NOT NULL AND brand != "" ORDER BY brand' 'SELECT DISTINCT COALESCE(brand, \'Unbranded\') as brand FROM products WHERE visible = true ORDER BY brand'
); );
// Main query with all fields // Main query with all fields
const query = ` const query = `
WITH product_thresholds AS ( WITH RECURSIVE
SELECT category_path AS (
p.product_id, SELECT
COALESCE( c.cat_id,
(SELECT overstock_days FROM stock_thresholds st c.name,
WHERE st.category_id IN ( c.parent_id,
SELECT pc.category_id CAST(c.name AS CHAR(1000)) as path
FROM product_categories pc FROM categories c
WHERE pc.product_id = p.product_id WHERE c.parent_id IS NULL
)
AND (st.vendor = p.vendor OR st.vendor IS NULL) UNION ALL
ORDER BY st.vendor IS NULL
LIMIT 1), SELECT
(SELECT overstock_days FROM stock_thresholds st c.cat_id,
WHERE st.category_id IS NULL c.name,
AND (st.vendor = p.vendor OR st.vendor IS NULL) c.parent_id,
ORDER BY st.vendor IS NULL CONCAT(cp.path, ' > ', c.name)
LIMIT 1), FROM categories c
90 JOIN category_path cp ON c.parent_id = cp.cat_id
) as target_days ),
FROM products p product_thresholds AS (
) SELECT
p.pid,
COALESCE(
(SELECT overstock_days FROM stock_thresholds st
WHERE st.category_id IN (
SELECT pc.cat_id
FROM product_categories pc
WHERE pc.pid = p.pid
)
AND (st.vendor = p.vendor OR st.vendor IS NULL)
ORDER BY st.vendor IS NULL
LIMIT 1),
(SELECT overstock_days FROM stock_thresholds st
WHERE st.category_id IS NULL
AND (st.vendor = p.vendor OR st.vendor IS NULL)
ORDER BY st.vendor IS NULL
LIMIT 1),
90
) as target_days
FROM products p
),
product_leaf_categories AS (
-- Find categories that aren't parents to other categories for this product
SELECT DISTINCT pc.cat_id
FROM product_categories pc
WHERE NOT EXISTS (
SELECT 1
FROM categories child
JOIN product_categories child_pc ON child.cat_id = child_pc.cat_id
WHERE child.parent_id = pc.cat_id
AND child_pc.pid = pc.pid
)
)
SELECT SELECT
p.*, p.*,
GROUP_CONCAT(DISTINCT c.name) as categories, COALESCE(p.brand, 'Unbranded') as brand,
GROUP_CONCAT(DISTINCT CONCAT(c.cat_id, ':', c.name)) as categories,
pm.daily_sales_avg, pm.daily_sales_avg,
pm.weekly_sales_avg, pm.weekly_sales_avg,
pm.monthly_sales_avg, pm.monthly_sales_avg,
@@ -205,10 +237,10 @@ router.get('/', async (req, res) => {
pm.reorder_point, pm.reorder_point,
pm.safety_stock, pm.safety_stock,
pm.avg_margin_percent, pm.avg_margin_percent,
pm.total_revenue, CAST(pm.total_revenue AS DECIMAL(15,3)) as total_revenue,
pm.inventory_value, CAST(pm.inventory_value AS DECIMAL(15,3)) as inventory_value,
pm.cost_of_goods_sold, CAST(pm.cost_of_goods_sold AS DECIMAL(15,3)) as cost_of_goods_sold,
pm.gross_profit, CAST(pm.gross_profit AS DECIMAL(15,3)) as gross_profit,
pm.gmroi, pm.gmroi,
pm.avg_lead_time_days, pm.avg_lead_time_days,
pm.last_purchase_date, pm.last_purchase_date,
@@ -223,12 +255,13 @@ router.get('/', async (req, res) => {
pm.overstocked_amt, pm.overstocked_amt,
COALESCE(pm.days_of_inventory / NULLIF(pt.target_days, 0), 0) as stock_coverage_ratio COALESCE(pm.days_of_inventory / NULLIF(pt.target_days, 0), 0) as stock_coverage_ratio
FROM products p FROM products p
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id LEFT JOIN product_metrics pm ON p.pid = pm.pid
LEFT JOIN product_categories pc ON p.product_id = pc.product_id LEFT JOIN product_categories pc ON p.pid = pc.pid
LEFT JOIN categories c ON pc.category_id = c.id LEFT JOIN categories c ON pc.cat_id = c.cat_id
LEFT JOIN product_thresholds pt ON p.product_id = pt.product_id LEFT JOIN product_thresholds pt ON p.pid = pt.pid
${whereClause} JOIN product_leaf_categories plc ON c.cat_id = plc.cat_id
GROUP BY p.product_id ${whereClause ? 'WHERE ' + whereClause.substring(6) : ''}
GROUP BY p.pid
ORDER BY ${sortColumn} ${sortDirection} ORDER BY ${sortColumn} ${sortDirection}
LIMIT ? OFFSET ? LIMIT ? OFFSET ?
`; `;
@@ -308,7 +341,7 @@ router.get('/trending', async (req, res) => {
SELECT COUNT(*) as count, SELECT COUNT(*) as count,
MAX(total_revenue) as max_revenue, MAX(total_revenue) as max_revenue,
MAX(daily_sales_avg) as max_daily_sales, MAX(daily_sales_avg) as max_daily_sales,
COUNT(DISTINCT product_id) as products_with_metrics COUNT(DISTINCT pid) as products_with_metrics
FROM product_metrics FROM product_metrics
WHERE total_revenue > 0 OR daily_sales_avg > 0 WHERE total_revenue > 0 OR daily_sales_avg > 0
`); `);
@@ -322,7 +355,7 @@ router.get('/trending', async (req, res) => {
// Get trending products // Get trending products
const [rows] = await pool.query(` const [rows] = await pool.query(`
SELECT SELECT
p.product_id, p.pid,
p.sku, p.sku,
p.title, p.title,
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg, COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
@@ -334,7 +367,7 @@ router.get('/trending', async (req, res) => {
END as growth_rate, END as growth_rate,
COALESCE(pm.total_revenue, 0) as total_revenue COALESCE(pm.total_revenue, 0) as total_revenue
FROM products p FROM products p
INNER JOIN product_metrics pm ON p.product_id = pm.product_id INNER JOIN product_metrics pm ON p.pid = pm.pid
WHERE (pm.total_revenue > 0 OR pm.daily_sales_avg > 0) WHERE (pm.total_revenue > 0 OR pm.daily_sales_avg > 0)
AND p.visible = true AND p.visible = true
ORDER BY growth_rate DESC ORDER BY growth_rate DESC
@@ -351,130 +384,160 @@ router.get('/trending', async (req, res) => {
// Get a single product // Get a single product
router.get('/:id', async (req, res) => { router.get('/:id', async (req, res) => {
const pool = req.app.locals.pool;
try { try {
// Get basic product data with metrics const pool = req.app.locals.pool;
const [rows] = await pool.query( const id = parseInt(req.params.id);
`SELECT
// Common CTE for category paths
const categoryPathCTE = `
WITH RECURSIVE category_path AS (
SELECT
c.cat_id,
c.name,
c.parent_id,
CAST(c.name AS CHAR(1000)) as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
CONCAT(cp.path, ' > ', c.name)
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
)
`;
// Get product details with category paths
const [productRows] = await pool.query(`
SELECT
p.*, p.*,
GROUP_CONCAT(DISTINCT c.name) as categories,
pm.daily_sales_avg, pm.daily_sales_avg,
pm.weekly_sales_avg, pm.weekly_sales_avg,
pm.monthly_sales_avg, pm.monthly_sales_avg,
pm.days_of_inventory, pm.days_of_inventory,
pm.reorder_point, pm.reorder_point,
pm.safety_stock, pm.safety_stock,
pm.stock_status,
pm.abc_class,
pm.avg_margin_percent, pm.avg_margin_percent,
pm.total_revenue, pm.total_revenue,
pm.inventory_value, pm.inventory_value,
pm.turnover_rate, pm.turnover_rate,
pm.abc_class, pm.gmroi,
pm.stock_status, pm.cost_of_goods_sold,
pm.gross_profit,
pm.avg_lead_time_days, pm.avg_lead_time_days,
pm.current_lead_time, pm.current_lead_time,
pm.target_lead_time, pm.target_lead_time,
pm.lead_time_status, pm.lead_time_status,
pm.gmroi, pm.reorder_qty,
pm.cost_of_goods_sold, pm.overstocked_amt
pm.gross_profit
FROM products p FROM products p
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id LEFT JOIN product_metrics pm ON p.pid = pm.pid
LEFT JOIN product_categories pc ON p.product_id = pc.product_id WHERE p.pid = ?
LEFT JOIN categories c ON pc.category_id = c.id `, [id]);
WHERE p.product_id = ? AND p.visible = true
GROUP BY p.product_id`, if (!productRows.length) {
[req.params.id]
);
if (rows.length === 0) {
return res.status(404).json({ error: 'Product not found' }); return res.status(404).json({ error: 'Product not found' });
} }
// Get vendor performance metrics // Get categories and their paths separately to avoid GROUP BY issues
const [vendorMetrics] = await pool.query( const [categoryRows] = await pool.query(`
`SELECT * FROM vendor_metrics WHERE vendor = ?`, WITH RECURSIVE
[rows[0].vendor] category_path AS (
); SELECT
c.cat_id,
c.name,
c.parent_id,
CAST(c.name AS CHAR(1000)) as path
FROM categories c
WHERE c.parent_id IS NULL
UNION ALL
SELECT
c.cat_id,
c.name,
c.parent_id,
CONCAT(cp.path, ' > ', c.name)
FROM categories c
JOIN category_path cp ON c.parent_id = cp.cat_id
),
product_leaf_categories AS (
-- Find categories assigned to this product that aren't parents
-- of other categories assigned to this product
SELECT pc.cat_id
FROM product_categories pc
WHERE pc.pid = ?
AND NOT EXISTS (
-- Check if there are any child categories also assigned to this product
SELECT 1
FROM categories child
JOIN product_categories child_pc ON child.cat_id = child_pc.cat_id
WHERE child.parent_id = pc.cat_id
AND child_pc.pid = pc.pid
)
)
SELECT
c.cat_id,
c.name as category_name,
cp.path as full_path
FROM product_categories pc
JOIN categories c ON pc.cat_id = c.cat_id
JOIN category_path cp ON c.cat_id = cp.cat_id
JOIN product_leaf_categories plc ON c.cat_id = plc.cat_id
WHERE pc.pid = ?
ORDER BY cp.path
`, [id, id]);
// Transform the results
const categoryPathMap = categoryRows.reduce((acc, row) => {
// Use cat_id in the key to differentiate categories with the same name
acc[`${row.cat_id}:${row.category_name}`] = row.full_path;
return acc;
}, {});
// Transform the data to match frontend expectations
const product = { const product = {
// Basic product info ...productRows[0],
product_id: rows[0].product_id, // Include cat_id in categories array to match the keys in categoryPathMap
title: rows[0].title, categories: categoryRows.map(row => `${row.cat_id}:${row.category_name}`),
SKU: rows[0].SKU, category_paths: categoryPathMap,
barcode: rows[0].barcode, price: parseFloat(productRows[0].price),
created_at: rows[0].created_at, regular_price: parseFloat(productRows[0].regular_price),
updated_at: rows[0].updated_at, cost_price: parseFloat(productRows[0].cost_price),
landing_cost_price: parseFloat(productRows[0].landing_cost_price),
// Inventory fields stock_quantity: parseInt(productRows[0].stock_quantity),
stock_quantity: parseInt(rows[0].stock_quantity), moq: parseInt(productRows[0].moq),
moq: parseInt(rows[0].moq), uom: parseInt(productRows[0].uom),
uom: parseInt(rows[0].uom), managing_stock: Boolean(productRows[0].managing_stock),
managing_stock: Boolean(rows[0].managing_stock), replenishable: Boolean(productRows[0].replenishable),
replenishable: Boolean(rows[0].replenishable), daily_sales_avg: parseFloat(productRows[0].daily_sales_avg) || 0,
weekly_sales_avg: parseFloat(productRows[0].weekly_sales_avg) || 0,
// Pricing fields monthly_sales_avg: parseFloat(productRows[0].monthly_sales_avg) || 0,
price: parseFloat(rows[0].price), avg_quantity_per_order: parseFloat(productRows[0].avg_quantity_per_order) || 0,
regular_price: parseFloat(rows[0].regular_price), number_of_orders: parseInt(productRows[0].number_of_orders) || 0,
cost_price: parseFloat(rows[0].cost_price), first_sale_date: productRows[0].first_sale_date || null,
landing_cost_price: parseFloat(rows[0].landing_cost_price), last_sale_date: productRows[0].last_sale_date || null,
days_of_inventory: parseFloat(productRows[0].days_of_inventory) || 0,
// Categorization weeks_of_inventory: parseFloat(productRows[0].weeks_of_inventory) || 0,
categories: rows[0].categories ? rows[0].categories.split(',') : [], reorder_point: parseFloat(productRows[0].reorder_point) || 0,
tags: rows[0].tags ? rows[0].tags.split(',') : [], safety_stock: parseFloat(productRows[0].safety_stock) || 0,
options: rows[0].options ? JSON.parse(rows[0].options) : {}, avg_margin_percent: parseFloat(productRows[0].avg_margin_percent) || 0,
total_revenue: parseFloat(productRows[0].total_revenue) || 0,
// Vendor info inventory_value: parseFloat(productRows[0].inventory_value) || 0,
vendor: rows[0].vendor, cost_of_goods_sold: parseFloat(productRows[0].cost_of_goods_sold) || 0,
vendor_reference: rows[0].vendor_reference, gross_profit: parseFloat(productRows[0].gross_profit) || 0,
brand: rows[0].brand, gmroi: parseFloat(productRows[0].gmroi) || 0,
avg_lead_time_days: parseFloat(productRows[0].avg_lead_time_days) || 0,
// URLs current_lead_time: parseFloat(productRows[0].current_lead_time) || 0,
permalink: rows[0].permalink, target_lead_time: parseFloat(productRows[0].target_lead_time) || 0,
image: rows[0].image, lead_time_status: productRows[0].lead_time_status || null,
reorder_qty: parseInt(productRows[0].reorder_qty) || 0,
// Metrics overstocked_amt: parseInt(productRows[0].overstocked_amt) || 0
metrics: {
// Sales metrics
daily_sales_avg: parseFloat(rows[0].daily_sales_avg) || 0,
weekly_sales_avg: parseFloat(rows[0].weekly_sales_avg) || 0,
monthly_sales_avg: parseFloat(rows[0].monthly_sales_avg) || 0,
// Inventory metrics
days_of_inventory: parseInt(rows[0].days_of_inventory) || 0,
reorder_point: parseInt(rows[0].reorder_point) || 0,
safety_stock: parseInt(rows[0].safety_stock) || 0,
stock_status: rows[0].stock_status || 'Unknown',
abc_class: rows[0].abc_class || 'C',
// Financial metrics
avg_margin_percent: parseFloat(rows[0].avg_margin_percent) || 0,
total_revenue: parseFloat(rows[0].total_revenue) || 0,
inventory_value: parseFloat(rows[0].inventory_value) || 0,
turnover_rate: parseFloat(rows[0].turnover_rate) || 0,
gmroi: parseFloat(rows[0].gmroi) || 0,
cost_of_goods_sold: parseFloat(rows[0].cost_of_goods_sold) || 0,
gross_profit: parseFloat(rows[0].gross_profit) || 0,
// Lead time metrics
avg_lead_time_days: parseInt(rows[0].avg_lead_time_days) || 0,
current_lead_time: parseInt(rows[0].current_lead_time) || 0,
target_lead_time: parseInt(rows[0].target_lead_time) || 14,
lead_time_status: rows[0].lead_time_status || 'Unknown',
reorder_qty: parseInt(rows[0].reorder_qty) || 0,
overstocked_amt: parseInt(rows[0].overstocked_amt) || 0
},
// Vendor performance (if available)
vendor_performance: vendorMetrics.length ? {
avg_lead_time_days: parseFloat(vendorMetrics[0].avg_lead_time_days) || 0,
on_time_delivery_rate: parseFloat(vendorMetrics[0].on_time_delivery_rate) || 0,
order_fill_rate: parseFloat(vendorMetrics[0].order_fill_rate) || 0,
total_orders: parseInt(vendorMetrics[0].total_orders) || 0,
total_late_orders: parseInt(vendorMetrics[0].total_late_orders) || 0,
total_purchase_value: parseFloat(vendorMetrics[0].total_purchase_value) || 0,
avg_order_value: parseFloat(vendorMetrics[0].avg_order_value) || 0
} : null
}; };
res.json(product); res.json(product);
@@ -532,7 +595,7 @@ router.put('/:id', async (req, res) => {
categories = ?, categories = ?,
visible = ?, visible = ?,
managing_stock = ? managing_stock = ?
WHERE product_id = ?`, WHERE pid = ?`,
[ [
title, title,
sku, sku,
@@ -570,7 +633,7 @@ router.get('/:id/metrics', async (req, res) => {
const [metrics] = await pool.query(` const [metrics] = await pool.query(`
WITH inventory_status AS ( WITH inventory_status AS (
SELECT SELECT
p.product_id, p.pid,
CASE CASE
WHEN pm.daily_sales_avg = 0 THEN 'New' WHEN pm.daily_sales_avg = 0 THEN 'New'
WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * 7) THEN 'Critical' WHEN p.stock_quantity <= CEIL(pm.daily_sales_avg * 7) THEN 'Critical'
@@ -579,8 +642,8 @@ router.get('/:id/metrics', async (req, res) => {
ELSE 'Healthy' ELSE 'Healthy'
END as calculated_status END as calculated_status
FROM products p FROM products p
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id LEFT JOIN product_metrics pm ON p.pid = pm.pid
WHERE p.product_id = ? WHERE p.pid = ?
) )
SELECT SELECT
COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg, COALESCE(pm.daily_sales_avg, 0) as daily_sales_avg,
@@ -604,9 +667,9 @@ router.get('/:id/metrics', async (req, res) => {
COALESCE(pm.reorder_qty, 0) as reorder_qty, COALESCE(pm.reorder_qty, 0) as reorder_qty,
COALESCE(pm.overstocked_amt, 0) as overstocked_amt COALESCE(pm.overstocked_amt, 0) as overstocked_amt
FROM products p FROM products p
LEFT JOIN product_metrics pm ON p.product_id = pm.product_id LEFT JOIN product_metrics pm ON p.pid = pm.pid
LEFT JOIN inventory_status is ON p.product_id = is.product_id LEFT JOIN inventory_status is ON p.pid = is.pid
WHERE p.product_id = ? WHERE p.pid = ?
`, [id]); `, [id]);
if (!metrics.length) { if (!metrics.length) {
@@ -643,57 +706,35 @@ router.get('/:id/metrics', async (req, res) => {
// Get product time series data // Get product time series data
router.get('/:id/time-series', async (req, res) => { router.get('/:id/time-series', async (req, res) => {
const pool = req.app.locals.pool; const { id } = req.params;
try { try {
const { id } = req.params; const pool = req.app.locals.pool;
const months = parseInt(req.query.months) || 12;
// Get monthly sales data with running totals and growth rates
const [monthlySales] = await pool.query(`
WITH monthly_data AS (
SELECT
CONCAT(year, '-', LPAD(month, 2, '0')) as month,
total_quantity_sold as quantity,
total_revenue as revenue,
total_cost as cost,
avg_price,
profit_margin,
inventory_value
FROM product_time_aggregates
WHERE product_id = ?
ORDER BY year DESC, month DESC
LIMIT ?
)
SELECT
month,
quantity,
revenue,
cost,
avg_price,
profit_margin,
inventory_value,
LAG(quantity) OVER (ORDER BY month) as prev_month_quantity,
LAG(revenue) OVER (ORDER BY month) as prev_month_revenue
FROM monthly_data
ORDER BY month ASC
`, [id, months]);
// Calculate growth rates and format data // Get monthly sales data
const formattedMonthlySales = monthlySales.map(row => ({ const [monthlySales] = await pool.query(`
month: row.month, SELECT
quantity: parseInt(row.quantity) || 0, DATE_FORMAT(date, '%Y-%m') as month,
revenue: parseFloat(row.revenue) || 0, COUNT(DISTINCT order_number) as order_count,
cost: parseFloat(row.cost) || 0, SUM(quantity) as units_sold,
avg_price: parseFloat(row.avg_price) || 0, CAST(SUM(price * quantity) AS DECIMAL(15,3)) as revenue
profit_margin: parseFloat(row.profit_margin) || 0, FROM orders
inventory_value: parseFloat(row.inventory_value) || 0, WHERE pid = ?
quantity_growth: row.prev_month_quantity ? AND canceled = false
((row.quantity - row.prev_month_quantity) / row.prev_month_quantity) * 100 : 0, GROUP BY DATE_FORMAT(date, '%Y-%m')
revenue_growth: row.prev_month_revenue ? ORDER BY month DESC
((row.revenue - row.prev_month_revenue) / row.prev_month_revenue) * 100 : 0 LIMIT 12
`, [id]);
// Format monthly sales data
const formattedMonthlySales = monthlySales.map(month => ({
month: month.month,
order_count: parseInt(month.order_count),
units_sold: parseInt(month.units_sold),
revenue: parseFloat(month.revenue),
profit: 0 // Set to 0 since we don't have cost data in orders table
})); }));
// Get recent orders with customer info and status // Get recent orders
const [recentOrders] = await pool.query(` const [recentOrders] = await pool.query(`
SELECT SELECT
DATE_FORMAT(date, '%Y-%m-%d') as date, DATE_FORMAT(date, '%Y-%m-%d') as date,
@@ -703,11 +744,10 @@ router.get('/:id/time-series', async (req, res) => {
discount, discount,
tax, tax,
shipping, shipping,
customer, customer_name as customer,
status, status
payment_method
FROM orders FROM orders
WHERE product_id = ? WHERE pid = ?
AND canceled = false AND canceled = false
ORDER BY date DESC ORDER BY date DESC
LIMIT 10 LIMIT 10
@@ -723,17 +763,19 @@ router.get('/:id/time-series', async (req, res) => {
ordered, ordered,
received, received,
status, status,
receiving_status,
cost_price, cost_price,
notes, notes,
CASE CASE
WHEN received_date IS NOT NULL THEN WHEN received_date IS NOT NULL THEN
DATEDIFF(received_date, date) DATEDIFF(received_date, date)
WHEN expected_date < CURDATE() AND status != 'received' THEN WHEN expected_date < CURDATE() AND status < ${PurchaseOrderStatus.ReceivingStarted} THEN
DATEDIFF(CURDATE(), expected_date) DATEDIFF(CURDATE(), expected_date)
ELSE NULL ELSE NULL
END as lead_time_days END as lead_time_days
FROM purchase_orders FROM purchase_orders
WHERE product_id = ? WHERE pid = ?
AND status != ${PurchaseOrderStatus.Canceled}
ORDER BY date DESC ORDER BY date DESC
LIMIT 10 LIMIT 10
`, [id]); `, [id]);
@@ -752,6 +794,8 @@ router.get('/:id/time-series', async (req, res) => {
...po, ...po,
ordered: parseInt(po.ordered), ordered: parseInt(po.ordered),
received: parseInt(po.received), received: parseInt(po.received),
status: parseInt(po.status),
receiving_status: parseInt(po.receiving_status),
cost_price: parseFloat(po.cost_price), cost_price: parseFloat(po.cost_price),
lead_time_days: po.lead_time_days ? parseInt(po.lead_time_days) : null lead_time_days: po.lead_time_days ? parseInt(po.lead_time_days) : null
})) }))

View File

@@ -1,6 +1,26 @@
const express = require('express'); const express = require('express');
const router = express.Router(); const router = express.Router();
// Status code constants
const STATUS = {
CANCELED: 0,
CREATED: 1,
ELECTRONICALLY_READY_SEND: 10,
ORDERED: 11,
PREORDERED: 12,
ELECTRONICALLY_SENT: 13,
RECEIVING_STARTED: 15,
DONE: 50
};
const RECEIVING_STATUS = {
CANCELED: 0,
CREATED: 1,
PARTIAL_RECEIVED: 30,
FULL_RECEIVED: 40,
PAID: 50
};
// Get all purchase orders with summary metrics // Get all purchase orders with summary metrics
router.get('/', async (req, res) => { router.get('/', async (req, res) => {
try { try {
@@ -11,13 +31,13 @@ router.get('/', async (req, res) => {
const params = []; const params = [];
if (search) { if (search) {
whereClause += ' AND (po.po_id LIKE ? OR po.vendor LIKE ? OR po.status LIKE ?)'; whereClause += ' AND (po.po_id LIKE ? OR po.vendor LIKE ?)';
params.push(`%${search}%`, `%${search}%`, `%${search}%`); params.push(`%${search}%`, `%${search}%`);
} }
if (status && status !== 'all') { if (status && status !== 'all') {
whereClause += ' AND po.status = ?'; whereClause += ' AND po.status = ?';
params.push(status); params.push(Number(status));
} }
if (vendor && vendor !== 'all') { if (vendor && vendor !== 'all') {
@@ -42,7 +62,7 @@ router.get('/', async (req, res) => {
po_id, po_id,
SUM(ordered) as total_ordered, SUM(ordered) as total_ordered,
SUM(received) as total_received, SUM(received) as total_received,
SUM(ordered * cost_price) as total_cost CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_cost
FROM purchase_orders po FROM purchase_orders po
WHERE ${whereClause} WHERE ${whereClause}
GROUP BY po_id GROUP BY po_id
@@ -54,8 +74,8 @@ router.get('/', async (req, res) => {
ROUND( ROUND(
SUM(total_received) / NULLIF(SUM(total_ordered), 0), 3 SUM(total_received) / NULLIF(SUM(total_ordered), 0), 3
) as fulfillment_rate, ) as fulfillment_rate,
SUM(total_cost) as total_value, CAST(SUM(total_cost) AS DECIMAL(15,3)) as total_value,
ROUND(AVG(total_cost), 2) as avg_cost CAST(AVG(total_cost) AS DECIMAL(15,3)) as avg_cost
FROM po_totals FROM po_totals
`, params); `, params);
@@ -78,22 +98,24 @@ router.get('/', async (req, res) => {
vendor, vendor,
date, date,
status, status,
COUNT(DISTINCT product_id) as total_items, receiving_status,
COUNT(DISTINCT pid) as total_items,
SUM(ordered) as total_quantity, SUM(ordered) as total_quantity,
SUM(ordered * cost_price) as total_cost, CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_cost,
SUM(received) as total_received, SUM(received) as total_received,
ROUND( ROUND(
SUM(received) / NULLIF(SUM(ordered), 0), 3 SUM(received) / NULLIF(SUM(ordered), 0), 3
) as fulfillment_rate ) as fulfillment_rate
FROM purchase_orders po FROM purchase_orders po
WHERE ${whereClause} WHERE ${whereClause}
GROUP BY po_id, vendor, date, status GROUP BY po_id, vendor, date, status, receiving_status
) )
SELECT SELECT
po_id as id, po_id as id,
vendor as vendor_name, vendor as vendor_name,
DATE_FORMAT(date, '%Y-%m-%d') as order_date, DATE_FORMAT(date, '%Y-%m-%d') as order_date,
status, status,
receiving_status,
total_items, total_items,
total_quantity, total_quantity,
total_cost, total_cost,
@@ -104,8 +126,8 @@ router.get('/', async (req, res) => {
CASE CASE
WHEN ? = 'order_date' THEN date WHEN ? = 'order_date' THEN date
WHEN ? = 'vendor_name' THEN vendor WHEN ? = 'vendor_name' THEN vendor
WHEN ? = 'total_cost' THEN CAST(total_cost AS DECIMAL(15,2)) WHEN ? = 'total_cost' THEN CAST(total_cost AS DECIMAL(15,3))
WHEN ? = 'total_received' THEN CAST(total_received AS DECIMAL(15,2)) WHEN ? = 'total_received' THEN CAST(total_received AS DECIMAL(15,3))
WHEN ? = 'total_items' THEN CAST(total_items AS SIGNED) WHEN ? = 'total_items' THEN CAST(total_items AS SIGNED)
WHEN ? = 'total_quantity' THEN CAST(total_quantity AS SIGNED) WHEN ? = 'total_quantity' THEN CAST(total_quantity AS SIGNED)
WHEN ? = 'fulfillment_rate' THEN CAST(fulfillment_rate AS DECIMAL(5,3)) WHEN ? = 'fulfillment_rate' THEN CAST(fulfillment_rate AS DECIMAL(5,3))
@@ -127,7 +149,7 @@ router.get('/', async (req, res) => {
const [statuses] = await pool.query(` const [statuses] = await pool.query(`
SELECT DISTINCT status SELECT DISTINCT status
FROM purchase_orders FROM purchase_orders
WHERE status IS NOT NULL AND status != '' WHERE status IS NOT NULL
ORDER BY status ORDER BY status
`); `);
@@ -136,7 +158,8 @@ router.get('/', async (req, res) => {
id: order.id, id: order.id,
vendor_name: order.vendor_name, vendor_name: order.vendor_name,
order_date: order.order_date, order_date: order.order_date,
status: order.status, status: Number(order.status),
receiving_status: Number(order.receiving_status),
total_items: Number(order.total_items) || 0, total_items: Number(order.total_items) || 0,
total_quantity: Number(order.total_quantity) || 0, total_quantity: Number(order.total_quantity) || 0,
total_cost: Number(order.total_cost) || 0, total_cost: Number(order.total_cost) || 0,
@@ -165,7 +188,7 @@ router.get('/', async (req, res) => {
}, },
filters: { filters: {
vendors: vendors.map(v => v.vendor), vendors: vendors.map(v => v.vendor),
statuses: statuses.map(s => s.status) statuses: statuses.map(s => Number(s.status))
} }
}); });
} catch (error) { } catch (error) {
@@ -188,12 +211,14 @@ router.get('/vendor-metrics', async (req, res) => {
received, received,
cost_price, cost_price,
CASE CASE
WHEN status = 'received' AND received_date IS NOT NULL AND date IS NOT NULL WHEN status >= ${STATUS.RECEIVING_STARTED} AND receiving_status >= ${RECEIVING_STATUS.PARTIAL_RECEIVED}
AND received_date IS NOT NULL AND date IS NOT NULL
THEN DATEDIFF(received_date, date) THEN DATEDIFF(received_date, date)
ELSE NULL ELSE NULL
END as delivery_days END as delivery_days
FROM purchase_orders FROM purchase_orders
WHERE vendor IS NOT NULL AND vendor != '' WHERE vendor IS NOT NULL AND vendor != ''
AND status != ${STATUS.CANCELED} -- Exclude canceled orders
) )
SELECT SELECT
vendor as vendor_name, vendor as vendor_name,
@@ -203,10 +228,10 @@ router.get('/vendor-metrics', async (req, res) => {
ROUND( ROUND(
SUM(received) / NULLIF(SUM(ordered), 0), 3 SUM(received) / NULLIF(SUM(ordered), 0), 3
) as fulfillment_rate, ) as fulfillment_rate,
ROUND( CAST(ROUND(
SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2 SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2
) as avg_unit_cost, ) AS DECIMAL(15,3)) as avg_unit_cost,
SUM(ordered * cost_price) as total_spend, CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend,
ROUND( ROUND(
AVG(NULLIF(delivery_days, 0)), 1 AVG(NULLIF(delivery_days, 0)), 1
) as avg_delivery_days ) as avg_delivery_days
@@ -242,47 +267,47 @@ router.get('/cost-analysis', async (req, res) => {
const pool = req.app.locals.pool; const pool = req.app.locals.pool;
const [analysis] = await pool.query(` const [analysis] = await pool.query(`
WITH category_costs AS (
SELECT
c.name as category,
po.pid,
po.cost_price,
po.ordered,
po.received,
po.status,
po.receiving_status
FROM purchase_orders po
JOIN product_categories pc ON po.pid = pc.pid
JOIN categories c ON pc.cat_id = c.cat_id
WHERE po.status != ${STATUS.CANCELED} -- Exclude canceled orders
)
SELECT SELECT
c.name as categories, category,
COUNT(DISTINCT po.product_id) as unique_products, COUNT(DISTINCT pid) as unique_products,
ROUND(AVG(po.cost_price), 2) as avg_cost, CAST(AVG(cost_price) AS DECIMAL(15,3)) as avg_cost,
MIN(po.cost_price) as min_cost, CAST(MIN(cost_price) AS DECIMAL(15,3)) as min_cost,
MAX(po.cost_price) as max_cost, CAST(MAX(cost_price) AS DECIMAL(15,3)) as max_cost,
ROUND( CAST(STDDEV(cost_price) AS DECIMAL(15,3)) as cost_variance,
STDDEV(po.cost_price), 2 CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend
) as cost_variance, FROM category_costs
SUM(po.ordered * po.cost_price) as total_spend GROUP BY category
FROM purchase_orders po
JOIN products p ON po.product_id = p.product_id
JOIN product_categories pc ON p.product_id = pc.product_id
JOIN categories c ON pc.category_id = c.id
GROUP BY c.name
ORDER BY total_spend DESC ORDER BY total_spend DESC
`); `);
// Parse numeric values and add ids for React keys // Parse numeric values
const parsedAnalysis = analysis.map(item => ({ const parsedAnalysis = {
id: item.categories || 'Uncategorized', categories: analysis.map(cat => ({
categories: item.categories || 'Uncategorized', category: cat.category,
unique_products: Number(item.unique_products) || 0, unique_products: Number(cat.unique_products) || 0,
avg_cost: Number(item.avg_cost) || 0, avg_cost: Number(cat.avg_cost) || 0,
min_cost: Number(item.min_cost) || 0, min_cost: Number(cat.min_cost) || 0,
max_cost: Number(item.max_cost) || 0, max_cost: Number(cat.max_cost) || 0,
cost_variance: Number(item.cost_variance) || 0, cost_variance: Number(cat.cost_variance) || 0,
total_spend: Number(item.total_spend) || 0 total_spend: Number(cat.total_spend) || 0
}));
// Transform the data with parsed values
const transformedAnalysis = {
...parsedAnalysis[0],
total_spend_by_category: parsedAnalysis.map(item => ({
id: item.categories,
category: item.categories,
total_spend: Number(item.total_spend)
})) }))
}; };
res.json(transformedAnalysis); res.json(parsedAnalysis);
} catch (error) { } catch (error) {
console.error('Error fetching cost analysis:', error); console.error('Error fetching cost analysis:', error);
res.status(500).json({ error: 'Failed to fetch cost analysis' }); res.status(500).json({ error: 'Failed to fetch cost analysis' });
@@ -298,11 +323,14 @@ router.get('/receiving-status', async (req, res) => {
WITH po_totals AS ( WITH po_totals AS (
SELECT SELECT
po_id, po_id,
status,
receiving_status,
SUM(ordered) as total_ordered, SUM(ordered) as total_ordered,
SUM(received) as total_received, SUM(received) as total_received,
SUM(ordered * cost_price) as total_cost CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_cost
FROM purchase_orders FROM purchase_orders
GROUP BY po_id WHERE status != ${STATUS.CANCELED}
GROUP BY po_id, status, receiving_status
) )
SELECT SELECT
COUNT(DISTINCT po_id) as order_count, COUNT(DISTINCT po_id) as order_count,
@@ -311,8 +339,20 @@ router.get('/receiving-status', async (req, res) => {
ROUND( ROUND(
SUM(total_received) / NULLIF(SUM(total_ordered), 0), 3 SUM(total_received) / NULLIF(SUM(total_ordered), 0), 3
) as fulfillment_rate, ) as fulfillment_rate,
SUM(total_cost) as total_value, CAST(SUM(total_cost) AS DECIMAL(15,3)) as total_value,
ROUND(AVG(total_cost), 2) as avg_cost CAST(AVG(total_cost) AS DECIMAL(15,3)) as avg_cost,
COUNT(DISTINCT CASE
WHEN receiving_status = ${RECEIVING_STATUS.CREATED} THEN po_id
END) as pending_count,
COUNT(DISTINCT CASE
WHEN receiving_status = ${RECEIVING_STATUS.PARTIAL_RECEIVED} THEN po_id
END) as partial_count,
COUNT(DISTINCT CASE
WHEN receiving_status >= ${RECEIVING_STATUS.FULL_RECEIVED} THEN po_id
END) as completed_count,
COUNT(DISTINCT CASE
WHEN receiving_status = ${RECEIVING_STATUS.CANCELED} THEN po_id
END) as canceled_count
FROM po_totals FROM po_totals
`); `);
@@ -323,7 +363,13 @@ router.get('/receiving-status', async (req, res) => {
total_received: Number(status[0].total_received) || 0, total_received: Number(status[0].total_received) || 0,
fulfillment_rate: Number(status[0].fulfillment_rate) || 0, fulfillment_rate: Number(status[0].fulfillment_rate) || 0,
total_value: Number(status[0].total_value) || 0, total_value: Number(status[0].total_value) || 0,
avg_cost: Number(status[0].avg_cost) || 0 avg_cost: Number(status[0].avg_cost) || 0,
status_breakdown: {
pending: Number(status[0].pending_count) || 0,
partial: Number(status[0].partial_count) || 0,
completed: Number(status[0].completed_count) || 0,
canceled: Number(status[0].canceled_count) || 0
}
}; };
res.json(parsedStatus); res.json(parsedStatus);

View File

@@ -29,8 +29,8 @@ router.get('/', async (req, res) => {
const [costMetrics] = await pool.query(` const [costMetrics] = await pool.query(`
SELECT SELECT
vendor, vendor,
ROUND(SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2) as avg_unit_cost, CAST(ROUND(SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2) AS DECIMAL(15,3)) as avg_unit_cost,
SUM(ordered * cost_price) as total_spend CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend
FROM purchase_orders FROM purchase_orders
WHERE status = 'closed' WHERE status = 'closed'
AND cost_price IS NOT NULL AND cost_price IS NOT NULL
@@ -56,9 +56,9 @@ router.get('/', async (req, res) => {
WHEN COALESCE(vm.total_orders, 0) > 0 AND COALESCE(vm.order_fill_rate, 0) >= 75 WHEN COALESCE(vm.total_orders, 0) > 0 AND COALESCE(vm.order_fill_rate, 0) >= 75
THEN p.vendor THEN p.vendor
END) as activeVendors, END) as activeVendors,
ROUND(AVG(NULLIF(vm.avg_lead_time_days, 0)), 1) as avgLeadTime, COALESCE(ROUND(AVG(NULLIF(vm.avg_lead_time_days, 0)), 1), 0) as avgLeadTime,
ROUND(AVG(NULLIF(vm.order_fill_rate, 0)), 1) as avgFillRate, COALESCE(ROUND(AVG(NULLIF(vm.order_fill_rate, 0)), 1), 0) as avgFillRate,
ROUND(AVG(NULLIF(vm.on_time_delivery_rate, 0)), 1) as avgOnTimeDelivery COALESCE(ROUND(AVG(NULLIF(vm.on_time_delivery_rate, 0)), 1), 0) as avgOnTimeDelivery
FROM products p FROM products p
LEFT JOIN vendor_metrics vm ON p.vendor = vm.vendor LEFT JOIN vendor_metrics vm ON p.vendor = vm.vendor
WHERE p.vendor IS NOT NULL AND p.vendor != '' WHERE p.vendor IS NOT NULL AND p.vendor != ''
@@ -67,8 +67,8 @@ router.get('/', async (req, res) => {
// Get overall cost metrics // Get overall cost metrics
const [overallCostMetrics] = await pool.query(` const [overallCostMetrics] = await pool.query(`
SELECT SELECT
ROUND(SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2) as avg_unit_cost, CAST(ROUND(SUM(ordered * cost_price) / NULLIF(SUM(ordered), 0), 2) AS DECIMAL(15,3)) as avg_unit_cost,
SUM(ordered * cost_price) as total_spend CAST(SUM(ordered * cost_price) AS DECIMAL(15,3)) as total_spend
FROM purchase_orders FROM purchase_orders
WHERE status = 'closed' WHERE status = 'closed'
AND cost_price IS NOT NULL AND cost_price IS NOT NULL
@@ -78,25 +78,25 @@ router.get('/', async (req, res) => {
res.json({ res.json({
vendors: vendors.map(vendor => ({ vendors: vendors.map(vendor => ({
vendor_id: vendor.vendor_id || vendor.name, vendor_id: vendor.name,
name: vendor.name, name: vendor.name,
status: vendor.status, status: vendor.status,
avg_lead_time_days: parseFloat(vendor.avg_lead_time_days || 0), avg_lead_time_days: parseFloat(vendor.avg_lead_time_days),
on_time_delivery_rate: parseFloat(vendor.on_time_delivery_rate || 0), on_time_delivery_rate: parseFloat(vendor.on_time_delivery_rate),
order_fill_rate: parseFloat(vendor.order_fill_rate || 0), order_fill_rate: parseFloat(vendor.order_fill_rate),
total_orders: parseInt(vendor.total_orders || 0), total_orders: parseInt(vendor.total_orders),
active_products: parseInt(vendor.active_products || 0), active_products: parseInt(vendor.active_products),
avg_unit_cost: parseFloat(costMetricsMap[vendor.name]?.avg_unit_cost || 0), avg_unit_cost: parseFloat(costMetricsMap[vendor.name]?.avg_unit_cost || 0),
total_spend: parseFloat(costMetricsMap[vendor.name]?.total_spend || 0) total_spend: parseFloat(costMetricsMap[vendor.name]?.total_spend || 0)
})), })),
stats: { stats: {
totalVendors: parseInt(stats[0].totalVendors || 0), totalVendors: parseInt(stats[0].totalVendors),
activeVendors: parseInt(stats[0].activeVendors || 0), activeVendors: parseInt(stats[0].activeVendors),
avgLeadTime: parseFloat(stats[0].avgLeadTime || 0), avgLeadTime: parseFloat(stats[0].avgLeadTime),
avgFillRate: parseFloat(stats[0].avgFillRate || 0), avgFillRate: parseFloat(stats[0].avgFillRate),
avgOnTimeDelivery: parseFloat(stats[0].avgOnTimeDelivery || 0), avgOnTimeDelivery: parseFloat(stats[0].avgOnTimeDelivery),
avgUnitCost: parseFloat(overallCostMetrics[0].avg_unit_cost || 0), avgUnitCost: parseFloat(overallCostMetrics[0].avg_unit_cost),
totalSpend: parseFloat(overallCostMetrics[0].total_spend || 0) totalSpend: parseFloat(overallCostMetrics[0].total_spend)
} }
}); });
} catch (error) { } catch (error) {

View File

@@ -0,0 +1,79 @@
// Purchase Order Status Codes
const PurchaseOrderStatus = {
Canceled: 0,
Created: 1,
ElectronicallyReadySend: 10,
Ordered: 11,
Preordered: 12,
ElectronicallySent: 13,
ReceivingStarted: 15,
Done: 50
};
// Receiving Status Codes
const ReceivingStatus = {
Canceled: 0,
Created: 1,
PartialReceived: 30,
FullReceived: 40,
Paid: 50
};
// Status Code Display Names
const PurchaseOrderStatusLabels = {
[PurchaseOrderStatus.Canceled]: 'Canceled',
[PurchaseOrderStatus.Created]: 'Created',
[PurchaseOrderStatus.ElectronicallyReadySend]: 'Ready to Send',
[PurchaseOrderStatus.Ordered]: 'Ordered',
[PurchaseOrderStatus.Preordered]: 'Preordered',
[PurchaseOrderStatus.ElectronicallySent]: 'Sent',
[PurchaseOrderStatus.ReceivingStarted]: 'Receiving Started',
[PurchaseOrderStatus.Done]: 'Done'
};
const ReceivingStatusLabels = {
[ReceivingStatus.Canceled]: 'Canceled',
[ReceivingStatus.Created]: 'Created',
[ReceivingStatus.PartialReceived]: 'Partially Received',
[ReceivingStatus.FullReceived]: 'Fully Received',
[ReceivingStatus.Paid]: 'Paid'
};
// Helper functions
function getPurchaseOrderStatusLabel(status) {
return PurchaseOrderStatusLabels[status] || 'Unknown';
}
function getReceivingStatusLabel(status) {
return ReceivingStatusLabels[status] || 'Unknown';
}
// Status checks
function isReceivingComplete(status) {
return status >= ReceivingStatus.PartialReceived;
}
function isPurchaseOrderComplete(status) {
return status === PurchaseOrderStatus.Done;
}
function isPurchaseOrderCanceled(status) {
return status === PurchaseOrderStatus.Canceled;
}
function isReceivingCanceled(status) {
return status === ReceivingStatus.Canceled;
}
module.exports = {
PurchaseOrderStatus,
ReceivingStatus,
PurchaseOrderStatusLabels,
ReceivingStatusLabels,
getPurchaseOrderStatusLabel,
getReceivingStatusLabel,
isReceivingComplete,
isPurchaseOrderComplete,
isPurchaseOrderCanceled,
isReceivingCanceled
};

View File

@@ -6,6 +6,7 @@ import config from '../../config';
interface CategoryData { interface CategoryData {
performance: { performance: {
category: string; category: string;
categoryPath: string; // Full hierarchy path
revenue: number; revenue: number;
profit: number; profit: number;
growth: number; growth: number;
@@ -13,10 +14,12 @@ interface CategoryData {
}[]; }[];
distribution: { distribution: {
category: string; category: string;
categoryPath: string; // Full hierarchy path
value: number; value: number;
}[]; }[];
trends: { trends: {
category: string; category: string;
categoryPath: string; // Full hierarchy path
month: string; month: string;
sales: number; sales: number;
}[]; }[];
@@ -36,6 +39,7 @@ export function CategoryPerformance() {
return { return {
performance: rawData.performance.map((item: any) => ({ performance: rawData.performance.map((item: any) => ({
...item, ...item,
categoryPath: item.categoryPath || item.category,
revenue: Number(item.revenue) || 0, revenue: Number(item.revenue) || 0,
profit: Number(item.profit) || 0, profit: Number(item.profit) || 0,
growth: Number(item.growth) || 0, growth: Number(item.growth) || 0,
@@ -43,10 +47,12 @@ export function CategoryPerformance() {
})), })),
distribution: rawData.distribution.map((item: any) => ({ distribution: rawData.distribution.map((item: any) => ({
...item, ...item,
categoryPath: item.categoryPath || item.category,
value: Number(item.value) || 0 value: Number(item.value) || 0
})), })),
trends: rawData.trends.map((item: any) => ({ trends: rawData.trends.map((item: any) => ({
...item, ...item,
categoryPath: item.categoryPath || item.category,
sales: Number(item.sales) || 0 sales: Number(item.sales) || 0
})) }))
}; };
@@ -63,6 +69,8 @@ export function CategoryPerformance() {
return <span className={color}>{value}</span>; return <span className={color}>{value}</span>;
}; };
const getShortCategoryName = (path: string) => path.split(' > ').pop() || path;
return ( return (
<div className="grid gap-4"> <div className="grid gap-4">
<div className="grid gap-4 md:grid-cols-2"> <div className="grid gap-4 md:grid-cols-2">
@@ -76,24 +84,34 @@ export function CategoryPerformance() {
<Pie <Pie
data={data.distribution} data={data.distribution}
dataKey="value" dataKey="value"
nameKey="category" nameKey="categoryPath"
cx="50%" cx="50%"
cy="50%" cy="50%"
outerRadius={100} outerRadius={100}
fill="#8884d8" fill="#8884d8"
label={(entry) => entry.category} label={({ categoryPath }) => getShortCategoryName(categoryPath)}
> >
{data.distribution.map((entry, index) => ( {data.distribution.map((entry, index) => (
<Cell <Cell
key={entry.category} key={`${entry.category}-${entry.value}-${index}`}
fill={COLORS[index % COLORS.length]} fill={COLORS[index % COLORS.length]}
/> />
))} ))}
</Pie> </Pie>
<Tooltip <Tooltip
formatter={(value: number) => [`$${value.toLocaleString()}`, 'Revenue']} formatter={(value: number, name: string, props: any) => [
`$${value.toLocaleString()}`,
<div key="tooltip">
<div className="font-medium">Category Path:</div>
<div className="text-sm text-muted-foreground">{props.payload.categoryPath}</div>
<div className="mt-1">Revenue</div>
</div>
]}
/>
<Legend
formatter={(value) => getShortCategoryName(value)}
wrapperStyle={{ fontSize: '12px' }}
/> />
<Legend />
</PieChart> </PieChart>
</ResponsiveContainer> </ResponsiveContainer>
</CardContent> </CardContent>
@@ -106,10 +124,33 @@ export function CategoryPerformance() {
<CardContent> <CardContent>
<ResponsiveContainer width="100%" height={300}> <ResponsiveContainer width="100%" height={300}>
<BarChart data={data.performance}> <BarChart data={data.performance}>
<XAxis dataKey="category" /> <XAxis
dataKey="categoryPath"
tick={({ x, y, payload }) => (
<g transform={`translate(${x},${y})`}>
<text
x={0}
y={0}
dy={16}
textAnchor="end"
fill="#888888"
transform="rotate(-35)"
>
{getShortCategoryName(payload.value)}
</text>
</g>
)}
/>
<YAxis tickFormatter={(value) => `${value}%`} /> <YAxis tickFormatter={(value) => `${value}%`} />
<Tooltip <Tooltip
formatter={(value: number) => [`${value.toFixed(1)}%`, 'Growth Rate']} formatter={(value: number, name: string, props: any) => [
`${value.toFixed(1)}%`,
<div key="tooltip">
<div className="font-medium">Category Path:</div>
<div className="text-sm text-muted-foreground">{props.payload.categoryPath}</div>
<div className="mt-1">Growth Rate</div>
</div>
]}
/> />
<Bar <Bar
dataKey="growth" dataKey="growth"
@@ -129,10 +170,13 @@ export function CategoryPerformance() {
<CardContent> <CardContent>
<div className="space-y-4"> <div className="space-y-4">
{data.performance.map((category) => ( {data.performance.map((category) => (
<div key={category.category} className="flex items-center"> <div key={`${category.category}-${category.revenue}`} className="flex items-center">
<div className="flex-1"> <div className="flex-1">
<p className="text-sm font-medium">{category.category}</p> <div className="space-y-1">
<p className="text-sm text-muted-foreground"> <p className="text-sm font-medium">{getShortCategoryName(category.categoryPath)}</p>
<p className="text-xs text-muted-foreground">{category.categoryPath}</p>
</div>
<p className="text-sm text-muted-foreground mt-1">
{category.productCount} products {category.productCount} products
</p> </p>
</div> </div>

View File

@@ -154,7 +154,7 @@ export function PriceAnalysis() {
<CardContent> <CardContent>
<div className="space-y-4"> <div className="space-y-4">
{data.recommendations.map((item) => ( {data.recommendations.map((item) => (
<div key={item.product} className="flex items-center"> <div key={`${item.product}-${item.currentPrice}`} className="flex items-center">
<div className="flex-1"> <div className="flex-1">
<p className="text-sm font-medium">{item.product}</p> <p className="text-sm font-medium">{item.product}</p>
<p className="text-sm text-muted-foreground"> <p className="text-sm text-muted-foreground">

View File

@@ -6,6 +6,7 @@ import config from '../../config';
interface ProfitData { interface ProfitData {
byCategory: { byCategory: {
category: string; category: string;
categoryPath: string; // Full hierarchy path
profitMargin: number; profitMargin: number;
revenue: number; revenue: number;
cost: number; cost: number;
@@ -18,6 +19,8 @@ interface ProfitData {
}[]; }[];
topProducts: { topProducts: {
product: string; product: string;
category: string;
categoryPath: string; // Full hierarchy path
profitMargin: number; profitMargin: number;
revenue: number; revenue: number;
cost: number; cost: number;
@@ -36,6 +39,7 @@ export function ProfitAnalysis() {
return { return {
byCategory: rawData.byCategory.map((item: any) => ({ byCategory: rawData.byCategory.map((item: any) => ({
...item, ...item,
categoryPath: item.categoryPath || item.category,
profitMargin: Number(item.profitMargin) || 0, profitMargin: Number(item.profitMargin) || 0,
revenue: Number(item.revenue) || 0, revenue: Number(item.revenue) || 0,
cost: Number(item.cost) || 0 cost: Number(item.cost) || 0
@@ -48,6 +52,7 @@ export function ProfitAnalysis() {
})), })),
topProducts: rawData.topProducts.map((item: any) => ({ topProducts: rawData.topProducts.map((item: any) => ({
...item, ...item,
categoryPath: item.categoryPath || item.category,
profitMargin: Number(item.profitMargin) || 0, profitMargin: Number(item.profitMargin) || 0,
revenue: Number(item.revenue) || 0, revenue: Number(item.revenue) || 0,
cost: Number(item.cost) || 0 cost: Number(item.cost) || 0
@@ -60,6 +65,8 @@ export function ProfitAnalysis() {
return <div>Loading profit analysis...</div>; return <div>Loading profit analysis...</div>;
} }
const getShortCategoryName = (path: string) => path.split(' > ').pop() || path;
return ( return (
<div className="grid gap-4"> <div className="grid gap-4">
<div className="grid gap-4 md:grid-cols-2"> <div className="grid gap-4 md:grid-cols-2">
@@ -70,10 +77,33 @@ export function ProfitAnalysis() {
<CardContent> <CardContent>
<ResponsiveContainer width="100%" height={300}> <ResponsiveContainer width="100%" height={300}>
<BarChart data={data.byCategory}> <BarChart data={data.byCategory}>
<XAxis dataKey="category" /> <XAxis
dataKey="categoryPath"
tick={({ x, y, payload }) => (
<g transform={`translate(${x},${y})`}>
<text
x={0}
y={0}
dy={16}
textAnchor="end"
fill="#888888"
transform="rotate(-35)"
>
{getShortCategoryName(payload.value)}
</text>
</g>
)}
/>
<YAxis tickFormatter={(value) => `${value}%`} /> <YAxis tickFormatter={(value) => `${value}%`} />
<Tooltip <Tooltip
formatter={(value: number) => [`${value.toFixed(1)}%`, 'Profit Margin']} formatter={(value: number, name: string, props: any) => [
`${value.toFixed(1)}%`,
<div key="tooltip">
<div className="font-medium">Category Path:</div>
<div className="text-sm text-muted-foreground">{props.payload.categoryPath}</div>
<div className="mt-1">Profit Margin</div>
</div>
]}
/> />
<Bar <Bar
dataKey="profitMargin" dataKey="profitMargin"
@@ -120,10 +150,14 @@ export function ProfitAnalysis() {
<CardContent> <CardContent>
<div className="space-y-4"> <div className="space-y-4">
{data.topProducts.map((product) => ( {data.topProducts.map((product) => (
<div key={product.product} className="flex items-center"> <div key={`${product.product}-${product.category}`} className="flex items-center">
<div className="flex-1"> <div className="flex-1">
<p className="text-sm font-medium">{product.product}</p> <p className="text-sm font-medium">{product.product}</p>
<p className="text-sm text-muted-foreground"> <div className="text-xs text-muted-foreground space-y-1">
<p className="font-medium">Category:</p>
<p>{product.categoryPath}</p>
</div>
<p className="text-sm text-muted-foreground mt-1">
Revenue: ${product.revenue.toLocaleString()} Revenue: ${product.revenue.toLocaleString()}
</p> </p>
</div> </div>

View File

@@ -145,7 +145,7 @@ export function StockAnalysis() {
<CardContent> <CardContent>
<div className="space-y-4"> <div className="space-y-4">
{data.criticalItems.map((item) => ( {data.criticalItems.map((item) => (
<div key={item.sku} className="flex items-center"> <div key={`${item.sku}-${item.product}`} className="flex items-center">
<div className="flex-1"> <div className="flex-1">
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">
<p className="text-sm font-medium">{item.product}</p> <p className="text-sm font-medium">{item.product}</p>

View File

@@ -131,7 +131,7 @@ export function VendorPerformance() {
<CardContent> <CardContent>
<div className="space-y-4"> <div className="space-y-4">
{data.performance.map((vendor) => ( {data.performance.map((vendor) => (
<div key={vendor.vendor} className="flex items-center"> <div key={`${vendor.vendor}-${vendor.salesVolume}`} className="flex items-center">
<div className="flex-1"> <div className="flex-1">
<p className="text-sm font-medium">{vendor.vendor}</p> <p className="text-sm font-medium">{vendor.vendor}</p>
<p className="text-sm text-muted-foreground"> <p className="text-sm text-muted-foreground">

View File

@@ -6,37 +6,46 @@ import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"
import config from "@/config" import config from "@/config"
import { formatCurrency } from "@/lib/utils" import { formatCurrency } from "@/lib/utils"
interface BestSellerProduct { interface Product {
product_id: number pid: number;
sku: string sku: string;
title: string title: string;
units_sold: number units_sold: number;
revenue: number revenue: string;
profit: number profit: string;
growth_rate: number }
interface Category {
cat_id: number;
name: string;
categoryPath: string;
units_sold: number;
revenue: string;
profit: string;
growth_rate: string;
} }
interface BestSellerBrand { interface BestSellerBrand {
brand: string brand: string
units_sold: number units_sold: number
revenue: number revenue: string
profit: number profit: string
growth_rate: number growth_rate: string
} }
interface BestSellerCategory { interface BestSellerCategory {
category_id: number cat_id: number;
name: string name: string;
units_sold: number units_sold: number;
revenue: number revenue: string;
profit: number profit: string;
growth_rate: number growth_rate: string;
} }
interface BestSellersData { interface BestSellersData {
products: BestSellerProduct[] products: Product[]
brands: BestSellerBrand[] brands: BestSellerBrand[]
categories: BestSellerCategory[] categories: Category[]
} }
export function BestSellers() { export function BestSellers() {
@@ -70,41 +79,29 @@ export function BestSellers() {
<Table> <Table>
<TableHeader> <TableHeader>
<TableRow> <TableRow>
<TableHead className="w-[40%]">Product</TableHead> <TableHead>Product</TableHead>
<TableHead className="w-[15%] text-right">Sales</TableHead> <TableHead className="text-right">Units Sold</TableHead>
<TableHead className="w-[15%] text-right">Revenue</TableHead> <TableHead className="text-right">Revenue</TableHead>
<TableHead className="w-[15%] text-right">Profit</TableHead> <TableHead className="text-right">Profit</TableHead>
<TableHead className="w-[15%] text-right">Growth</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{data?.products.map((product) => ( {data?.products.map((product) => (
<TableRow key={product.product_id}> <TableRow key={product.pid}>
<TableCell className="w-[40%]"> <TableCell>
<div> <a
<a href={`https://backend.acherryontop.com/product/${product.pid}`}
href={`https://backend.acherryontop.com/product/${product.product_id}`} target="_blank"
target="_blank" rel="noopener noreferrer"
rel="noopener noreferrer" className="hover:underline"
className="font-medium hover:underline" >
> {product.title}
{product.title} </a>
</a> <div className="text-sm text-muted-foreground">{product.sku}</div>
<p className="text-sm text-muted-foreground">{product.sku}</p>
</div>
</TableCell>
<TableCell className="w-[15%] text-right">
{product.units_sold.toLocaleString()}
</TableCell>
<TableCell className="w-[15%] text-right">
{formatCurrency(product.revenue)}
</TableCell>
<TableCell className="w-[15%] text-right">
{formatCurrency(product.profit)}
</TableCell>
<TableCell className="w-[15%] text-right">
{product.growth_rate > 0 ? '+' : ''}{product.growth_rate.toFixed(1)}%
</TableCell> </TableCell>
<TableCell className="text-right">{product.units_sold}</TableCell>
<TableCell className="text-right">{formatCurrency(Number(product.revenue))}</TableCell>
<TableCell className="text-right">{formatCurrency(Number(product.profit))}</TableCell>
</TableRow> </TableRow>
))} ))}
</TableBody> </TableBody>
@@ -134,13 +131,13 @@ export function BestSellers() {
{brand.units_sold.toLocaleString()} {brand.units_sold.toLocaleString()}
</TableCell> </TableCell>
<TableCell className="w-[15%] text-right"> <TableCell className="w-[15%] text-right">
{formatCurrency(brand.revenue)} {formatCurrency(Number(brand.revenue))}
</TableCell> </TableCell>
<TableCell className="w-[15%] text-right"> <TableCell className="w-[15%] text-right">
{formatCurrency(brand.profit)} {formatCurrency(Number(brand.profit))}
</TableCell> </TableCell>
<TableCell className="w-[15%] text-right"> <TableCell className="w-[15%] text-right">
{brand.growth_rate > 0 ? '+' : ''}{brand.growth_rate.toFixed(1)}% {Number(brand.growth_rate) > 0 ? '+' : ''}{Number(brand.growth_rate).toFixed(1)}%
</TableCell> </TableCell>
</TableRow> </TableRow>
))} ))}
@@ -154,31 +151,26 @@ export function BestSellers() {
<Table> <Table>
<TableHeader> <TableHeader>
<TableRow> <TableRow>
<TableHead className="w-[40%]">Category</TableHead> <TableHead>Category</TableHead>
<TableHead className="w-[15%] text-right">Sales</TableHead> <TableHead className="text-right">Units Sold</TableHead>
<TableHead className="w-[15%] text-right">Revenue</TableHead> <TableHead className="text-right">Revenue</TableHead>
<TableHead className="w-[15%] text-right">Profit</TableHead> <TableHead className="text-right">Profit</TableHead>
<TableHead className="w-[15%] text-right">Growth</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{data?.categories.map((category) => ( {data?.categories.map((category) => (
<TableRow key={category.category_id}> <TableRow key={category.cat_id}>
<TableCell className="w-[40%]"> <TableCell>
<p className="font-medium">{category.name}</p> <div className="font-medium">{category.name}</div>
</TableCell> {category.categoryPath && (
<TableCell className="w-[15%] text-right"> <div className="text-sm text-muted-foreground">
{category.units_sold.toLocaleString()} {category.categoryPath}
</TableCell> </div>
<TableCell className="w-[15%] text-right"> )}
{formatCurrency(category.revenue)}
</TableCell>
<TableCell className="w-[15%] text-right">
{formatCurrency(category.profit)}
</TableCell>
<TableCell className="w-[15%] text-right">
{category.growth_rate > 0 ? '+' : ''}{category.growth_rate.toFixed(1)}%
</TableCell> </TableCell>
<TableCell className="text-right">{category.units_sold}</TableCell>
<TableCell className="text-right">{formatCurrency(Number(category.revenue))}</TableCell>
<TableCell className="text-right">{formatCurrency(Number(category.profit))}</TableCell>
</TableRow> </TableRow>
))} ))}
</TableBody> </TableBody>

View File

@@ -11,18 +11,18 @@ import { DateRangePicker } from "@/components/ui/date-range-picker-narrow"
interface ForecastData { interface ForecastData {
forecastSales: number forecastSales: number
forecastRevenue: number forecastRevenue: string
confidenceLevel: number confidenceLevel: number
dailyForecasts: { dailyForecasts: {
date: string date: string
units: number units: number
revenue: number revenue: string
confidence: number confidence: number
}[] }[]
categoryForecasts: { categoryForecasts: {
category: string category: string
units: number units: number
revenue: number revenue: string
confidence: number confidence: number
}[] }[]
} }
@@ -86,7 +86,7 @@ export function ForecastMetrics() {
<DollarSign className="h-4 w-4 text-muted-foreground" /> <DollarSign className="h-4 w-4 text-muted-foreground" />
<p className="text-sm font-medium text-muted-foreground">Forecast Revenue</p> <p className="text-sm font-medium text-muted-foreground">Forecast Revenue</p>
</div> </div>
<p className="text-lg font-bold">{formatCurrency(data?.forecastRevenue || 0)}</p> <p className="text-lg font-bold">{formatCurrency(Number(data?.forecastRevenue) || 0)}</p>
</div> </div>
</div> </div>
@@ -108,7 +108,7 @@ export function ForecastMetrics() {
tick={false} tick={false}
/> />
<Tooltip <Tooltip
formatter={(value: number) => [formatCurrency(value), "Revenue"]} formatter={(value: string) => [formatCurrency(Number(value)), "Revenue"]}
labelFormatter={(date) => format(new Date(date), 'MMM d, yyyy')} labelFormatter={(date) => format(new Date(date), 'MMM d, yyyy')}
/> />
<Area <Area

View File

@@ -13,11 +13,11 @@ interface InventoryMetrics {
topVendors: { topVendors: {
vendor: string; vendor: string;
productCount: number; productCount: number;
averageStockLevel: number; averageStockLevel: string;
}[]; }[];
stockTurnover: { stockTurnover: {
category: string; category: string;
rate: number; rate: string;
}[]; }[];
} }
@@ -70,7 +70,7 @@ export function InventoryStats() {
<BarChart data={data?.stockTurnover}> <BarChart data={data?.stockTurnover}>
<XAxis dataKey="category" /> <XAxis dataKey="category" />
<YAxis /> <YAxis />
<Tooltip /> <Tooltip formatter={(value: string) => [Number(value).toFixed(2), "Rate"]} />
<Bar dataKey="rate" name="Turnover Rate" fill="#60a5fa" /> <Bar dataKey="rate" name="Turnover Rate" fill="#60a5fa" />
</BarChart> </BarChart>
</ResponsiveContainer> </ResponsiveContainer>
@@ -93,7 +93,7 @@ export function InventoryStats() {
</div> </div>
<div className="ml-4 text-right"> <div className="ml-4 text-right">
<p className="text-sm font-medium"> <p className="text-sm font-medium">
Avg. Stock: {vendor.averageStockLevel.toFixed(0)} Avg. Stock: {Number(vendor.averageStockLevel).toFixed(0)}
</p> </p>
</div> </div>
</div> </div>

View File

@@ -12,19 +12,20 @@ import { Badge } from "@/components/ui/badge"
import { AlertCircle, AlertTriangle } from "lucide-react" import { AlertCircle, AlertTriangle } from "lucide-react"
import config from "@/config" import config from "@/config"
interface LowStockProduct { interface Product {
product_id: number pid: number;
SKU: string sku: string;
title: string title: string;
stock_quantity: number stock_quantity: number;
reorder_qty: number daily_sales_avg: string;
days_of_inventory: number days_of_inventory: string;
stock_status: "Critical" | "Reorder" reorder_qty: number;
daily_sales_avg: number last_purchase_date: string | null;
lead_time_status: string;
} }
export function LowStockAlerts() { export function LowStockAlerts() {
const { data: products } = useQuery<LowStockProduct[]>({ const { data: products } = useQuery<Product[]>({
queryKey: ["low-stock"], queryKey: ["low-stock"],
queryFn: async () => { queryFn: async () => {
const response = await fetch(`${config.apiUrl}/dashboard/low-stock/products`) const response = await fetch(`${config.apiUrl}/dashboard/low-stock/products`)
@@ -45,35 +46,37 @@ export function LowStockAlerts() {
<Table> <Table>
<TableHeader> <TableHeader>
<TableRow> <TableRow>
<TableHead>SKU</TableHead>
<TableHead>Product</TableHead> <TableHead>Product</TableHead>
<TableHead className="text-right">Stock</TableHead> <TableHead className="text-right">Stock</TableHead>
<TableHead className="text-right">Status</TableHead> <TableHead className="text-right">Daily Sales</TableHead>
<TableHead className="text-right">Days Left</TableHead>
<TableHead className="text-right">Reorder Qty</TableHead>
<TableHead>Last Purchase</TableHead>
<TableHead>Lead Time</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{products?.map((product) => ( {products?.map((product) => (
<TableRow key={product.product_id}> <TableRow key={product.pid}>
<TableCell className="font-medium">{product.SKU}</TableCell> <TableCell>
<TableCell>{product.title}</TableCell> <a
<TableCell className="text-right"> href={`https://backend.acherryontop.com/product/${product.pid}`}
{product.stock_quantity} / {product.reorder_qty} target="_blank"
</TableCell> rel="noopener noreferrer"
<TableCell className="text-right"> className="hover:underline"
<Badge
variant="outline"
className={
product.stock_status === "Critical"
? "border-destructive text-destructive"
: "border-warning text-warning"
}
> >
{product.stock_status === "Critical" ? ( {product.title}
<AlertCircle className="mr-1 h-3 w-3" /> </a>
) : ( <div className="text-sm text-muted-foreground">{product.sku}</div>
<AlertTriangle className="mr-1 h-3 w-3" /> </TableCell>
)} <TableCell className="text-right">{product.stock_quantity}</TableCell>
{product.stock_status} <TableCell className="text-right">{Number(product.daily_sales_avg).toFixed(1)}</TableCell>
<TableCell className="text-right">{Number(product.days_of_inventory).toFixed(1)}</TableCell>
<TableCell className="text-right">{product.reorder_qty}</TableCell>
<TableCell>{product.last_purchase_date ? formatDate(product.last_purchase_date) : '-'}</TableCell>
<TableCell>
<Badge variant={getLeadTimeVariant(product.lead_time_status)}>
{product.lead_time_status}
</Badge> </Badge>
</TableCell> </TableCell>
</TableRow> </TableRow>

View File

@@ -5,13 +5,14 @@ import config from "@/config"
import { formatCurrency } from "@/lib/utils" import { formatCurrency } from "@/lib/utils"
import { ClipboardList, AlertCircle, Layers, DollarSign, ShoppingCart } from "lucide-react" // Importing icons import { ClipboardList, AlertCircle, Layers, DollarSign, ShoppingCart } from "lucide-react" // Importing icons
import { useState } from "react" import { useState } from "react"
import { PurchaseOrderStatus, ReceivingStatus } from "@/types/status-codes"
interface PurchaseMetricsData { interface PurchaseMetricsData {
activePurchaseOrders: number activePurchaseOrders: number // Orders that are not canceled, done, or fully received
overduePurchaseOrders: number overduePurchaseOrders: number // Orders past their expected delivery date
onOrderUnits: number onOrderUnits: number // Total units across all active orders
onOrderCost: number onOrderCost: number // Total cost across all active orders
onOrderRetail: number onOrderRetail: number // Total retail value across all active orders
vendorOrders: { vendorOrders: {
vendor: string vendor: string
orders: number orders: number

View File

@@ -12,13 +12,13 @@ import { DateRangePicker } from "@/components/ui/date-range-picker-narrow"
interface SalesData { interface SalesData {
totalOrders: number totalOrders: number
totalUnitsSold: number totalUnitsSold: number
totalCogs: number totalCogs: string
totalRevenue: number totalRevenue: string
dailySales: { dailySales: {
date: string date: string
units: number units: number
revenue: number revenue: string
cogs: number cogs: string
}[] }[]
} }
@@ -78,14 +78,14 @@ export function SalesMetrics() {
<DollarSign className="h-4 w-4 text-muted-foreground" /> <DollarSign className="h-4 w-4 text-muted-foreground" />
<p className="text-sm font-medium text-muted-foreground">Cost of Goods</p> <p className="text-sm font-medium text-muted-foreground">Cost of Goods</p>
</div> </div>
<p className="text-lg font-bold">{formatCurrency(data?.totalCogs || 0)}</p> <p className="text-lg font-bold">{formatCurrency(Number(data?.totalCogs) || 0)}</p>
</div> </div>
<div className="flex items-baseline justify-between"> <div className="flex items-baseline justify-between">
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">
<ShoppingCart className="h-4 w-4 text-muted-foreground" /> <ShoppingCart className="h-4 w-4 text-muted-foreground" />
<p className="text-sm font-medium text-muted-foreground">Revenue</p> <p className="text-sm font-medium text-muted-foreground">Revenue</p>
</div> </div>
<p className="text-lg font-bold">{formatCurrency(data?.totalRevenue || 0)}</p> <p className="text-lg font-bold">{formatCurrency(Number(data?.totalRevenue) || 0)}</p>
</div> </div>
</div> </div>
@@ -107,7 +107,7 @@ export function SalesMetrics() {
tick={false} tick={false}
/> />
<Tooltip <Tooltip
formatter={(value: number) => [formatCurrency(value), "Revenue"]} formatter={(value: string) => [formatCurrency(Number(value)), "Revenue"]}
labelFormatter={(date) => format(new Date(date), 'MMM d, yyyy')} labelFormatter={(date) => format(new Date(date), 'MMM d, yyyy')}
/> />
<Area <Area

View File

@@ -10,14 +10,14 @@ interface StockMetricsData {
totalProducts: number totalProducts: number
productsInStock: number productsInStock: number
totalStockUnits: number totalStockUnits: number
totalStockCost: number totalStockCost: string
totalStockRetail: number totalStockRetail: string
brandStock: { brandStock: {
brand: string brand: string
variants: number variants: number
units: number units: number
cost: number cost: string
retail: number retail: string
}[] }[]
} }
@@ -91,7 +91,7 @@ const renderActiveShape = (props: any) => {
fill="#000000" fill="#000000"
className="text-base font-medium" className="text-base font-medium"
> >
{formatCurrency(retail)} {formatCurrency(Number(retail))}
</text> </text>
</g> </g>
); );
@@ -154,14 +154,14 @@ export function StockMetrics() {
<DollarSign className="h-4 w-4 text-muted-foreground" /> <DollarSign className="h-4 w-4 text-muted-foreground" />
<p className="text-sm font-medium text-muted-foreground">Stock Cost</p> <p className="text-sm font-medium text-muted-foreground">Stock Cost</p>
</div> </div>
<p className="text-lg font-bold">{formatCurrency(data?.totalStockCost || 0)}</p> <p className="text-lg font-bold">{formatCurrency(Number(data?.totalStockCost) || 0)}</p>
</div> </div>
<div className="flex items-baseline justify-between"> <div className="flex items-baseline justify-between">
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">
<ShoppingCart className="h-4 w-4 text-muted-foreground" /> <ShoppingCart className="h-4 w-4 text-muted-foreground" />
<p className="text-sm font-medium text-muted-foreground">Stock Retail</p> <p className="text-sm font-medium text-muted-foreground">Stock Retail</p>
</div> </div>
<p className="text-lg font-bold">{formatCurrency(data?.totalStockRetail || 0)}</p> <p className="text-lg font-bold">{formatCurrency(Number(data?.totalStockRetail) || 0)}</p>
</div> </div>
</div> </div>
</div> </div>

View File

@@ -5,18 +5,18 @@ import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@
import config from "@/config" import config from "@/config"
import { formatCurrency } from "@/lib/utils" import { formatCurrency } from "@/lib/utils"
interface OverstockedProduct { interface Product {
product_id: number pid: number;
SKU: string sku: string;
title: string title: string;
stock_quantity: number stock_quantity: number;
overstocked_amt: number overstocked_amt: number;
excess_cost: number excess_cost: number;
excess_retail: number excess_retail: number;
} }
export function TopOverstockedProducts() { export function TopOverstockedProducts() {
const { data } = useQuery<OverstockedProduct[]>({ const { data } = useQuery<Product[]>({
queryKey: ["top-overstocked-products"], queryKey: ["top-overstocked-products"],
queryFn: async () => { queryFn: async () => {
const response = await fetch(`${config.apiUrl}/dashboard/overstock/products?limit=50`) const response = await fetch(`${config.apiUrl}/dashboard/overstock/products?limit=50`)
@@ -38,40 +38,30 @@ export function TopOverstockedProducts() {
<TableHeader> <TableHeader>
<TableRow> <TableRow>
<TableHead>Product</TableHead> <TableHead>Product</TableHead>
<TableHead className="text-right">Current Stock</TableHead> <TableHead className="text-right">Stock</TableHead>
<TableHead className="text-right">Overstock Amt</TableHead> <TableHead className="text-right">Excess</TableHead>
<TableHead className="text-right">Overstock Cost</TableHead> <TableHead className="text-right">Cost</TableHead>
<TableHead className="text-right">Overstock Retail</TableHead> <TableHead className="text-right">Retail</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{data?.map((product) => ( {data?.map((product) => (
<TableRow key={product.product_id}> <TableRow key={product.pid}>
<TableCell> <TableCell>
<div> <a
<a href={`https://backend.acherryontop.com/product/${product.pid}`}
href={`https://backend.acherryontop.com/product/${product.product_id}`} target="_blank"
target="_blank" rel="noopener noreferrer"
rel="noopener noreferrer" className="hover:underline"
className="font-medium hover:underline" >
> {product.title}
{product.title} </a>
</a> <div className="text-sm text-muted-foreground">{product.sku}</div>
<p className="text-sm text-muted-foreground">{product.SKU}</p>
</div>
</TableCell>
<TableCell className="text-right">
{product.stock_quantity.toLocaleString()}
</TableCell>
<TableCell className="text-right">
{product.overstocked_amt.toLocaleString()}
</TableCell>
<TableCell className="text-right">
{formatCurrency(product.excess_cost)}
</TableCell>
<TableCell className="text-right">
{formatCurrency(product.excess_retail)}
</TableCell> </TableCell>
<TableCell className="text-right">{product.stock_quantity}</TableCell>
<TableCell className="text-right">{product.overstocked_amt}</TableCell>
<TableCell className="text-right">{formatCurrency(product.excess_cost)}</TableCell>
<TableCell className="text-right">{formatCurrency(product.excess_retail)}</TableCell>
</TableRow> </TableRow>
))} ))}
</TableBody> </TableBody>

View File

@@ -3,20 +3,19 @@ import { CardHeader, CardTitle, CardContent } from "@/components/ui/card"
import { ScrollArea } from "@/components/ui/scroll-area" import { ScrollArea } from "@/components/ui/scroll-area"
import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table" import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table"
import config from "@/config" import config from "@/config"
import { formatCurrency } from "@/lib/utils"
interface ReplenishProduct { interface Product {
product_id: number pid: number;
SKU: string sku: string;
title: string title: string;
current_stock: number stock_quantity: number;
replenish_qty: number daily_sales_avg: string;
replenish_cost: number reorder_qty: number;
replenish_retail: number last_purchase_date: string | null;
} }
export function TopReplenishProducts() { export function TopReplenishProducts() {
const { data } = useQuery<ReplenishProduct[]>({ const { data } = useQuery<Product[]>({
queryKey: ["top-replenish-products"], queryKey: ["top-replenish-products"],
queryFn: async () => { queryFn: async () => {
const response = await fetch(`${config.apiUrl}/dashboard/replenish/products?limit=50`) const response = await fetch(`${config.apiUrl}/dashboard/replenish/products?limit=50`)
@@ -39,39 +38,29 @@ export function TopReplenishProducts() {
<TableRow> <TableRow>
<TableHead>Product</TableHead> <TableHead>Product</TableHead>
<TableHead className="text-right">Stock</TableHead> <TableHead className="text-right">Stock</TableHead>
<TableHead className="text-right">Replenish</TableHead> <TableHead className="text-right">Daily Sales</TableHead>
<TableHead className="text-right">Cost</TableHead> <TableHead className="text-right">Reorder Qty</TableHead>
<TableHead className="text-right">Retail</TableHead> <TableHead>Last Purchase</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{data?.map((product) => ( {data?.map((product) => (
<TableRow key={product.product_id}> <TableRow key={product.pid}>
<TableCell> <TableCell>
<div> <a
<a href={`https://backend.acherryontop.com/product/${product.pid}`}
href={`https://backend.acherryontop.com/product/${product.product_id}`} target="_blank"
target="_blank" rel="noopener noreferrer"
rel="noopener noreferrer" className="hover:underline"
className="font-medium hover:underline" >
> {product.title}
{product.title} </a>
</a> <div className="text-sm text-muted-foreground">{product.sku}</div>
<p className="text-sm text-muted-foreground">{product.SKU}</p>
</div>
</TableCell>
<TableCell className="text-right">
{product.current_stock.toLocaleString()}
</TableCell>
<TableCell className="text-right">
{product.replenish_qty.toLocaleString()}
</TableCell>
<TableCell className="text-right">
{formatCurrency(product.replenish_cost)}
</TableCell>
<TableCell className="text-right">
{formatCurrency(product.replenish_retail)}
</TableCell> </TableCell>
<TableCell className="text-right">{product.stock_quantity}</TableCell>
<TableCell className="text-right">{Number(product.daily_sales_avg).toFixed(1)}</TableCell>
<TableCell className="text-right">{product.reorder_qty}</TableCell>
<TableCell>{product.last_purchase_date ? product.last_purchase_date : '-'}</TableCell>
</TableRow> </TableRow>
))} ))}
</TableBody> </TableBody>

View File

@@ -11,18 +11,18 @@ import {
import { TrendingUp, TrendingDown } from "lucide-react" import { TrendingUp, TrendingDown } from "lucide-react"
import config from "@/config" import config from "@/config"
interface TrendingProduct { interface Product {
product_id: number pid: number;
sku: string sku: string;
title: string title: string;
daily_sales_avg: string daily_sales_avg: string;
weekly_sales_avg: string weekly_sales_avg: string;
growth_rate: string growth_rate: string;
total_revenue: string total_revenue: string;
} }
export function TrendingProducts() { export function TrendingProducts() {
const { data: products } = useQuery<TrendingProduct[]>({ const { data: products } = useQuery<Product[]>({
queryKey: ["trending-products"], queryKey: ["trending-products"],
queryFn: async () => { queryFn: async () => {
const response = await fetch(`${config.apiUrl}/products/trending`) const response = await fetch(`${config.apiUrl}/products/trending`)
@@ -33,7 +33,6 @@ export function TrendingProducts() {
}, },
}) })
const formatPercent = (value: number) => const formatPercent = (value: number) =>
new Intl.NumberFormat("en-US", { new Intl.NumberFormat("en-US", {
style: "percent", style: "percent",
@@ -42,6 +41,14 @@ export function TrendingProducts() {
signDisplay: "exceptZero", signDisplay: "exceptZero",
}).format(value / 100) }).format(value / 100)
const formatCurrency = (value: number) =>
new Intl.NumberFormat("en-US", {
style: "currency",
currency: "USD",
minimumFractionDigits: 2,
maximumFractionDigits: 2,
}).format(value)
return ( return (
<> <>
<CardHeader> <CardHeader>
@@ -59,7 +66,7 @@ export function TrendingProducts() {
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{products?.map((product) => ( {products?.map((product) => (
<TableRow key={product.product_id}> <TableRow key={product.pid}>
<TableCell className="font-medium"> <TableCell className="font-medium">
<div className="flex flex-col"> <div className="flex flex-col">
<span className="font-medium">{product.title}</span> <span className="font-medium">{product.title}</span>
@@ -68,20 +75,20 @@ export function TrendingProducts() {
</span> </span>
</div> </div>
</TableCell> </TableCell>
<TableCell>{parseFloat(product.daily_sales_avg).toFixed(1)}</TableCell> <TableCell>{Number(product.daily_sales_avg).toFixed(1)}</TableCell>
<TableCell className="text-right"> <TableCell className="text-right">
<div className="flex items-center justify-end gap-1"> <div className="flex items-center justify-end gap-1">
{parseFloat(product.growth_rate) > 0 ? ( {Number(product.growth_rate) > 0 ? (
<TrendingUp className="h-4 w-4 text-success" /> <TrendingUp className="h-4 w-4 text-success" />
) : ( ) : (
<TrendingDown className="h-4 w-4 text-destructive" /> <TrendingDown className="h-4 w-4 text-destructive" />
)} )}
<span <span
className={ className={
parseFloat(product.growth_rate) > 0 ? "text-success" : "text-destructive" Number(product.growth_rate) > 0 ? "text-success" : "text-destructive"
} }
> >
{formatPercent(parseFloat(product.growth_rate))} {formatPercent(Number(product.growth_rate))}
</span> </span>
</div> </div>
</TableCell> </TableCell>

View File

@@ -3,24 +3,27 @@ import { ArrowUpDown, ChevronDown, ChevronRight } from "lucide-react";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { ScrollArea } from "@/components/ui/scroll-area"; import { ScrollArea } from "@/components/ui/scroll-area";
import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table"; import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table";
interface ProductDetail {
product_id: string; interface Product {
name: string; pid: string;
sku: string; sku: string;
title: string;
stock_quantity: number; stock_quantity: number;
total_sold: number; daily_sales_avg: number;
avg_price: number; forecast_units: number;
first_received_date: string; forecast_revenue: number;
confidence_level: number;
} }
export interface ForecastItem { export interface ForecastItem {
category: string; category: string;
categoryPath: string;
avgDailySales: number; avgDailySales: number;
totalSold: number; totalSold: number;
numProducts: number; numProducts: number;
avgPrice: number; avgPrice: number;
avgTotalSold: number; avgTotalSold: number;
products?: ProductDetail[]; products?: Product[];
} }
export const columns: ColumnDef<ForecastItem>[] = [ export const columns: ColumnDef<ForecastItem>[] = [
@@ -42,6 +45,16 @@ export const columns: ColumnDef<ForecastItem>[] = [
{ {
accessorKey: "category", accessorKey: "category",
header: "Category", header: "Category",
cell: ({ row }) => (
<div>
<div className="font-medium">{row.original.category}</div>
{row.original.categoryPath && (
<div className="text-sm text-muted-foreground">
{row.original.categoryPath}
</div>
)}
</div>
),
}, },
{ {
accessorKey: "avgDailySales", accessorKey: "avgDailySales",
@@ -147,23 +160,33 @@ export const renderSubComponent = ({ row }: { row: any }) => {
<Table> <Table>
<TableHeader> <TableHeader>
<TableRow> <TableRow>
<TableHead>Product Name</TableHead> <TableHead>Product</TableHead>
<TableHead>SKU</TableHead> <TableHead className="text-right">Stock</TableHead>
<TableHead>First Received</TableHead> <TableHead className="text-right">Daily Sales</TableHead>
<TableHead>Stock Quantity</TableHead> <TableHead className="text-right">Forecast Units</TableHead>
<TableHead>Total Sold</TableHead> <TableHead className="text-right">Forecast Revenue</TableHead>
<TableHead>Average Price</TableHead> <TableHead className="text-right">Confidence</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{products.map((product: ProductDetail) => ( {products.map((product) => (
<TableRow key={product.product_id}> <TableRow key={product.pid}>
<TableCell>{product.name}</TableCell> <TableCell>
<TableCell>{product.sku}</TableCell> <a
<TableCell>{product.first_received_date}</TableCell> href={`https://backend.acherryontop.com/product/${product.pid}`}
<TableCell>{product.stock_quantity.toLocaleString()}</TableCell> target="_blank"
<TableCell>{product.total_sold.toLocaleString()}</TableCell> rel="noopener noreferrer"
<TableCell>${product.avg_price.toFixed(2)}</TableCell> className="hover:underline"
>
{product.title}
</a>
<div className="text-sm text-muted-foreground">{product.sku}</div>
</TableCell>
<TableCell className="text-right">{product.stock_quantity}</TableCell>
<TableCell className="text-right">{product.daily_sales_avg.toFixed(1)}</TableCell>
<TableCell className="text-right">{product.forecast_units.toFixed(1)}</TableCell>
<TableCell className="text-right">{product.forecast_revenue.toFixed(2)}</TableCell>
<TableCell className="text-right">{product.confidence_level.toFixed(1)}%</TableCell>
</TableRow> </TableRow>
))} ))}
</TableBody> </TableBody>

View File

@@ -10,7 +10,7 @@ import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContai
import config from "@/config"; import config from "@/config";
interface Product { interface Product {
product_id: number; pid: number;
title: string; title: string;
SKU: string; SKU: string;
barcode: string; barcode: string;
@@ -38,7 +38,7 @@ interface Product {
// Vendor info // Vendor info
vendor: string; vendor: string;
vendor_reference: string; vendor_reference: string;
brand: string; brand: string | 'Unbranded';
// URLs // URLs
permalink: string; permalink: string;
@@ -123,6 +123,8 @@ interface Product {
notes: string; notes: string;
lead_time_days: number | null; lead_time_days: number | null;
}>; }>;
category_paths?: Record<string, string>;
} }
interface ProductDetailProps { interface ProductDetailProps {
@@ -205,8 +207,8 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
</div> </div>
)} )}
<div> <div>
<h2 className="text-xl font-semibold">{product?.title || 'Loading...'}</h2> <VaulDrawer.Title className="text-xl font-semibold">{product?.title || 'Loading...'}</VaulDrawer.Title>
<p className="text-sm text-muted-foreground">{product?.SKU || ''}</p> <VaulDrawer.Description className="text-sm text-muted-foreground">{product?.SKU || ''}</VaulDrawer.Description>
</div> </div>
</div> </div>
<Button variant="ghost" size="icon" onClick={onClose}> <Button variant="ghost" size="icon" onClick={onClose}>
@@ -255,22 +257,28 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Categories</dt> <dt className="text-sm text-muted-foreground">Categories</dt>
<dd className="flex flex-wrap gap-2"> <dd className="flex flex-col gap-2">
{product?.categories?.map(category => ( {product?.category_paths ?
<span key={category} className="inline-flex items-center rounded-md bg-muted px-2 py-1 text-xs font-medium ring-1 ring-inset ring-muted"> Object.entries(product.category_paths).map(([key, fullPath], index) => {
{category} const [, leafCategory] = key.split(':');
</span> return (
)) || "N/A"} <div key={key} className="flex flex-col">
<span className="inline-flex items-center rounded-md bg-muted px-2 py-1 text-xs font-medium ring-1 ring-inset ring-muted">
{leafCategory}
</span>
<span className="text-xs text-muted-foreground ml-2 mt-1">
{fullPath}
</span>
</div>
);
})
: "N/A"}
</dd> </dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Tags</dt> <dt className="text-sm text-muted-foreground">Tags</dt>
<dd className="flex flex-wrap gap-2"> <dd className="flex flex-wrap gap-2">
{product?.tags?.map(tag => ( N/A
<span key={tag} className="inline-flex items-center rounded-md bg-muted px-2 py-1 text-xs font-medium ring-1 ring-inset ring-muted">
{tag}
</span>
)) || "N/A"}
</dd> </dd>
</div> </div>
</dl> </dl>
@@ -307,11 +315,11 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Status</dt> <dt className="text-sm text-muted-foreground">Status</dt>
<dd>{product?.metrics?.stock_status}</dd> <dd>{product?.stock_status || "N/A"}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Days of Stock</dt> <dt className="text-sm text-muted-foreground">Days of Stock</dt>
<dd>{product?.metrics?.days_of_inventory} days</dd> <dd>{product?.days_of_inventory || 0} days</dd>
</div> </div>
</dl> </dl>
</Card> </Card>
@@ -321,15 +329,15 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<dl className="space-y-2"> <dl className="space-y-2">
<div> <div>
<dt className="text-sm text-muted-foreground">Daily Sales</dt> <dt className="text-sm text-muted-foreground">Daily Sales</dt>
<dd>{product?.metrics?.daily_sales_avg?.toFixed(1)} units</dd> <dd>{product?.daily_sales_avg?.toFixed(1) || "0.0"} units</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Weekly Sales</dt> <dt className="text-sm text-muted-foreground">Weekly Sales</dt>
<dd>{product?.metrics?.weekly_sales_avg?.toFixed(1)} units</dd> <dd>{product?.weekly_sales_avg?.toFixed(1) || "0.0"} units</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Monthly Sales</dt> <dt className="text-sm text-muted-foreground">Monthly Sales</dt>
<dd>{product?.metrics?.monthly_sales_avg?.toFixed(1)} units</dd> <dd>{product?.monthly_sales_avg?.toFixed(1) || "0.0"} units</dd>
</div> </div>
</dl> </dl>
</Card> </Card>
@@ -356,19 +364,19 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<dl className="space-y-2"> <dl className="space-y-2">
<div> <div>
<dt className="text-sm text-muted-foreground">Total Revenue</dt> <dt className="text-sm text-muted-foreground">Total Revenue</dt>
<dd>${formatPrice(product?.metrics.total_revenue)}</dd> <dd>${formatPrice(product?.total_revenue)}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Gross Profit</dt> <dt className="text-sm text-muted-foreground">Gross Profit</dt>
<dd>${formatPrice(product?.metrics.gross_profit)}</dd> <dd>${formatPrice(product?.gross_profit)}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Margin</dt> <dt className="text-sm text-muted-foreground">Margin</dt>
<dd>{product?.metrics.avg_margin_percent.toFixed(2)}%</dd> <dd>{product?.avg_margin_percent?.toFixed(2) || "0.00"}%</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">GMROI</dt> <dt className="text-sm text-muted-foreground">GMROI</dt>
<dd>{product?.metrics.gmroi.toFixed(2)}</dd> <dd>{product?.gmroi?.toFixed(2) || "0.00"}</dd>
</div> </div>
</dl> </dl>
</Card> </Card>
@@ -378,15 +386,15 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<dl className="space-y-2"> <dl className="space-y-2">
<div> <div>
<dt className="text-sm text-muted-foreground">Current Lead Time</dt> <dt className="text-sm text-muted-foreground">Current Lead Time</dt>
<dd>{product?.metrics.current_lead_time}</dd> <dd>{product?.current_lead_time || "N/A"}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Target Lead Time</dt> <dt className="text-sm text-muted-foreground">Target Lead Time</dt>
<dd>{product?.metrics.target_lead_time}</dd> <dd>{product?.target_lead_time || "N/A"}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Lead Time Status</dt> <dt className="text-sm text-muted-foreground">Lead Time Status</dt>
<dd>{product?.metrics.lead_time_status}</dd> <dd>{product?.lead_time_status || "N/A"}</dd>
</div> </div>
</dl> </dl>
</Card> </Card>
@@ -408,11 +416,11 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Days of Inventory</dt> <dt className="text-sm text-muted-foreground">Days of Inventory</dt>
<dd className="text-2xl font-semibold">{product?.metrics?.days_of_inventory || 0}</dd> <dd className="text-2xl font-semibold">{product?.days_of_inventory || 0}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Status</dt> <dt className="text-sm text-muted-foreground">Status</dt>
<dd className="text-2xl font-semibold">{product?.metrics?.stock_status || "N/A"}</dd> <dd className="text-2xl font-semibold">{product?.stock_status || "N/A"}</dd>
</div> </div>
</dl> </dl>
</Card> </Card>
@@ -422,15 +430,15 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<dl className="grid grid-cols-3 gap-4"> <dl className="grid grid-cols-3 gap-4">
<div> <div>
<dt className="text-sm text-muted-foreground">Reorder Point</dt> <dt className="text-sm text-muted-foreground">Reorder Point</dt>
<dd>{product?.metrics?.reorder_point || 0}</dd> <dd>{product?.reorder_point || 0}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Safety Stock</dt> <dt className="text-sm text-muted-foreground">Safety Stock</dt>
<dd>{product?.metrics?.safety_stock || 0}</dd> <dd>{product?.safety_stock || 0}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">ABC Class</dt> <dt className="text-sm text-muted-foreground">ABC Class</dt>
<dd>{product?.metrics?.abc_class || "N/A"}</dd> <dd>{product?.abc_class || "N/A"}</dd>
</div> </div>
</dl> </dl>
</Card> </Card>
@@ -551,15 +559,15 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<dl className="grid grid-cols-3 gap-4"> <dl className="grid grid-cols-3 gap-4">
<div> <div>
<dt className="text-sm text-muted-foreground">Gross Profit</dt> <dt className="text-sm text-muted-foreground">Gross Profit</dt>
<dd className="text-2xl font-semibold">${formatPrice(product?.metrics.gross_profit)}</dd> <dd className="text-2xl font-semibold">${formatPrice(product?.gross_profit)}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">GMROI</dt> <dt className="text-sm text-muted-foreground">GMROI</dt>
<dd className="text-2xl font-semibold">{product?.metrics.gmroi.toFixed(2)}</dd> <dd className="text-2xl font-semibold">{product?.gmroi?.toFixed(2) || "0.00"}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Margin %</dt> <dt className="text-sm text-muted-foreground">Margin %</dt>
<dd className="text-2xl font-semibold">{product?.metrics.avg_margin_percent.toFixed(2)}%</dd> <dd className="text-2xl font-semibold">{product?.avg_margin_percent?.toFixed(2) || "0.00"}%</dd>
</div> </div>
</dl> </dl>
</Card> </Card>
@@ -569,7 +577,7 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<dl className="grid grid-cols-2 gap-4"> <dl className="grid grid-cols-2 gap-4">
<div> <div>
<dt className="text-sm text-muted-foreground">Cost of Goods Sold</dt> <dt className="text-sm text-muted-foreground">Cost of Goods Sold</dt>
<dd>${formatPrice(product?.metrics.cost_of_goods_sold)}</dd> <dd>${formatPrice(product?.cost_of_goods_sold)}</dd>
</div> </div>
<div> <div>
<dt className="text-sm text-muted-foreground">Landing Cost</dt> <dt className="text-sm text-muted-foreground">Landing Cost</dt>

View File

@@ -24,7 +24,7 @@ type FilterValue = string | number | boolean;
type ComparisonOperator = "=" | ">" | ">=" | "<" | "<=" | "between"; type ComparisonOperator = "=" | ">" | ">=" | "<" | "<=" | "between";
interface FilterValueWithOperator { interface FilterValueWithOperator {
value: FilterValue | [number, number]; value: FilterValue | [string, string];
operator: ComparisonOperator; operator: ComparisonOperator;
} }
@@ -317,18 +317,32 @@ export function ProductFilters({
}); });
}, []); }, []);
const handleApplyFilter = (value: FilterValue | [number, number]) => { const handleApplyFilter = (value: FilterValue | [string, string]) => {
if (!selectedFilter) return; if (!selectedFilter) return;
const newFilters = { let filterValue: ActiveFilterValue;
...activeFilters,
[selectedFilter.id]: { if (selectedFilter.type === "number") {
value, if (selectedOperator === "between" && Array.isArray(value)) {
operator: selectedOperator, filterValue = {
}, value: [value[0].toString(), value[1].toString()],
}; operator: selectedOperator,
};
} else {
filterValue = {
value: value.toString(),
operator: selectedOperator,
};
}
} else {
filterValue = value;
}
onFilterChange({
...activeFilters,
[selectedFilter.id]: filterValue,
});
onFilterChange(newFilters as Record<string, ActiveFilterValue>);
handlePopoverClose(); handlePopoverClose();
}; };
@@ -394,38 +408,14 @@ export function ProductFilters({
const getFilterDisplayValue = (filter: ActiveFilter) => { const getFilterDisplayValue = (filter: ActiveFilter) => {
const filterValue = activeFilters[filter.id]; if (typeof filter.value === "object" && "operator" in filter.value) {
const filterOption = filterOptions.find((opt) => opt.id === filter.id); const { operator, value } = filter.value;
if (Array.isArray(value)) {
// For between ranges return `${operator} ${value[0]} and ${value[1]}`;
if (Array.isArray(filterValue)) { }
return `${filter.label} between ${filterValue[0]} and ${filterValue[1]}`; return `${operator} ${value}`;
} }
return filter.value.toString();
// For direct selections (select type) or text search
if (
filterOption?.type === "select" ||
filterOption?.type === "text" ||
typeof filterValue !== "object"
) {
const value =
typeof filterValue === "object" ? filterValue.value : filterValue;
return `${filter.label}: ${value}`;
}
// For numeric filters with operators
const operator = filterValue.operator;
const value = filterValue.value;
const operatorDisplay = {
"=": "=",
">": ">",
">=": "≥",
"<": "<",
"<=": "≤",
between: "between",
}[operator];
return `${filter.label} ${operatorDisplay} ${value}`;
}; };
return ( return (

View File

@@ -230,7 +230,7 @@ export function ProductTable({
return ( return (
<div className="flex flex-wrap gap-1"> <div className="flex flex-wrap gap-1">
{Array.from(new Set(value as string[])).map((category) => ( {Array.from(new Set(value as string[])).map((category) => (
<Badge key={`${product.product_id}-${category}`} variant="outline">{category}</Badge> <Badge key={`${product.pid}-${category}`} variant="outline">{category}</Badge>
)) || '-'} )) || '-'}
</div> </div>
); );
@@ -261,6 +261,11 @@ export function ProductTable({
return columnDef.format(num); return columnDef.format(num);
} }
} }
// If the value is already a number, format it directly
if (typeof value === 'number') {
return columnDef.format(value);
}
// For other formats (e.g., date formatting), pass the value as is
return columnDef.format(value); return columnDef.format(value);
} }
return value ?? '-'; return value ?? '-';
@@ -297,12 +302,12 @@ export function ProductTable({
<TableBody> <TableBody>
{products.map((product) => ( {products.map((product) => (
<TableRow <TableRow
key={product.product_id} key={product.pid}
onClick={() => onRowClick?.(product)} onClick={() => onRowClick?.(product)}
className="cursor-pointer" className="cursor-pointer"
> >
{orderedColumns.map((column) => ( {orderedColumns.map((column) => (
<TableCell key={`${product.product_id}-${column}`}> <TableCell key={`${product.pid}-${column}`}>
{formatColumnValue(product, column)} {formatColumnValue(product, column)}
</TableCell> </TableCell>
))} ))}

View File

@@ -8,7 +8,7 @@ import config from '../../config';
interface SalesVelocityConfig { interface SalesVelocityConfig {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
daily_window_days: number; daily_window_days: number;
weekly_window_days: number; weekly_window_days: number;
@@ -18,7 +18,7 @@ interface SalesVelocityConfig {
export function CalculationSettings() { export function CalculationSettings() {
const [salesVelocityConfig, setSalesVelocityConfig] = useState<SalesVelocityConfig>({ const [salesVelocityConfig, setSalesVelocityConfig] = useState<SalesVelocityConfig>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
daily_window_days: 30, daily_window_days: 30,
weekly_window_days: 7, weekly_window_days: 7,

View File

@@ -6,10 +6,11 @@ import { Label } from "@/components/ui/label";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { toast } from "sonner"; import { toast } from "sonner";
import config from '../../config'; import config from '../../config';
import { Table, TableBody, TableCell, TableHeader, TableRow } from "@/components/ui/table";
interface StockThreshold { interface StockThreshold {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
critical_days: number; critical_days: number;
reorder_days: number; reorder_days: number;
@@ -22,7 +23,7 @@ interface StockThreshold {
interface LeadTimeThreshold { interface LeadTimeThreshold {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
target_days: number; target_days: number;
warning_days: number; warning_days: number;
@@ -31,7 +32,7 @@ interface LeadTimeThreshold {
interface SalesVelocityConfig { interface SalesVelocityConfig {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
daily_window_days: number; daily_window_days: number;
weekly_window_days: number; weekly_window_days: number;
@@ -47,7 +48,7 @@ interface ABCClassificationConfig {
interface SafetyStockConfig { interface SafetyStockConfig {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
coverage_days: number; coverage_days: number;
service_level: number; service_level: number;
@@ -55,7 +56,7 @@ interface SafetyStockConfig {
interface TurnoverConfig { interface TurnoverConfig {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
calculation_period_days: number; calculation_period_days: number;
target_rate: number; target_rate: number;
@@ -64,7 +65,7 @@ interface TurnoverConfig {
export function Configuration() { export function Configuration() {
const [stockThresholds, setStockThresholds] = useState<StockThreshold>({ const [stockThresholds, setStockThresholds] = useState<StockThreshold>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
critical_days: 7, critical_days: 7,
reorder_days: 14, reorder_days: 14,
@@ -75,7 +76,7 @@ export function Configuration() {
const [leadTimeThresholds, setLeadTimeThresholds] = useState<LeadTimeThreshold>({ const [leadTimeThresholds, setLeadTimeThresholds] = useState<LeadTimeThreshold>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
target_days: 14, target_days: 14,
warning_days: 21, warning_days: 21,
@@ -84,7 +85,7 @@ export function Configuration() {
const [salesVelocityConfig, setSalesVelocityConfig] = useState<SalesVelocityConfig>({ const [salesVelocityConfig, setSalesVelocityConfig] = useState<SalesVelocityConfig>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
daily_window_days: 30, daily_window_days: 30,
weekly_window_days: 7, weekly_window_days: 7,
@@ -100,7 +101,7 @@ export function Configuration() {
const [safetyStockConfig, setSafetyStockConfig] = useState<SafetyStockConfig>({ const [safetyStockConfig, setSafetyStockConfig] = useState<SafetyStockConfig>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
coverage_days: 14, coverage_days: 14,
service_level: 95.0 service_level: 95.0
@@ -108,7 +109,7 @@ export function Configuration() {
const [turnoverConfig, setTurnoverConfig] = useState<TurnoverConfig>({ const [turnoverConfig, setTurnoverConfig] = useState<TurnoverConfig>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
calculation_period_days: 30, calculation_period_days: 30,
target_rate: 1.0 target_rate: 1.0

View File

@@ -13,7 +13,7 @@ import {
AlertDialogTitle, AlertDialogTitle,
AlertDialogTrigger, AlertDialogTrigger,
} from "@/components/ui/alert-dialog"; } from "@/components/ui/alert-dialog";
import { Loader2, RefreshCw, Upload, X, Database } from "lucide-react"; import { Loader2, RefreshCw, X, Database } from "lucide-react";
import config from '../../config'; import config from '../../config';
import { toast } from "sonner"; import { toast } from "sonner";
@@ -36,34 +36,20 @@ interface ImportProgress {
duration?: string; duration?: string;
} }
interface ImportLimits {
products: number;
orders: number;
purchaseOrders: number;
}
export function DataManagement() { export function DataManagement() {
const [isUpdating, setIsUpdating] = useState(false);
const [isImportingCSV, setIsImportingCSV] = useState(false);
const [isImportingProd, setIsImportingProd] = useState(false); const [isImportingProd, setIsImportingProd] = useState(false);
const [isResetting, setIsResetting] = useState(false); const [isResetting, setIsResetting] = useState(false);
const [updateProgress, setUpdateProgress] = useState<ImportProgress | null>(null);
const [importProgress, setImportProgress] = useState<ImportProgress | null>(null); const [importProgress, setImportProgress] = useState<ImportProgress | null>(null);
const [purchaseOrdersProgress, setPurchaseOrdersProgress] = useState<ImportProgress | null>(null); const [purchaseOrdersProgress, setPurchaseOrdersProgress] = useState<ImportProgress | null>(null);
const [resetProgress, setResetProgress] = useState<ImportProgress | null>(null); const [resetProgress, setResetProgress] = useState<ImportProgress | null>(null);
const [eventSource, setEventSource] = useState<EventSource | null>(null); const [eventSource, setEventSource] = useState<EventSource | null>(null);
const [] = useState<ImportLimits>({
products: 0,
orders: 0,
purchaseOrders: 0
});
const [isResettingMetrics, setIsResettingMetrics] = useState(false); const [isResettingMetrics, setIsResettingMetrics] = useState(false);
const [resetMetricsProgress, setResetMetricsProgress] = useState<ImportProgress | null>(null); const [resetMetricsProgress, setResetMetricsProgress] = useState<ImportProgress | null>(null);
const [isCalculatingMetrics, setIsCalculatingMetrics] = useState(false); const [isCalculatingMetrics, setIsCalculatingMetrics] = useState(false);
const [metricsProgress, setMetricsProgress] = useState<ImportProgress | null>(null); const [metricsProgress, setMetricsProgress] = useState<ImportProgress | null>(null);
// Add states for completed operations // Add states for completed operations
const [lastUpdateStatus, setLastUpdateStatus] = useState<ImportProgress | null>(null);
const [lastImportStatus, setLastImportStatus] = useState<ImportProgress | null>(null); const [lastImportStatus, setLastImportStatus] = useState<ImportProgress | null>(null);
const [lastResetStatus, setLastResetStatus] = useState<ImportProgress | null>(null); const [lastResetStatus, setLastResetStatus] = useState<ImportProgress | null>(null);
const [lastMetricsStatus, setLastMetricsStatus] = useState<ImportProgress | null>(null); const [lastMetricsStatus, setLastMetricsStatus] = useState<ImportProgress | null>(null);
@@ -77,7 +63,7 @@ export function DataManagement() {
// Helper to check if any operation is running // Helper to check if any operation is running
const isAnyOperationRunning = () => { const isAnyOperationRunning = () => {
return isUpdating || isImportingCSV || isImportingProd || isTestingConnection || isResetting || isCalculatingMetrics; return isImportingProd || isTestingConnection || isResetting || isCalculatingMetrics || isResettingMetrics;
}; };
// Helper function to get progress bar color based on status // Helper function to get progress bar color based on status
@@ -132,7 +118,7 @@ export function DataManagement() {
}; };
// Helper function to render progress // Helper function to render progress
const renderProgress = (progress: any, operationType: 'update' | 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => { const renderProgress = (progress: any, operationType: 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => {
if (!progress) return null; if (!progress) return null;
const status = progress.status?.toLowerCase(); const status = progress.status?.toLowerCase();
@@ -218,7 +204,7 @@ export function DataManagement() {
}; };
// Helper to connect to event source // Helper to connect to event source
const connectToEventSource = (type: 'update' | 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => { const connectToEventSource = (type: 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => {
console.log(`Setting up EventSource for ${type}...`); console.log(`Setting up EventSource for ${type}...`);
// Clean up existing connection first // Clean up existing connection first
@@ -257,8 +243,7 @@ export function DataManagement() {
// Try to reconnect via status check if the operation might still be running // Try to reconnect via status check if the operation might still be running
if ( if (
(type === 'calculate-metrics' && isCalculatingMetrics) || (type === 'calculate-metrics' && isCalculatingMetrics) ||
(type === 'import' && isImportingCSV) || (type === 'import' && isImportingProd) ||
(type === 'update' && isUpdating) ||
(type === 'reset' && isResetting) || (type === 'reset' && isResetting) ||
(type === 'reset-metrics' && isResettingMetrics) (type === 'reset-metrics' && isResettingMetrics)
) { ) {
@@ -295,7 +280,7 @@ export function DataManagement() {
}; };
const handleProgressUpdate = ( const handleProgressUpdate = (
type: 'update' | 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics', type: 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics',
progressData: any, progressData: any,
source: EventSource source: EventSource
) => { ) => {
@@ -342,7 +327,6 @@ export function DataManagement() {
if (!otherProgress || otherProgress.status === 'complete' || otherProgress.status === 'error' || otherProgress.status === 'cancelled') { if (!otherProgress || otherProgress.status === 'complete' || otherProgress.status === 'error' || otherProgress.status === 'cancelled') {
source.close(); source.close();
setEventSource(null); setEventSource(null);
setIsImportingCSV(false);
setIsImportingProd(false); setIsImportingProd(false);
// Show appropriate toast based on final status // Show appropriate toast based on final status
@@ -374,12 +358,6 @@ export function DataManagement() {
let operationName; let operationName;
switch (type) { switch (type) {
case 'update':
setProgress = setUpdateProgress;
setLastStatus = setLastUpdateStatus;
setIsRunning = setIsUpdating;
operationName = 'Update';
break;
case 'reset': case 'reset':
setProgress = setResetProgress; setProgress = setResetProgress;
setLastStatus = setLastResetStatus; setLastStatus = setLastResetStatus;
@@ -435,7 +413,7 @@ export function DataManagement() {
} }
}; };
const handleCancel = async (operation: 'update' | 'import' | 'reset' | 'calculate-metrics') => { const handleCancel = async (operation: 'import' | 'reset' | 'calculate-metrics') => {
try { try {
const response = await fetch(`${config.apiUrl}/csv/cancel?operation=${operation}`, { const response = await fetch(`${config.apiUrl}/csv/cancel?operation=${operation}`, {
method: 'POST', method: 'POST',
@@ -448,13 +426,9 @@ export function DataManagement() {
// Reset the appropriate state // Reset the appropriate state
if (operation === 'import') { if (operation === 'import') {
setIsImportingCSV(false);
setIsImportingProd(false); setIsImportingProd(false);
setImportProgress(null); setImportProgress(null);
setPurchaseOrdersProgress(null); setPurchaseOrdersProgress(null);
} else if (operation === 'update') {
setIsUpdating(false);
setUpdateProgress(null);
} }
// ... other operation states ... // ... other operation states ...
} catch (error) { } catch (error) {
@@ -511,7 +485,7 @@ export function DataManagement() {
if (operation.includes('import')) { if (operation.includes('import')) {
console.log('Import is running'); console.log('Import is running');
setIsImportingCSV(true); setIsImportingProd(true);
if (operation.includes('purchase orders')) { if (operation.includes('purchase orders')) {
setPurchaseOrdersProgress(importData.progress || importData); setPurchaseOrdersProgress(importData.progress || importData);
} else { } else {
@@ -520,13 +494,6 @@ export function DataManagement() {
if (!eventSource) { if (!eventSource) {
connectToEventSource('import'); connectToEventSource('import');
} }
} else if (operation.includes('update')) {
console.log('Update is running');
setIsUpdating(true);
setUpdateProgress(importData.progress || importData);
if (!eventSource) {
connectToEventSource('update');
}
} else if (operation.includes('reset')) { } else if (operation.includes('reset')) {
if (operation.includes('metrics')) { if (operation.includes('metrics')) {
console.log('Reset metrics is running'); console.log('Reset metrics is running');
@@ -549,8 +516,6 @@ export function DataManagement() {
const operation = (importData.lastStatus?.operation || '').toLowerCase(); const operation = (importData.lastStatus?.operation || '').toLowerCase();
if (operation.includes('import')) { if (operation.includes('import')) {
setLastImportStatus(importData.lastStatus); setLastImportStatus(importData.lastStatus);
} else if (operation.includes('update')) {
setLastUpdateStatus(importData.lastStatus);
} else if (operation.includes('reset')) { } else if (operation.includes('reset')) {
if (operation.includes('metrics')) { if (operation.includes('metrics')) {
setLastResetMetricsStatus(importData.lastStatus); setLastResetMetricsStatus(importData.lastStatus);
@@ -569,39 +534,30 @@ export function DataManagement() {
checkStatus(); checkStatus();
}, []); }, []);
const handleUpdateCSV = async () => { const handleTestConnection = async () => {
setIsUpdating(true); setIsTestingConnection(true);
setUpdateProgress({ status: 'running', operation: 'Starting CSV update' });
try { try {
connectToEventSource('update'); const response = await fetch(`${config.apiUrl}/test-prod-connection`, {
const response = await fetch(`${config.apiUrl}/csv/update`, {
method: 'POST',
credentials: 'include' credentials: 'include'
}); });
if (!response.ok) { const data = await response.json();
const data = await response.json().catch(() => ({}));
if (data.error === 'Import already in progress') { if (response.ok) {
return; toast.success(`Successfully connected to production database. Found ${data.productCount.toLocaleString()} products.`);
} } else {
throw new Error(data.error || `Failed to update CSV files: ${response.status} ${response.statusText}`); throw new Error(data.error || 'Failed to connect to production database');
} }
} catch (error) { } catch (error) {
if (eventSource) { toast.error(`Connection test failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
eventSource.close(); } finally {
setEventSource(null); setIsTestingConnection(false);
}
setIsUpdating(false);
setUpdateProgress(null);
toast.error(`CSV update failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
} }
}; };
const handleImportCSV = async () => { const handleImportFromProd = async () => {
setIsImportingCSV(true); setIsImportingProd(true);
setImportProgress({ status: 'running', operation: 'Starting CSV import' }); setImportProgress({ status: 'running', operation: 'Starting import from production' });
try { try {
connectToEventSource('import'); connectToEventSource('import');
@@ -620,20 +576,93 @@ export function DataManagement() {
} }
// Start new import // Start new import
const response = await fetch(`${config.apiUrl}/csv/import`, { const response = await fetch(`${config.apiUrl}/csv/import-from-prod`, {
method: 'POST', method: 'POST',
credentials: 'include' credentials: 'include'
}).catch(error => {
console.log('Import request error (may be timeout):', error);
return null;
}); });
const data = await response.json(); // If we got no response but have progress, assume it's still running
if (!response.ok) { if (!response && (importProgress?.current || purchaseOrdersProgress?.current)) {
throw new Error(data.error || 'Failed to start CSV import'); console.log('No response but import appears to be running, continuing...');
return;
}
// If we got a response, check if it indicates an actual error
if (response) {
const data = await response.json().catch(() => null);
if (!response.ok && data?.error && !data.error.includes('already in progress')) {
throw new Error(data.error || 'Failed to start production import');
}
} }
} catch (error) { } catch (error) {
toast.error(`CSV import failed: ${error instanceof Error ? error.message : 'Unknown error'}`); // Only handle actual errors, not timeouts or connection issues
setIsImportingCSV(false); if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) {
setImportProgress(null); toast.error(`Production import failed: ${error.message}`);
setPurchaseOrdersProgress(null); setIsImportingProd(false);
setImportProgress(null);
setPurchaseOrdersProgress(null);
} else {
console.log('Ignoring network error, import may still be running:', error);
}
}
};
const handleCalculateMetrics = async () => {
setIsCalculatingMetrics(true);
setMetricsProgress({ status: 'running', operation: 'Starting metrics calculation' });
try {
connectToEventSource('calculate-metrics');
// First check if metrics calculation is already running
const statusResponse = await fetch(`${config.apiUrl}/csv/calculate-metrics/status`, {
credentials: 'include'
}).catch(() => null);
if (statusResponse) {
const statusData = await statusResponse.json().catch(() => null);
if (statusData?.active && statusData?.progress) {
console.log('Metrics calculation already running, connecting to existing process');
setMetricsProgress(statusData.progress);
return;
}
}
// Start new metrics calculation
const response = await fetch(`${config.apiUrl}/csv/calculate-metrics`, {
method: 'POST',
credentials: 'include'
}).catch(error => {
// Ignore network errors as the calculation might still be running
console.log('Metrics calculation request error (may be timeout):', error);
return null;
});
// If we got no response but have progress, assume it's still running
if (!response && metricsProgress?.current) {
console.log('No response but metrics calculation appears to be running, continuing...');
return;
}
// If we got a response, check if it indicates an actual error
if (response) {
const data = await response.json().catch(() => null);
if (!response.ok && data?.error && !data.error.includes('already in progress')) {
throw new Error(data.error || 'Failed to calculate metrics');
}
}
} catch (error) {
// Only handle actual errors, not timeouts or connection issues
if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) {
toast.error(`Metrics calculation failed: ${error.message}`);
setIsCalculatingMetrics(false);
setMetricsProgress(null);
} else {
console.log('Ignoring network error, metrics calculation may still be running:', error);
}
} }
}; };
@@ -726,138 +755,6 @@ export function DataManagement() {
} }
}; };
const handleCalculateMetrics = async () => {
setIsCalculatingMetrics(true);
setMetricsProgress({ status: 'running', operation: 'Starting metrics calculation' });
try {
connectToEventSource('calculate-metrics');
// First check if metrics calculation is already running
const statusResponse = await fetch(`${config.apiUrl}/csv/calculate-metrics/status`, {
credentials: 'include'
}).catch(() => null);
if (statusResponse) {
const statusData = await statusResponse.json().catch(() => null);
if (statusData?.active && statusData?.progress) {
console.log('Metrics calculation already running, connecting to existing process');
setMetricsProgress(statusData.progress);
return;
}
}
// Start new metrics calculation
const response = await fetch(`${config.apiUrl}/csv/calculate-metrics`, {
method: 'POST',
credentials: 'include'
}).catch(error => {
// Ignore network errors as the calculation might still be running
console.log('Metrics calculation request error (may be timeout):', error);
return null;
});
// If we got no response but have progress, assume it's still running
if (!response && metricsProgress?.current) {
console.log('No response but metrics calculation appears to be running, continuing...');
return;
}
// If we got a response, check if it indicates an actual error
if (response) {
const data = await response.json().catch(() => null);
if (!response.ok && data?.error && !data.error.includes('already in progress')) {
throw new Error(data.error || 'Failed to calculate metrics');
}
}
} catch (error) {
// Only handle actual errors, not timeouts or connection issues
if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) {
toast.error(`Metrics calculation failed: ${error.message}`);
setIsCalculatingMetrics(false);
setMetricsProgress(null);
} else {
console.log('Ignoring network error, metrics calculation may still be running:', error);
}
}
};
const handleTestConnection = async () => {
setIsTestingConnection(true);
try {
const response = await fetch(`${config.apiUrl}/test-prod-connection`, {
credentials: 'include'
});
const data = await response.json();
if (response.ok) {
toast.success(`Successfully connected to production database. Found ${data.productCount.toLocaleString()} products.`);
} else {
throw new Error(data.error || 'Failed to connect to production database');
}
} catch (error) {
toast.error(`Connection test failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
} finally {
setIsTestingConnection(false);
}
};
const handleImportFromProd = async () => {
setIsImportingProd(true);
setImportProgress({ status: 'running', operation: 'Starting import from production' });
try {
connectToEventSource('import');
// First check if import is already running
const statusResponse = await fetch(`${config.apiUrl}/csv/status`, {
credentials: 'include'
}).catch(() => null);
if (statusResponse) {
const statusData = await statusResponse.json().catch(() => null);
if (statusData?.active && statusData?.progress) {
console.log('Import already running, connecting to existing process');
return;
}
}
// Start new import
const response = await fetch(`${config.apiUrl}/csv/import-from-prod`, {
method: 'POST',
credentials: 'include'
}).catch(error => {
console.log('Import request error (may be timeout):', error);
return null;
});
// If we got no response but have progress, assume it's still running
if (!response && (importProgress?.current || purchaseOrdersProgress?.current)) {
console.log('No response but import appears to be running, continuing...');
return;
}
// If we got a response, check if it indicates an actual error
if (response) {
const data = await response.json().catch(() => null);
if (!response.ok && data?.error && !data.error.includes('already in progress')) {
throw new Error(data.error || 'Failed to start production import');
}
}
} catch (error) {
// Only handle actual errors, not timeouts or connection issues
if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) {
toast.error(`Production import failed: ${error.message}`);
setIsImportingProd(false);
setImportProgress(null);
setPurchaseOrdersProgress(null);
} else {
console.log('Ignoring network error, import may still be running:', error);
}
}
};
return ( return (
<div className="max-w-[400px] space-y-4"> <div className="max-w-[400px] space-y-4">
{/* Test Production Connection Card */} {/* Test Production Connection Card */}
@@ -887,91 +784,33 @@ export function DataManagement() {
</CardContent> </CardContent>
</Card> </Card>
{/* Update CSV Card */}
<Card>
<CardHeader>
<CardTitle>Update CSV Files</CardTitle>
<CardDescription>Download the latest CSV data files</CardDescription>
</CardHeader>
<CardContent>
<div className="flex gap-2">
<Button
className="flex-1"
onClick={handleUpdateCSV}
disabled={isAnyOperationRunning()}
>
{isUpdating ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Updating CSV Files...
</>
) : (
<>
<RefreshCw className="mr-2 h-4 w-4" />
Update CSV Files
</>
)}
</Button>
{isUpdating && (
<Button
variant="destructive"
onClick={() => handleCancel('update')}
>
<X className="h-4 w-4" />
</Button>
)}
</div>
{(isUpdating || lastUpdateStatus) && renderProgress(updateProgress || lastUpdateStatus, 'update')}
</CardContent>
</Card>
{/* Import Data Card */} {/* Import Data Card */}
<Card> <Card>
<CardHeader> <CardHeader>
<CardTitle>Import Data</CardTitle> <CardTitle>Import Data</CardTitle>
<CardDescription>Import data from CSV files or production database</CardDescription> <CardDescription>Import data from production database</CardDescription>
</CardHeader> </CardHeader>
<CardContent className="space-y-6"> <CardContent className="space-y-6">
<div className="flex gap-2"> <div className="flex gap-2">
<Button <Button
className="flex-1 min-w-0" className="w-full"
onClick={handleImportCSV}
disabled={isAnyOperationRunning()}
>
{isImportingCSV ? (
<div className="flex items-center justify-center">
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
<span className="truncate">Importing CSV...</span>
</div>
) : (
<div className="flex items-center justify-center">
<Upload className="mr-2 h-4 w-4" />
<span>Import from CSV</span>
</div>
)}
</Button>
<Button
className="flex-1 min-w-0"
onClick={handleImportFromProd} onClick={handleImportFromProd}
disabled={isAnyOperationRunning()} disabled={isAnyOperationRunning()}
> >
{isImportingProd ? ( {isImportingProd ? (
<div className="flex items-center justify-center"> <div className="flex items-center justify-center">
<Loader2 className="mr-2 h-4 w-4 animate-spin" /> <Loader2 className="mr-2 h-4 w-4 animate-spin" />
<span className="truncate">Importing Prod...</span> <span className="truncate">Importing from Production...</span>
</div> </div>
) : ( ) : (
<div className="flex items-center justify-center"> <div className="flex items-center justify-center">
<Database className="mr-2 h-4 w-4" /> <Database className="mr-2 h-4 w-4" />
<span>Import from Prod</span> <span>Import from Production</span>
</div> </div>
)} )}
</Button> </Button>
{(isImportingCSV || isImportingProd) && ( {isImportingProd && (
<Button <Button
variant="destructive" variant="destructive"
onClick={() => handleCancel('import')} onClick={() => handleCancel('import')}
@@ -981,7 +820,7 @@ export function DataManagement() {
)} )}
</div> </div>
{(isImportingCSV || isImportingProd || lastImportStatus) && ( {(isImportingProd || lastImportStatus) && (
<div className="space-y-4"> <div className="space-y-4">
{renderProgress(importProgress || lastImportStatus, 'import')} {renderProgress(importProgress || lastImportStatus, 'import')}
{renderProgress(purchaseOrdersProgress, 'import')} {renderProgress(purchaseOrdersProgress, 'import')}

View File

@@ -5,10 +5,11 @@ import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label"; import { Label } from "@/components/ui/label";
import { toast } from "sonner"; import { toast } from "sonner";
import config from '../../config'; import config from '../../config';
import { Table, TableBody, TableCell, TableHeader, TableRow } from "@/components/ui/table";
interface LeadTimeThreshold { interface LeadTimeThreshold {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
target_days: number; target_days: number;
warning_days: number; warning_days: number;
@@ -17,6 +18,8 @@ interface LeadTimeThreshold {
interface ABCClassificationConfig { interface ABCClassificationConfig {
id: number; id: number;
cat_id: number | null;
vendor: string | null;
a_threshold: number; a_threshold: number;
b_threshold: number; b_threshold: number;
classification_period_days: number; classification_period_days: number;
@@ -24,7 +27,7 @@ interface ABCClassificationConfig {
interface TurnoverConfig { interface TurnoverConfig {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
calculation_period_days: number; calculation_period_days: number;
target_rate: number; target_rate: number;
@@ -33,27 +36,16 @@ interface TurnoverConfig {
export function PerformanceMetrics() { export function PerformanceMetrics() {
const [leadTimeThresholds, setLeadTimeThresholds] = useState<LeadTimeThreshold>({ const [leadTimeThresholds, setLeadTimeThresholds] = useState<LeadTimeThreshold>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
target_days: 14, target_days: 14,
warning_days: 21, warning_days: 21,
critical_days: 30 critical_days: 30
}); });
const [abcConfig, setAbcConfig] = useState<ABCClassificationConfig>({ const [abcConfigs, setAbcConfigs] = useState<ABCClassificationConfig[]>([]);
id: 1,
a_threshold: 20.0,
b_threshold: 50.0,
classification_period_days: 90
});
const [turnoverConfig, setTurnoverConfig] = useState<TurnoverConfig>({ const [turnoverConfigs, setTurnoverConfigs] = useState<TurnoverConfig[]>([]);
id: 1,
category_id: null,
vendor: null,
calculation_period_days: 30,
target_rate: 1.0
});
useEffect(() => { useEffect(() => {
const loadConfig = async () => { const loadConfig = async () => {
@@ -66,8 +58,8 @@ export function PerformanceMetrics() {
} }
const data = await response.json(); const data = await response.json();
setLeadTimeThresholds(data.leadTimeThresholds); setLeadTimeThresholds(data.leadTimeThresholds);
setAbcConfig(data.abcConfig); setAbcConfigs(data.abcConfigs);
setTurnoverConfig(data.turnoverConfig); setTurnoverConfigs(data.turnoverConfigs);
} catch (error) { } catch (error) {
toast.error(`Failed to load configuration: ${error instanceof Error ? error.message : 'Unknown error'}`); toast.error(`Failed to load configuration: ${error instanceof Error ? error.message : 'Unknown error'}`);
} }
@@ -105,7 +97,7 @@ export function PerformanceMetrics() {
'Content-Type': 'application/json' 'Content-Type': 'application/json'
}, },
credentials: 'include', credentials: 'include',
body: JSON.stringify(abcConfig) body: JSON.stringify(abcConfigs)
}); });
if (!response.ok) { if (!response.ok) {
@@ -127,7 +119,7 @@ export function PerformanceMetrics() {
'Content-Type': 'application/json' 'Content-Type': 'application/json'
}, },
credentials: 'include', credentials: 'include',
body: JSON.stringify(turnoverConfig) body: JSON.stringify(turnoverConfigs)
}); });
if (!response.ok) { if (!response.ok) {
@@ -210,54 +202,28 @@ export function PerformanceMetrics() {
</CardHeader> </CardHeader>
<CardContent> <CardContent>
<div className="space-y-4"> <div className="space-y-4">
<div className="grid grid-cols-3 gap-4"> <Table>
<div> <TableHeader>
<Label htmlFor="a-threshold">A Threshold (%)</Label> <TableRow>
<Input <TableHead>Category</TableHead>
id="a-threshold" <TableHead>Vendor</TableHead>
type="number" <TableHead className="text-right">A Threshold</TableHead>
min="0" <TableHead className="text-right">B Threshold</TableHead>
max="100" <TableHead className="text-right">Period Days</TableHead>
step="0.1" </TableRow>
className="[appearance:textfield] [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none" </TableHeader>
value={abcConfig.a_threshold} <TableBody>
onChange={(e) => setAbcConfig(prev => ({ {abcConfigs.map((config) => (
...prev, <TableRow key={`${config.cat_id}-${config.vendor}`}>
a_threshold: parseFloat(e.target.value) || 0 <TableCell>{config.cat_id ? getCategoryName(config.cat_id) : 'Global'}</TableCell>
}))} <TableCell>{config.vendor || 'All Vendors'}</TableCell>
/> <TableCell className="text-right">{config.a_threshold}%</TableCell>
</div> <TableCell className="text-right">{config.b_threshold}%</TableCell>
<div> <TableCell className="text-right">{config.classification_period_days}</TableCell>
<Label htmlFor="b-threshold">B Threshold (%)</Label> </TableRow>
<Input ))}
id="b-threshold" </TableBody>
type="number" </Table>
min="0"
max="100"
step="0.1"
className="[appearance:textfield] [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none"
value={abcConfig.b_threshold}
onChange={(e) => setAbcConfig(prev => ({
...prev,
b_threshold: parseFloat(e.target.value) || 0
}))}
/>
</div>
<div>
<Label htmlFor="classification-period">Classification Period (days)</Label>
<Input
id="classification-period"
type="number"
min="1"
className="[appearance:textfield] [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none"
value={abcConfig.classification_period_days}
onChange={(e) => setAbcConfig(prev => ({
...prev,
classification_period_days: parseInt(e.target.value) || 1
}))}
/>
</div>
</div>
<Button onClick={handleUpdateABCConfig}> <Button onClick={handleUpdateABCConfig}>
Update ABC Classification Update ABC Classification
</Button> </Button>
@@ -273,37 +239,26 @@ export function PerformanceMetrics() {
</CardHeader> </CardHeader>
<CardContent> <CardContent>
<div className="space-y-4"> <div className="space-y-4">
<div className="grid grid-cols-2 gap-4"> <Table>
<div> <TableHeader>
<Label htmlFor="calculation-period">Calculation Period (days)</Label> <TableRow>
<Input <TableHead>Category</TableHead>
id="calculation-period" <TableHead>Vendor</TableHead>
type="number" <TableHead className="text-right">Period Days</TableHead>
min="1" <TableHead className="text-right">Target Rate</TableHead>
className="[appearance:textfield] [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none" </TableRow>
value={turnoverConfig.calculation_period_days} </TableHeader>
onChange={(e) => setTurnoverConfig(prev => ({ <TableBody>
...prev, {turnoverConfigs.map((config) => (
calculation_period_days: parseInt(e.target.value) || 1 <TableRow key={`${config.cat_id}-${config.vendor}`}>
}))} <TableCell>{config.cat_id ? getCategoryName(config.cat_id) : 'Global'}</TableCell>
/> <TableCell>{config.vendor || 'All Vendors'}</TableCell>
</div> <TableCell className="text-right">{config.calculation_period_days}</TableCell>
<div> <TableCell className="text-right">{config.target_rate.toFixed(2)}</TableCell>
<Label htmlFor="target-rate">Target Rate</Label> </TableRow>
<Input ))}
id="target-rate" </TableBody>
type="number" </Table>
min="0"
step="0.1"
className="[appearance:textfield] [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none"
value={turnoverConfig.target_rate}
onChange={(e) => setTurnoverConfig(prev => ({
...prev,
target_rate: parseFloat(e.target.value) || 0
}))}
/>
</div>
</div>
<Button onClick={handleUpdateTurnoverConfig}> <Button onClick={handleUpdateTurnoverConfig}>
Update Turnover Configuration Update Turnover Configuration
</Button> </Button>

View File

@@ -5,10 +5,11 @@ import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label"; import { Label } from "@/components/ui/label";
import { toast } from "sonner"; import { toast } from "sonner";
import config from '../../config'; import config from '../../config';
import { Table, TableBody, TableCell, TableHeader, TableRow } from "@/components/ui/table";
interface StockThreshold { interface StockThreshold {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
critical_days: number; critical_days: number;
reorder_days: number; reorder_days: number;
@@ -19,7 +20,7 @@ interface StockThreshold {
interface SafetyStockConfig { interface SafetyStockConfig {
id: number; id: number;
category_id: number | null; cat_id: number | null;
vendor: string | null; vendor: string | null;
coverage_days: number; coverage_days: number;
service_level: number; service_level: number;
@@ -28,7 +29,7 @@ interface SafetyStockConfig {
export function StockManagement() { export function StockManagement() {
const [stockThresholds, setStockThresholds] = useState<StockThreshold>({ const [stockThresholds, setStockThresholds] = useState<StockThreshold>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
critical_days: 7, critical_days: 7,
reorder_days: 14, reorder_days: 14,
@@ -39,7 +40,7 @@ export function StockManagement() {
const [safetyStockConfig, setSafetyStockConfig] = useState<SafetyStockConfig>({ const [safetyStockConfig, setSafetyStockConfig] = useState<SafetyStockConfig>({
id: 1, id: 1,
category_id: null, cat_id: null,
vendor: null, vendor: null,
coverage_days: 14, coverage_days: 14,
service_level: 95.0 service_level: 95.0
@@ -243,6 +244,54 @@ export function StockManagement() {
</div> </div>
</CardContent> </CardContent>
</Card> </Card>
<Table>
<TableHeader>
<TableRow>
<TableHead>Category</TableHead>
<TableHead>Vendor</TableHead>
<TableHead className="text-right">Critical Days</TableHead>
<TableHead className="text-right">Reorder Days</TableHead>
<TableHead className="text-right">Overstock Days</TableHead>
<TableHead className="text-right">Low Stock</TableHead>
<TableHead className="text-right">Min Reorder</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{stockThresholds.map((threshold) => (
<TableRow key={`${threshold.cat_id}-${threshold.vendor}`}>
<TableCell>{threshold.cat_id ? getCategoryName(threshold.cat_id) : 'Global'}</TableCell>
<TableCell>{threshold.vendor || 'All Vendors'}</TableCell>
<TableCell className="text-right">{threshold.critical_days}</TableCell>
<TableCell className="text-right">{threshold.reorder_days}</TableCell>
<TableCell className="text-right">{threshold.overstock_days}</TableCell>
<TableCell className="text-right">{threshold.low_stock_threshold}</TableCell>
<TableCell className="text-right">{threshold.min_reorder_quantity}</TableCell>
</TableRow>
))}
</TableBody>
</Table>
<Table>
<TableHeader>
<TableRow>
<TableHead>Category</TableHead>
<TableHead>Vendor</TableHead>
<TableHead className="text-right">Coverage Days</TableHead>
<TableHead className="text-right">Service Level</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{safetyStockConfigs.map((config) => (
<TableRow key={`${config.cat_id}-${config.vendor}`}>
<TableCell>{config.cat_id ? getCategoryName(config.cat_id) : 'Global'}</TableCell>
<TableCell>{config.vendor || 'All Vendors'}</TableCell>
<TableCell className="text-right">{config.coverage_days}</TableCell>
<TableCell className="text-right">{config.service_level}%</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</div> </div>
); );
} }

View File

@@ -10,37 +10,66 @@ import { motion } from "motion/react";
import config from "../config"; import config from "../config";
interface Category { interface Category {
category_id: number; cat_id: number;
name: string; name: string;
description: string; type: number;
parent_category?: string; parent_id: number | null;
product_count: number; parent_name: string | null;
total_value: number; parent_type: number | null;
avg_margin: number; description: string | null;
turnover_rate: number;
growth_rate: number;
status: string; status: string;
metrics?: {
product_count: number;
active_products: number;
total_value: number;
avg_margin: number;
turnover_rate: number;
growth_rate: number;
};
}
interface TypeCount {
type: number;
count: number;
} }
interface CategoryFilters { interface CategoryFilters {
search: string; search: string;
parent: string; type: string;
performance: string; performance: string;
} }
const TYPE_LABELS: Record<number, string> = {
10: 'Section',
11: 'Category',
12: 'Subcategory',
13: 'Sub-subcategory',
20: 'Theme',
21: 'Subtheme'
};
function getCategoryStatusVariant(status: string): "default" | "secondary" | "destructive" | "outline" {
switch (status.toLowerCase()) {
case 'active':
return 'default';
case 'inactive':
return 'secondary';
case 'archived':
return 'destructive';
default:
return 'outline';
}
}
export function Categories() { export function Categories() {
const [page, setPage] = useState(1); const [page, setPage] = useState(1);
const [sortColumn, setSortColumn] = useState<keyof Category>("name"); const [sortColumn] = useState<keyof Category>("name");
const [sortDirection, setSortDirection] = useState<"asc" | "desc">("asc"); const [sortDirection] = useState<"asc" | "desc">("asc");
const [filters, setFilters] = useState<CategoryFilters>({ const [filters, setFilters] = useState<CategoryFilters>({
search: "", search: "",
parent: "all", type: "all",
performance: "all", performance: "all",
}); });
const [] = useState({
column: 'name',
direction: 'asc'
});
const { data, isLoading } = useQuery({ const { data, isLoading } = useQuery({
queryKey: ["categories"], queryKey: ["categories"],
@@ -68,19 +97,15 @@ export function Categories() {
); );
} }
// Apply parent filter // Apply type filter
if (filters.parent !== 'all') { if (filters.type !== 'all') {
if (filters.parent === 'none') { filtered = filtered.filter(category => category.type === parseInt(filters.type));
filtered = filtered.filter(category => !category.parent_category);
} else {
filtered = filtered.filter(category => category.parent_category === filters.parent);
}
} }
// Apply performance filter // Apply performance filter
if (filters.performance !== 'all') { if (filters.performance !== 'all') {
filtered = filtered.filter(category => { filtered = filtered.filter(category => {
const growth = category.growth_rate ?? 0; const growth = category.metrics?.growth_rate ?? 0;
switch (filters.performance) { switch (filters.performance) {
case 'high_growth': return growth >= 20; case 'high_growth': return growth >= 20;
case 'growing': return growth >= 5 && growth < 20; case 'growing': return growth >= 5 && growth < 20;
@@ -93,6 +118,19 @@ export function Categories() {
// Apply sorting // Apply sorting
filtered.sort((a, b) => { filtered.sort((a, b) => {
// First sort by type if not explicitly sorting by another column
if (sortColumn === "name") {
if (a.type !== b.type) {
return a.type - b.type;
}
// Then by parent hierarchy
if (a.parent_id !== b.parent_id) {
if (!a.parent_id) return -1;
if (!b.parent_id) return 1;
return a.parent_id - b.parent_id;
}
}
const aVal = a[sortColumn]; const aVal = a[sortColumn];
const bVal = b[sortColumn]; const bVal = b[sortColumn];
@@ -123,9 +161,9 @@ export function Categories() {
if (!filteredData.length) return data?.stats; if (!filteredData.length) return data?.stats;
const activeCategories = filteredData.filter(c => c.status === 'active').length; const activeCategories = filteredData.filter(c => c.status === 'active').length;
const totalValue = filteredData.reduce((sum, c) => sum + (c.total_value || 0), 0); const totalValue = filteredData.reduce((sum, c) => sum + (c.metrics?.total_value || 0), 0);
const margins = filteredData.map(c => c.avg_margin || 0).filter(m => m !== 0); const margins = filteredData.map(c => c.metrics?.avg_margin || 0).filter(m => m !== 0);
const growthRates = filteredData.map(c => c.growth_rate || 0).filter(g => g !== 0); const growthRates = filteredData.map(c => c.metrics?.growth_rate || 0).filter(g => g !== 0);
return { return {
totalCategories: filteredData.length, totalCategories: filteredData.length,
@@ -136,20 +174,7 @@ export function Categories() {
}; };
}, [filteredData, data?.stats]); }, [filteredData, data?.stats]);
const handleSort = (column: keyof Category) => {
setSortDirection(prev => {
if (sortColumn !== column) return "asc";
return prev === "asc" ? "desc" : "asc";
});
setSortColumn(column);
};
const getPerformanceBadge = (growth: number) => {
if (growth >= 20) return <Badge variant="default">High Growth</Badge>;
if (growth >= 5) return <Badge variant="secondary">Growing</Badge>;
if (growth >= -5) return <Badge variant="outline">Stable</Badge>;
return <Badge variant="destructive">Declining</Badge>;
};
const formatCurrency = (value: number) => { const formatCurrency = (value: number) => {
return new Intl.NumberFormat('en-US', { return new Intl.NumberFormat('en-US', {
@@ -245,17 +270,18 @@ export function Categories() {
className="h-8 w-[150px] lg:w-[250px]" className="h-8 w-[150px] lg:w-[250px]"
/> />
<Select <Select
value={filters.parent} value={filters.type}
onValueChange={(value) => setFilters(prev => ({ ...prev, parent: value }))} onValueChange={(value) => setFilters(prev => ({ ...prev, type: value }))}
> >
<SelectTrigger className="h-8 w-[180px]"> <SelectTrigger className="h-8 w-[180px]">
<SelectValue placeholder="Parent Category" /> <SelectValue placeholder="Category Type" />
</SelectTrigger> </SelectTrigger>
<SelectContent> <SelectContent>
<SelectItem value="all">All Categories</SelectItem> <SelectItem value="all">All Types</SelectItem>
<SelectItem value="none">Top Level Only</SelectItem> {data?.typeCounts?.map((tc: TypeCount) => (
{data?.parentCategories?.map((parent: string) => ( <SelectItem key={tc.type} value={tc.type.toString()}>
<SelectItem key={parent} value={parent}>{parent}</SelectItem> {TYPE_LABELS[tc.type]} ({tc.count})
</SelectItem>
))} ))}
</SelectContent> </SelectContent>
</Select> </Select>
@@ -281,48 +307,66 @@ export function Categories() {
<Table> <Table>
<TableHeader> <TableHeader>
<TableRow> <TableRow>
<TableHead onClick={() => handleSort("name")} className="cursor-pointer">Name</TableHead> <TableHead>Type</TableHead>
<TableHead onClick={() => handleSort("parent_category")} className="cursor-pointer">Parent</TableHead> <TableHead>Name</TableHead>
<TableHead onClick={() => handleSort("product_count")} className="cursor-pointer">Products</TableHead> <TableHead>Parent</TableHead>
<TableHead onClick={() => handleSort("total_value")} className="cursor-pointer">Value</TableHead> <TableHead className="text-right">Products</TableHead>
<TableHead onClick={() => handleSort("avg_margin")} className="cursor-pointer">Margin</TableHead> <TableHead className="text-right">Active</TableHead>
<TableHead onClick={() => handleSort("turnover_rate")} className="cursor-pointer">Turnover</TableHead> <TableHead className="text-right">Value</TableHead>
<TableHead onClick={() => handleSort("growth_rate")} className="cursor-pointer">Growth</TableHead> <TableHead className="text-right">Margin</TableHead>
<TableHead onClick={() => handleSort("status")} className="cursor-pointer">Status</TableHead> <TableHead className="text-right">Turnover</TableHead>
<TableHead className="text-right">Growth</TableHead>
<TableHead>Status</TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{isLoading ? ( {isLoading ? (
<TableRow> <TableRow>
<TableCell colSpan={8} className="text-center py-8"> <TableCell colSpan={10} className="text-center py-8">
Loading categories... Loading categories...
</TableCell> </TableCell>
</TableRow> </TableRow>
) : paginatedData.map((category: Category) => ( ) : paginatedData.map((category: Category) => (
<TableRow key={category.category_id}> <TableRow key={category.cat_id}>
<TableCell> <TableCell>
<div className="font-medium">{category.name}</div> <Badge variant="outline">
<div className="text-sm text-muted-foreground">{category.description}</div> {TYPE_LABELS[category.type]}
</Badge>
</TableCell> </TableCell>
<TableCell>{category.parent_category || "—"}</TableCell>
<TableCell>{category.product_count?.toLocaleString() ?? 0}</TableCell>
<TableCell>{formatCurrency(category.total_value ?? 0)}</TableCell>
<TableCell>{typeof category.avg_margin === 'number' ? category.avg_margin.toFixed(1) : "0.0"}%</TableCell>
<TableCell>{typeof category.turnover_rate === 'number' ? category.turnover_rate.toFixed(1) : "0.0"}x</TableCell>
<TableCell> <TableCell>
<div className="flex items-center gap-2" style={{ minWidth: '120px' }}> <div className="flex flex-col gap-1">
<div style={{ width: '50px', textAlign: 'right' }}> <div className="flex items-center gap-2">
{typeof category.growth_rate === 'number' ? category.growth_rate.toFixed(1) : "0.0"}% <span className="font-medium">{category.name}</span>
</div> </div>
{getPerformanceBadge(category.growth_rate ?? 0)} {category.description && (
<div className="text-xs text-muted-foreground">{category.description}</div>
)}
</div> </div>
</TableCell> </TableCell>
<TableCell>{category.status}</TableCell> <TableCell className="text-sm text-muted-foreground">
{category.type === 10 ? category.name : // Section
category.type === 11 ? `${category.parent_name}` : // Category
category.type === 12 ? `${category.parent_name} > ${category.name}` : // Subcategory
category.type === 13 ? `${category.parent_name} > ${category.name}` : // Sub-subcategory
category.parent_name ? `${category.parent_name} > ${category.name}` : category.name}
</TableCell>
<TableCell className="text-right">{category.metrics?.product_count || 0}</TableCell>
<TableCell className="text-right">{category.metrics?.active_products || 0}</TableCell>
<TableCell className="text-right">{formatCurrency(category.metrics?.total_value || 0)}</TableCell>
<TableCell className="text-right">{category.metrics?.avg_margin?.toFixed(1)}%</TableCell>
<TableCell className="text-right">{category.metrics?.turnover_rate?.toFixed(2)}</TableCell>
<TableCell className="text-right">{category.metrics?.growth_rate?.toFixed(1)}%</TableCell>
<TableCell>
<Badge variant={getCategoryStatusVariant(category.status)}>
{category.status}
</Badge>
</TableCell>
</TableRow> </TableRow>
))} ))}
{!isLoading && !paginatedData.length && ( {!isLoading && !paginatedData.length && (
<TableRow> <TableRow>
<TableCell colSpan={8} className="text-center py-8 text-muted-foreground"> <TableCell colSpan={10} className="text-center py-8 text-muted-foreground">
No categories found No categories found
</TableCell> </TableCell>
</TableRow> </TableRow>

View File

@@ -60,19 +60,23 @@ export default function Forecasting() {
const data = await response.json(); const data = await response.json();
return data.map((item: any) => ({ return data.map((item: any) => ({
category: item.category_name, category: item.category_name,
categoryPath: item.path,
avgDailySales: Number(item.avg_daily_sales) || 0, avgDailySales: Number(item.avg_daily_sales) || 0,
totalSold: Number(item.total_sold) || 0, totalSold: Number(item.total_sold) || 0,
numProducts: Number(item.num_products) || 0, numProducts: Number(item.num_products) || 0,
avgPrice: Number(item.avg_price) || 0, avgPrice: Number(item.avg_price) || 0,
avgTotalSold: Number(item.avgTotalSold) || 0, avgTotalSold: Number(item.avgTotalSold) || 0,
products: item.products?.map((p: any) => ({ products: item.products?.map((p: any) => ({
product_id: p.product_id, pid: p.pid,
name: p.title, title: p.title,
sku: p.sku, sku: p.sku,
stock_quantity: Number(p.stock_quantity) || 0, stock_quantity: Number(p.stock_quantity) || 0,
total_sold: Number(p.total_sold) || 0, total_sold: Number(p.total_sold) || 0,
avg_price: Number(p.avg_price) || 0, daily_sales_avg: Number(p.daily_sales_avg) || 0,
first_received_date: p.first_received_date, forecast_units: Number(p.forecast_units) || 0,
forecast_revenue: Number(p.forecast_revenue) || 0,
confidence_level: Number(p.confidence_level) || 0,
categoryPath: item.path
})) }))
})); }));
}, },

View File

@@ -503,7 +503,7 @@ export function Products() {
columnDefs={AVAILABLE_COLUMNS} columnDefs={AVAILABLE_COLUMNS}
columnOrder={columnOrder} columnOrder={columnOrder}
onColumnOrderChange={handleColumnOrderChange} onColumnOrderChange={handleColumnOrderChange}
onRowClick={(product) => setSelectedProductId(product.product_id)} onRowClick={(product) => setSelectedProductId(product.pid)}
/> />
{totalPages > 1 && ( {totalPages > 1 && (

View File

@@ -20,12 +20,21 @@ import {
PaginationPrevious, PaginationPrevious,
} from '../components/ui/pagination'; } from '../components/ui/pagination';
import { motion } from 'motion/react'; import { motion } from 'motion/react';
import {
PurchaseOrderStatus,
ReceivingStatus as ReceivingStatusCode,
getPurchaseOrderStatusLabel,
getReceivingStatusLabel,
getPurchaseOrderStatusVariant,
getReceivingStatusVariant
} from '../types/status-codes';
interface PurchaseOrder { interface PurchaseOrder {
id: number; id: number;
vendor_name: string; vendor_name: string;
order_date: string; order_date: string;
status: string; status: number;
receiving_status: number;
total_items: number; total_items: number;
total_quantity: number; total_quantity: number;
total_cost: number; total_cost: number;
@@ -113,6 +122,16 @@ export default function PurchaseOrders() {
limit: 100, limit: 100,
}); });
const STATUS_FILTER_OPTIONS = [
{ value: 'all', label: 'All Statuses' },
{ value: String(PurchaseOrderStatus.Created), label: getPurchaseOrderStatusLabel(PurchaseOrderStatus.Created) },
{ value: String(PurchaseOrderStatus.ElectronicallyReadySend), label: getPurchaseOrderStatusLabel(PurchaseOrderStatus.ElectronicallyReadySend) },
{ value: String(PurchaseOrderStatus.Ordered), label: getPurchaseOrderStatusLabel(PurchaseOrderStatus.Ordered) },
{ value: String(PurchaseOrderStatus.ReceivingStarted), label: getPurchaseOrderStatusLabel(PurchaseOrderStatus.ReceivingStarted) },
{ value: String(PurchaseOrderStatus.Done), label: getPurchaseOrderStatusLabel(PurchaseOrderStatus.Done) },
{ value: String(PurchaseOrderStatus.Canceled), label: getPurchaseOrderStatusLabel(PurchaseOrderStatus.Canceled) },
];
const fetchData = async () => { const fetchData = async () => {
try { try {
const searchParams = new URLSearchParams({ const searchParams = new URLSearchParams({
@@ -171,16 +190,25 @@ export default function PurchaseOrders() {
} }
}; };
const getStatusBadge = (status: string) => { const getStatusBadge = (status: number, receivingStatus: number) => {
const variants: Record<string, { variant: "default" | "secondary" | "destructive" | "outline"; label: string }> = { // If the PO is canceled, show that status
pending: { variant: "outline", label: "Pending" }, if (status === PurchaseOrderStatus.Canceled) {
received: { variant: "default", label: "Received" }, return <Badge variant={getPurchaseOrderStatusVariant(status)}>
partial: { variant: "secondary", label: "Partial" }, {getPurchaseOrderStatusLabel(status)}
cancelled: { variant: "destructive", label: "Cancelled" }, </Badge>;
}; }
const statusConfig = variants[status.toLowerCase()] || variants.pending; // If receiving has started, show receiving status
return <Badge variant={statusConfig.variant}>{statusConfig.label}</Badge>; if (status >= PurchaseOrderStatus.ReceivingStarted) {
return <Badge variant={getReceivingStatusVariant(receivingStatus)}>
{getReceivingStatusLabel(receivingStatus)}
</Badge>;
}
// Otherwise show PO status
return <Badge variant={getPurchaseOrderStatusVariant(status)}>
{getPurchaseOrderStatusLabel(status)}
</Badge>;
}; };
const formatNumber = (value: number) => { const formatNumber = (value: number) => {
@@ -252,45 +280,44 @@ export default function PurchaseOrders() {
</div> </div>
{/* Filters */} {/* Filters */}
<div className="mb-6 flex flex-col gap-4 md:flex-row md:items-center"> <div className="mb-4 flex items-center gap-4">
<div className="flex items-center gap-2 flex-1"> <Input
<Input placeholder="Search orders..."
placeholder="Search orders..." value={filters.search}
value={filters.search} onChange={(e) => setFilters(prev => ({ ...prev, search: e.target.value }))}
onChange={(e) => setFilters(prev => ({ ...prev, search: e.target.value }))} className="max-w-xs"
className="h-8 w-[300px]" />
/> <Select
</div> value={filters.status}
<div className="flex flex-wrap items-center gap-2"> onValueChange={(value) => setFilters(prev => ({ ...prev, status: value }))}
<Select >
value={filters.status} <SelectTrigger className="w-[180px]">
onValueChange={(value) => setFilters(prev => ({ ...prev, status: value }))} <SelectValue placeholder="Select status" />
> </SelectTrigger>
<SelectTrigger className="h-8 w-[180px]"> <SelectContent>
<SelectValue placeholder="Status" /> {STATUS_FILTER_OPTIONS.map(option => (
</SelectTrigger> <SelectItem key={option.value} value={option.value}>
<SelectContent> {option.label}
<SelectItem value="all">All Statuses</SelectItem> </SelectItem>
{filterOptions.statuses.map(status => ( ))}
<SelectItem key={status} value={status}>{status}</SelectItem> </SelectContent>
))} </Select>
</SelectContent> <Select
</Select> value={filters.vendor}
<Select onValueChange={(value) => setFilters(prev => ({ ...prev, vendor: value }))}
value={filters.vendor} >
onValueChange={(value) => setFilters(prev => ({ ...prev, vendor: value }))} <SelectTrigger className="w-[180px]">
> <SelectValue placeholder="Select vendor" />
<SelectTrigger className="h-8 w-[180px]"> </SelectTrigger>
<SelectValue placeholder="Vendor" /> <SelectContent>
</SelectTrigger> <SelectItem value="all">All Vendors</SelectItem>
<SelectContent> {filterOptions.vendors.map(vendor => (
<SelectItem value="all">All Vendors</SelectItem> <SelectItem key={vendor} value={vendor}>
{filterOptions.vendors.map(vendor => ( {vendor}
<SelectItem key={vendor} value={vendor}>{vendor}</SelectItem> </SelectItem>
))} ))}
</SelectContent> </SelectContent>
</Select> </Select>
</div>
</div> </div>
{/* Purchase Orders Table */} {/* Purchase Orders Table */}
@@ -343,7 +370,7 @@ export default function PurchaseOrders() {
<TableCell>{po.id}</TableCell> <TableCell>{po.id}</TableCell>
<TableCell>{po.vendor_name}</TableCell> <TableCell>{po.vendor_name}</TableCell>
<TableCell>{new Date(po.order_date).toLocaleDateString()}</TableCell> <TableCell>{new Date(po.order_date).toLocaleDateString()}</TableCell>
<TableCell>{getStatusBadge(po.status)}</TableCell> <TableCell>{getStatusBadge(po.status, po.receiving_status)}</TableCell>
<TableCell>{po.total_items.toLocaleString()}</TableCell> <TableCell>{po.total_items.toLocaleString()}</TableCell>
<TableCell>{po.total_quantity.toLocaleString()}</TableCell> <TableCell>{po.total_quantity.toLocaleString()}</TableCell>
<TableCell>${formatNumber(po.total_cost)}</TableCell> <TableCell>${formatNumber(po.total_cost)}</TableCell>

View File

@@ -1,16 +1,16 @@
export interface Product { export interface Product {
product_id: number; pid: number;
title: string; title: string;
SKU: string; SKU: string;
stock_quantity: number; stock_quantity: number;
price: number; price: string; // DECIMAL(15,3)
regular_price: number; regular_price: string; // DECIMAL(15,3)
cost_price: number; cost_price: string; // DECIMAL(15,3)
landing_cost_price: number | null; landing_cost_price: string | null; // DECIMAL(15,3)
barcode: string; barcode: string;
vendor: string; vendor: string;
vendor_reference: string; vendor_reference: string;
brand: string; brand: string | 'Unbranded';
categories: string[]; categories: string[];
tags: string[]; tags: string[];
options: Record<string, any>; options: Record<string, any>;
@@ -24,32 +24,32 @@ export interface Product {
updated_at: string; updated_at: string;
// Metrics // Metrics
daily_sales_avg?: number; daily_sales_avg?: string; // DECIMAL(15,3)
weekly_sales_avg?: number; weekly_sales_avg?: string; // DECIMAL(15,3)
monthly_sales_avg?: number; monthly_sales_avg?: string; // DECIMAL(15,3)
avg_quantity_per_order?: number; avg_quantity_per_order?: string; // DECIMAL(15,3)
number_of_orders?: number; number_of_orders?: number;
first_sale_date?: string; first_sale_date?: string;
last_sale_date?: string; last_sale_date?: string;
last_purchase_date?: string; last_purchase_date?: string;
days_of_inventory?: number; days_of_inventory?: string; // DECIMAL(15,3)
weeks_of_inventory?: number; weeks_of_inventory?: string; // DECIMAL(15,3)
reorder_point?: number; reorder_point?: string; // DECIMAL(15,3)
safety_stock?: number; safety_stock?: string; // DECIMAL(15,3)
avg_margin_percent?: number; avg_margin_percent?: string; // DECIMAL(15,3)
total_revenue?: number; total_revenue?: string; // DECIMAL(15,3)
inventory_value?: number; inventory_value?: string; // DECIMAL(15,3)
cost_of_goods_sold?: number; cost_of_goods_sold?: string; // DECIMAL(15,3)
gross_profit?: number; gross_profit?: string; // DECIMAL(15,3)
gmroi?: number; gmroi?: string; // DECIMAL(15,3)
avg_lead_time_days?: number; avg_lead_time_days?: string; // DECIMAL(15,3)
last_received_date?: string; last_received_date?: string;
abc_class?: string; abc_class?: string;
stock_status?: string; stock_status?: string;
turnover_rate?: number; turnover_rate?: string; // DECIMAL(15,3)
current_lead_time?: number; current_lead_time?: string; // DECIMAL(15,3)
target_lead_time?: number; target_lead_time?: string; // DECIMAL(15,3)
lead_time_status?: string; lead_time_status?: string;
reorder_qty?: number; reorder_qty?: number;
overstocked_amt?: number; overstocked_amt?: string; // DECIMAL(15,3)
} }

View File

@@ -0,0 +1,81 @@
// Purchase Order Status Codes
export enum PurchaseOrderStatus {
Canceled = 0,
Created = 1,
ElectronicallyReadySend = 10,
Ordered = 11,
Preordered = 12,
ElectronicallySent = 13,
ReceivingStarted = 15,
Done = 50
}
// Receiving Status Codes
export enum ReceivingStatus {
Canceled = 0,
Created = 1,
PartialReceived = 30,
FullReceived = 40,
Paid = 50
}
// Status Code Display Names
export const PurchaseOrderStatusLabels: Record<PurchaseOrderStatus, string> = {
[PurchaseOrderStatus.Canceled]: 'Canceled',
[PurchaseOrderStatus.Created]: 'Created',
[PurchaseOrderStatus.ElectronicallyReadySend]: 'Ready to Send',
[PurchaseOrderStatus.Ordered]: 'Ordered',
[PurchaseOrderStatus.Preordered]: 'Preordered',
[PurchaseOrderStatus.ElectronicallySent]: 'Sent',
[PurchaseOrderStatus.ReceivingStarted]: 'Receiving Started',
[PurchaseOrderStatus.Done]: 'Done'
};
export const ReceivingStatusLabels: Record<ReceivingStatus, string> = {
[ReceivingStatus.Canceled]: 'Canceled',
[ReceivingStatus.Created]: 'Created',
[ReceivingStatus.PartialReceived]: 'Partially Received',
[ReceivingStatus.FullReceived]: 'Fully Received',
[ReceivingStatus.Paid]: 'Paid'
};
// Helper functions
export function getPurchaseOrderStatusLabel(status: number): string {
return PurchaseOrderStatusLabels[status as PurchaseOrderStatus] || 'Unknown';
}
export function getReceivingStatusLabel(status: number): string {
return ReceivingStatusLabels[status as ReceivingStatus] || 'Unknown';
}
// Status checks
export function isReceivingComplete(status: number): boolean {
return status >= ReceivingStatus.PartialReceived;
}
export function isPurchaseOrderComplete(status: number): boolean {
return status === PurchaseOrderStatus.Done;
}
export function isPurchaseOrderCanceled(status: number): boolean {
return status === PurchaseOrderStatus.Canceled;
}
export function isReceivingCanceled(status: number): boolean {
return status === ReceivingStatus.Canceled;
}
// Badge variants for different statuses
export function getPurchaseOrderStatusVariant(status: number): 'default' | 'secondary' | 'destructive' | 'outline' {
if (isPurchaseOrderCanceled(status)) return 'destructive';
if (isPurchaseOrderComplete(status)) return 'default';
if (status >= PurchaseOrderStatus.ElectronicallyReadySend) return 'secondary';
return 'outline';
}
export function getReceivingStatusVariant(status: number): 'default' | 'secondary' | 'destructive' | 'outline' {
if (isReceivingCanceled(status)) return 'destructive';
if (status === ReceivingStatus.Paid) return 'default';
if (status >= ReceivingStatus.PartialReceived) return 'secondary';
return 'outline';
}