8 Commits

43 changed files with 4004 additions and 1116 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -154,6 +154,24 @@ CREATE TRIGGER update_sales_seasonality_updated
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Create table for financial calculation parameters
CREATE TABLE financial_calc_config (
id INTEGER NOT NULL PRIMARY KEY,
order_cost DECIMAL(10,2) NOT NULL DEFAULT 25.00, -- The fixed cost per purchase order (used in EOQ)
holding_rate DECIMAL(10,4) NOT NULL DEFAULT 0.25, -- The annual inventory holding cost as a percentage of unit cost (used in EOQ)
service_level_z_score DECIMAL(10,4) NOT NULL DEFAULT 1.96, -- Z-score for ~95% service level (used in Safety Stock)
min_reorder_qty INTEGER NOT NULL DEFAULT 1, -- Minimum reorder quantity
default_reorder_qty INTEGER NOT NULL DEFAULT 5, -- Default reorder quantity when sales data is insufficient
default_safety_stock INTEGER NOT NULL DEFAULT 5, -- Default safety stock when sales data is insufficient
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
CREATE TRIGGER update_financial_calc_config_updated
BEFORE UPDATE ON financial_calc_config
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Insert default global thresholds
INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
VALUES (1, NULL, NULL, 7, 14, 90)
@@ -203,6 +221,17 @@ VALUES
ON CONFLICT (month) DO UPDATE SET
last_updated = CURRENT_TIMESTAMP;
-- Insert default values
INSERT INTO financial_calc_config (id, order_cost, holding_rate, service_level_z_score, min_reorder_qty, default_reorder_qty, default_safety_stock)
VALUES (1, 25.00, 0.25, 1.96, 1, 5, 5)
ON CONFLICT (id) DO UPDATE SET
order_cost = EXCLUDED.order_cost,
holding_rate = EXCLUDED.holding_rate,
service_level_z_score = EXCLUDED.service_level_z_score,
min_reorder_qty = EXCLUDED.min_reorder_qty,
default_reorder_qty = EXCLUDED.default_reorder_qty,
default_safety_stock = EXCLUDED.default_safety_stock;
-- View to show thresholds with category names
CREATE OR REPLACE VIEW stock_thresholds_view AS
SELECT
@@ -252,7 +281,7 @@ CREATE TABLE IF NOT EXISTS calculate_status (
);
CREATE TABLE IF NOT EXISTS sync_status (
table_name VARCHAR(50) PRIMARY KEY,
table_name TEXT PRIMARY KEY,
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT
);

View File

@@ -11,15 +11,17 @@ CREATE TABLE temp_sales_metrics (
avg_margin_percent DECIMAL(10,3),
first_sale_date DATE,
last_sale_date DATE,
stddev_daily_sales DECIMAL(10,3),
PRIMARY KEY (pid)
);
CREATE TABLE temp_purchase_metrics (
pid BIGINT NOT NULL,
avg_lead_time_days INTEGER,
avg_lead_time_days DECIMAL(10,2),
last_purchase_date DATE,
first_received_date DATE,
last_received_date DATE,
stddev_lead_time_days DECIMAL(10,2),
PRIMARY KEY (pid)
);
@@ -50,7 +52,7 @@ CREATE TABLE product_metrics (
gross_profit DECIMAL(10,3),
gmroi DECIMAL(10,3),
-- Purchase metrics
avg_lead_time_days INTEGER,
avg_lead_time_days DECIMAL(10,2),
last_purchase_date DATE,
first_received_date DATE,
last_received_date DATE,

View File

@@ -0,0 +1,165 @@
-- Drop tables in reverse order of dependency
DROP TABLE IF EXISTS public.product_metrics CASCADE;
DROP TABLE IF EXISTS public.daily_product_snapshots CASCADE;
-- Table Definition: daily_product_snapshots
CREATE TABLE public.daily_product_snapshots (
snapshot_date DATE NOT NULL,
pid INT8 NOT NULL,
sku VARCHAR, -- Copied for convenience
-- Inventory Metrics (End of Day / Last Snapshot of Day)
eod_stock_quantity INT NOT NULL DEFAULT 0,
eod_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- Increased precision
eod_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
eod_stock_gross NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
stockout_flag BOOLEAN NOT NULL DEFAULT FALSE,
-- Sales Metrics (Aggregated for the snapshot_date)
units_sold INT NOT NULL DEFAULT 0,
units_returned INT NOT NULL DEFAULT 0,
gross_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
discounts NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
returns_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
net_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- gross_revenue - discounts
cogs NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
gross_regular_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
profit NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- net_revenue - cogs
-- Receiving Metrics (Aggregated for the snapshot_date)
units_received INT NOT NULL DEFAULT 0,
cost_received NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
calculation_timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (snapshot_date, pid) -- Composite primary key
-- CONSTRAINT fk_daily_snapshot_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE -- FK Optional on snapshot table
);
-- Add Indexes for daily_product_snapshots
CREATE INDEX idx_daily_snapshot_pid_date ON public.daily_product_snapshots(pid, snapshot_date); -- Useful for product-specific time series
-- Table Definition: product_metrics
CREATE TABLE public.product_metrics (
pid INT8 PRIMARY KEY,
last_calculated TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
-- Product Info (Copied for convenience/performance)
sku VARCHAR,
title VARCHAR,
brand VARCHAR,
vendor VARCHAR,
image_url VARCHAR, -- (e.g., products.image_175)
is_visible BOOLEAN,
is_replenishable BOOLEAN,
-- Current Status (Refreshed Hourly)
current_price NUMERIC(10, 2),
current_regular_price NUMERIC(10, 2),
current_cost_price NUMERIC(10, 4), -- Increased precision for cost
current_landing_cost_price NUMERIC(10, 4), -- Increased precision for cost
current_stock INT NOT NULL DEFAULT 0,
current_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
current_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
current_stock_gross NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
on_order_qty INT NOT NULL DEFAULT 0,
on_order_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
on_order_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
earliest_expected_date DATE,
-- total_received_lifetime INT NOT NULL DEFAULT 0, -- Can calc if needed
-- Historical Dates (Calculated Once/Periodically)
date_created DATE,
date_first_received DATE,
date_last_received DATE,
date_first_sold DATE,
date_last_sold DATE,
age_days INT, -- Calculated based on LEAST(date_created, date_first_sold)
-- Rolling Period Metrics (Refreshed Hourly from daily_product_snapshots)
sales_7d INT, revenue_7d NUMERIC(14, 4),
sales_14d INT, revenue_14d NUMERIC(14, 4),
sales_30d INT, revenue_30d NUMERIC(14, 4),
cogs_30d NUMERIC(14, 4), profit_30d NUMERIC(14, 4),
returns_units_30d INT, returns_revenue_30d NUMERIC(14, 4),
discounts_30d NUMERIC(14, 4),
gross_revenue_30d NUMERIC(14, 4), gross_regular_revenue_30d NUMERIC(14, 4),
stockout_days_30d INT,
sales_365d INT, revenue_365d NUMERIC(14, 4),
avg_stock_units_30d NUMERIC(10, 2), avg_stock_cost_30d NUMERIC(14, 4),
avg_stock_retail_30d NUMERIC(14, 4), avg_stock_gross_30d NUMERIC(14, 4),
received_qty_30d INT, received_cost_30d NUMERIC(14, 4),
-- Lifetime Metrics (Recalculated Hourly/Daily from daily_product_snapshots)
lifetime_sales INT,
lifetime_revenue NUMERIC(16, 4),
-- First Period Metrics (Calculated Once/Periodically from daily_product_snapshots)
first_7_days_sales INT, first_7_days_revenue NUMERIC(14, 4),
first_30_days_sales INT, first_30_days_revenue NUMERIC(14, 4),
first_60_days_sales INT, first_60_days_revenue NUMERIC(14, 4),
first_90_days_sales INT, first_90_days_revenue NUMERIC(14, 4),
-- Calculated KPIs (Refreshed Hourly based on rolling metrics)
asp_30d NUMERIC(10, 2), -- revenue_30d / sales_30d
acp_30d NUMERIC(10, 4), -- cogs_30d / sales_30d
avg_ros_30d NUMERIC(10, 4), -- profit_30d / sales_30d
avg_sales_per_day_30d NUMERIC(10, 2), -- sales_30d / 30.0
avg_sales_per_month_30d NUMERIC(10, 2), -- sales_30d (assuming 30d = 1 month for this metric)
margin_30d NUMERIC(5, 2), -- (profit_30d / revenue_30d) * 100
markup_30d NUMERIC(5, 2), -- (profit_30d / cogs_30d) * 100
gmroi_30d NUMERIC(10, 2), -- profit_30d / avg_stock_cost_30d
stockturn_30d NUMERIC(10, 2), -- sales_30d / avg_stock_units_30d
return_rate_30d NUMERIC(5, 2), -- returns_units_30d / (sales_30d + returns_units_30d) * 100
discount_rate_30d NUMERIC(5, 2), -- discounts_30d / gross_revenue_30d * 100
stockout_rate_30d NUMERIC(5, 2), -- stockout_days_30d / 30.0 * 100
markdown_30d NUMERIC(14, 4), -- gross_regular_revenue_30d - gross_revenue_30d
markdown_rate_30d NUMERIC(5, 2), -- markdown_30d / gross_regular_revenue_30d * 100
sell_through_30d NUMERIC(5, 2), -- sales_30d / (current_stock + sales_30d) * 100
avg_lead_time_days INT, -- Calculated Periodically from purchase_orders
-- Forecasting & Replenishment (Refreshed Hourly)
abc_class CHAR(1), -- Updated Periodically (e.g., Weekly)
sales_velocity_daily NUMERIC(10, 4), -- sales_30d / (30.0 - stockout_days_30d)
config_lead_time INT, -- From settings tables
config_days_of_stock INT, -- From settings tables
config_safety_stock INT, -- From settings_product
planning_period_days INT, -- config_lead_time + config_days_of_stock
lead_time_forecast_units NUMERIC(10, 2), -- sales_velocity_daily * config_lead_time
days_of_stock_forecast_units NUMERIC(10, 2), -- sales_velocity_daily * config_days_of_stock
planning_period_forecast_units NUMERIC(10, 2), -- lead_time_forecast_units + days_of_stock_forecast_units
lead_time_closing_stock NUMERIC(10, 2), -- current_stock + on_order_qty - lead_time_forecast_units
days_of_stock_closing_stock NUMERIC(10, 2), -- lead_time_closing_stock - days_of_stock_forecast_units
replenishment_needed_raw NUMERIC(10, 2), -- planning_period_forecast_units + config_safety_stock - current_stock - on_order_qty
replenishment_units INT, -- CEILING(GREATEST(0, replenishment_needed_raw))
replenishment_cost NUMERIC(14, 4), -- replenishment_units * COALESCE(current_landing_cost_price, current_cost_price)
replenishment_retail NUMERIC(14, 4), -- replenishment_units * current_price
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - COALESCE(current_landing_cost_price, current_cost_price))
to_order_units INT, -- Apply MOQ/UOM logic to replenishment_units
forecast_lost_sales_units NUMERIC(10, 2), -- GREATEST(0, -lead_time_closing_stock)
forecast_lost_revenue NUMERIC(14, 4), -- forecast_lost_sales_units * current_price
stock_cover_in_days NUMERIC(10, 1), -- current_stock / sales_velocity_daily
po_cover_in_days NUMERIC(10, 1), -- on_order_qty / sales_velocity_daily
sells_out_in_days NUMERIC(10, 1), -- (current_stock + on_order_qty) / sales_velocity_daily
replenish_date DATE, -- Calc based on when stock hits safety stock minus lead time
overstocked_units INT, -- GREATEST(0, current_stock - config_safety_stock - planning_period_forecast_units)
overstocked_cost NUMERIC(14, 4), -- overstocked_units * COALESCE(current_landing_cost_price, current_cost_price)
overstocked_retail NUMERIC(14, 4), -- overstocked_units * current_price
is_old_stock BOOLEAN, -- Based on age, last sold, last received, on_order status
-- Yesterday's Metrics (Refreshed Hourly from daily_product_snapshots)
yesterday_sales INT,
CONSTRAINT fk_product_metrics_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE
);
-- Add Indexes for product_metrics (adjust based on common filtering/sorting in frontend)
CREATE INDEX idx_product_metrics_brand ON public.product_metrics(brand);
CREATE INDEX idx_product_metrics_vendor ON public.product_metrics(vendor);
CREATE INDEX idx_product_metrics_sku ON public.product_metrics(sku);
CREATE INDEX idx_product_metrics_abc_class ON public.product_metrics(abc_class);
CREATE INDEX idx_product_metrics_revenue_30d ON public.product_metrics(revenue_30d DESC NULLS LAST); -- Example sorting index
CREATE INDEX idx_product_metrics_sales_30d ON public.product_metrics(sales_30d DESC NULLS LAST); -- Example sorting index
CREATE INDEX idx_product_metrics_current_stock ON public.product_metrics(current_stock);
CREATE INDEX idx_product_metrics_sells_out_in_days ON public.product_metrics(sells_out_in_days ASC NULLS LAST); -- Example sorting index

View File

@@ -7,7 +7,7 @@ BEGIN
-- Check which table is being updated and use the appropriate column
IF TG_TABLE_NAME = 'categories' THEN
NEW.updated_at = CURRENT_TIMESTAMP;
ELSE
ELSIF TG_TABLE_NAME IN ('products', 'orders', 'purchase_orders') THEN
NEW.updated = CURRENT_TIMESTAMP;
END IF;
RETURN NEW;
@@ -17,48 +17,48 @@ $func$ language plpgsql;
-- Create tables
CREATE TABLE products (
pid BIGINT NOT NULL,
title VARCHAR(255) NOT NULL,
title TEXT NOT NULL,
description TEXT,
SKU VARCHAR(50) NOT NULL,
sku TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE,
first_received TIMESTAMP WITH TIME ZONE,
stock_quantity INTEGER DEFAULT 0,
preorder_count INTEGER DEFAULT 0,
notions_inv_count INTEGER DEFAULT 0,
price DECIMAL(10, 3) NOT NULL,
regular_price DECIMAL(10, 3) NOT NULL,
cost_price DECIMAL(10, 3),
landing_cost_price DECIMAL(10, 3),
barcode VARCHAR(50),
harmonized_tariff_code VARCHAR(20),
price NUMERIC(14, 4) NOT NULL,
regular_price NUMERIC(14, 4) NOT NULL,
cost_price NUMERIC(14, 4),
landing_cost_price NUMERIC(14, 4),
barcode TEXT,
harmonized_tariff_code TEXT,
updated_at TIMESTAMP WITH TIME ZONE,
visible BOOLEAN DEFAULT true,
managing_stock BOOLEAN DEFAULT true,
replenishable BOOLEAN DEFAULT true,
vendor VARCHAR(100),
vendor_reference VARCHAR(100),
notions_reference VARCHAR(100),
permalink VARCHAR(255),
vendor TEXT,
vendor_reference TEXT,
notions_reference TEXT,
permalink TEXT,
categories TEXT,
image VARCHAR(255),
image_175 VARCHAR(255),
image_full VARCHAR(255),
brand VARCHAR(100),
line VARCHAR(100),
subline VARCHAR(100),
artist VARCHAR(100),
image TEXT,
image_175 TEXT,
image_full TEXT,
brand TEXT,
line TEXT,
subline TEXT,
artist TEXT,
options TEXT,
tags TEXT,
moq INTEGER DEFAULT 1,
uom INTEGER DEFAULT 1,
rating DECIMAL(10,2) DEFAULT 0.00,
rating NUMERIC(14, 4) DEFAULT 0.00,
reviews INTEGER DEFAULT 0,
weight DECIMAL(10,3),
length DECIMAL(10,3),
width DECIMAL(10,3),
height DECIMAL(10,3),
country_of_origin VARCHAR(5),
location VARCHAR(50),
weight NUMERIC(14, 4),
length NUMERIC(14, 4),
width NUMERIC(14, 4),
height NUMERIC(14, 4),
country_of_origin TEXT,
location TEXT,
total_sold INTEGER DEFAULT 0,
baskets INTEGER DEFAULT 0,
notifies INTEGER DEFAULT 0,
@@ -74,25 +74,25 @@ CREATE TRIGGER update_products_updated
EXECUTE FUNCTION update_updated_column();
-- Create indexes for products table
CREATE INDEX idx_products_sku ON products(SKU);
CREATE INDEX idx_products_sku ON products(sku);
CREATE INDEX idx_products_vendor ON products(vendor);
CREATE INDEX idx_products_brand ON products(brand);
CREATE INDEX idx_products_location ON products(location);
CREATE INDEX idx_products_total_sold ON products(total_sold);
CREATE INDEX idx_products_date_last_sold ON products(date_last_sold);
CREATE INDEX idx_products_visible ON products(visible);
CREATE INDEX idx_products_replenishable ON products(replenishable);
CREATE INDEX idx_products_updated ON products(updated);
-- Create categories table with hierarchy support
CREATE TABLE categories (
cat_id BIGINT PRIMARY KEY,
name VARCHAR(100) NOT NULL,
name TEXT NOT NULL,
type SMALLINT NOT NULL,
parent_id BIGINT,
description TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
status VARCHAR(20) DEFAULT 'active',
FOREIGN KEY (parent_id) REFERENCES categories(cat_id)
updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
status TEXT DEFAULT 'active',
FOREIGN KEY (parent_id) REFERENCES categories(cat_id) ON DELETE SET NULL
);
-- Create trigger for categories
@@ -106,6 +106,7 @@ COMMENT ON COLUMN categories.type IS '10=section, 11=category, 12=subcategory, 1
CREATE INDEX idx_categories_parent ON categories(parent_id);
CREATE INDEX idx_categories_type ON categories(type);
CREATE INDEX idx_categories_status ON categories(status);
CREATE INDEX idx_categories_name ON categories(name);
CREATE INDEX idx_categories_name_type ON categories(name, type);
-- Create product_categories junction table
@@ -118,28 +119,28 @@ CREATE TABLE product_categories (
);
CREATE INDEX idx_product_categories_category ON product_categories(cat_id);
CREATE INDEX idx_product_categories_product ON product_categories(pid);
-- Create orders table with its indexes
CREATE TABLE orders (
id BIGSERIAL PRIMARY KEY,
order_number VARCHAR(50) NOT NULL,
order_number TEXT NOT NULL,
pid BIGINT NOT NULL,
SKU VARCHAR(50) NOT NULL,
date DATE NOT NULL,
price DECIMAL(10,3) NOT NULL,
sku TEXT NOT NULL,
date TIMESTAMP WITH TIME ZONE NOT NULL,
price NUMERIC(14, 4) NOT NULL,
quantity INTEGER NOT NULL,
discount DECIMAL(10,3) DEFAULT 0.000,
tax DECIMAL(10,3) DEFAULT 0.000,
discount NUMERIC(14, 4) DEFAULT 0.0000,
tax NUMERIC(14, 4) DEFAULT 0.0000,
tax_included BOOLEAN DEFAULT false,
shipping DECIMAL(10,3) DEFAULT 0.000,
costeach DECIMAL(10,3) DEFAULT 0.000,
customer VARCHAR(50) NOT NULL,
customer_name VARCHAR(100),
status VARCHAR(20) DEFAULT 'pending',
shipping NUMERIC(14, 4) DEFAULT 0.0000,
costeach NUMERIC(14, 4) DEFAULT 0.0000,
customer TEXT NOT NULL,
customer_name TEXT,
status TEXT DEFAULT 'pending',
canceled BOOLEAN DEFAULT false,
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE (order_number, pid)
UNIQUE (order_number, pid),
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE RESTRICT
);
-- Create trigger for orders
@@ -150,36 +151,37 @@ CREATE TRIGGER update_orders_updated
CREATE INDEX idx_orders_number ON orders(order_number);
CREATE INDEX idx_orders_pid ON orders(pid);
CREATE INDEX idx_orders_sku ON orders(sku);
CREATE INDEX idx_orders_customer ON orders(customer);
CREATE INDEX idx_orders_date ON orders(date);
CREATE INDEX idx_orders_status ON orders(status);
CREATE INDEX idx_orders_metrics ON orders(pid, date, canceled);
CREATE INDEX idx_orders_pid_date ON orders(pid, date);
CREATE INDEX idx_orders_updated ON orders(updated);
-- Create purchase_orders table with its indexes
CREATE TABLE purchase_orders (
id BIGSERIAL PRIMARY KEY,
po_id VARCHAR(50) NOT NULL,
vendor VARCHAR(100) NOT NULL,
po_id TEXT NOT NULL,
vendor TEXT NOT NULL,
date DATE NOT NULL,
expected_date DATE,
pid BIGINT NOT NULL,
sku VARCHAR(50) NOT NULL,
name VARCHAR(255) NOT NULL,
cost_price DECIMAL(10, 3) NOT NULL,
po_cost_price DECIMAL(10, 3) NOT NULL,
status SMALLINT DEFAULT 1,
receiving_status SMALLINT DEFAULT 1,
sku TEXT NOT NULL,
name TEXT NOT NULL,
cost_price NUMERIC(14, 4) NOT NULL,
po_cost_price NUMERIC(14, 4) NOT NULL,
status TEXT DEFAULT 'created',
receiving_status TEXT DEFAULT 'created',
notes TEXT,
long_note TEXT,
ordered INTEGER NOT NULL,
received INTEGER DEFAULT 0,
received_date DATE,
last_received_date DATE,
received_by VARCHAR,
received_by TEXT,
receiving_history JSONB,
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (pid) REFERENCES products(pid),
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
UNIQUE (po_id, pid)
);
@@ -191,18 +193,19 @@ CREATE TRIGGER update_purchase_orders_updated
COMMENT ON COLUMN purchase_orders.name IS 'Product name from products.description';
COMMENT ON COLUMN purchase_orders.po_cost_price IS 'Original cost from PO, before receiving adjustments';
COMMENT ON COLUMN purchase_orders.status IS '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done';
COMMENT ON COLUMN purchase_orders.receiving_status IS '0=canceled,1=created,30=partial_received,40=full_received,50=paid';
COMMENT ON COLUMN purchase_orders.status IS 'canceled, created, electronically_ready_send, ordered, preordered, electronically_sent, receiving_started, done';
COMMENT ON COLUMN purchase_orders.receiving_status IS 'canceled, created, partial_received, full_received, paid';
COMMENT ON COLUMN purchase_orders.receiving_history IS 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag';
CREATE INDEX idx_po_id ON purchase_orders(po_id);
CREATE INDEX idx_po_sku ON purchase_orders(sku);
CREATE INDEX idx_po_vendor ON purchase_orders(vendor);
CREATE INDEX idx_po_status ON purchase_orders(status);
CREATE INDEX idx_po_receiving_status ON purchase_orders(receiving_status);
CREATE INDEX idx_po_metrics ON purchase_orders(pid, date, status, ordered, received);
CREATE INDEX idx_po_metrics_receiving ON purchase_orders(pid, date, receiving_status, received_date);
CREATE INDEX idx_po_product_date ON purchase_orders(pid, date);
CREATE INDEX idx_po_product_status ON purchase_orders(pid, status);
CREATE INDEX idx_po_expected_date ON purchase_orders(expected_date);
CREATE INDEX idx_po_last_received_date ON purchase_orders(last_received_date);
CREATE INDEX idx_po_pid_status ON purchase_orders(pid, status);
CREATE INDEX idx_po_pid_date ON purchase_orders(pid, date);
CREATE INDEX idx_po_updated ON purchase_orders(updated);
SET session_replication_role = 'origin'; -- Re-enable foreign key checks

View File

@@ -57,25 +57,16 @@ const TEMP_TABLES = [
'temp_daily_sales',
'temp_product_stats',
'temp_category_sales',
'temp_category_stats'
'temp_category_stats',
'temp_beginning_inventory',
'temp_monthly_inventory'
];
// Add cleanup function for temporary tables
async function cleanupTemporaryTables(connection) {
// List of possible temporary tables that might exist
const tempTables = [
'temp_sales_metrics',
'temp_purchase_metrics',
'temp_forecast_dates',
'temp_daily_sales',
'temp_product_stats',
'temp_category_sales',
'temp_category_stats'
];
try {
// Drop each temporary table if it exists
for (const table of tempTables) {
for (const table of TEMP_TABLES) {
await connection.query(`DROP TABLE IF EXISTS ${table}`);
}
} catch (err) {
@@ -534,7 +525,7 @@ async function calculateMetrics() {
await connection.query(`
UPDATE calculate_history
SET
status = 'error',
status = 'failed',
end_time = NOW(),
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
error_message = $1

View File

@@ -111,8 +111,8 @@ async function main() {
// Initialize sync_status table if it doesn't exist
await localConnection.query(`
CREATE TABLE IF NOT EXISTS sync_status (
table_name VARCHAR(50) PRIMARY KEY,
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
table_name TEXT PRIMARY KEY,
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT
);
@@ -169,8 +169,8 @@ async function main() {
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Categories import result:', results.categories);
totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0) || 0;
totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0) || 0;
totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0);
}
if (IMPORT_PRODUCTS) {
@@ -178,8 +178,8 @@ async function main() {
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Products import result:', results.products);
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0) || 0;
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0) || 0;
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0);
}
if (IMPORT_ORDERS) {
@@ -187,8 +187,8 @@ async function main() {
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Orders import result:', results.orders);
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0) || 0;
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0) || 0;
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0);
}
if (IMPORT_PURCHASE_ORDERS) {
@@ -202,8 +202,8 @@ async function main() {
if (results.purchaseOrders?.status === 'error') {
console.error('Purchase orders import had an error:', results.purchaseOrders.error);
} else {
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0) || 0;
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0) || 0;
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0);
}
} catch (error) {
console.error('Error during purchase orders import:', error);
@@ -242,8 +242,8 @@ async function main() {
WHERE id = $12
`, [
totalElapsedSeconds,
parseInt(totalRecordsAdded) || 0,
parseInt(totalRecordsUpdated) || 0,
parseInt(totalRecordsAdded),
parseInt(totalRecordsUpdated),
IMPORT_CATEGORIES,
IMPORT_PRODUCTS,
IMPORT_ORDERS,

View File

@@ -15,6 +15,9 @@ async function importCategories(prodConnection, localConnection) {
try {
// Start a single transaction for the entire import
await localConnection.query('BEGIN');
// Temporarily disable the trigger that's causing problems
await localConnection.query('ALTER TABLE categories DISABLE TRIGGER update_categories_updated_at');
// Process each type in order with its own savepoint
for (const type of typeOrder) {
@@ -149,6 +152,9 @@ async function importCategories(prodConnection, localConnection) {
ON CONFLICT (table_name) DO UPDATE SET
last_sync_timestamp = NOW()
`);
// Re-enable the trigger
await localConnection.query('ALTER TABLE categories ENABLE TRIGGER update_categories_updated_at');
outputProgress({
status: "complete",
@@ -178,6 +184,9 @@ async function importCategories(prodConnection, localConnection) {
// Only rollback if we haven't committed yet
try {
await localConnection.query('ROLLBACK');
// Make sure we re-enable the trigger even if there was an error
await localConnection.query('ALTER TABLE categories ENABLE TRIGGER update_categories_updated_at');
} catch (rollbackError) {
console.error("Error during rollback:", rollbackError);
}

View File

@@ -117,43 +117,43 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
CREATE TEMP TABLE temp_order_items (
order_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
SKU VARCHAR(50) NOT NULL,
price DECIMAL(10,2) NOT NULL,
sku TEXT NOT NULL,
price NUMERIC(14, 4) NOT NULL,
quantity INTEGER NOT NULL,
base_discount DECIMAL(10,2) DEFAULT 0,
base_discount NUMERIC(14, 4) DEFAULT 0,
PRIMARY KEY (order_id, pid)
);
CREATE TEMP TABLE temp_order_meta (
order_id INTEGER NOT NULL,
date DATE NOT NULL,
customer VARCHAR(100) NOT NULL,
customer_name VARCHAR(150) NOT NULL,
status INTEGER,
date TIMESTAMP WITH TIME ZONE NOT NULL,
customer TEXT NOT NULL,
customer_name TEXT NOT NULL,
status TEXT,
canceled BOOLEAN,
summary_discount DECIMAL(10,2) DEFAULT 0.00,
summary_subtotal DECIMAL(10,2) DEFAULT 0.00,
summary_discount NUMERIC(14, 4) DEFAULT 0.0000,
summary_subtotal NUMERIC(14, 4) DEFAULT 0.0000,
PRIMARY KEY (order_id)
);
CREATE TEMP TABLE temp_order_discounts (
order_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
discount DECIMAL(10,2) NOT NULL,
discount NUMERIC(14, 4) NOT NULL,
PRIMARY KEY (order_id, pid)
);
CREATE TEMP TABLE temp_order_taxes (
order_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
tax DECIMAL(10,2) NOT NULL,
tax NUMERIC(14, 4) NOT NULL,
PRIMARY KEY (order_id, pid)
);
CREATE TEMP TABLE temp_order_costs (
order_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
costeach DECIMAL(10,3) DEFAULT 0.000,
costeach NUMERIC(14, 4) DEFAULT 0.0000,
PRIMARY KEY (order_id, pid)
);
@@ -172,10 +172,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
]);
await localConnection.query(`
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
INSERT INTO temp_order_items (order_id, pid, sku, price, quantity, base_discount)
VALUES ${placeholders}
ON CONFLICT (order_id, pid) DO UPDATE SET
SKU = EXCLUDED.SKU,
sku = EXCLUDED.sku,
price = EXCLUDED.price,
quantity = EXCLUDED.quantity,
base_discount = EXCLUDED.base_discount
@@ -241,10 +241,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
const values = subBatch.flatMap(order => [
order.order_id,
order.date,
new Date(order.date), // Convert to TIMESTAMP WITH TIME ZONE
order.customer,
toTitleCase(order.customer_name) || '',
order.status,
order.status.toString(), // Convert status to TEXT
order.canceled,
order.summary_discount || 0,
order.summary_subtotal || 0
@@ -447,7 +447,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
SELECT
oi.order_id as order_number,
oi.pid::bigint as pid,
oi.SKU as sku,
oi.sku,
om.date,
oi.price,
oi.quantity,
@@ -457,18 +457,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
WHEN om.summary_discount > 0 AND om.summary_subtotal > 0 THEN
ROUND((om.summary_discount * (oi.price * oi.quantity)) / NULLIF(om.summary_subtotal, 0), 2)
ELSE 0
END)::DECIMAL(10,2) as discount,
COALESCE(ot.total_tax, 0)::DECIMAL(10,2) as tax,
END)::NUMERIC(14, 4) as discount,
COALESCE(ot.total_tax, 0)::NUMERIC(14, 4) as tax,
false as tax_included,
0 as shipping,
om.customer,
om.customer_name,
om.status,
om.canceled,
COALESCE(ot.costeach, oi.price * 0.5)::DECIMAL(10,3) as costeach
COALESCE(ot.costeach, oi.price * 0.5)::NUMERIC(14, 4) as costeach
FROM (
SELECT DISTINCT ON (order_id, pid)
order_id, pid, SKU, price, quantity, base_discount
order_id, pid, sku, price, quantity, base_discount
FROM temp_order_items
WHERE order_id = ANY($1)
ORDER BY order_id, pid
@@ -508,7 +508,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
o.order_number,
o.pid,
o.sku || 'NO-SKU',
o.date,
o.date, // This is now a TIMESTAMP WITH TIME ZONE
o.price,
o.quantity,
o.discount,
@@ -517,7 +517,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
o.shipping,
o.customer,
o.customer_name,
o.status,
o.status.toString(), // Convert status to TEXT
o.canceled,
o.costeach
]);

View File

@@ -57,50 +57,50 @@ async function setupTemporaryTables(connection) {
await connection.query(`
CREATE TEMP TABLE temp_products (
pid BIGINT NOT NULL,
title VARCHAR(255),
title TEXT,
description TEXT,
sku VARCHAR(50),
sku TEXT,
stock_quantity INTEGER DEFAULT 0,
preorder_count INTEGER DEFAULT 0,
notions_inv_count INTEGER DEFAULT 0,
price DECIMAL(10,3) NOT NULL DEFAULT 0,
regular_price DECIMAL(10,3) NOT NULL DEFAULT 0,
cost_price DECIMAL(10,3),
vendor VARCHAR(100),
vendor_reference VARCHAR(100),
notions_reference VARCHAR(100),
brand VARCHAR(100),
line VARCHAR(100),
subline VARCHAR(100),
artist VARCHAR(100),
price NUMERIC(14, 4) NOT NULL DEFAULT 0,
regular_price NUMERIC(14, 4) NOT NULL DEFAULT 0,
cost_price NUMERIC(14, 4),
vendor TEXT,
vendor_reference TEXT,
notions_reference TEXT,
brand TEXT,
line TEXT,
subline TEXT,
artist TEXT,
categories TEXT,
created_at TIMESTAMP,
first_received TIMESTAMP,
landing_cost_price DECIMAL(10,3),
barcode VARCHAR(50),
harmonized_tariff_code VARCHAR(50),
updated_at TIMESTAMP,
created_at TIMESTAMP WITH TIME ZONE,
first_received TIMESTAMP WITH TIME ZONE,
landing_cost_price NUMERIC(14, 4),
barcode TEXT,
harmonized_tariff_code TEXT,
updated_at TIMESTAMP WITH TIME ZONE,
visible BOOLEAN,
managing_stock BOOLEAN DEFAULT true,
replenishable BOOLEAN,
permalink VARCHAR(255),
permalink TEXT,
moq INTEGER DEFAULT 1,
uom INTEGER DEFAULT 1,
rating DECIMAL(10,2),
rating NUMERIC(14, 4),
reviews INTEGER,
weight DECIMAL(10,3),
length DECIMAL(10,3),
width DECIMAL(10,3),
height DECIMAL(10,3),
country_of_origin VARCHAR(100),
location VARCHAR(100),
weight NUMERIC(14, 4),
length NUMERIC(14, 4),
width NUMERIC(14, 4),
height NUMERIC(14, 4),
country_of_origin TEXT,
location TEXT,
total_sold INTEGER,
baskets INTEGER,
notifies INTEGER,
date_last_sold TIMESTAMP,
image VARCHAR(255),
image_175 VARCHAR(255),
image_full VARCHAR(255),
date_last_sold TIMESTAMP WITH TIME ZONE,
image TEXT,
image_175 TEXT,
image_full TEXT,
options TEXT,
tags TEXT,
needs_update BOOLEAN DEFAULT TRUE,

View File

@@ -73,19 +73,18 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
-- Temporary table for purchase orders
CREATE TEMP TABLE temp_purchase_orders (
po_id VARCHAR(50) NOT NULL,
po_id TEXT NOT NULL,
pid BIGINT NOT NULL,
sku VARCHAR(50),
name VARCHAR(255),
vendor VARCHAR(255),
sku TEXT,
name TEXT,
vendor TEXT,
date TIMESTAMP WITH TIME ZONE,
expected_date DATE,
status INTEGER,
status_text VARCHAR(50),
status TEXT,
notes TEXT,
long_note TEXT,
ordered INTEGER,
po_cost_price DECIMAL(10,3),
po_cost_price NUMERIC(14, 4),
supplier_id INTEGER,
date_created TIMESTAMP WITH TIME ZONE,
date_ordered TIMESTAMP WITH TIME ZONE,
@@ -94,27 +93,26 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
-- Temporary table for receivings
CREATE TEMP TABLE temp_receivings (
receiving_id VARCHAR(50) NOT NULL,
po_id VARCHAR(50),
receiving_id TEXT NOT NULL,
po_id TEXT,
pid BIGINT NOT NULL,
qty_each INTEGER,
cost_each DECIMAL(10,5),
cost_each NUMERIC(14, 4),
received_by INTEGER,
received_date TIMESTAMP WITH TIME ZONE,
receiving_created_date TIMESTAMP WITH TIME ZONE,
supplier_id INTEGER,
status INTEGER,
status_text VARCHAR(50),
status TEXT,
PRIMARY KEY (receiving_id, pid)
);
-- Temporary table for tracking FIFO allocations
CREATE TEMP TABLE temp_receiving_allocations (
po_id VARCHAR(50) NOT NULL,
po_id TEXT NOT NULL,
pid BIGINT NOT NULL,
receiving_id VARCHAR(50) NOT NULL,
receiving_id TEXT NOT NULL,
allocated_qty INTEGER NOT NULL,
cost_each DECIMAL(10,5) NOT NULL,
cost_each NUMERIC(14, 4) NOT NULL,
received_date TIMESTAMP WITH TIME ZONE NOT NULL,
received_by INTEGER,
PRIMARY KEY (po_id, pid, receiving_id)
@@ -123,8 +121,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
-- Temporary table for employee names
CREATE TEMP TABLE employee_names (
employeeid INTEGER PRIMARY KEY,
firstname VARCHAR(100),
lastname VARCHAR(100)
firstname TEXT,
lastname TEXT
);
-- Create indexes for efficient joins
@@ -135,22 +133,22 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
// Map status codes to text values
const poStatusMap = {
0: 'Canceled',
1: 'Created',
10: 'Ready ESend',
11: 'Ordered',
12: 'Preordered',
13: 'Electronically Sent',
15: 'Receiving Started',
50: 'Done'
0: 'canceled',
1: 'created',
10: 'electronically_ready_send',
11: 'ordered',
12: 'preordered',
13: 'electronically_sent',
15: 'receiving_started',
50: 'done'
};
const receivingStatusMap = {
0: 'Canceled',
1: 'Created',
30: 'Partial Received',
40: 'Full Received',
50: 'Paid'
0: 'canceled',
1: 'created',
30: 'partial_received',
40: 'full_received',
50: 'paid'
};
// Get time window for data retrieval
@@ -281,8 +279,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
vendor: po.vendor || 'Unknown Vendor',
date: validateDate(po.date_ordered) || validateDate(po.date_created),
expected_date: validateDate(po.date_estin),
status: po.status,
status_text: poStatusMap[po.status] || '',
status: poStatusMap[po.status] || 'created',
notes: po.notes || '',
long_note: po.long_note || '',
ordered: product.qty_each,
@@ -298,8 +295,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
const batch = completePOs.slice(i, i + INSERT_BATCH_SIZE);
const placeholders = batch.map((_, idx) => {
const base = idx * 16;
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15}, $${base + 16})`;
const base = idx * 15;
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15})`;
}).join(',');
const values = batch.flatMap(po => [
@@ -311,7 +308,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
po.date,
po.expected_date,
po.status,
po.status_text,
po.notes,
po.long_note,
po.ordered,
@@ -323,8 +319,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
await localConnection.query(`
INSERT INTO temp_purchase_orders (
po_id, pid, sku, name, vendor, date, expected_date, status, status_text,
notes, long_note, ordered, po_cost_price, supplier_id, date_created, date_ordered
po_id, pid, sku, name, vendor, date, expected_date, status, notes, long_note,
ordered, po_cost_price, supplier_id, date_created, date_ordered
)
VALUES ${placeholders}
ON CONFLICT (po_id, pid) DO UPDATE SET
@@ -334,7 +330,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
date = EXCLUDED.date,
expected_date = EXCLUDED.expected_date,
status = EXCLUDED.status,
status_text = EXCLUDED.status_text,
notes = EXCLUDED.notes,
long_note = EXCLUDED.long_note,
ordered = EXCLUDED.ordered,
@@ -448,9 +443,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
received_date: validateDate(product.received_date) || validateDate(product.receiving_created_date),
receiving_created_date: validateDate(product.receiving_created_date),
supplier_id: receiving.supplier_id,
status: receiving.status,
status_text: receivingStatusMap[receiving.status] || '',
receiving_created_date: validateDate(product.receiving_created_date)
status: receivingStatusMap[receiving.status] || 'created'
});
}
@@ -459,8 +452,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
const batch = completeReceivings.slice(i, i + INSERT_BATCH_SIZE);
const placeholders = batch.map((_, idx) => {
const base = idx * 11;
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11})`;
const base = idx * 10;
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10})`;
}).join(',');
const values = batch.flatMap(r => [
@@ -473,14 +466,13 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
r.received_date,
r.receiving_created_date,
r.supplier_id,
r.status,
r.status_text
r.status
]);
await localConnection.query(`
INSERT INTO temp_receivings (
receiving_id, po_id, pid, qty_each, cost_each, received_by,
received_date, receiving_created_date, supplier_id, status, status_text
received_date, receiving_created_date, supplier_id, status
)
VALUES ${placeholders}
ON CONFLICT (receiving_id, pid) DO UPDATE SET
@@ -491,8 +483,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
received_date = EXCLUDED.received_date,
receiving_created_date = EXCLUDED.receiving_created_date,
supplier_id = EXCLUDED.supplier_id,
status = EXCLUDED.status,
status_text = EXCLUDED.status_text
status = EXCLUDED.status
`, values);
}
@@ -586,11 +577,11 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
name: "Handling standalone receivings",
query: `
INSERT INTO temp_purchase_orders (
po_id, pid, sku, name, vendor, date, status, status_text,
po_id, pid, sku, name, vendor, date, status,
ordered, po_cost_price, supplier_id, date_created, date_ordered
)
SELECT
'R' || r.receiving_id as po_id,
r.receiving_id::text as po_id,
r.pid,
COALESCE(p.sku, 'NO-SKU') as sku,
COALESCE(p.name, 'Unknown Product') as name,
@@ -600,8 +591,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
'Unknown Vendor'
) as vendor,
COALESCE(r.received_date, r.receiving_created_date) as date,
NULL as status,
NULL as status_text,
'created' as status,
NULL as ordered,
r.cost_each as po_cost_price,
r.supplier_id,
@@ -626,7 +616,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
po_id, pid, receiving_id, allocated_qty, cost_each, received_date, received_by
)
SELECT
'R' || r.receiving_id as po_id,
r.receiving_id::text as po_id,
r.pid,
r.receiving_id,
r.qty_each as allocated_qty,
@@ -872,13 +862,13 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
po.name,
COALESCE(ca.avg_cost, po.po_cost_price) as cost_price,
po.po_cost_price,
CASE WHEN po.status IS NULL THEN 1 ELSE po.status END as status,
COALESCE(po.status, 'created'),
CASE
WHEN rs.total_received IS NULL THEN 1
WHEN rs.total_received = 0 THEN 1
WHEN rs.total_received < po.ordered THEN 30
WHEN rs.total_received >= po.ordered THEN 40
ELSE 1
WHEN rs.total_received IS NULL THEN 'created'
WHEN rs.total_received = 0 THEN 'created'
WHEN rs.total_received < po.ordered THEN 'partial_received'
WHEN rs.total_received >= po.ordered THEN 'full_received'
ELSE 'created'
END as receiving_status,
po.notes,
po.long_note,

View File

@@ -56,36 +56,94 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
}
});
// Calculate financial metrics with optimized query
// First, calculate beginning inventory values (12 months ago)
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_beginning_inventory AS
WITH beginning_inventory_calc AS (
SELECT
p.pid,
p.stock_quantity as current_quantity,
COALESCE(SUM(o.quantity), 0) as sold_quantity,
COALESCE(SUM(po.received), 0) as received_quantity,
GREATEST(0, (p.stock_quantity + COALESCE(SUM(o.quantity), 0) - COALESCE(SUM(po.received), 0))) as beginning_quantity,
p.cost_price
FROM
products p
LEFT JOIN
orders o ON p.pid = o.pid
AND o.canceled = false
AND o.date >= CURRENT_DATE - INTERVAL '12 months'::interval
LEFT JOIN
purchase_orders po ON p.pid = po.pid
AND po.received_date IS NOT NULL
AND po.received_date >= CURRENT_DATE - INTERVAL '12 months'::interval
GROUP BY
p.pid, p.stock_quantity, p.cost_price
)
SELECT
pid,
beginning_quantity,
beginning_quantity * cost_price as beginning_value,
current_quantity * cost_price as current_value,
((beginning_quantity * cost_price) + (current_quantity * cost_price)) / 2 as average_inventory_value
FROM
beginning_inventory_calc
`);
processedCount = Math.floor(totalProducts * 0.60);
outputProgress({
status: 'running',
operation: 'Beginning inventory values calculated, computing financial metrics',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
// Calculate financial metrics with optimized query and standard formulas
await connection.query(`
WITH product_financials AS (
SELECT
p.pid,
p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * o.price) as total_revenue,
SUM(o.quantity * p.cost_price) as cost_of_goods_sold,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
COALESCE(bi.average_inventory_value, p.cost_price * p.stock_quantity) as avg_inventory_value,
p.cost_price * p.stock_quantity as current_inventory_value,
SUM(o.quantity * (o.price - COALESCE(o.discount, 0))) as total_revenue,
SUM(o.quantity * COALESCE(o.costeach, 0)) as cost_of_goods_sold,
SUM(o.quantity * (o.price - COALESCE(o.discount, 0) - COALESCE(o.costeach, 0))) as gross_profit,
MIN(o.date) as first_sale_date,
MAX(o.date) as last_sale_date,
EXTRACT(DAY FROM (MAX(o.date)::timestamp with time zone - MIN(o.date)::timestamp with time zone)) + 1 as calculation_period_days,
COUNT(DISTINCT DATE(o.date)) as active_days
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
LEFT JOIN temp_beginning_inventory bi ON p.pid = bi.pid
WHERE o.canceled = false
AND DATE(o.date) >= CURRENT_DATE - INTERVAL '12 months'
GROUP BY p.pid, p.cost_price, p.stock_quantity
AND DATE(o.date) >= CURRENT_DATE - INTERVAL '12 months'::interval
GROUP BY p.pid, p.cost_price, p.stock_quantity, bi.average_inventory_value
)
UPDATE product_metrics pm
SET
inventory_value = COALESCE(pf.inventory_value, 0),
total_revenue = COALESCE(pf.total_revenue, 0),
cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0),
gross_profit = COALESCE(pf.gross_profit, 0),
gmroi = CASE
WHEN COALESCE(pf.inventory_value, 0) > 0 AND pf.active_days > 0 THEN
(COALESCE(pf.gross_profit, 0) * (365.0 / pf.active_days)) / COALESCE(pf.inventory_value, 0)
inventory_value = COALESCE(pf.current_inventory_value, 0)::decimal(10,3),
total_revenue = COALESCE(pf.total_revenue, 0)::decimal(10,3),
cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0)::decimal(10,3),
gross_profit = COALESCE(pf.gross_profit, 0)::decimal(10,3),
turnover_rate = CASE
WHEN COALESCE(pf.avg_inventory_value, 0) > 0 THEN
COALESCE(pf.cost_of_goods_sold, 0) / NULLIF(pf.avg_inventory_value, 0)
ELSE 0
END,
END::decimal(12,3),
gmroi = CASE
WHEN COALESCE(pf.avg_inventory_value, 0) > 0 THEN
COALESCE(pf.gross_profit, 0) / NULLIF(pf.avg_inventory_value, 0)
ELSE 0
END::decimal(10,3),
last_calculated_at = CURRENT_TIMESTAMP
FROM product_financials pf
WHERE pm.pid = pf.pid
@@ -115,53 +173,8 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
success
};
// Update time-based aggregates with optimized query
await connection.query(`
WITH monthly_financials AS (
SELECT
p.pid,
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
COUNT(DISTINCT DATE(o.date)) as active_days,
MIN(o.date) as period_start,
MAX(o.date) as period_end
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity
)
UPDATE product_time_aggregates pta
SET
inventory_value = COALESCE(mf.inventory_value, 0),
gmroi = CASE
WHEN COALESCE(mf.inventory_value, 0) > 0 AND mf.active_days > 0 THEN
(COALESCE(mf.gross_profit, 0) * (365.0 / mf.active_days)) / COALESCE(mf.inventory_value, 0)
ELSE 0
END
FROM monthly_financials mf
WHERE pta.pid = mf.pid
AND pta.year = mf.year
AND pta.month = mf.month
`);
processedCount = Math.floor(totalProducts * 0.70);
outputProgress({
status: 'running',
operation: 'Time-based aggregates updated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
// Clean up temporary tables
await connection.query('DROP TABLE IF EXISTS temp_beginning_inventory');
// If we get here, everything completed successfully
success = true;
@@ -187,6 +200,12 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
throw error;
} finally {
if (connection) {
try {
// Make sure temporary tables are always cleaned up
await connection.query('DROP TABLE IF EXISTS temp_beginning_inventory');
} catch (err) {
console.error('Error cleaning up temp tables:', err);
}
connection.release();
}
}

View File

@@ -66,8 +66,36 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
WHERE category_id IS NULL AND vendor IS NULL
LIMIT 1
`);
// Check if threshold data was returned
if (!thresholds.rows || thresholds.rows.length === 0) {
console.warn('No default thresholds found in the database. Using explicit type casting in the query.');
}
const defaultThresholds = thresholds.rows[0];
// Get financial calculation configuration parameters
const financialConfig = await connection.query(`
SELECT
order_cost,
holding_rate,
service_level_z_score,
min_reorder_qty,
default_reorder_qty,
default_safety_stock
FROM financial_calc_config
WHERE id = 1
LIMIT 1
`);
const finConfig = financialConfig.rows[0] || {
order_cost: 25.00,
holding_rate: 0.25,
service_level_z_score: 1.96,
min_reorder_qty: 1,
default_reorder_qty: 5,
default_safety_stock: 5
};
// Calculate base product metrics
if (!SKIP_PRODUCT_BASE_METRICS) {
outputProgress({
@@ -109,6 +137,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
avg_margin_percent DECIMAL(10,3),
first_sale_date DATE,
last_sale_date DATE,
stddev_daily_sales DECIMAL(10,3),
PRIMARY KEY (pid)
)
`);
@@ -117,10 +146,11 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
await connection.query(`
CREATE TEMPORARY TABLE temp_purchase_metrics (
pid BIGINT NOT NULL,
avg_lead_time_days DOUBLE PRECISION,
avg_lead_time_days DECIMAL(10,2),
last_purchase_date DATE,
first_received_date DATE,
last_received_date DATE,
stddev_lead_time_days DECIMAL(10,2),
PRIMARY KEY (pid)
)
`);
@@ -140,11 +170,22 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
ELSE 0
END as avg_margin_percent,
MIN(o.date) as first_sale_date,
MAX(o.date) as last_sale_date
MAX(o.date) as last_sale_date,
COALESCE(STDDEV_SAMP(daily_qty.quantity), 0) as stddev_daily_sales
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
AND o.canceled = false
AND o.date >= CURRENT_DATE - INTERVAL '90 days'
LEFT JOIN (
SELECT
pid,
DATE(date) as sale_date,
SUM(quantity) as quantity
FROM orders
WHERE canceled = false
AND date >= CURRENT_DATE - INTERVAL '90 days'
GROUP BY pid, DATE(date)
) daily_qty ON p.pid = daily_qty.pid
GROUP BY p.pid
`);
@@ -163,7 +204,14 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
) as avg_lead_time_days,
MAX(po.date) as last_purchase_date,
MIN(po.received_date) as first_received_date,
MAX(po.received_date) as last_received_date
MAX(po.received_date) as last_received_date,
STDDEV_SAMP(
CASE
WHEN po.received_date IS NOT NULL AND po.date IS NOT NULL
THEN EXTRACT(EPOCH FROM (po.received_date::timestamp with time zone - po.date::timestamp with time zone)) / 86400.0
ELSE NULL
END
) as stddev_lead_time_days
FROM products p
LEFT JOIN purchase_orders po ON p.pid = po.pid
AND po.received_date IS NOT NULL
@@ -184,7 +232,8 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
30.0 as avg_lead_time_days,
NULL as last_purchase_date,
NULL as first_received_date,
NULL as last_received_date
NULL as last_received_date,
0.0 as stddev_lead_time_days
FROM products p
LEFT JOIN temp_purchase_metrics tpm ON p.pid = tpm.pid
WHERE tpm.pid IS NULL
@@ -208,6 +257,17 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
if (batch.rows.length === 0) break;
// Process the entire batch in a single efficient query
const lowStockThreshold = parseInt(defaultThresholds?.low_stock_threshold) || 5;
const criticalDays = parseInt(defaultThresholds?.critical_days) || 7;
const reorderDays = parseInt(defaultThresholds?.reorder_days) || 14;
const overstockDays = parseInt(defaultThresholds?.overstock_days) || 90;
const serviceLevel = parseFloat(finConfig?.service_level_z_score) || 1.96;
const defaultSafetyStock = parseInt(finConfig?.default_safety_stock) || 5;
const defaultReorderQty = parseInt(finConfig?.default_reorder_qty) || 5;
const orderCost = parseFloat(finConfig?.order_cost) || 25.00;
const holdingRate = parseFloat(finConfig?.holding_rate) || 0.25;
const minReorderQty = parseInt(finConfig?.min_reorder_qty) || 1;
await connection.query(`
UPDATE product_metrics pm
SET
@@ -219,7 +279,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
avg_margin_percent = COALESCE(sm.avg_margin_percent, 0),
first_sale_date = sm.first_sale_date,
last_sale_date = sm.last_sale_date,
avg_lead_time_days = COALESCE(lm.avg_lead_time_days, 30),
avg_lead_time_days = COALESCE(lm.avg_lead_time_days, 30.0),
days_of_inventory = CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0
THEN FLOOR(p.stock_quantity / NULLIF(sm.daily_sales_avg, 0))
@@ -232,57 +292,61 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
END,
stock_status = CASE
WHEN p.stock_quantity <= 0 THEN 'Out of Stock'
WHEN COALESCE(sm.daily_sales_avg, 0) = 0 AND p.stock_quantity <= $1 THEN 'Low Stock'
WHEN COALESCE(sm.daily_sales_avg, 0) = 0 AND p.stock_quantity <= ${lowStockThreshold} THEN 'Low Stock'
WHEN COALESCE(sm.daily_sales_avg, 0) = 0 THEN 'In Stock'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= $2 THEN 'Critical'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= $3 THEN 'Reorder'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > $4 THEN 'Overstocked'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= ${criticalDays} THEN 'Critical'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= ${reorderDays} THEN 'Reorder'
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > ${overstockDays} THEN 'Overstocked'
ELSE 'Healthy'
END,
safety_stock = CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 THEN
CEIL(sm.daily_sales_avg * SQRT(ABS(COALESCE(lm.avg_lead_time_days, 30))) * 1.96)
ELSE $5
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND COALESCE(lm.avg_lead_time_days, 0) > 0 THEN
CEIL(
${serviceLevel} * SQRT(
GREATEST(0, COALESCE(lm.avg_lead_time_days, 0)) * POWER(COALESCE(sm.stddev_daily_sales, 0), 2) +
POWER(COALESCE(sm.daily_sales_avg, 0), 2) * POWER(COALESCE(lm.stddev_lead_time_days, 0), 2)
)
)
ELSE ${defaultSafetyStock}
END,
reorder_point = CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 THEN
CEIL(sm.daily_sales_avg * COALESCE(lm.avg_lead_time_days, 30)) +
CEIL(sm.daily_sales_avg * SQRT(ABS(COALESCE(lm.avg_lead_time_days, 30))) * 1.96)
ELSE $6
CEIL(sm.daily_sales_avg * GREATEST(0, COALESCE(lm.avg_lead_time_days, 30.0))) +
(CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND COALESCE(lm.avg_lead_time_days, 0) > 0 THEN
CEIL(
${serviceLevel} * SQRT(
GREATEST(0, COALESCE(lm.avg_lead_time_days, 0)) * POWER(COALESCE(sm.stddev_daily_sales, 0), 2) +
POWER(COALESCE(sm.daily_sales_avg, 0), 2) * POWER(COALESCE(lm.stddev_lead_time_days, 0), 2)
)
)
ELSE ${defaultSafetyStock}
END)
ELSE ${lowStockThreshold}
END,
reorder_qty = CASE
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND NULLIF(p.cost_price, 0) IS NOT NULL AND NULLIF(p.cost_price, 0) > 0 THEN
GREATEST(
CEIL(SQRT(ABS((2 * (sm.daily_sales_avg * 365) * 25) / (NULLIF(p.cost_price, 0) * 0.25)))),
$7
CEIL(SQRT(
(2 * (sm.daily_sales_avg * 365) * ${orderCost}) /
NULLIF(p.cost_price * ${holdingRate}, 0)
)),
${minReorderQty}
)
ELSE $8
ELSE ${defaultReorderQty}
END,
overstocked_amt = CASE
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > $9
THEN GREATEST(0, p.stock_quantity - CEIL(sm.daily_sales_avg * $10))
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > ${overstockDays}
THEN GREATEST(0, p.stock_quantity - CEIL(sm.daily_sales_avg * ${overstockDays}))
ELSE 0
END,
last_calculated_at = NOW()
FROM products p
LEFT JOIN temp_sales_metrics sm ON p.pid = sm.pid
LEFT JOIN temp_purchase_metrics lm ON p.pid = lm.pid
WHERE p.pid = ANY($11::bigint[])
WHERE p.pid = ANY($1::BIGINT[])
AND pm.pid = p.pid
`,
[
defaultThresholds.low_stock_threshold,
defaultThresholds.critical_days,
defaultThresholds.reorder_days,
defaultThresholds.overstock_days,
defaultThresholds.low_stock_threshold,
defaultThresholds.low_stock_threshold,
defaultThresholds.low_stock_threshold,
defaultThresholds.low_stock_threshold,
defaultThresholds.overstock_days,
defaultThresholds.overstock_days,
batch.rows.map(row => row.pid)
]);
`, [batch.rows.map(row => row.pid)]);
lastPid = batch.rows[batch.rows.length - 1].pid;
processedCount += batch.rows.length;
@@ -311,25 +375,22 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
}
// Calculate forecast accuracy and bias in batches
lastPid = 0;
let forecastPid = 0;
while (true) {
if (isCancelled) break;
const batch = await connection.query(
const forecastBatch = await connection.query(
'SELECT pid FROM products WHERE pid > $1 ORDER BY pid LIMIT $2',
[lastPid, BATCH_SIZE]
[forecastPid, BATCH_SIZE]
);
if (batch.rows.length === 0) break;
if (forecastBatch.rows.length === 0) break;
const forecastPidArray = forecastBatch.rows.map(row => row.pid);
// Use array_to_string to convert the array to a string of comma-separated values
await connection.query(`
UPDATE product_metrics pm
SET
forecast_accuracy = GREATEST(0, 100 - LEAST(fa.avg_forecast_error, 100)),
forecast_bias = GREATEST(-100, LEAST(fa.avg_forecast_bias, 100)),
last_forecast_date = fa.last_forecast_date,
last_calculated_at = NOW()
FROM (
WITH forecast_metrics AS (
SELECT
sf.pid,
AVG(CASE
@@ -348,13 +409,20 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
AND DATE(o.date) = sf.forecast_date
WHERE o.canceled = false
AND sf.forecast_date >= CURRENT_DATE - INTERVAL '90 days'
AND sf.pid = ANY($1::bigint[])
AND sf.pid = ANY('{${forecastPidArray.join(',')}}'::BIGINT[])
GROUP BY sf.pid
) fa
WHERE pm.pid = fa.pid
`, [batch.rows.map(row => row.pid)]);
)
UPDATE product_metrics pm
SET
forecast_accuracy = GREATEST(0, 100 - LEAST(fm.avg_forecast_error, 100)),
forecast_bias = GREATEST(-100, LEAST(fm.avg_forecast_bias, 100)),
last_forecast_date = fm.last_forecast_date,
last_calculated_at = NOW()
FROM forecast_metrics fm
WHERE pm.pid = fm.pid
`);
lastPid = batch.rows[batch.rows.length - 1].pid;
forecastPid = forecastBatch.rows[forecastBatch.rows.length - 1].pid;
}
// Calculate product time aggregates
@@ -375,61 +443,12 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
}
});
// Calculate time-based aggregates
await connection.query(`
INSERT INTO product_time_aggregates (
pid,
year,
month,
total_quantity_sold,
total_revenue,
total_cost,
order_count,
avg_price,
profit_margin,
inventory_value,
gmroi
)
SELECT
p.pid,
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
SUM(o.quantity) as total_quantity_sold,
SUM(o.price * o.quantity) as total_revenue,
SUM(p.cost_price * o.quantity) as total_cost,
COUNT(DISTINCT o.order_number) as order_count,
AVG(o.price) as avg_price,
CASE
WHEN SUM(o.quantity * o.price) > 0
THEN ((SUM(o.quantity * o.price) - SUM(o.quantity * p.cost_price)) / SUM(o.quantity * o.price)) * 100
ELSE 0
END as profit_margin,
p.cost_price * p.stock_quantity as inventory_value,
CASE
WHEN p.cost_price * p.stock_quantity > 0
THEN (SUM(o.quantity * (o.price - p.cost_price))) / (p.cost_price * p.stock_quantity)
ELSE 0
END as gmroi
FROM products p
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
WHERE o.date >= CURRENT_DATE - INTERVAL '12 months'
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone)
ON CONFLICT (pid, year, month) DO UPDATE
SET
total_quantity_sold = EXCLUDED.total_quantity_sold,
total_revenue = EXCLUDED.total_revenue,
total_cost = EXCLUDED.total_cost,
order_count = EXCLUDED.order_count,
avg_price = EXCLUDED.avg_price,
profit_margin = EXCLUDED.profit_margin,
inventory_value = EXCLUDED.inventory_value,
gmroi = EXCLUDED.gmroi
`);
// Note: The time-aggregates calculation has been moved to time-aggregates.js
// This module will not duplicate that functionality
processedCount = Math.floor(totalProducts * 0.6);
outputProgress({
status: 'running',
operation: 'Product time aggregates calculated',
operation: 'Product time aggregates calculation delegated to time-aggregates module',
current: processedCount || 0,
total: totalProducts || 0,
elapsed: formatElapsedTime(startTime),
@@ -487,6 +506,10 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
const abcConfig = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
const abcThresholds = abcConfig.rows[0] || { a_threshold: 20, b_threshold: 50 };
// Extract values and ensure they are valid numbers
const aThreshold = parseFloat(abcThresholds.a_threshold) || 20;
const bThreshold = parseFloat(abcThresholds.b_threshold) || 50;
// First, create and populate the rankings table with an index
await connection.query('DROP TABLE IF EXISTS temp_revenue_ranks');
@@ -557,13 +580,13 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
OR pm.abc_class !=
CASE
WHEN tr.pid IS NULL THEN 'C'
WHEN tr.percentile <= $2 THEN 'A'
WHEN tr.percentile <= $3 THEN 'B'
WHEN tr.percentile <= ${aThreshold} THEN 'A'
WHEN tr.percentile <= ${bThreshold} THEN 'B'
ELSE 'C'
END)
ORDER BY pm.pid
LIMIT $4
`, [abcProcessedCount, abcThresholds.a_threshold, abcThresholds.b_threshold, batchSize]);
LIMIT $2
`, [abcProcessedCount, batchSize]);
if (pids.rows.length === 0) break;
@@ -574,15 +597,15 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
SET abc_class =
CASE
WHEN tr.pid IS NULL THEN 'C'
WHEN tr.percentile <= $1 THEN 'A'
WHEN tr.percentile <= $2 THEN 'B'
WHEN tr.percentile <= ${aThreshold} THEN 'A'
WHEN tr.percentile <= ${bThreshold} THEN 'B'
ELSE 'C'
END,
last_calculated_at = NOW()
FROM (SELECT pid, percentile FROM temp_revenue_ranks) tr
WHERE pm.pid = tr.pid AND pm.pid = ANY($3::bigint[])
OR (pm.pid = ANY($3::bigint[]) AND tr.pid IS NULL)
`, [abcThresholds.a_threshold, abcThresholds.b_threshold, pidValues]);
WHERE pm.pid = tr.pid AND pm.pid = ANY($1::BIGINT[])
OR (pm.pid = ANY($1::BIGINT[]) AND tr.pid IS NULL)
`, [pidValues]);
// Now update turnover rate with proper handling of zero inventory periods
await connection.query(`
@@ -610,7 +633,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
JOIN products p ON o.pid = p.pid
WHERE o.canceled = false
AND o.date >= CURRENT_DATE - INTERVAL '90 days'
AND o.pid = ANY($1::bigint[])
AND o.pid = ANY($1::BIGINT[])
GROUP BY o.pid
) sales
WHERE pm.pid = sales.pid
@@ -707,40 +730,7 @@ function calculateStockStatus(stock, config, daily_sales_avg, weekly_sales_avg,
return 'Healthy';
}
function calculateReorderQuantities(stock, stock_status, daily_sales_avg, avg_lead_time, config) {
// Calculate safety stock based on service level and lead time
const z_score = 1.96; // 95% service level
const lead_time = avg_lead_time || config.target_days;
const safety_stock = Math.ceil(daily_sales_avg * Math.sqrt(lead_time) * z_score);
// Calculate reorder point
const lead_time_demand = daily_sales_avg * lead_time;
const reorder_point = Math.ceil(lead_time_demand + safety_stock);
// Calculate reorder quantity using EOQ formula if we have the necessary data
let reorder_qty = 0;
if (daily_sales_avg > 0) {
const annual_demand = daily_sales_avg * 365;
const order_cost = 25; // Fixed cost per order
const holding_cost = config.cost_price * 0.25; // 25% of unit cost as annual holding cost
reorder_qty = Math.ceil(Math.sqrt((2 * annual_demand * order_cost) / holding_cost));
} else {
// If no sales data, use a basic calculation
reorder_qty = Math.max(safety_stock, config.low_stock_threshold);
}
// Calculate overstocked amount
const overstocked_amt = stock_status === 'Overstocked' ?
stock - Math.ceil(daily_sales_avg * config.overstock_days) :
0;
return {
safety_stock,
reorder_point,
reorder_qty,
overstocked_amt
};
}
// Note: calculateReorderQuantities function has been removed as its logic has been incorporated
// in the main SQL query with configurable parameters
module.exports = calculateProductMetrics;

View File

@@ -216,13 +216,7 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
GREATEST(0,
ROUND(
ds.avg_daily_qty *
(1 + COALESCE(sf.seasonality_factor, 0)) *
CASE
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 1.5 THEN 0.85
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 1.0 THEN 0.9
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 0.5 THEN 0.95
ELSE 1.0
END
(1 + COALESCE(sf.seasonality_factor, 0))
)
) as forecast_quantity,
CASE
@@ -336,8 +330,8 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
cs.cat_id::bigint as category_id,
fd.forecast_date,
GREATEST(0,
AVG(cs.daily_quantity) *
(1 + COALESCE(sf.seasonality_factor, 0))
ROUND(AVG(cs.daily_quantity) *
(1 + COALESCE(sf.seasonality_factor, 0)))
) as forecast_units,
GREATEST(0,
COALESCE(
@@ -345,8 +339,7 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
WHEN SUM(cs.day_count) >= 4 THEN AVG(cs.daily_revenue)
ELSE ct.overall_avg_revenue
END *
(1 + COALESCE(sf.seasonality_factor, 0)) *
(0.95 + (random() * 0.1)),
(1 + COALESCE(sf.seasonality_factor, 0)),
0
)
) as forecast_revenue,
@@ -427,6 +420,18 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
throw error;
} finally {
if (connection) {
try {
// Ensure temporary tables are cleaned up
await connection.query(`
DROP TABLE IF EXISTS temp_forecast_dates;
DROP TABLE IF EXISTS temp_daily_sales;
DROP TABLE IF EXISTS temp_product_stats;
DROP TABLE IF EXISTS temp_category_sales;
DROP TABLE IF EXISTS temp_category_stats;
`);
} catch (err) {
console.error('Error cleaning up temporary tables:', err);
}
connection.release();
}
}

View File

@@ -55,6 +55,93 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
}
});
// Create a temporary table for end-of-month inventory values
await connection.query(`
CREATE TEMPORARY TABLE IF NOT EXISTS temp_monthly_inventory AS
WITH months AS (
-- Generate all year/month combinations for the last 12 months
SELECT
EXTRACT(YEAR FROM month_date)::INTEGER as year,
EXTRACT(MONTH FROM month_date)::INTEGER as month,
month_date as start_date,
(month_date + INTERVAL '1 month'::interval - INTERVAL '1 day'::interval)::DATE as end_date
FROM (
SELECT generate_series(
DATE_TRUNC('month', CURRENT_DATE - INTERVAL '12 months'::interval)::DATE,
DATE_TRUNC('month', CURRENT_DATE)::DATE,
INTERVAL '1 month'::interval
) as month_date
) dates
),
monthly_inventory_calc AS (
SELECT
p.pid,
m.year,
m.month,
m.end_date,
p.stock_quantity as current_quantity,
-- Calculate sold during period (before end_date)
COALESCE(SUM(
CASE
WHEN o.date <= m.end_date THEN o.quantity
ELSE 0
END
), 0) as sold_after_end_date,
-- Calculate received during period (before end_date)
COALESCE(SUM(
CASE
WHEN po.received_date <= m.end_date THEN po.received
ELSE 0
END
), 0) as received_after_end_date,
p.cost_price
FROM
products p
CROSS JOIN
months m
LEFT JOIN
orders o ON p.pid = o.pid
AND o.canceled = false
AND o.date > m.end_date
AND o.date <= CURRENT_DATE
LEFT JOIN
purchase_orders po ON p.pid = po.pid
AND po.received_date IS NOT NULL
AND po.received_date > m.end_date
AND po.received_date <= CURRENT_DATE
GROUP BY
p.pid, m.year, m.month, m.end_date, p.stock_quantity, p.cost_price
)
SELECT
pid,
year,
month,
-- End of month quantity = current quantity - sold after + received after
GREATEST(0, current_quantity - sold_after_end_date + received_after_end_date) as end_of_month_quantity,
-- End of month inventory value
GREATEST(0, current_quantity - sold_after_end_date + received_after_end_date) * cost_price as end_of_month_value,
cost_price
FROM
monthly_inventory_calc
`);
processedCount = Math.floor(totalProducts * 0.40);
outputProgress({
status: 'running',
operation: 'Monthly inventory values calculated, processing time aggregates',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
// Initial insert of time-based aggregates
await connection.query(`
INSERT INTO product_time_aggregates (
@@ -75,76 +162,67 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
WITH monthly_sales AS (
SELECT
o.pid,
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
EXTRACT(YEAR FROM o.date::timestamp with time zone)::INTEGER as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone)::INTEGER as month,
SUM(o.quantity) as total_quantity_sold,
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
SUM(COALESCE(o.costeach, 0) * o.quantity) as total_cost,
COUNT(DISTINCT o.order_number) as order_count,
AVG(o.price - COALESCE(o.discount, 0)) as avg_price,
CASE
WHEN SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) > 0
THEN ((SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) - SUM(COALESCE(p.cost_price, 0) * o.quantity))
THEN ((SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) - SUM(COALESCE(o.costeach, 0) * o.quantity))
/ SUM((o.price - COALESCE(o.discount, 0)) * o.quantity)) * 100
ELSE 0
END as profit_margin,
p.cost_price * p.stock_quantity as inventory_value,
COUNT(DISTINCT DATE(o.date)) as active_days
FROM orders o
JOIN products p ON o.pid = p.pid
WHERE o.canceled = false
GROUP BY o.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity
GROUP BY o.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone)
),
monthly_stock AS (
SELECT
pid,
EXTRACT(YEAR FROM date::timestamp with time zone) as year,
EXTRACT(MONTH FROM date::timestamp with time zone) as month,
EXTRACT(YEAR FROM date::timestamp with time zone)::INTEGER as year,
EXTRACT(MONTH FROM date::timestamp with time zone)::INTEGER as month,
SUM(received) as stock_received,
SUM(ordered) as stock_ordered
FROM purchase_orders
GROUP BY pid, EXTRACT(YEAR FROM date::timestamp with time zone), EXTRACT(MONTH FROM date::timestamp with time zone)
),
base_products AS (
SELECT
p.pid,
p.cost_price * p.stock_quantity as inventory_value
FROM products p
)
SELECT
COALESCE(s.pid, ms.pid) as pid,
COALESCE(s.year, ms.year) as year,
COALESCE(s.month, ms.month) as month,
COALESCE(s.total_quantity_sold, 0) as total_quantity_sold,
COALESCE(s.total_revenue, 0) as total_revenue,
COALESCE(s.total_cost, 0) as total_cost,
COALESCE(s.order_count, 0) as order_count,
COALESCE(ms.stock_received, 0) as stock_received,
COALESCE(ms.stock_ordered, 0) as stock_ordered,
COALESCE(s.avg_price, 0) as avg_price,
COALESCE(s.profit_margin, 0) as profit_margin,
COALESCE(s.inventory_value, bp.inventory_value, 0) as inventory_value,
COALESCE(s.pid, ms.pid, mi.pid) as pid,
COALESCE(s.year, ms.year, mi.year) as year,
COALESCE(s.month, ms.month, mi.month) as month,
COALESCE(s.total_quantity_sold, 0)::INTEGER as total_quantity_sold,
COALESCE(s.total_revenue, 0)::DECIMAL(10,3) as total_revenue,
COALESCE(s.total_cost, 0)::DECIMAL(10,3) as total_cost,
COALESCE(s.order_count, 0)::INTEGER as order_count,
COALESCE(ms.stock_received, 0)::INTEGER as stock_received,
COALESCE(ms.stock_ordered, 0)::INTEGER as stock_ordered,
COALESCE(s.avg_price, 0)::DECIMAL(10,3) as avg_price,
COALESCE(s.profit_margin, 0)::DECIMAL(10,3) as profit_margin,
COALESCE(mi.end_of_month_value, 0)::DECIMAL(10,3) as inventory_value,
CASE
WHEN COALESCE(s.inventory_value, bp.inventory_value, 0) > 0
AND COALESCE(s.active_days, 0) > 0
THEN (COALESCE(s.total_revenue - s.total_cost, 0) * (365.0 / s.active_days))
/ COALESCE(s.inventory_value, bp.inventory_value)
WHEN COALESCE(mi.end_of_month_value, 0) > 0
THEN (COALESCE(s.total_revenue, 0) - COALESCE(s.total_cost, 0))
/ NULLIF(COALESCE(mi.end_of_month_value, 0), 0)
ELSE 0
END as gmroi
END::DECIMAL(10,3) as gmroi
FROM (
SELECT * FROM monthly_sales s
UNION ALL
SELECT
ms.pid,
ms.year,
ms.month,
pid,
year,
month,
0 as total_quantity_sold,
0 as total_revenue,
0 as total_cost,
0 as order_count,
NULL as avg_price,
0 as profit_margin,
NULL as inventory_value,
0 as active_days
FROM monthly_stock ms
WHERE NOT EXISTS (
@@ -153,50 +231,40 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
AND s2.year = ms.year
AND s2.month = ms.month
)
UNION ALL
SELECT
pid,
year,
month,
0 as total_quantity_sold,
0 as total_revenue,
0 as total_cost,
0 as order_count,
NULL as avg_price,
0 as profit_margin,
0 as active_days
FROM temp_monthly_inventory mi
WHERE NOT EXISTS (
SELECT 1 FROM monthly_sales s3
WHERE s3.pid = mi.pid
AND s3.year = mi.year
AND s3.month = mi.month
)
AND NOT EXISTS (
SELECT 1 FROM monthly_stock ms3
WHERE ms3.pid = mi.pid
AND ms3.year = mi.year
AND ms3.month = mi.month
)
) s
LEFT JOIN monthly_stock ms
ON s.pid = ms.pid
AND s.year = ms.year
AND s.month = ms.month
JOIN base_products bp ON COALESCE(s.pid, ms.pid) = bp.pid
UNION
SELECT
ms.pid,
ms.year,
ms.month,
0 as total_quantity_sold,
0 as total_revenue,
0 as total_cost,
0 as order_count,
ms.stock_received,
ms.stock_ordered,
0 as avg_price,
0 as profit_margin,
bp.inventory_value,
0 as gmroi
FROM monthly_stock ms
JOIN base_products bp ON ms.pid = bp.pid
WHERE NOT EXISTS (
SELECT 1 FROM (
SELECT * FROM monthly_sales
UNION ALL
SELECT
ms2.pid,
ms2.year,
ms2.month,
0, 0, 0, 0, NULL, 0, NULL, 0
FROM monthly_stock ms2
WHERE NOT EXISTS (
SELECT 1 FROM monthly_sales s2
WHERE s2.pid = ms2.pid
AND s2.year = ms2.year
AND s2.month = ms2.month
)
) s
WHERE s.pid = ms.pid
AND s.year = ms.year
AND s.month = ms.month
)
LEFT JOIN temp_monthly_inventory mi
ON s.pid = mi.pid
AND s.year = mi.year
AND s.month = mi.month
ON CONFLICT (pid, year, month) DO UPDATE
SET
total_quantity_sold = EXCLUDED.total_quantity_sold,
@@ -214,7 +282,7 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
processedCount = Math.floor(totalProducts * 0.60);
outputProgress({
status: 'running',
operation: 'Base time aggregates calculated, updating financial metrics',
operation: 'Base time aggregates calculated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
@@ -234,45 +302,9 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
processedPurchaseOrders: 0,
success
};
// Update with financial metrics
await connection.query(`
UPDATE product_time_aggregates pta
SET inventory_value = COALESCE(fin.inventory_value, 0)
FROM (
SELECT
p.pid,
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
p.cost_price * p.stock_quantity as inventory_value,
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
COUNT(DISTINCT DATE(o.date)) as active_days
FROM products p
LEFT JOIN orders o ON p.pid = o.pid
WHERE o.canceled = false
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity
) fin
WHERE pta.pid = fin.pid
AND pta.year = fin.year
AND pta.month = fin.month
`);
processedCount = Math.floor(totalProducts * 0.65);
outputProgress({
status: 'running',
operation: 'Financial metrics updated',
current: processedCount,
total: totalProducts,
elapsed: formatElapsedTime(startTime),
remaining: estimateRemaining(startTime, processedCount, totalProducts),
rate: calculateRate(startTime, processedCount),
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date().toISOString(),
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
}
});
// Clean up temporary tables
await connection.query('DROP TABLE IF EXISTS temp_monthly_inventory');
// If we get here, everything completed successfully
success = true;
@@ -298,6 +330,12 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
throw error;
} finally {
if (connection) {
try {
// Ensure temporary tables are cleaned up
await connection.query('DROP TABLE IF EXISTS temp_monthly_inventory');
} catch (err) {
console.error('Error cleaning up temporary tables:', err);
}
connection.release();
}
}

View File

@@ -184,7 +184,7 @@ async function resetDatabase() {
SELECT string_agg(tablename, ', ') as tables
FROM pg_tables
WHERE schemaname = 'public'
AND tablename NOT IN ('users', 'permissions', 'user_permissions', 'calculate_history', 'import_history', 'ai_prompts', 'ai_validation_performance', 'templates');
AND tablename NOT IN ('users', 'permissions', 'user_permissions', 'calculate_history', 'import_history', 'ai_prompts', 'ai_validation_performance', 'templates', 'reusable_images');
`);
if (!tablesResult.rows[0].tables) {
@@ -204,7 +204,7 @@ async function resetDatabase() {
// Drop all tables except users
const tables = tablesResult.rows[0].tables.split(', ');
for (const table of tables) {
if (!['users'].includes(table)) {
if (!['users', 'reusable_images'].includes(table)) {
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
}
}

View File

@@ -39,6 +39,19 @@ const METRICS_TABLES = [
'vendor_details'
];
// Tables to always protect from being dropped
const PROTECTED_TABLES = [
'users',
'permissions',
'user_permissions',
'calculate_history',
'import_history',
'ai_prompts',
'ai_validation_performance',
'templates',
'reusable_images'
];
// Split SQL into individual statements
function splitSQLStatements(sql) {
sql = sql.replace(/\r\n/g, '\n');
@@ -109,7 +122,8 @@ async function resetMetrics() {
FROM pg_tables
WHERE schemaname = 'public'
AND tablename = ANY($1)
`, [METRICS_TABLES]);
AND tablename NOT IN (SELECT unnest($2::text[]))
`, [METRICS_TABLES, PROTECTED_TABLES]);
outputProgress({
operation: 'Initial state',
@@ -126,6 +140,15 @@ async function resetMetrics() {
});
for (const table of [...METRICS_TABLES].reverse()) {
// Skip protected tables
if (PROTECTED_TABLES.includes(table)) {
outputProgress({
operation: 'Protected table',
message: `Skipping protected table: ${table}`
});
continue;
}
try {
// Use NOWAIT to avoid hanging if there's a lock
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);

View File

@@ -779,10 +779,16 @@ router.get('/history/calculate', async (req, res) => {
id,
start_time,
end_time,
duration_minutes,
status,
error_message,
modules_processed::integer,
total_modules::integer
total_products,
total_orders,
total_purchase_orders,
processed_products,
processed_orders,
processed_purchase_orders,
additional_info
FROM calculate_history
ORDER BY start_time DESC
LIMIT 20
@@ -830,4 +836,58 @@ router.get('/status/tables', async (req, res) => {
}
});
// GET /status/table-counts - Get record counts for all tables
router.get('/status/table-counts', async (req, res) => {
try {
const pool = req.app.locals.pool;
const tables = [
// Core tables
'products', 'categories', 'product_categories', 'orders', 'purchase_orders',
// Metrics tables
'product_metrics', 'product_time_aggregates', 'vendor_metrics', 'category_metrics',
'vendor_time_metrics', 'category_time_metrics', 'category_sales_metrics',
'brand_metrics', 'brand_time_metrics', 'sales_forecasts', 'category_forecasts',
// Config tables
'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config',
'abc_classification_config', 'safety_stock_config', 'turnover_config',
'sales_seasonality', 'financial_calc_config'
];
const counts = await Promise.all(
tables.map(table =>
pool.query(`SELECT COUNT(*) as count FROM ${table}`)
.then(result => ({
table_name: table,
count: parseInt(result.rows[0].count)
}))
.catch(err => ({
table_name: table,
count: null,
error: err.message
}))
)
);
// Group tables by type
const groupedCounts = {
core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders'].includes(c.table_name)),
metrics: counts.filter(c => [
'product_metrics', 'product_time_aggregates', 'vendor_metrics', 'category_metrics',
'vendor_time_metrics', 'category_time_metrics', 'category_sales_metrics',
'brand_metrics', 'brand_time_metrics', 'sales_forecasts', 'category_forecasts'
].includes(c.table_name)),
config: counts.filter(c => [
'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config',
'abc_classification_config', 'safety_stock_config', 'turnover_config',
'sales_seasonality', 'financial_calc_config'
].includes(c.table_name))
};
res.json(groupedCounts);
} catch (error) {
console.error('Error fetching table counts:', error);
res.status(500).json({ error: error.message });
}
});
module.exports = router;

View File

@@ -102,35 +102,40 @@ router.get('/stock/metrics', async (req, res) => {
// Returns purchase order metrics by vendor
router.get('/purchase/metrics', async (req, res) => {
try {
// First check if there are any purchase orders in the database
const { rows: [poCount] } = await executeQuery(`
SELECT COUNT(*) as count FROM purchase_orders
`);
const { rows: [poMetrics] } = await executeQuery(`
SELECT
COALESCE(COUNT(DISTINCT CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
THEN po.po_id
END), 0)::integer as active_pos,
COALESCE(COUNT(DISTINCT CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
AND po.expected_date < CURRENT_DATE
THEN po.po_id
END), 0)::integer as overdue_pos,
COALESCE(SUM(CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
THEN po.ordered
ELSE 0
END), 0)::integer as total_units,
ROUND(COALESCE(SUM(CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
THEN po.ordered * po.cost_price
ELSE 0
END), 0)::numeric, 3) as total_cost,
ROUND(COALESCE(SUM(CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
THEN po.ordered * p.price
ELSE 0
END), 0)::numeric, 3) as total_retail
FROM purchase_orders po
JOIN products p ON po.pid = p.pid
`, [ReceivingStatus.PartialReceived]);
`);
const { rows: vendorOrders } = await executeQuery(`
SELECT
@@ -141,15 +146,15 @@ router.get('/purchase/metrics', async (req, res) => {
ROUND(COALESCE(SUM(po.ordered * p.price), 0)::numeric, 3) as retail
FROM purchase_orders po
JOIN products p ON po.pid = p.pid
WHERE po.receiving_status < $1
WHERE po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
GROUP BY po.vendor
HAVING ROUND(COALESCE(SUM(po.ordered * po.cost_price), 0)::numeric, 3) > 0
ORDER BY cost DESC
`, [ReceivingStatus.PartialReceived]);
`);
// If no data or missing metrics, provide dummy data
if (!poMetrics || vendorOrders.length === 0) {
console.log('No purchase metrics found, returning dummy data');
// If no purchase orders exist at all in the database, return dummy data
if (parseInt(poCount.count) === 0) {
console.log('No purchase orders found in database, returning dummy data');
return res.json({
activePurchaseOrders: 12,
@@ -164,6 +169,20 @@ router.get('/purchase/metrics', async (req, res) => {
]
});
}
// If no active purchase orders match the criteria, return zeros instead of dummy data
if (vendorOrders.length === 0) {
console.log('No active purchase orders matching criteria, returning zeros');
return res.json({
activePurchaseOrders: parseInt(poMetrics.active_pos) || 0,
overduePurchaseOrders: parseInt(poMetrics.overdue_pos) || 0,
onOrderUnits: parseInt(poMetrics.total_units) || 0,
onOrderCost: parseFloat(poMetrics.total_cost) || 0,
onOrderRetail: parseFloat(poMetrics.total_retail) || 0,
vendorOrders: []
});
}
// Format response to match PurchaseMetricsData interface
const response = {
@@ -184,19 +203,15 @@ router.get('/purchase/metrics', async (req, res) => {
res.json(response);
} catch (err) {
console.error('Error fetching purchase metrics:', err);
// Return dummy data on error
res.json({
activePurchaseOrders: 12,
overduePurchaseOrders: 3,
onOrderUnits: 1250,
onOrderCost: 12500,
onOrderRetail: 25000,
vendorOrders: [
{ vendor: "Test Vendor 1", orders: 5, units: 500, cost: 5000, retail: 10000 },
{ vendor: "Test Vendor 2", orders: 4, units: 400, cost: 4000, retail: 8000 },
{ vendor: "Test Vendor 3", orders: 3, units: 350, cost: 3500, retail: 7000 }
]
res.status(500).json({
error: 'Failed to fetch purchase metrics',
details: err.message,
activePurchaseOrders: 0,
overduePurchaseOrders: 0,
onOrderUnits: 0,
onOrderCost: 0,
onOrderRetail: 0,
vendorOrders: []
});
}
});
@@ -1018,17 +1033,17 @@ router.get('/vendor/performance', async (req, res) => {
THEN EXTRACT(EPOCH FROM (po.received_date - po.date))/86400
ELSE NULL END)::numeric, 2), 0) as avg_lead_time,
COALESCE(ROUND(SUM(CASE
WHEN po.status = 'completed' AND po.received_date <= po.expected_date
WHEN po.status = 'done' AND po.received_date <= po.expected_date
THEN 1
ELSE 0
END)::numeric * 100.0 / NULLIF(COUNT(*)::numeric, 0), 2), 0) as on_time_delivery_rate,
COALESCE(ROUND(AVG(CASE
WHEN po.status = 'completed'
WHEN po.status = 'done'
THEN po.received::numeric / NULLIF(po.ordered::numeric, 0) * 100
ELSE NULL
END)::numeric, 2), 0) as avg_fill_rate,
COUNT(CASE WHEN po.status = 'open' THEN 1 END)::integer as active_orders,
COUNT(CASE WHEN po.status = 'open' AND po.expected_date < CURRENT_DATE THEN 1 END)::integer as overdue_orders
COUNT(CASE WHEN po.status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') THEN 1 END)::integer as active_orders,
COUNT(CASE WHEN po.status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') AND po.expected_date < CURRENT_DATE THEN 1 END)::integer as overdue_orders
FROM purchase_orders po
WHERE po.date >= CURRENT_DATE - INTERVAL '180 days'
GROUP BY po.vendor
@@ -1165,7 +1180,7 @@ router.get('/key-metrics', async (req, res) => {
SELECT
COUNT(DISTINCT po_id) as total_pos,
SUM(ordered * cost_price) as total_po_value,
COUNT(CASE WHEN status = 'open' THEN 1 END) as open_pos
COUNT(CASE WHEN status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') THEN 1 END) as open_pos
FROM purchase_orders
WHERE order_date >= CURRENT_DATE - INTERVAL '${days} days'
)

View File

@@ -8,7 +8,9 @@ const fs = require('fs');
// Create uploads directory if it doesn't exist
const uploadsDir = path.join('/var/www/html/inventory/uploads/products');
const reusableUploadsDir = path.join('/var/www/html/inventory/uploads/reusable');
fs.mkdirSync(uploadsDir, { recursive: true });
fs.mkdirSync(reusableUploadsDir, { recursive: true });
// Create a Map to track image upload times and their scheduled deletion
const imageUploadMap = new Map();
@@ -35,6 +37,12 @@ const connectionCache = {
// Function to schedule image deletion after 24 hours
const scheduleImageDeletion = (filename, filePath) => {
// Only schedule deletion for images in the products folder
if (!filePath.includes('/uploads/products/')) {
console.log(`Skipping deletion for non-product image: ${filename}`);
return;
}
// Delete any existing timeout for this file
if (imageUploadMap.has(filename)) {
clearTimeout(imageUploadMap.get(filename).timeoutId);
@@ -407,6 +415,14 @@ router.delete('/delete-image', (req, res) => {
return res.status(404).json({ error: 'File not found' });
}
// Only allow deletion of images in the products folder
if (!filePath.includes('/uploads/products/')) {
return res.status(403).json({
error: 'Cannot delete images outside the products folder',
message: 'This image is in a protected folder and cannot be deleted through this endpoint'
});
}
// Delete the file
fs.unlinkSync(filePath);
@@ -641,11 +657,19 @@ router.get('/check-file/:filename', (req, res) => {
return res.status(400).json({ error: 'Invalid filename' });
}
const filePath = path.join(uploadsDir, filename);
// First check in products directory
let filePath = path.join(uploadsDir, filename);
let exists = fs.existsSync(filePath);
// If not found in products, check in reusable directory
if (!exists) {
filePath = path.join(reusableUploadsDir, filename);
exists = fs.existsSync(filePath);
}
try {
// Check if file exists
if (!fs.existsSync(filePath)) {
if (!exists) {
return res.status(404).json({
error: 'File not found',
path: filePath,
@@ -685,13 +709,23 @@ router.get('/check-file/:filename', (req, res) => {
// List all files in uploads directory
router.get('/list-uploads', (req, res) => {
try {
if (!fs.existsSync(uploadsDir)) {
return res.status(404).json({ error: 'Uploads directory not found', path: uploadsDir });
const { directory = 'products' } = req.query;
// Determine which directory to list
let targetDir;
if (directory === 'reusable') {
targetDir = reusableUploadsDir;
} else {
targetDir = uploadsDir; // default to products
}
const files = fs.readdirSync(uploadsDir);
if (!fs.existsSync(targetDir)) {
return res.status(404).json({ error: 'Uploads directory not found', path: targetDir });
}
const files = fs.readdirSync(targetDir);
const fileDetails = files.map(file => {
const filePath = path.join(uploadsDir, file);
const filePath = path.join(targetDir, file);
try {
const stats = fs.statSync(filePath);
return {
@@ -709,12 +743,13 @@ router.get('/list-uploads', (req, res) => {
});
return res.json({
directory: uploadsDir,
directory: targetDir,
type: directory,
count: files.length,
files: fileDetails
});
} catch (error) {
return res.status(500).json({ error: error.message, path: uploadsDir });
return res.status(500).json({ error: error.message });
}
});

View File

@@ -65,6 +65,68 @@ router.get('/', async (req, res) => {
paramCounter++;
}
// Handle text filters for specific fields
if (req.query.barcode) {
conditions.push(`p.barcode ILIKE $${paramCounter}`);
params.push(`%${req.query.barcode}%`);
paramCounter++;
}
if (req.query.vendor_reference) {
conditions.push(`p.vendor_reference ILIKE $${paramCounter}`);
params.push(`%${req.query.vendor_reference}%`);
paramCounter++;
}
// Add new text filters for the additional fields
if (req.query.description) {
conditions.push(`p.description ILIKE $${paramCounter}`);
params.push(`%${req.query.description}%`);
paramCounter++;
}
if (req.query.harmonized_tariff_code) {
conditions.push(`p.harmonized_tariff_code ILIKE $${paramCounter}`);
params.push(`%${req.query.harmonized_tariff_code}%`);
paramCounter++;
}
if (req.query.notions_reference) {
conditions.push(`p.notions_reference ILIKE $${paramCounter}`);
params.push(`%${req.query.notions_reference}%`);
paramCounter++;
}
if (req.query.line) {
conditions.push(`p.line ILIKE $${paramCounter}`);
params.push(`%${req.query.line}%`);
paramCounter++;
}
if (req.query.subline) {
conditions.push(`p.subline ILIKE $${paramCounter}`);
params.push(`%${req.query.subline}%`);
paramCounter++;
}
if (req.query.artist) {
conditions.push(`p.artist ILIKE $${paramCounter}`);
params.push(`%${req.query.artist}%`);
paramCounter++;
}
if (req.query.country_of_origin) {
conditions.push(`p.country_of_origin ILIKE $${paramCounter}`);
params.push(`%${req.query.country_of_origin}%`);
paramCounter++;
}
if (req.query.location) {
conditions.push(`p.location ILIKE $${paramCounter}`);
params.push(`%${req.query.location}%`);
paramCounter++;
}
// Handle numeric filters with operators
const numericFields = {
stock: 'p.stock_quantity',
@@ -74,11 +136,31 @@ router.get('/', async (req, res) => {
dailySalesAvg: 'pm.daily_sales_avg',
weeklySalesAvg: 'pm.weekly_sales_avg',
monthlySalesAvg: 'pm.monthly_sales_avg',
avgQuantityPerOrder: 'pm.avg_quantity_per_order',
numberOfOrders: 'pm.number_of_orders',
margin: 'pm.avg_margin_percent',
gmroi: 'pm.gmroi',
inventoryValue: 'pm.inventory_value',
costOfGoodsSold: 'pm.cost_of_goods_sold',
grossProfit: 'pm.gross_profit',
turnoverRate: 'pm.turnover_rate',
leadTime: 'pm.current_lead_time',
currentLeadTime: 'pm.current_lead_time',
targetLeadTime: 'pm.target_lead_time',
stockCoverage: 'pm.days_of_inventory',
daysOfStock: 'pm.days_of_inventory'
daysOfStock: 'pm.days_of_inventory',
weeksOfStock: 'pm.weeks_of_inventory',
reorderPoint: 'pm.reorder_point',
safetyStock: 'pm.safety_stock',
// Add new numeric fields
preorderCount: 'p.preorder_count',
notionsInvCount: 'p.notions_inv_count',
rating: 'p.rating',
reviews: 'p.reviews',
weight: 'p.weight',
totalSold: 'p.total_sold',
baskets: 'p.baskets',
notifies: 'p.notifies'
};
Object.entries(req.query).forEach(([key, value]) => {
@@ -102,6 +184,24 @@ router.get('/', async (req, res) => {
}
});
// Handle date filters
const dateFields = {
firstSaleDate: 'pm.first_sale_date',
lastSaleDate: 'pm.last_sale_date',
lastPurchaseDate: 'pm.last_purchase_date',
firstReceivedDate: 'pm.first_received_date',
lastReceivedDate: 'pm.last_received_date'
};
Object.entries(req.query).forEach(([key, value]) => {
const field = dateFields[key];
if (field) {
conditions.push(`${field}::TEXT LIKE $${paramCounter}`);
params.push(`${value}%`); // Format like '2023-01%' to match by month or '2023-01-01' for exact date
paramCounter++;
}
});
// Handle select filters
if (req.query.vendor) {
conditions.push(`p.vendor = $${paramCounter}`);
@@ -256,7 +356,8 @@ router.get('/', async (req, res) => {
pm.last_received_date,
pm.abc_class,
pm.stock_status,
pm.turnover_rate
pm.turnover_rate,
p.date_last_sold
FROM products p
LEFT JOIN product_metrics pm ON p.pid = pm.pid
LEFT JOIN product_categories pc ON p.pid = pc.pid
@@ -473,6 +574,29 @@ router.get('/:id', async (req, res) => {
uom: parseInt(productRows[0].uom),
managing_stock: Boolean(productRows[0].managing_stock),
replenishable: Boolean(productRows[0].replenishable),
// Format new fields
preorder_count: parseInt(productRows[0].preorder_count || 0),
notions_inv_count: parseInt(productRows[0].notions_inv_count || 0),
harmonized_tariff_code: productRows[0].harmonized_tariff_code || '',
notions_reference: productRows[0].notions_reference || '',
line: productRows[0].line || '',
subline: productRows[0].subline || '',
artist: productRows[0].artist || '',
rating: parseFloat(productRows[0].rating || 0),
reviews: parseInt(productRows[0].reviews || 0),
weight: parseFloat(productRows[0].weight || 0),
dimensions: {
length: parseFloat(productRows[0].length || 0),
width: parseFloat(productRows[0].width || 0),
height: parseFloat(productRows[0].height || 0),
},
country_of_origin: productRows[0].country_of_origin || '',
location: productRows[0].location || '',
total_sold: parseInt(productRows[0].total_sold || 0),
baskets: parseInt(productRows[0].baskets || 0),
notifies: parseInt(productRows[0].notifies || 0),
date_last_sold: productRows[0].date_last_sold || null,
// Format existing analytics fields
daily_sales_avg: parseFloat(productRows[0].daily_sales_avg) || 0,
weekly_sales_avg: parseFloat(productRows[0].weekly_sales_avg) || 0,
monthly_sales_avg: parseFloat(productRows[0].monthly_sales_avg) || 0,

View File

@@ -2,9 +2,12 @@
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/box.svg" />
<link rel="icon" type="image/x-icon" href="/cherrybottom.ico" />
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@100..900&display=swap" rel="stylesheet">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Inventory Manager</title>
<title>A Cherry On Bottom</title>
</head>
<body>
<div id="root"></div>

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><g fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"><path d="M21 8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16Z"/><path d="m3.3 7l8.7 5l8.7-5M12 22V12"/></g></svg>

Before

Width:  |  Height:  |  Size: 340 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

View File

@@ -1,42 +1,3 @@
#root {
max-width: 1800px;
margin: 0 auto;
padding: 2rem;
text-align: center;
}
.logo {
height: 6em;
padding: 1.5em;
will-change: filter;
transition: filter 300ms;
}
.logo:hover {
filter: drop-shadow(0 0 2em #646cffaa);
}
.logo.react:hover {
filter: drop-shadow(0 0 2em #61dafbaa);
}
@keyframes logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
@media (prefers-reduced-motion: no-preference) {
a:nth-of-type(2) .logo {
animation: logo-spin infinite 20s linear;
}
}
.card {
padding: 2em;
}
.read-the-docs {
color: #888;
}
font-family: 'Inter', sans-serif;
}

View File

@@ -1,4 +1,3 @@
import { useQuery } from '@tanstack/react-query';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { ResponsiveContainer, BarChart, Bar, XAxis, YAxis, Tooltip, ScatterChart, Scatter, ZAxis } from 'recharts';
import config from '../../config';

View File

@@ -0,0 +1,66 @@
import { useQuery } from '@tanstack/react-query';
import { Line, LineChart, ResponsiveContainer, Tooltip, XAxis, YAxis } from 'recharts';
import config from '../../config';
interface SalesData {
date: string;
total: number;
}
export function Overview() {
const { data, isLoading, error } = useQuery<SalesData[]>({
queryKey: ['sales-overview'],
queryFn: async () => {
const response = await fetch(`${config.apiUrl}/dashboard/sales-overview`);
if (!response.ok) {
throw new Error('Failed to fetch sales overview');
}
const rawData = await response.json();
return rawData.map((item: SalesData) => ({
...item,
total: parseFloat(item.total.toString()),
date: new Date(item.date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' })
}));
},
});
if (isLoading) {
return <div>Loading chart...</div>;
}
if (error) {
return <div className="text-red-500">Error loading sales overview</div>;
}
return (
<ResponsiveContainer width="100%" height={350}>
<LineChart data={data}>
<XAxis
dataKey="date"
stroke="#888888"
fontSize={12}
tickLine={false}
axisLine={false}
/>
<YAxis
stroke="#888888"
fontSize={12}
tickLine={false}
axisLine={false}
tickFormatter={(value) => `$${value.toLocaleString()}`}
/>
<Tooltip
formatter={(value: number) => [`$${value.toLocaleString()}`, 'Sales']}
labelFormatter={(label) => `Date: ${label}`}
/>
<Line
type="monotone"
dataKey="total"
stroke="hsl(var(--primary))"
strokeWidth={2}
dot={false}
/>
</LineChart>
</ResponsiveContainer>
);
}

View File

@@ -0,0 +1,79 @@
import { useQuery } from "@tanstack/react-query"
import { CardHeader, CardTitle, CardContent } from "@/components/ui/card"
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table"
import { Progress } from "@/components/ui/progress"
import config from "@/config"
interface VendorMetrics {
vendor: string
avg_lead_time: number
on_time_delivery_rate: number
avg_fill_rate: number
total_orders: number
active_orders: number
overdue_orders: number
}
export function VendorPerformance() {
const { data: vendors } = useQuery<VendorMetrics[]>({
queryKey: ["vendor-metrics"],
queryFn: async () => {
const response = await fetch(`${config.apiUrl}/dashboard/vendor/performance`)
if (!response.ok) {
throw new Error("Failed to fetch vendor metrics")
}
return response.json()
},
})
// Sort vendors by on-time delivery rate
const sortedVendors = vendors
?.sort((a, b) => b.on_time_delivery_rate - a.on_time_delivery_rate)
return (
<>
<CardHeader>
<CardTitle className="text-lg font-medium">Top Vendor Performance</CardTitle>
</CardHeader>
<CardContent className="max-h-[400px] overflow-auto">
<Table>
<TableHeader>
<TableRow>
<TableHead>Vendor</TableHead>
<TableHead>On-Time</TableHead>
<TableHead className="text-right">Fill Rate</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{sortedVendors?.map((vendor) => (
<TableRow key={vendor.vendor}>
<TableCell className="font-medium">{vendor.vendor}</TableCell>
<TableCell>
<div className="flex items-center gap-2">
<Progress
value={vendor.on_time_delivery_rate}
className="h-2"
/>
<span className="w-10 text-sm">
{vendor.on_time_delivery_rate.toFixed(0)}%
</span>
</div>
</TableCell>
<TableCell className="text-right">
{vendor.avg_fill_rate.toFixed(0)}%
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</CardContent>
</>
)
}

View File

@@ -3,7 +3,6 @@ import {
Package,
BarChart2,
Settings,
Box,
ClipboardList,
LogOut,
Users,
@@ -22,6 +21,7 @@ import {
SidebarMenuButton,
SidebarMenuItem,
SidebarSeparator,
useSidebar
} from "@/components/ui/sidebar";
import { useLocation, useNavigate, Link } from "react-router-dom";
import { Protected } from "@/components/auth/Protected";
@@ -80,6 +80,7 @@ const items = [
export function AppSidebar() {
const location = useLocation();
const navigate = useNavigate();
useSidebar();
const handleLogout = () => {
localStorage.removeItem('token');
@@ -90,11 +91,19 @@ export function AppSidebar() {
return (
<Sidebar collapsible="icon" variant="sidebar">
<SidebarHeader>
<div className="p-4 flex items-center gap-2 group-data-[collapsible=icon]:justify-center">
<Box className="h-6 w-6 shrink-0" />
<h2 className="text-lg font-semibold group-data-[collapsible=icon]:hidden">
Inventory Manager
</h2>
<div className="py-1 flex justify-center items-center">
<div className="flex items-center">
<div className="flex-shrink-0 w-8 h-8 relative flex items-center justify-center">
<img
src="/cherrybottom.png"
alt="Cherry Bottom"
className="w-6 h-6 object-contain -rotate-12 transform hover:rotate-0 transition-transform ease-in-out duration-300"
/>
</div>
<div className="ml-2 transition-all duration-200 whitespace-nowrap group-[.group[data-state=collapsed]]:hidden">
<span className="font-bold text-lg">A Cherry On Bottom</span>
</div>
</div>
</div>
</SidebarHeader>
<SidebarSeparator />

View File

@@ -95,12 +95,8 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
isChangeReverted,
getFieldDisplayValueWithHighlight,
fields,
debugData,
}) => {
const [costPerMillionTokens, setCostPerMillionTokens] = useState(2.5); // Default cost
const hasCompanyPrompts =
currentPrompt.debugData?.promptSources?.companyPrompts &&
currentPrompt.debugData.promptSources.companyPrompts.length > 0;
// Create our own state to track changes
const [localReversionState, setLocalReversionState] = useState<
@@ -157,17 +153,6 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
return !!localReversionState[key];
};
// Use "full" as the default tab
const defaultTab = "full";
const [activeTab, setActiveTab] = useState(defaultTab);
// Update activeTab when the dialog is opened with new data
React.useEffect(() => {
if (currentPrompt.isOpen) {
setActiveTab("full");
}
}, [currentPrompt.isOpen]);
// Format time helper
const formatTime = (seconds: number): string => {
if (seconds < 60) {

View File

@@ -125,6 +125,11 @@ interface Product {
}>;
category_paths?: Record<string, string>;
description?: string;
preorder_count: number;
notions_inv_count: number;
}
interface ProductDetailProps {
@@ -225,6 +230,7 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<TabsTrigger value="purchase">Purchase History</TabsTrigger>
<TabsTrigger value="financial">Financial</TabsTrigger>
<TabsTrigger value="vendor">Vendor</TabsTrigger>
<TabsTrigger value="details">Additional Info</TabsTrigger>
</TabsList>
</div>
@@ -255,6 +261,12 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<dt className="text-sm text-muted-foreground">UPC</dt>
<dd>{product?.barcode || "N/A"}</dd>
</div>
{product?.description && (
<div>
<dt className="text-sm text-muted-foreground">Description</dt>
<dd>{product.description}</dd>
</div>
)}
<div>
<dt className="text-sm text-muted-foreground">Categories</dt>
<dd className="flex flex-col gap-2">
@@ -359,6 +371,51 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
</div>
</Card>
<Card className="p-4">
<h3 className="font-semibold mb-2">Customer Engagement</h3>
<dl className="grid grid-cols-3 gap-4">
{product?.total_sold > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Total Sold</dt>
<dd className="text-2xl font-semibold">{product.total_sold}</dd>
</div>
)}
{product?.rating > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Rating</dt>
<dd className="text-2xl font-semibold flex items-center">
{product.rating.toFixed(1)}
<span className="ml-1 text-yellow-500"></span>
</dd>
</div>
)}
{product?.reviews > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Reviews</dt>
<dd className="text-2xl font-semibold">{product.reviews}</dd>
</div>
)}
{product?.baskets > 0 && (
<div>
<dt className="text-sm text-muted-foreground">In Baskets</dt>
<dd className="text-2xl font-semibold">{product.baskets}</dd>
</div>
)}
{product?.notifies > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Notify Requests</dt>
<dd className="text-2xl font-semibold">{product.notifies}</dd>
</div>
)}
{product?.date_last_sold && (
<div>
<dt className="text-sm text-muted-foreground">Last Sold</dt>
<dd className="text-xl font-semibold">{formatDate(product.date_last_sold)}</dd>
</div>
)}
</dl>
</Card>
<Card className="p-4">
<h3 className="font-semibold mb-2">Financial Metrics</h3>
<dl className="space-y-2">
@@ -426,6 +483,18 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<dt className="text-sm text-muted-foreground">Days of Inventory</dt>
<dd className="text-2xl font-semibold">{product?.metrics?.days_of_inventory || 0}</dd>
</div>
{product?.preorder_count > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Preorders</dt>
<dd className="text-2xl font-semibold">{product?.preorder_count}</dd>
</div>
)}
{product?.notions_inv_count > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Notions Inventory</dt>
<dd className="text-2xl font-semibold">{product?.notions_inv_count}</dd>
</div>
)}
</dl>
</Card>
@@ -506,6 +575,51 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
</ResponsiveContainer>
</div>
</Card>
<Card className="p-4">
<h3 className="font-semibold mb-2">Customer Engagement</h3>
<dl className="grid grid-cols-3 gap-4">
{product?.total_sold > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Total Sold</dt>
<dd className="text-2xl font-semibold">{product.total_sold}</dd>
</div>
)}
{product?.rating > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Rating</dt>
<dd className="text-2xl font-semibold flex items-center">
{product.rating.toFixed(1)}
<span className="ml-1 text-yellow-500"></span>
</dd>
</div>
)}
{product?.reviews > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Reviews</dt>
<dd className="text-2xl font-semibold">{product.reviews}</dd>
</div>
)}
{product?.baskets > 0 && (
<div>
<dt className="text-sm text-muted-foreground">In Baskets</dt>
<dd className="text-2xl font-semibold">{product.baskets}</dd>
</div>
)}
{product?.notifies > 0 && (
<div>
<dt className="text-sm text-muted-foreground">Notify Requests</dt>
<dd className="text-2xl font-semibold">{product.notifies}</dd>
</div>
)}
{product?.date_last_sold && (
<div>
<dt className="text-sm text-muted-foreground">Last Sold</dt>
<dd className="text-xl font-semibold">{formatDate(product.date_last_sold)}</dd>
</div>
)}
</dl>
</Card>
</div>
)}
</TabsContent>
@@ -661,6 +775,123 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
<div className="text-center text-muted-foreground">No vendor performance data available</div>
)}
</TabsContent>
<TabsContent value="details" className="p-4">
{isLoading ? (
<Skeleton className="h-48 w-full" />
) : (
<div className="space-y-4">
<Card className="p-4">
<h3 className="font-semibold mb-2">Product Details</h3>
<dl className="grid grid-cols-2 gap-4">
{product?.description && (
<div className="col-span-2">
<dt className="text-sm text-muted-foreground">Description</dt>
<dd>{product.description}</dd>
</div>
)}
<div>
<dt className="text-sm text-muted-foreground">Created Date</dt>
<dd>{formatDate(product?.created_at)}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Last Updated</dt>
<dd>{formatDate(product?.updated_at)}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Product ID</dt>
<dd>{product?.pid}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Line</dt>
<dd>{product?.line || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Subline</dt>
<dd>{product?.subline || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Artist</dt>
<dd>{product?.artist || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Country of Origin</dt>
<dd>{product?.country_of_origin || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Location</dt>
<dd>{product?.location || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">HTS Code</dt>
<dd>{product?.harmonized_tariff_code || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Notions Reference</dt>
<dd>{product?.notions_reference || 'N/A'}</dd>
</div>
</dl>
</Card>
<Card className="p-4">
<h3 className="font-semibold mb-2">Physical Attributes</h3>
<dl className="grid grid-cols-2 gap-4">
<div>
<dt className="text-sm text-muted-foreground">Weight</dt>
<dd>{product?.weight ? `${product.weight} kg` : 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Dimensions</dt>
<dd>
{product?.dimensions
? `${product.dimensions.length} × ${product.dimensions.width} × ${product.dimensions.height} cm`
: 'N/A'
}
</dd>
</div>
</dl>
</Card>
<Card className="p-4">
<h3 className="font-semibold mb-2">Customer Metrics</h3>
<dl className="grid grid-cols-2 gap-4">
<div>
<dt className="text-sm text-muted-foreground">Rating</dt>
<dd className="flex items-center">
{product?.rating
? <>
{product.rating.toFixed(1)}
<span className="ml-1 text-yellow-500"></span>
</>
: 'N/A'
}
</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Review Count</dt>
<dd>{product?.reviews || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Total Sold</dt>
<dd>{product?.total_sold || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Currently in Baskets</dt>
<dd>{product?.baskets || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Notify Requests</dt>
<dd>{product?.notifies || 'N/A'}</dd>
</div>
<div>
<dt className="text-sm text-muted-foreground">Date Last Sold</dt>
<dd>{formatDate(product?.date_last_sold) || 'N/A'}</dd>
</div>
</dl>
</Card>
</div>
)}
</TabsContent>
</Tabs>
</VaulDrawer.Content>
</VaulDrawer.Portal>

View File

@@ -51,9 +51,28 @@ const FILTER_OPTIONS: FilterOption[] = [
// Basic Info Group
{ id: "search", label: "Search", type: "text", group: "Basic Info" },
{ id: "sku", label: "SKU", type: "text", group: "Basic Info" },
{ id: "barcode", label: "UPC/Barcode", type: "text", group: "Basic Info" },
{ id: "vendor", label: "Vendor", type: "select", group: "Basic Info" },
{ id: "vendor_reference", label: "Supplier #", type: "text", group: "Basic Info" },
{ id: "brand", label: "Brand", type: "select", group: "Basic Info" },
{ id: "category", label: "Category", type: "select", group: "Basic Info" },
{ id: "description", label: "Description", type: "text", group: "Basic Info" },
{ id: "harmonized_tariff_code", label: "HTS Code", type: "text", group: "Basic Info" },
{ id: "notions_reference", label: "Notions Ref", type: "text", group: "Basic Info" },
{ id: "line", label: "Line", type: "text", group: "Basic Info" },
{ id: "subline", label: "Subline", type: "text", group: "Basic Info" },
{ id: "artist", label: "Artist", type: "text", group: "Basic Info" },
{ id: "country_of_origin", label: "Origin", type: "text", group: "Basic Info" },
{ id: "location", label: "Location", type: "text", group: "Basic Info" },
// Physical Properties
{
id: "weight",
label: "Weight",
type: "number",
group: "Physical Properties",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
// Inventory Group
{
@@ -77,6 +96,20 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "preorderCount",
label: "Preorder Count",
type: "number",
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "notionsInvCount",
label: "Notions Inventory",
type: "number",
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "daysOfStock",
label: "Days of Stock",
@@ -84,6 +117,27 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "weeksOfStock",
label: "Weeks of Stock",
type: "number",
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "reorderPoint",
label: "Reorder Point",
type: "number",
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "safetyStock",
label: "Safety Stock",
type: "number",
group: "Inventory",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "replenishable",
label: "Replenishable",
@@ -94,6 +148,17 @@ const FILTER_OPTIONS: FilterOption[] = [
],
group: "Inventory",
},
{
id: "abcClass",
label: "ABC Class",
type: "select",
options: [
{ label: "A", value: "A" },
{ label: "B", value: "B" },
{ label: "C", value: "C" },
],
group: "Inventory",
},
// Pricing Group
{
@@ -140,6 +205,73 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "avgQuantityPerOrder",
label: "Avg Qty/Order",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "numberOfOrders",
label: "Order Count",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "firstSaleDate",
label: "First Sale Date",
type: "text",
group: "Sales Metrics",
},
{
id: "lastSaleDate",
label: "Last Sale Date",
type: "text",
group: "Sales Metrics",
},
{
id: "date_last_sold",
label: "Date Last Sold",
type: "text",
group: "Sales Metrics",
},
{
id: "total_sold",
label: "Total Sold",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "baskets",
label: "In Baskets",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "notifies",
label: "Notifies",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "rating",
label: "Rating",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "reviews",
label: "Reviews",
type: "number",
group: "Sales Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
// Financial Metrics Group
{
@@ -156,6 +288,34 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "inventoryValue",
label: "Inventory Value",
type: "number",
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "costOfGoodsSold",
label: "COGS",
type: "number",
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "grossProfit",
label: "Gross Profit",
type: "number",
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "turnoverRate",
label: "Turnover Rate",
type: "number",
group: "Financial Metrics",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
// Lead Time & Stock Coverage Group
{
@@ -165,6 +325,20 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Lead Time & Coverage",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "currentLeadTime",
label: "Current Lead Time",
type: "number",
group: "Lead Time & Coverage",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "targetLeadTime",
label: "Target Lead Time",
type: "number",
group: "Lead Time & Coverage",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
{
id: "leadTimeStatus",
label: "Lead Time Status",
@@ -183,19 +357,26 @@ const FILTER_OPTIONS: FilterOption[] = [
group: "Lead Time & Coverage",
operators: ["=", ">", ">=", "<", "<=", "between"],
},
// Classification Group
{
id: "abcClass",
label: "ABC Class",
type: "select",
options: [
{ label: "A", value: "A" },
{ label: "B", value: "B" },
{ label: "C", value: "C" },
],
group: "Classification",
id: "lastPurchaseDate",
label: "Last Purchase Date",
type: "text",
group: "Lead Time & Coverage",
},
{
id: "firstReceivedDate",
label: "First Received Date",
type: "text",
group: "Lead Time & Coverage",
},
{
id: "lastReceivedDate",
label: "Last Received Date",
type: "text",
group: "Lead Time & Coverage",
},
// Classification Group
{
id: "managingStock",
label: "Managing Stock",

View File

@@ -234,6 +234,11 @@ export function ProductTable({
)) || '-'}
</div>
);
case 'dimensions':
if (value) {
return `${value.length}×${value.width}×${value.height}`;
}
return '-';
case 'stock_status':
return getStockStatus(product.stock_status);
case 'abc_class':
@@ -252,6 +257,14 @@ export function ProductTable({
) : (
<Badge variant="outline">Non-Replenishable</Badge>
);
case 'rating':
if (value === undefined || value === null) return '-';
return (
<div className="flex items-center">
{value.toFixed(1)}
<span className="ml-1 text-yellow-500"></span>
</div>
);
default:
if (columnDef?.format && value !== undefined && value !== null) {
// For numeric formats (those using toFixed), ensure the value is a number

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
import { useState, useMemo, useCallback, useRef, useEffect } from "react";
import { useState, useMemo, useCallback, useEffect } from "react";
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
import { Button } from "@/components/ui/button";
import {
@@ -90,7 +90,7 @@ const ImageForm = ({
}: {
editingImage: ReusableImage | null;
formData: ImageFormData;
setFormData: (data: ImageFormData) => void;
setFormData: (data: ImageFormData | ((prev: ImageFormData) => ImageFormData)) => void;
onSubmit: (e: React.FormEvent) => void;
onCancel: () => void;
fieldOptions: FieldOptions | undefined;
@@ -99,11 +99,11 @@ const ImageForm = ({
isDragActive: boolean;
}) => {
const handleNameChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
setFormData(prev => ({ ...prev, name: e.target.value }));
setFormData((prev: ImageFormData) => ({ ...prev, name: e.target.value }));
}, [setFormData]);
const handleGlobalChange = useCallback((checked: boolean) => {
setFormData(prev => ({
setFormData((prev: ImageFormData) => ({
...prev,
is_global: checked,
company: checked ? null : prev.company
@@ -111,7 +111,7 @@ const ImageForm = ({
}, [setFormData]);
const handleCompanyChange = useCallback((value: string) => {
setFormData(prev => ({ ...prev, company: value }));
setFormData((prev: ImageFormData) => ({ ...prev, company: value }));
}, [setFormData]);
return (
@@ -738,12 +738,18 @@ export function ReusableImageManagement() {
</DialogContent>
</Dialog>
<style jsx global>{`
{/* Add global styles for this component using regular style tag */}
<style>{`
.reusable-image-table thead tr th,
.reusable-image-table tbody tr td {
padding-left: 1rem;
padding-right: 1rem;
}
.bg-checkerboard {
background-image: linear-gradient(45deg, #f0f0f0 25%, transparent 25%),
linear-gradient(-45deg, #f0f0f0 25%, transparent 25%),
linear-gradient(45deg, transparent 75%, #f0f0f0 75%),
linear-gradient(-45deg, transparent 75%, #f0f0f0 75%);
linear-gradient(-45deg, #f0f0f0 25%, transparent 25%),
linear-gradient(45deg, transparent 75%, #f0f0f0 75%),
linear-gradient(-45deg, transparent 75%, #f0f0f0 75%);
background-size: 20px 20px;
background-position: 0 0, 0 10px, 10px -10px, -10px 0px;
}

View File

@@ -96,17 +96,15 @@
@apply border-border;
}
body {
@apply bg-background text-foreground;
@apply bg-background text-foreground font-sans;
}
}
@layer base {
* {
@apply border-border outline-ring/50;
}
body {
@apply bg-background text-foreground;
@apply bg-background text-foreground font-sans;
}
}

View File

@@ -1,6 +1,7 @@
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import './index.css'
import './App.css'
import App from './App.tsx'
import { BrowserRouter as Router } from 'react-router-dom'

View File

@@ -1,28 +1,31 @@
import { useState, useContext } from "react";
import { useNavigate, useSearchParams } from "react-router-dom";
import { AuthContext } from "@/contexts/AuthContext";
import { toast } from "sonner";
import { cn } from "@/lib/utils";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { Input } from "@/components/ui/input";
import { toast } from "sonner";
import { Loader2, Box } from "lucide-react";
import { motion } from "framer-motion";
import { AuthContext } from "@/contexts/AuthContext";
import { Label } from "@/components/ui/label";
import { motion } from "motion/react";
export function Login() {
const [username, setUsername] = useState("");
const [password, setPassword] = useState("");
const [isLoading, setIsLoading] = useState(false);
const navigate = useNavigate();
const [searchParams] = useSearchParams();
const { login } = useContext(AuthContext);
const handleLogin = async (e: React.FormEvent) => {
const handleSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault();
setIsLoading(true);
const formData = new FormData(e.currentTarget);
const username = formData.get("username") as string;
const password = formData.get("password") as string;
try {
await login(username, password);
// Login successful, redirect to the requested page or home
const redirectTo = searchParams.get("redirect") || "/";
navigate(redirectTo);
@@ -36,70 +39,77 @@ export function Login() {
};
return (
<motion.div
layout
className="min-h-screen bg-gradient-to-b from-slate-100 to-slate-200 dark:from-slate-900 dark:to-slate-800 antialiased"
>
<div className="flex flex-col gap-2 p-2 bg-primary">
<div className="p-4 flex items-center gap-2 group-data-[collapsible=icon]:justify-center text-white">
<Box className="h-6 w-6 shrink-0" />
<h2 className="text-lg font-semibold group-data-[collapsible=icon]:hidden">
Inventory Manager
</h2>
<motion.div className="flex min-h-svh flex-row items-center justify-center bg-muted p-6 md:p-10">
<div className="fixed top-0 w-full backdrop-blur-sm bg-white/40 border-b shadow-sm z-10">
<div className="mx-auto p-4 sm:p-6">
<div className="flex items-center gap-2 font-medium text-3xl justify-center sm:justify-start">
<div className="relative">
<div className="absolute inset-0 "></div>
<img
src="/cherrybottom.png"
alt="Cherry Bottom"
className="h-12 w-12 object-contain -rotate-12 transform hover:rotate-0 transition-transform ease-in-out duration-300 relative z-10"
/>
</div>
<span className="font-bold font-text-primary">A Cherry On Bottom</span>
</div>
<p className="text-sm italic text-muted-foreground text-center sm:text-left ml-32 -mt-1">
supporting the cherry on top
</p>
</div>
</div>
<motion.div
initial={{ opacity: 0, scale: 0.95 }}
animate={{ opacity: 1, scale: 1 }}
transition={{ duration: 0.3, delay: 0.2 }}
className="container relative flex min-h-[calc(100vh-4rem)] flex-col items-center justify-center"
>
<div className="mx-auto flex w-full flex-col justify-center space-y-6 sm:w-[350px]">
<Card className="border-none shadow-xl">
<CardHeader className="space-y-1">
<div className="flex items-center justify-center mb-2">
<Box className="h-10 w-10 text-primary" />
</div>
<CardTitle className="text-2xl text-center">
Log in to continue
</CardTitle>
</CardHeader>
<CardContent>
<form onSubmit={handleLogin}>
<div className="grid gap-4">
<div className="grid gap-2">
<Input
id="username"
placeholder="Username"
value={username}
onChange={(e) => setUsername(e.target.value)}
disabled={isLoading}
className="w-full"
/>
</div>
<div className="grid gap-2">
<Input
id="password"
type="password"
placeholder="Password"
value={password}
onChange={(e) => setPassword(e.target.value)}
disabled={isLoading}
className="w-full"
/>
</div>
<Button className="w-full" type="submit" disabled={isLoading}>
{isLoading && (
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
)}
Sign In
</Button>
</div>
</form>
</CardContent>
</Card>
</div>
</motion.div>
<div className="w-full sm:w-[80%] max-w-sm mt-20">
<LoginForm onSubmit={handleSubmit} isLoading={isLoading} />
</div>
</motion.div>
);
}
interface LoginFormProps {
className?: string;
isLoading?: boolean;
onSubmit: (e: React.FormEvent<HTMLFormElement>) => void;
}
function LoginForm({ className, isLoading, onSubmit, ...props }: LoginFormProps) {
return (
<motion.div className={cn("flex flex-col gap-6", className)} {...props}>
<Card className="overflow-hidden rounded-lg shadow-lg">
<CardHeader className="pb-0">
<CardTitle className="text-2xl font-bold text-center">Log in to your account</CardTitle>
</CardHeader>
<CardContent className="grid p-0 h-full">
<form className="p-6 md:p-8 flex flex-col gap-6" onSubmit={onSubmit}>
<div className="grid gap-2">
<Label htmlFor="username">Username</Label>
<Input
id="username"
name="username"
type="text"
required
disabled={isLoading}
/>
</div>
<div className="grid gap-2">
<Label htmlFor="password">Password</Label>
<Input
id="password"
name="password"
type="password"
required
disabled={isLoading}
/>
</div>
<Button type="submit" className="w-full" disabled={isLoading}>
{isLoading ? "Logging in..." : "Log In"}
</Button>
</form>
</CardContent>
</Card>
</motion.div>
);
}

View File

@@ -52,30 +52,71 @@ const AVAILABLE_COLUMNS: ColumnDef[] = [
{ key: 'vendor', label: 'Supplier', group: 'Basic Info' },
{ key: 'vendor_reference', label: 'Supplier #', group: 'Basic Info' },
{ key: 'barcode', label: 'UPC', group: 'Basic Info' },
{ key: 'description', label: 'Description', group: 'Basic Info' },
{ key: 'created_at', label: 'Created', group: 'Basic Info' },
{ key: 'harmonized_tariff_code', label: 'HTS Code', group: 'Basic Info' },
{ key: 'notions_reference', label: 'Notions Ref', group: 'Basic Info' },
{ key: 'line', label: 'Line', group: 'Basic Info' },
{ key: 'subline', label: 'Subline', group: 'Basic Info' },
{ key: 'artist', label: 'Artist', group: 'Basic Info' },
{ key: 'country_of_origin', label: 'Origin', group: 'Basic Info' },
{ key: 'location', label: 'Location', group: 'Basic Info' },
// Physical properties
{ key: 'weight', label: 'Weight', group: 'Physical', format: (v) => v?.toString() ?? '-' },
{ key: 'dimensions', label: 'Dimensions', group: 'Physical', format: (v) => v ? `${v.length}x${v.width}x${v.height}` : '-' },
// Stock columns
{ key: 'stock_quantity', label: 'Shelf Count', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'stock_status', label: 'Stock Status', group: 'Stock' },
{ key: 'preorder_count', label: 'Preorders', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'notions_inv_count', label: 'Notions Inv', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'days_of_inventory', label: 'Days of Stock', group: 'Stock', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'weeks_of_inventory', label: 'Weeks of Stock', group: 'Stock', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'abc_class', label: 'ABC Class', group: 'Stock' },
{ key: 'replenishable', label: 'Replenishable', group: 'Stock' },
{ key: 'moq', label: 'MOQ', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'reorder_qty', label: 'Reorder Qty', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'reorder_point', label: 'Reorder Point', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'safety_stock', label: 'Safety Stock', group: 'Stock', format: (v) => v?.toString() ?? '-' },
{ key: 'overstocked_amt', label: 'Overstock Amt', group: 'Stock', format: (v) => v?.toString() ?? '-' },
// Pricing columns
{ key: 'price', label: 'Price', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'regular_price', label: 'Default Price', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'cost_price', label: 'Cost', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'landing_cost_price', label: 'Landing Cost', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
// Sales columns
{ key: 'daily_sales_avg', label: 'Daily Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'weekly_sales_avg', label: 'Weekly Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'monthly_sales_avg', label: 'Monthly Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'avg_quantity_per_order', label: 'Avg Qty/Order', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'number_of_orders', label: 'Order Count', group: 'Sales', format: (v) => v?.toString() ?? '-' },
{ key: 'first_sale_date', label: 'First Sale', group: 'Sales' },
{ key: 'last_sale_date', label: 'Last Sale', group: 'Sales' },
{ key: 'date_last_sold', label: 'Date Last Sold', group: 'Sales' },
{ key: 'total_sold', label: 'Total Sold', group: 'Sales', format: (v) => v?.toString() ?? '-' },
{ key: 'baskets', label: 'In Baskets', group: 'Sales', format: (v) => v?.toString() ?? '-' },
{ key: 'notifies', label: 'Notifies', group: 'Sales', format: (v) => v?.toString() ?? '-' },
{ key: 'rating', label: 'Rating', group: 'Sales', format: (v) => v ? v.toFixed(1) : '-' },
{ key: 'reviews', label: 'Reviews', group: 'Sales', format: (v) => v?.toString() ?? '-' },
// Financial columns
{ key: 'gmroi', label: 'GMROI', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'turnover_rate', label: 'Turnover Rate', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'avg_margin_percent', label: 'Margin %', group: 'Financial', format: (v) => v ? `${v.toFixed(1)}%` : '-' },
{ key: 'inventory_value', label: 'Inventory Value', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'cost_of_goods_sold', label: 'COGS', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
{ key: 'gross_profit', label: 'Gross Profit', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
// Lead Time columns
{ key: 'current_lead_time', label: 'Current Lead Time', group: 'Lead Time', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'target_lead_time', label: 'Target Lead Time', group: 'Lead Time', format: (v) => v?.toFixed(1) ?? '-' },
{ key: 'lead_time_status', label: 'Lead Time Status', group: 'Lead Time' },
{ key: 'last_purchase_date', label: 'Last Purchase', group: 'Lead Time' },
{ key: 'first_received_date', label: 'First Received', group: 'Lead Time' },
{ key: 'last_received_date', label: 'Last Received', group: 'Lead Time' },
];
// Define default columns for each view
@@ -93,14 +134,17 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'daily_sales_avg',
'weekly_sales_avg',
'monthly_sales_avg',
'inventory_value',
],
critical: [
'image',
'title',
'stock_quantity',
'safety_stock',
'daily_sales_avg',
'weekly_sales_avg',
'reorder_qty',
'reorder_point',
'vendor',
'last_purchase_date',
'current_lead_time',
@@ -109,11 +153,13 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'image',
'title',
'stock_quantity',
'reorder_point',
'daily_sales_avg',
'weekly_sales_avg',
'reorder_qty',
'vendor',
'last_purchase_date',
'avg_lead_time_days',
],
overstocked: [
'image',
@@ -123,15 +169,19 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'weekly_sales_avg',
'overstocked_amt',
'days_of_inventory',
'inventory_value',
'turnover_rate',
],
'at-risk': [
'image',
'title',
'stock_quantity',
'safety_stock',
'daily_sales_avg',
'weekly_sales_avg',
'days_of_inventory',
'last_sale_date',
'current_lead_time',
],
new: [
'image',
@@ -141,6 +191,7 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'brand',
'price',
'regular_price',
'first_received_date',
],
healthy: [
'image',
@@ -150,6 +201,8 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
'weekly_sales_avg',
'monthly_sales_avg',
'days_of_inventory',
'gross_profit',
'gmroi',
],
};

View File

@@ -23,6 +23,30 @@ export interface Product {
created_at: string;
updated_at: string;
// New fields
description?: string;
preorder_count?: number;
notions_inv_count?: number;
harmonized_tariff_code?: string;
notions_reference?: string;
line?: string;
subline?: string;
artist?: string;
rating?: number;
reviews?: number;
weight?: number;
dimensions?: {
length: number;
width: number;
height: number;
};
country_of_origin?: string;
location?: string;
total_sold?: number;
baskets?: number;
notifies?: number;
date_last_sold?: string;
// Metrics
daily_sales_avg?: string; // numeric(15,3)
weekly_sales_avg?: string; // numeric(15,3)
@@ -43,6 +67,7 @@ export interface Product {
gross_profit?: string; // numeric(15,3)
gmroi?: string; // numeric(15,3)
avg_lead_time_days?: string; // numeric(15,3)
first_received_date?: string;
last_received_date?: string;
abc_class?: string;
stock_status?: string;

View File

@@ -14,6 +14,9 @@ export default {
}
},
extend: {
fontFamily: {
sans: ['Inter', 'sans-serif'],
},
colors: {
border: 'hsl(var(--border))',
input: 'hsl(var(--input))',

File diff suppressed because one or more lines are too long