Update import tables schema with minor changes, add new metrics schema

This commit is contained in:
2025-03-29 16:46:31 -04:00
parent 0796518e26
commit f4854423ab
8 changed files with 380 additions and 208 deletions

View File

@@ -281,7 +281,7 @@ CREATE TABLE IF NOT EXISTS calculate_status (
);
CREATE TABLE IF NOT EXISTS sync_status (
table_name VARCHAR(50) PRIMARY KEY,
table_name TEXT PRIMARY KEY,
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT
);

View File

@@ -0,0 +1,165 @@
-- Drop tables in reverse order of dependency
DROP TABLE IF EXISTS public.product_metrics CASCADE;
DROP TABLE IF EXISTS public.daily_product_snapshots CASCADE;
-- Table Definition: daily_product_snapshots
CREATE TABLE public.daily_product_snapshots (
snapshot_date DATE NOT NULL,
pid INT8 NOT NULL,
sku VARCHAR, -- Copied for convenience
-- Inventory Metrics (End of Day / Last Snapshot of Day)
eod_stock_quantity INT NOT NULL DEFAULT 0,
eod_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- Increased precision
eod_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
eod_stock_gross NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
stockout_flag BOOLEAN NOT NULL DEFAULT FALSE,
-- Sales Metrics (Aggregated for the snapshot_date)
units_sold INT NOT NULL DEFAULT 0,
units_returned INT NOT NULL DEFAULT 0,
gross_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
discounts NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
returns_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
net_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- gross_revenue - discounts
cogs NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
gross_regular_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
profit NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- net_revenue - cogs
-- Receiving Metrics (Aggregated for the snapshot_date)
units_received INT NOT NULL DEFAULT 0,
cost_received NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
calculation_timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (snapshot_date, pid) -- Composite primary key
-- CONSTRAINT fk_daily_snapshot_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE -- FK Optional on snapshot table
);
-- Add Indexes for daily_product_snapshots
CREATE INDEX idx_daily_snapshot_pid_date ON public.daily_product_snapshots(pid, snapshot_date); -- Useful for product-specific time series
-- Table Definition: product_metrics
CREATE TABLE public.product_metrics (
pid INT8 PRIMARY KEY,
last_calculated TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
-- Product Info (Copied for convenience/performance)
sku VARCHAR,
title VARCHAR,
brand VARCHAR,
vendor VARCHAR,
image_url VARCHAR, -- (e.g., products.image_175)
is_visible BOOLEAN,
is_replenishable BOOLEAN,
-- Current Status (Refreshed Hourly)
current_price NUMERIC(10, 2),
current_regular_price NUMERIC(10, 2),
current_cost_price NUMERIC(10, 4), -- Increased precision for cost
current_landing_cost_price NUMERIC(10, 4), -- Increased precision for cost
current_stock INT NOT NULL DEFAULT 0,
current_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
current_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
current_stock_gross NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
on_order_qty INT NOT NULL DEFAULT 0,
on_order_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
on_order_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
earliest_expected_date DATE,
-- total_received_lifetime INT NOT NULL DEFAULT 0, -- Can calc if needed
-- Historical Dates (Calculated Once/Periodically)
date_created DATE,
date_first_received DATE,
date_last_received DATE,
date_first_sold DATE,
date_last_sold DATE,
age_days INT, -- Calculated based on LEAST(date_created, date_first_sold)
-- Rolling Period Metrics (Refreshed Hourly from daily_product_snapshots)
sales_7d INT, revenue_7d NUMERIC(14, 4),
sales_14d INT, revenue_14d NUMERIC(14, 4),
sales_30d INT, revenue_30d NUMERIC(14, 4),
cogs_30d NUMERIC(14, 4), profit_30d NUMERIC(14, 4),
returns_units_30d INT, returns_revenue_30d NUMERIC(14, 4),
discounts_30d NUMERIC(14, 4),
gross_revenue_30d NUMERIC(14, 4), gross_regular_revenue_30d NUMERIC(14, 4),
stockout_days_30d INT,
sales_365d INT, revenue_365d NUMERIC(14, 4),
avg_stock_units_30d NUMERIC(10, 2), avg_stock_cost_30d NUMERIC(14, 4),
avg_stock_retail_30d NUMERIC(14, 4), avg_stock_gross_30d NUMERIC(14, 4),
received_qty_30d INT, received_cost_30d NUMERIC(14, 4),
-- Lifetime Metrics (Recalculated Hourly/Daily from daily_product_snapshots)
lifetime_sales INT,
lifetime_revenue NUMERIC(16, 4),
-- First Period Metrics (Calculated Once/Periodically from daily_product_snapshots)
first_7_days_sales INT, first_7_days_revenue NUMERIC(14, 4),
first_30_days_sales INT, first_30_days_revenue NUMERIC(14, 4),
first_60_days_sales INT, first_60_days_revenue NUMERIC(14, 4),
first_90_days_sales INT, first_90_days_revenue NUMERIC(14, 4),
-- Calculated KPIs (Refreshed Hourly based on rolling metrics)
asp_30d NUMERIC(10, 2), -- revenue_30d / sales_30d
acp_30d NUMERIC(10, 4), -- cogs_30d / sales_30d
avg_ros_30d NUMERIC(10, 4), -- profit_30d / sales_30d
avg_sales_per_day_30d NUMERIC(10, 2), -- sales_30d / 30.0
avg_sales_per_month_30d NUMERIC(10, 2), -- sales_30d (assuming 30d = 1 month for this metric)
margin_30d NUMERIC(5, 2), -- (profit_30d / revenue_30d) * 100
markup_30d NUMERIC(5, 2), -- (profit_30d / cogs_30d) * 100
gmroi_30d NUMERIC(10, 2), -- profit_30d / avg_stock_cost_30d
stockturn_30d NUMERIC(10, 2), -- sales_30d / avg_stock_units_30d
return_rate_30d NUMERIC(5, 2), -- returns_units_30d / (sales_30d + returns_units_30d) * 100
discount_rate_30d NUMERIC(5, 2), -- discounts_30d / gross_revenue_30d * 100
stockout_rate_30d NUMERIC(5, 2), -- stockout_days_30d / 30.0 * 100
markdown_30d NUMERIC(14, 4), -- gross_regular_revenue_30d - gross_revenue_30d
markdown_rate_30d NUMERIC(5, 2), -- markdown_30d / gross_regular_revenue_30d * 100
sell_through_30d NUMERIC(5, 2), -- sales_30d / (current_stock + sales_30d) * 100
avg_lead_time_days INT, -- Calculated Periodically from purchase_orders
-- Forecasting & Replenishment (Refreshed Hourly)
abc_class CHAR(1), -- Updated Periodically (e.g., Weekly)
sales_velocity_daily NUMERIC(10, 4), -- sales_30d / (30.0 - stockout_days_30d)
config_lead_time INT, -- From settings tables
config_days_of_stock INT, -- From settings tables
config_safety_stock INT, -- From settings_product
planning_period_days INT, -- config_lead_time + config_days_of_stock
lead_time_forecast_units NUMERIC(10, 2), -- sales_velocity_daily * config_lead_time
days_of_stock_forecast_units NUMERIC(10, 2), -- sales_velocity_daily * config_days_of_stock
planning_period_forecast_units NUMERIC(10, 2), -- lead_time_forecast_units + days_of_stock_forecast_units
lead_time_closing_stock NUMERIC(10, 2), -- current_stock + on_order_qty - lead_time_forecast_units
days_of_stock_closing_stock NUMERIC(10, 2), -- lead_time_closing_stock - days_of_stock_forecast_units
replenishment_needed_raw NUMERIC(10, 2), -- planning_period_forecast_units + config_safety_stock - current_stock - on_order_qty
replenishment_units INT, -- CEILING(GREATEST(0, replenishment_needed_raw))
replenishment_cost NUMERIC(14, 4), -- replenishment_units * COALESCE(current_landing_cost_price, current_cost_price)
replenishment_retail NUMERIC(14, 4), -- replenishment_units * current_price
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - COALESCE(current_landing_cost_price, current_cost_price))
to_order_units INT, -- Apply MOQ/UOM logic to replenishment_units
forecast_lost_sales_units NUMERIC(10, 2), -- GREATEST(0, -lead_time_closing_stock)
forecast_lost_revenue NUMERIC(14, 4), -- forecast_lost_sales_units * current_price
stock_cover_in_days NUMERIC(10, 1), -- current_stock / sales_velocity_daily
po_cover_in_days NUMERIC(10, 1), -- on_order_qty / sales_velocity_daily
sells_out_in_days NUMERIC(10, 1), -- (current_stock + on_order_qty) / sales_velocity_daily
replenish_date DATE, -- Calc based on when stock hits safety stock minus lead time
overstocked_units INT, -- GREATEST(0, current_stock - config_safety_stock - planning_period_forecast_units)
overstocked_cost NUMERIC(14, 4), -- overstocked_units * COALESCE(current_landing_cost_price, current_cost_price)
overstocked_retail NUMERIC(14, 4), -- overstocked_units * current_price
is_old_stock BOOLEAN, -- Based on age, last sold, last received, on_order status
-- Yesterday's Metrics (Refreshed Hourly from daily_product_snapshots)
yesterday_sales INT,
CONSTRAINT fk_product_metrics_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE
);
-- Add Indexes for product_metrics (adjust based on common filtering/sorting in frontend)
CREATE INDEX idx_product_metrics_brand ON public.product_metrics(brand);
CREATE INDEX idx_product_metrics_vendor ON public.product_metrics(vendor);
CREATE INDEX idx_product_metrics_sku ON public.product_metrics(sku);
CREATE INDEX idx_product_metrics_abc_class ON public.product_metrics(abc_class);
CREATE INDEX idx_product_metrics_revenue_30d ON public.product_metrics(revenue_30d DESC NULLS LAST); -- Example sorting index
CREATE INDEX idx_product_metrics_sales_30d ON public.product_metrics(sales_30d DESC NULLS LAST); -- Example sorting index
CREATE INDEX idx_product_metrics_current_stock ON public.product_metrics(current_stock);
CREATE INDEX idx_product_metrics_sells_out_in_days ON public.product_metrics(sells_out_in_days ASC NULLS LAST); -- Example sorting index

View File

@@ -17,48 +17,48 @@ $func$ language plpgsql;
-- Create tables
CREATE TABLE products (
pid BIGINT NOT NULL,
title VARCHAR(255) NOT NULL,
title TEXT NOT NULL,
description TEXT,
SKU VARCHAR(50) NOT NULL,
sku TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE,
first_received TIMESTAMP WITH TIME ZONE,
stock_quantity INTEGER DEFAULT 0,
preorder_count INTEGER DEFAULT 0,
notions_inv_count INTEGER DEFAULT 0,
price DECIMAL(10, 3) NOT NULL,
regular_price DECIMAL(10, 3) NOT NULL,
cost_price DECIMAL(10, 3),
landing_cost_price DECIMAL(10, 3),
barcode VARCHAR(50),
harmonized_tariff_code VARCHAR(20),
price NUMERIC(14, 4) NOT NULL,
regular_price NUMERIC(14, 4) NOT NULL,
cost_price NUMERIC(14, 4),
landing_cost_price NUMERIC(14, 4),
barcode TEXT,
harmonized_tariff_code TEXT,
updated_at TIMESTAMP WITH TIME ZONE,
visible BOOLEAN DEFAULT true,
managing_stock BOOLEAN DEFAULT true,
replenishable BOOLEAN DEFAULT true,
vendor VARCHAR(100),
vendor_reference VARCHAR(100),
notions_reference VARCHAR(100),
permalink VARCHAR(255),
vendor TEXT,
vendor_reference TEXT,
notions_reference TEXT,
permalink TEXT,
categories TEXT,
image VARCHAR(255),
image_175 VARCHAR(255),
image_full VARCHAR(255),
brand VARCHAR(100),
line VARCHAR(100),
subline VARCHAR(100),
artist VARCHAR(100),
image TEXT,
image_175 TEXT,
image_full TEXT,
brand TEXT,
line TEXT,
subline TEXT,
artist TEXT,
options TEXT,
tags TEXT,
moq INTEGER DEFAULT 1,
uom INTEGER DEFAULT 1,
rating DECIMAL(10,2) DEFAULT 0.00,
rating NUMERIC(14, 4) DEFAULT 0.00,
reviews INTEGER DEFAULT 0,
weight DECIMAL(10,3),
length DECIMAL(10,3),
width DECIMAL(10,3),
height DECIMAL(10,3),
country_of_origin VARCHAR(5),
location VARCHAR(50),
weight NUMERIC(14, 4),
length NUMERIC(14, 4),
width NUMERIC(14, 4),
height NUMERIC(14, 4),
country_of_origin TEXT,
location TEXT,
total_sold INTEGER DEFAULT 0,
baskets INTEGER DEFAULT 0,
notifies INTEGER DEFAULT 0,
@@ -74,26 +74,25 @@ CREATE TRIGGER update_products_updated
EXECUTE FUNCTION update_updated_column();
-- Create indexes for products table
CREATE INDEX idx_products_sku ON products(SKU);
CREATE INDEX idx_products_sku ON products(sku);
CREATE INDEX idx_products_vendor ON products(vendor);
CREATE INDEX idx_products_brand ON products(brand);
CREATE INDEX idx_products_location ON products(location);
CREATE INDEX idx_products_total_sold ON products(total_sold);
CREATE INDEX idx_products_date_last_sold ON products(date_last_sold);
CREATE INDEX idx_products_visible ON products(visible);
CREATE INDEX idx_products_replenishable ON products(replenishable);
CREATE INDEX idx_products_updated ON products(updated);
-- Create categories table with hierarchy support
CREATE TABLE categories (
cat_id BIGINT PRIMARY KEY,
name VARCHAR(100) NOT NULL,
name TEXT NOT NULL,
type SMALLINT NOT NULL,
parent_id BIGINT,
description TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
status VARCHAR(20) DEFAULT 'active',
FOREIGN KEY (parent_id) REFERENCES categories(cat_id)
status TEXT DEFAULT 'active',
FOREIGN KEY (parent_id) REFERENCES categories(cat_id) ON DELETE SET NULL
);
-- Create trigger for categories
@@ -107,6 +106,7 @@ COMMENT ON COLUMN categories.type IS '10=section, 11=category, 12=subcategory, 1
CREATE INDEX idx_categories_parent ON categories(parent_id);
CREATE INDEX idx_categories_type ON categories(type);
CREATE INDEX idx_categories_status ON categories(status);
CREATE INDEX idx_categories_name ON categories(name);
CREATE INDEX idx_categories_name_type ON categories(name, type);
-- Create product_categories junction table
@@ -119,28 +119,28 @@ CREATE TABLE product_categories (
);
CREATE INDEX idx_product_categories_category ON product_categories(cat_id);
CREATE INDEX idx_product_categories_product ON product_categories(pid);
-- Create orders table with its indexes
CREATE TABLE orders (
id BIGSERIAL PRIMARY KEY,
order_number VARCHAR(50) NOT NULL,
order_number TEXT NOT NULL,
pid BIGINT NOT NULL,
SKU VARCHAR(50) NOT NULL,
date DATE NOT NULL,
price DECIMAL(10,3) NOT NULL,
sku TEXT NOT NULL,
date TIMESTAMP WITH TIME ZONE NOT NULL,
price NUMERIC(14, 4) NOT NULL,
quantity INTEGER NOT NULL,
discount DECIMAL(10,3) DEFAULT 0.000,
tax DECIMAL(10,3) DEFAULT 0.000,
discount NUMERIC(14, 4) DEFAULT 0.0000,
tax NUMERIC(14, 4) DEFAULT 0.0000,
tax_included BOOLEAN DEFAULT false,
shipping DECIMAL(10,3) DEFAULT 0.000,
costeach DECIMAL(10,3) DEFAULT 0.000,
customer VARCHAR(50) NOT NULL,
customer_name VARCHAR(100),
status VARCHAR(20) DEFAULT 'pending',
shipping NUMERIC(14, 4) DEFAULT 0.0000,
costeach NUMERIC(14, 4) DEFAULT 0.0000,
customer TEXT NOT NULL,
customer_name TEXT,
status TEXT DEFAULT 'pending',
canceled BOOLEAN DEFAULT false,
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE (order_number, pid)
UNIQUE (order_number, pid),
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE RESTRICT
);
-- Create trigger for orders
@@ -151,36 +151,37 @@ CREATE TRIGGER update_orders_updated
CREATE INDEX idx_orders_number ON orders(order_number);
CREATE INDEX idx_orders_pid ON orders(pid);
CREATE INDEX idx_orders_sku ON orders(sku);
CREATE INDEX idx_orders_customer ON orders(customer);
CREATE INDEX idx_orders_date ON orders(date);
CREATE INDEX idx_orders_status ON orders(status);
CREATE INDEX idx_orders_metrics ON orders(pid, date, canceled);
CREATE INDEX idx_orders_pid_date ON orders(pid, date);
CREATE INDEX idx_orders_updated ON orders(updated);
-- Create purchase_orders table with its indexes
CREATE TABLE purchase_orders (
id BIGSERIAL PRIMARY KEY,
po_id VARCHAR(50) NOT NULL,
vendor VARCHAR(100) NOT NULL,
po_id TEXT NOT NULL,
vendor TEXT NOT NULL,
date DATE NOT NULL,
expected_date DATE,
pid BIGINT NOT NULL,
sku VARCHAR(50) NOT NULL,
name VARCHAR(255) NOT NULL,
cost_price DECIMAL(10, 3) NOT NULL,
po_cost_price DECIMAL(10, 3) NOT NULL,
status SMALLINT DEFAULT 1,
receiving_status SMALLINT DEFAULT 1,
sku TEXT NOT NULL,
name TEXT NOT NULL,
cost_price NUMERIC(14, 4) NOT NULL,
po_cost_price NUMERIC(14, 4) NOT NULL,
status TEXT DEFAULT 'created',
receiving_status TEXT DEFAULT 'created',
notes TEXT,
long_note TEXT,
ordered INTEGER NOT NULL,
received INTEGER DEFAULT 0,
received_date DATE,
last_received_date DATE,
received_by VARCHAR,
received_by TEXT,
receiving_history JSONB,
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (pid) REFERENCES products(pid),
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
UNIQUE (po_id, pid)
);
@@ -192,18 +193,19 @@ CREATE TRIGGER update_purchase_orders_updated
COMMENT ON COLUMN purchase_orders.name IS 'Product name from products.description';
COMMENT ON COLUMN purchase_orders.po_cost_price IS 'Original cost from PO, before receiving adjustments';
COMMENT ON COLUMN purchase_orders.status IS '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done';
COMMENT ON COLUMN purchase_orders.receiving_status IS '0=canceled,1=created,30=partial_received,40=full_received,50=paid';
COMMENT ON COLUMN purchase_orders.status IS 'canceled, created, electronically_ready_send, ordered, preordered, electronically_sent, receiving_started, done';
COMMENT ON COLUMN purchase_orders.receiving_status IS 'canceled, created, partial_received, full_received, paid';
COMMENT ON COLUMN purchase_orders.receiving_history IS 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag';
CREATE INDEX idx_po_id ON purchase_orders(po_id);
CREATE INDEX idx_po_sku ON purchase_orders(sku);
CREATE INDEX idx_po_vendor ON purchase_orders(vendor);
CREATE INDEX idx_po_status ON purchase_orders(status);
CREATE INDEX idx_po_receiving_status ON purchase_orders(receiving_status);
CREATE INDEX idx_po_metrics ON purchase_orders(pid, date, status, ordered, received);
CREATE INDEX idx_po_metrics_receiving ON purchase_orders(pid, date, receiving_status, received_date);
CREATE INDEX idx_po_product_date ON purchase_orders(pid, date);
CREATE INDEX idx_po_product_status ON purchase_orders(pid, status);
CREATE INDEX idx_po_expected_date ON purchase_orders(expected_date);
CREATE INDEX idx_po_last_received_date ON purchase_orders(last_received_date);
CREATE INDEX idx_po_pid_status ON purchase_orders(pid, status);
CREATE INDEX idx_po_pid_date ON purchase_orders(pid, date);
CREATE INDEX idx_po_updated ON purchase_orders(updated);
SET session_replication_role = 'origin'; -- Re-enable foreign key checks

View File

@@ -10,9 +10,9 @@ const importPurchaseOrders = require('./import/purchase-orders');
dotenv.config({ path: path.join(__dirname, "../.env") });
// Constants to control which imports run
const IMPORT_CATEGORIES = true;
const IMPORT_PRODUCTS = true;
const IMPORT_ORDERS = true;
const IMPORT_CATEGORIES = false;
const IMPORT_PRODUCTS = false;
const IMPORT_ORDERS = false;
const IMPORT_PURCHASE_ORDERS = true;
// Add flag for incremental updates
@@ -111,8 +111,8 @@ async function main() {
// Initialize sync_status table if it doesn't exist
await localConnection.query(`
CREATE TABLE IF NOT EXISTS sync_status (
table_name VARCHAR(50) PRIMARY KEY,
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
table_name TEXT PRIMARY KEY,
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_sync_id BIGINT
);

View File

@@ -117,43 +117,43 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
CREATE TEMP TABLE temp_order_items (
order_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
SKU VARCHAR(50) NOT NULL,
price DECIMAL(10,2) NOT NULL,
sku TEXT NOT NULL,
price NUMERIC(14, 4) NOT NULL,
quantity INTEGER NOT NULL,
base_discount DECIMAL(10,2) DEFAULT 0,
base_discount NUMERIC(14, 4) DEFAULT 0,
PRIMARY KEY (order_id, pid)
);
CREATE TEMP TABLE temp_order_meta (
order_id INTEGER NOT NULL,
date DATE NOT NULL,
customer VARCHAR(100) NOT NULL,
customer_name VARCHAR(150) NOT NULL,
status INTEGER,
date TIMESTAMP WITH TIME ZONE NOT NULL,
customer TEXT NOT NULL,
customer_name TEXT NOT NULL,
status TEXT,
canceled BOOLEAN,
summary_discount DECIMAL(10,2) DEFAULT 0.00,
summary_subtotal DECIMAL(10,2) DEFAULT 0.00,
summary_discount NUMERIC(14, 4) DEFAULT 0.0000,
summary_subtotal NUMERIC(14, 4) DEFAULT 0.0000,
PRIMARY KEY (order_id)
);
CREATE TEMP TABLE temp_order_discounts (
order_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
discount DECIMAL(10,2) NOT NULL,
discount NUMERIC(14, 4) NOT NULL,
PRIMARY KEY (order_id, pid)
);
CREATE TEMP TABLE temp_order_taxes (
order_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
tax DECIMAL(10,2) NOT NULL,
tax NUMERIC(14, 4) NOT NULL,
PRIMARY KEY (order_id, pid)
);
CREATE TEMP TABLE temp_order_costs (
order_id INTEGER NOT NULL,
pid INTEGER NOT NULL,
costeach DECIMAL(10,3) DEFAULT 0.000,
costeach NUMERIC(14, 4) DEFAULT 0.0000,
PRIMARY KEY (order_id, pid)
);
@@ -172,10 +172,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
]);
await localConnection.query(`
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
INSERT INTO temp_order_items (order_id, pid, sku, price, quantity, base_discount)
VALUES ${placeholders}
ON CONFLICT (order_id, pid) DO UPDATE SET
SKU = EXCLUDED.SKU,
sku = EXCLUDED.sku,
price = EXCLUDED.price,
quantity = EXCLUDED.quantity,
base_discount = EXCLUDED.base_discount
@@ -241,10 +241,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
const values = subBatch.flatMap(order => [
order.order_id,
order.date,
new Date(order.date), // Convert to TIMESTAMP WITH TIME ZONE
order.customer,
toTitleCase(order.customer_name) || '',
order.status,
order.status.toString(), // Convert status to TEXT
order.canceled,
order.summary_discount || 0,
order.summary_subtotal || 0
@@ -447,7 +447,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
SELECT
oi.order_id as order_number,
oi.pid::bigint as pid,
oi.SKU as sku,
oi.sku,
om.date,
oi.price,
oi.quantity,
@@ -457,18 +457,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
WHEN om.summary_discount > 0 AND om.summary_subtotal > 0 THEN
ROUND((om.summary_discount * (oi.price * oi.quantity)) / NULLIF(om.summary_subtotal, 0), 2)
ELSE 0
END)::DECIMAL(10,2) as discount,
COALESCE(ot.total_tax, 0)::DECIMAL(10,2) as tax,
END)::NUMERIC(14, 4) as discount,
COALESCE(ot.total_tax, 0)::NUMERIC(14, 4) as tax,
false as tax_included,
0 as shipping,
om.customer,
om.customer_name,
om.status,
om.canceled,
COALESCE(ot.costeach, oi.price * 0.5)::DECIMAL(10,3) as costeach
COALESCE(ot.costeach, oi.price * 0.5)::NUMERIC(14, 4) as costeach
FROM (
SELECT DISTINCT ON (order_id, pid)
order_id, pid, SKU, price, quantity, base_discount
order_id, pid, sku, price, quantity, base_discount
FROM temp_order_items
WHERE order_id = ANY($1)
ORDER BY order_id, pid
@@ -508,7 +508,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
o.order_number,
o.pid,
o.sku || 'NO-SKU',
o.date,
o.date, // This is now a TIMESTAMP WITH TIME ZONE
o.price,
o.quantity,
o.discount,
@@ -517,7 +517,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
o.shipping,
o.customer,
o.customer_name,
o.status,
o.status.toString(), // Convert status to TEXT
o.canceled,
o.costeach
]);

View File

@@ -57,50 +57,50 @@ async function setupTemporaryTables(connection) {
await connection.query(`
CREATE TEMP TABLE temp_products (
pid BIGINT NOT NULL,
title VARCHAR(255),
title TEXT,
description TEXT,
sku VARCHAR(50),
sku TEXT,
stock_quantity INTEGER DEFAULT 0,
preorder_count INTEGER DEFAULT 0,
notions_inv_count INTEGER DEFAULT 0,
price DECIMAL(10,3) NOT NULL DEFAULT 0,
regular_price DECIMAL(10,3) NOT NULL DEFAULT 0,
cost_price DECIMAL(10,3),
vendor VARCHAR(100),
vendor_reference VARCHAR(100),
notions_reference VARCHAR(100),
brand VARCHAR(100),
line VARCHAR(100),
subline VARCHAR(100),
artist VARCHAR(100),
price NUMERIC(14, 4) NOT NULL DEFAULT 0,
regular_price NUMERIC(14, 4) NOT NULL DEFAULT 0,
cost_price NUMERIC(14, 4),
vendor TEXT,
vendor_reference TEXT,
notions_reference TEXT,
brand TEXT,
line TEXT,
subline TEXT,
artist TEXT,
categories TEXT,
created_at TIMESTAMP,
first_received TIMESTAMP,
landing_cost_price DECIMAL(10,3),
barcode VARCHAR(50),
harmonized_tariff_code VARCHAR(50),
updated_at TIMESTAMP,
created_at TIMESTAMP WITH TIME ZONE,
first_received TIMESTAMP WITH TIME ZONE,
landing_cost_price NUMERIC(14, 4),
barcode TEXT,
harmonized_tariff_code TEXT,
updated_at TIMESTAMP WITH TIME ZONE,
visible BOOLEAN,
managing_stock BOOLEAN DEFAULT true,
replenishable BOOLEAN,
permalink VARCHAR(255),
permalink TEXT,
moq INTEGER DEFAULT 1,
uom INTEGER DEFAULT 1,
rating DECIMAL(10,2),
rating NUMERIC(14, 4),
reviews INTEGER,
weight DECIMAL(10,3),
length DECIMAL(10,3),
width DECIMAL(10,3),
height DECIMAL(10,3),
country_of_origin VARCHAR(100),
location VARCHAR(100),
weight NUMERIC(14, 4),
length NUMERIC(14, 4),
width NUMERIC(14, 4),
height NUMERIC(14, 4),
country_of_origin TEXT,
location TEXT,
total_sold INTEGER,
baskets INTEGER,
notifies INTEGER,
date_last_sold TIMESTAMP,
image VARCHAR(255),
image_175 VARCHAR(255),
image_full VARCHAR(255),
date_last_sold TIMESTAMP WITH TIME ZONE,
image TEXT,
image_175 TEXT,
image_full TEXT,
options TEXT,
tags TEXT,
needs_update BOOLEAN DEFAULT TRUE,

View File

@@ -73,19 +73,18 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
-- Temporary table for purchase orders
CREATE TEMP TABLE temp_purchase_orders (
po_id VARCHAR(50) NOT NULL,
po_id TEXT NOT NULL,
pid BIGINT NOT NULL,
sku VARCHAR(50),
name VARCHAR(255),
vendor VARCHAR(255),
sku TEXT,
name TEXT,
vendor TEXT,
date TIMESTAMP WITH TIME ZONE,
expected_date DATE,
status INTEGER,
status_text VARCHAR(50),
status TEXT,
notes TEXT,
long_note TEXT,
ordered INTEGER,
po_cost_price DECIMAL(10,3),
po_cost_price NUMERIC(14, 4),
supplier_id INTEGER,
date_created TIMESTAMP WITH TIME ZONE,
date_ordered TIMESTAMP WITH TIME ZONE,
@@ -94,27 +93,26 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
-- Temporary table for receivings
CREATE TEMP TABLE temp_receivings (
receiving_id VARCHAR(50) NOT NULL,
po_id VARCHAR(50),
receiving_id TEXT NOT NULL,
po_id TEXT,
pid BIGINT NOT NULL,
qty_each INTEGER,
cost_each DECIMAL(10,5),
cost_each NUMERIC(14, 4),
received_by INTEGER,
received_date TIMESTAMP WITH TIME ZONE,
receiving_created_date TIMESTAMP WITH TIME ZONE,
supplier_id INTEGER,
status INTEGER,
status_text VARCHAR(50),
status TEXT,
PRIMARY KEY (receiving_id, pid)
);
-- Temporary table for tracking FIFO allocations
CREATE TEMP TABLE temp_receiving_allocations (
po_id VARCHAR(50) NOT NULL,
po_id TEXT NOT NULL,
pid BIGINT NOT NULL,
receiving_id VARCHAR(50) NOT NULL,
receiving_id TEXT NOT NULL,
allocated_qty INTEGER NOT NULL,
cost_each DECIMAL(10,5) NOT NULL,
cost_each NUMERIC(14, 4) NOT NULL,
received_date TIMESTAMP WITH TIME ZONE NOT NULL,
received_by INTEGER,
PRIMARY KEY (po_id, pid, receiving_id)
@@ -123,8 +121,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
-- Temporary table for employee names
CREATE TEMP TABLE employee_names (
employeeid INTEGER PRIMARY KEY,
firstname VARCHAR(100),
lastname VARCHAR(100)
firstname TEXT,
lastname TEXT
);
-- Create indexes for efficient joins
@@ -135,22 +133,22 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
// Map status codes to text values
const poStatusMap = {
0: 'Canceled',
1: 'Created',
10: 'Ready ESend',
11: 'Ordered',
12: 'Preordered',
13: 'Electronically Sent',
15: 'Receiving Started',
50: 'Done'
0: 'canceled',
1: 'created',
10: 'electronically_ready_send',
11: 'ordered',
12: 'preordered',
13: 'electronically_sent',
15: 'receiving_started',
50: 'done'
};
const receivingStatusMap = {
0: 'Canceled',
1: 'Created',
30: 'Partial Received',
40: 'Full Received',
50: 'Paid'
0: 'canceled',
1: 'created',
30: 'partial_received',
40: 'full_received',
50: 'paid'
};
// Get time window for data retrieval
@@ -281,8 +279,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
vendor: po.vendor || 'Unknown Vendor',
date: validateDate(po.date_ordered) || validateDate(po.date_created),
expected_date: validateDate(po.date_estin),
status: po.status,
status_text: poStatusMap[po.status] || '',
status: poStatusMap[po.status] || 'created',
notes: po.notes || '',
long_note: po.long_note || '',
ordered: product.qty_each,
@@ -298,8 +295,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
const batch = completePOs.slice(i, i + INSERT_BATCH_SIZE);
const placeholders = batch.map((_, idx) => {
const base = idx * 16;
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15}, $${base + 16})`;
const base = idx * 15;
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15})`;
}).join(',');
const values = batch.flatMap(po => [
@@ -311,7 +308,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
po.date,
po.expected_date,
po.status,
po.status_text,
po.notes,
po.long_note,
po.ordered,
@@ -323,8 +319,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
await localConnection.query(`
INSERT INTO temp_purchase_orders (
po_id, pid, sku, name, vendor, date, expected_date, status, status_text,
notes, long_note, ordered, po_cost_price, supplier_id, date_created, date_ordered
po_id, pid, sku, name, vendor, date, expected_date, status, notes, long_note,
ordered, po_cost_price, supplier_id, date_created, date_ordered
)
VALUES ${placeholders}
ON CONFLICT (po_id, pid) DO UPDATE SET
@@ -334,7 +330,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
date = EXCLUDED.date,
expected_date = EXCLUDED.expected_date,
status = EXCLUDED.status,
status_text = EXCLUDED.status_text,
notes = EXCLUDED.notes,
long_note = EXCLUDED.long_note,
ordered = EXCLUDED.ordered,
@@ -448,9 +443,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
received_date: validateDate(product.received_date) || validateDate(product.receiving_created_date),
receiving_created_date: validateDate(product.receiving_created_date),
supplier_id: receiving.supplier_id,
status: receiving.status,
status_text: receivingStatusMap[receiving.status] || '',
receiving_created_date: validateDate(product.receiving_created_date)
status: receivingStatusMap[receiving.status] || 'created'
});
}
@@ -459,8 +452,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
const batch = completeReceivings.slice(i, i + INSERT_BATCH_SIZE);
const placeholders = batch.map((_, idx) => {
const base = idx * 11;
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11})`;
const base = idx * 10;
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10})`;
}).join(',');
const values = batch.flatMap(r => [
@@ -473,14 +466,13 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
r.received_date,
r.receiving_created_date,
r.supplier_id,
r.status,
r.status_text
r.status
]);
await localConnection.query(`
INSERT INTO temp_receivings (
receiving_id, po_id, pid, qty_each, cost_each, received_by,
received_date, receiving_created_date, supplier_id, status, status_text
received_date, receiving_created_date, supplier_id, status
)
VALUES ${placeholders}
ON CONFLICT (receiving_id, pid) DO UPDATE SET
@@ -491,8 +483,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
received_date = EXCLUDED.received_date,
receiving_created_date = EXCLUDED.receiving_created_date,
supplier_id = EXCLUDED.supplier_id,
status = EXCLUDED.status,
status_text = EXCLUDED.status_text
status = EXCLUDED.status
`, values);
}
@@ -586,7 +577,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
name: "Handling standalone receivings",
query: `
INSERT INTO temp_purchase_orders (
po_id, pid, sku, name, vendor, date, status, status_text,
po_id, pid, sku, name, vendor, date, status,
ordered, po_cost_price, supplier_id, date_created, date_ordered
)
SELECT
@@ -600,8 +591,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
'Unknown Vendor'
) as vendor,
COALESCE(r.received_date, r.receiving_created_date) as date,
NULL as status,
NULL as status_text,
'created' as status,
NULL as ordered,
r.cost_each as po_cost_price,
r.supplier_id,
@@ -872,13 +862,13 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
po.name,
COALESCE(ca.avg_cost, po.po_cost_price) as cost_price,
po.po_cost_price,
CASE WHEN po.status IS NULL THEN 1 ELSE po.status END as status,
COALESCE(po.status, 'created'),
CASE
WHEN rs.total_received IS NULL THEN 1
WHEN rs.total_received = 0 THEN 1
WHEN rs.total_received < po.ordered THEN 30
WHEN rs.total_received >= po.ordered THEN 40
ELSE 1
WHEN rs.total_received IS NULL THEN 'created'
WHEN rs.total_received = 0 THEN 'created'
WHEN rs.total_received < po.ordered THEN 'partial_received'
WHEN rs.total_received >= po.ordered THEN 'full_received'
ELSE 'created'
END as receiving_status,
po.notes,
po.long_note,

View File

@@ -102,35 +102,40 @@ router.get('/stock/metrics', async (req, res) => {
// Returns purchase order metrics by vendor
router.get('/purchase/metrics', async (req, res) => {
try {
// First check if there are any purchase orders in the database
const { rows: [poCount] } = await executeQuery(`
SELECT COUNT(*) as count FROM purchase_orders
`);
const { rows: [poMetrics] } = await executeQuery(`
SELECT
COALESCE(COUNT(DISTINCT CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
THEN po.po_id
END), 0)::integer as active_pos,
COALESCE(COUNT(DISTINCT CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
AND po.expected_date < CURRENT_DATE
THEN po.po_id
END), 0)::integer as overdue_pos,
COALESCE(SUM(CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
THEN po.ordered
ELSE 0
END), 0)::integer as total_units,
ROUND(COALESCE(SUM(CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
THEN po.ordered * po.cost_price
ELSE 0
END), 0)::numeric, 3) as total_cost,
ROUND(COALESCE(SUM(CASE
WHEN po.receiving_status < $1
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
THEN po.ordered * p.price
ELSE 0
END), 0)::numeric, 3) as total_retail
FROM purchase_orders po
JOIN products p ON po.pid = p.pid
`, [ReceivingStatus.PartialReceived]);
`);
const { rows: vendorOrders } = await executeQuery(`
SELECT
@@ -141,15 +146,15 @@ router.get('/purchase/metrics', async (req, res) => {
ROUND(COALESCE(SUM(po.ordered * p.price), 0)::numeric, 3) as retail
FROM purchase_orders po
JOIN products p ON po.pid = p.pid
WHERE po.receiving_status < $1
WHERE po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
GROUP BY po.vendor
HAVING ROUND(COALESCE(SUM(po.ordered * po.cost_price), 0)::numeric, 3) > 0
ORDER BY cost DESC
`, [ReceivingStatus.PartialReceived]);
`);
// If no data or missing metrics, provide dummy data
if (!poMetrics || vendorOrders.length === 0) {
console.log('No purchase metrics found, returning dummy data');
// If no purchase orders exist at all in the database, return dummy data
if (parseInt(poCount.count) === 0) {
console.log('No purchase orders found in database, returning dummy data');
return res.json({
activePurchaseOrders: 12,
@@ -165,6 +170,20 @@ router.get('/purchase/metrics', async (req, res) => {
});
}
// If no active purchase orders match the criteria, return zeros instead of dummy data
if (vendorOrders.length === 0) {
console.log('No active purchase orders matching criteria, returning zeros');
return res.json({
activePurchaseOrders: parseInt(poMetrics.active_pos) || 0,
overduePurchaseOrders: parseInt(poMetrics.overdue_pos) || 0,
onOrderUnits: parseInt(poMetrics.total_units) || 0,
onOrderCost: parseFloat(poMetrics.total_cost) || 0,
onOrderRetail: parseFloat(poMetrics.total_retail) || 0,
vendorOrders: []
});
}
// Format response to match PurchaseMetricsData interface
const response = {
activePurchaseOrders: parseInt(poMetrics.active_pos) || 0,
@@ -184,19 +203,15 @@ router.get('/purchase/metrics', async (req, res) => {
res.json(response);
} catch (err) {
console.error('Error fetching purchase metrics:', err);
// Return dummy data on error
res.json({
activePurchaseOrders: 12,
overduePurchaseOrders: 3,
onOrderUnits: 1250,
onOrderCost: 12500,
onOrderRetail: 25000,
vendorOrders: [
{ vendor: "Test Vendor 1", orders: 5, units: 500, cost: 5000, retail: 10000 },
{ vendor: "Test Vendor 2", orders: 4, units: 400, cost: 4000, retail: 8000 },
{ vendor: "Test Vendor 3", orders: 3, units: 350, cost: 3500, retail: 7000 }
]
res.status(500).json({
error: 'Failed to fetch purchase metrics',
details: err.message,
activePurchaseOrders: 0,
overduePurchaseOrders: 0,
onOrderUnits: 0,
onOrderCost: 0,
onOrderRetail: 0,
vendorOrders: []
});
}
});
@@ -1018,17 +1033,17 @@ router.get('/vendor/performance', async (req, res) => {
THEN EXTRACT(EPOCH FROM (po.received_date - po.date))/86400
ELSE NULL END)::numeric, 2), 0) as avg_lead_time,
COALESCE(ROUND(SUM(CASE
WHEN po.status = 'completed' AND po.received_date <= po.expected_date
WHEN po.status = 'done' AND po.received_date <= po.expected_date
THEN 1
ELSE 0
END)::numeric * 100.0 / NULLIF(COUNT(*)::numeric, 0), 2), 0) as on_time_delivery_rate,
COALESCE(ROUND(AVG(CASE
WHEN po.status = 'completed'
WHEN po.status = 'done'
THEN po.received::numeric / NULLIF(po.ordered::numeric, 0) * 100
ELSE NULL
END)::numeric, 2), 0) as avg_fill_rate,
COUNT(CASE WHEN po.status = 'open' THEN 1 END)::integer as active_orders,
COUNT(CASE WHEN po.status = 'open' AND po.expected_date < CURRENT_DATE THEN 1 END)::integer as overdue_orders
COUNT(CASE WHEN po.status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') THEN 1 END)::integer as active_orders,
COUNT(CASE WHEN po.status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') AND po.expected_date < CURRENT_DATE THEN 1 END)::integer as overdue_orders
FROM purchase_orders po
WHERE po.date >= CURRENT_DATE - INTERVAL '180 days'
GROUP BY po.vendor
@@ -1165,7 +1180,7 @@ router.get('/key-metrics', async (req, res) => {
SELECT
COUNT(DISTINCT po_id) as total_pos,
SUM(ordered * cost_price) as total_po_value,
COUNT(CASE WHEN status = 'open' THEN 1 END) as open_pos
COUNT(CASE WHEN status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') THEN 1 END) as open_pos
FROM purchase_orders
WHERE order_date >= CURRENT_DATE - INTERVAL '${days} days'
)