Compare commits
12 Commits
move-to-po
...
796a2e5d1f
| Author | SHA1 | Date | |
|---|---|---|---|
| 796a2e5d1f | |||
| 047122a620 | |||
| 4c4359908c | |||
| 54cc4be1e3 | |||
| f4854423ab | |||
| 0796518e26 | |||
| 7aa494aaad | |||
| 1e0be3f86e | |||
| a068a253cd | |||
| 087ec710f6 | |||
| 957c7b5eb1 | |||
| 8b8845b423 |
1380
docs/inventory-calculation-reference.md
Normal file
1380
docs/inventory-calculation-reference.md
Normal file
File diff suppressed because it is too large
Load Diff
113
inventory-server/db/config-schema-new.sql
Normal file
113
inventory-server/db/config-schema-new.sql
Normal file
@@ -0,0 +1,113 @@
|
||||
-- Drop tables in reverse order of dependency
|
||||
DROP TABLE IF EXISTS public.settings_product CASCADE;
|
||||
DROP TABLE IF EXISTS public.settings_vendor CASCADE;
|
||||
DROP TABLE IF EXISTS public.settings_global CASCADE;
|
||||
|
||||
-- Table Definition: settings_global
|
||||
CREATE TABLE public.settings_global (
|
||||
setting_key VARCHAR PRIMARY KEY,
|
||||
setting_value VARCHAR NOT NULL,
|
||||
description TEXT,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Table Definition: settings_vendor
|
||||
CREATE TABLE public.settings_vendor (
|
||||
vendor VARCHAR PRIMARY KEY, -- Matches products.vendor
|
||||
default_lead_time_days INT,
|
||||
default_days_of_stock INT,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
-- Index for faster lookups if needed (PK usually sufficient)
|
||||
-- CREATE INDEX idx_settings_vendor_vendor ON public.settings_vendor(vendor);
|
||||
|
||||
-- Table Definition: settings_product
|
||||
CREATE TABLE public.settings_product (
|
||||
pid INT8 PRIMARY KEY,
|
||||
lead_time_days INT, -- Overrides vendor/global
|
||||
days_of_stock INT, -- Overrides vendor/global
|
||||
safety_stock INT DEFAULT 0, -- Minimum desired stock level
|
||||
forecast_method VARCHAR DEFAULT 'standard', -- e.g., 'standard', 'seasonal'
|
||||
exclude_from_forecast BOOLEAN DEFAULT FALSE,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT fk_settings_product_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
|
||||
-- Description: Inserts or updates standard default global settings.
|
||||
-- Safe to rerun; will update existing keys with these default values.
|
||||
-- Dependencies: `settings_global` table must exist.
|
||||
-- Frequency: Run once initially, or rerun if you want to reset global defaults.
|
||||
|
||||
INSERT INTO public.settings_global (setting_key, setting_value, description) VALUES
|
||||
('abc_revenue_threshold_a', '0.80', 'Revenue percentage for Class A (cumulative)'),
|
||||
('abc_revenue_threshold_b', '0.95', 'Revenue percentage for Class B (cumulative)'),
|
||||
('abc_calculation_basis', 'revenue_30d', 'Metric for ABC calc (revenue_30d, sales_30d, lifetime_revenue)'),
|
||||
('abc_calculation_period', '30', 'Days period for ABC calculation if not lifetime'),
|
||||
('default_forecast_method', 'standard', 'Default forecast method (standard, seasonal)'),
|
||||
('default_lead_time_days', '14', 'Global default lead time in days'),
|
||||
('default_days_of_stock', '30', 'Global default days of stock coverage target'),
|
||||
-- Set default safety stock to 0 units. Can be overridden per product.
|
||||
-- If you wanted safety stock in days, you'd store 'days' here and calculate units later.
|
||||
('default_safety_stock_units', '0', 'Global default safety stock in units')
|
||||
ON CONFLICT (setting_key) DO UPDATE SET
|
||||
setting_value = EXCLUDED.setting_value,
|
||||
description = EXCLUDED.description,
|
||||
updated_at = CURRENT_TIMESTAMP; -- Update timestamp if default value changes
|
||||
|
||||
|
||||
|
||||
-- Description: Creates placeholder rows in `settings_vendor` for each unique vendor
|
||||
-- found in the `products` table. Does NOT set specific overrides.
|
||||
-- Safe to rerun; will NOT overwrite existing vendor settings.
|
||||
-- Dependencies: `settings_vendor` table must exist, `products` table populated.
|
||||
-- Frequency: Run once after initial product load, or periodically if new vendors are added.
|
||||
|
||||
INSERT INTO public.settings_vendor (
|
||||
vendor,
|
||||
default_lead_time_days,
|
||||
default_days_of_stock
|
||||
-- updated_at will use its default CURRENT_TIMESTAMP on insert
|
||||
)
|
||||
SELECT
|
||||
DISTINCT p.vendor,
|
||||
-- Explicitly cast NULL to INTEGER to resolve type mismatch
|
||||
CAST(NULL AS INTEGER),
|
||||
CAST(NULL AS INTEGER)
|
||||
FROM
|
||||
public.products p
|
||||
WHERE
|
||||
p.vendor IS NOT NULL
|
||||
AND p.vendor <> '' -- Exclude blank vendors if necessary
|
||||
|
||||
ON CONFLICT (vendor) DO NOTHING; -- IMPORTANT: Do not overwrite existing vendor settings
|
||||
|
||||
SELECT COUNT(*) FROM public.settings_vendor; -- Verify rows were inserted
|
||||
|
||||
|
||||
-- Description: Creates placeholder rows in `settings_product` for each unique product
|
||||
-- found in the `products` table. Sets basic defaults but no specific overrides.
|
||||
-- Safe to rerun; will NOT overwrite existing product settings.
|
||||
-- Dependencies: `settings_product` table must exist, `products` table populated.
|
||||
-- Frequency: Run once after initial product load, or periodically if new products are added.
|
||||
|
||||
INSERT INTO public.settings_product (
|
||||
pid,
|
||||
lead_time_days, -- NULL = Inherit from Vendor/Global
|
||||
days_of_stock, -- NULL = Inherit from Vendor/Global
|
||||
safety_stock, -- Default to 0 units initially
|
||||
forecast_method, -- NULL = Inherit from Global ('standard')
|
||||
exclude_from_forecast -- Default to FALSE
|
||||
-- updated_at will use its default CURRENT_TIMESTAMP on insert
|
||||
)
|
||||
SELECT
|
||||
p.pid,
|
||||
CAST(NULL AS INTEGER), -- Explicitly cast NULL to INTEGER
|
||||
CAST(NULL AS INTEGER), -- Explicitly cast NULL to INTEGER
|
||||
COALESCE((SELECT setting_value::int FROM settings_global WHERE setting_key = 'default_safety_stock_units'), 0), -- Use global default safety stock units
|
||||
CAST(NULL AS VARCHAR), -- Cast NULL to VARCHAR for forecast_method (already varchar, but explicit)
|
||||
FALSE -- Default: Include in forecast
|
||||
FROM
|
||||
public.products p
|
||||
|
||||
ON CONFLICT (pid) DO NOTHING; -- IMPORTANT: Do not overwrite existing product-specific settings
|
||||
@@ -1,232 +1,261 @@
|
||||
-- Configuration tables schema
|
||||
-- -- Configuration tables schema
|
||||
|
||||
-- Create function for updating timestamps if it doesn't exist
|
||||
CREATE OR REPLACE FUNCTION update_updated_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ language 'plpgsql';
|
||||
-- -- Create function for updating timestamps if it doesn't exist
|
||||
-- CREATE OR REPLACE FUNCTION update_updated_column()
|
||||
-- RETURNS TRIGGER AS $$
|
||||
-- BEGIN
|
||||
-- NEW.updated = CURRENT_TIMESTAMP;
|
||||
-- RETURN NEW;
|
||||
-- END;
|
||||
-- $$ language 'plpgsql';
|
||||
|
||||
-- Create function for updating updated_at timestamps
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ language 'plpgsql';
|
||||
-- -- Create function for updating updated_at timestamps
|
||||
-- CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
-- RETURNS TRIGGER AS $$
|
||||
-- BEGIN
|
||||
-- NEW.updated_at = CURRENT_TIMESTAMP;
|
||||
-- RETURN NEW;
|
||||
-- END;
|
||||
-- $$ language 'plpgsql';
|
||||
|
||||
-- Stock threshold configurations
|
||||
CREATE TABLE stock_thresholds (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
critical_days INTEGER NOT NULL DEFAULT 7,
|
||||
reorder_days INTEGER NOT NULL DEFAULT 14,
|
||||
overstock_days INTEGER NOT NULL DEFAULT 90,
|
||||
low_stock_threshold INTEGER NOT NULL DEFAULT 5,
|
||||
min_reorder_quantity INTEGER NOT NULL DEFAULT 1,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
-- -- Stock threshold configurations
|
||||
-- CREATE TABLE stock_thresholds (
|
||||
-- id INTEGER NOT NULL,
|
||||
-- category_id BIGINT, -- NULL means default/global threshold
|
||||
-- vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
-- critical_days INTEGER NOT NULL DEFAULT 7,
|
||||
-- reorder_days INTEGER NOT NULL DEFAULT 14,
|
||||
-- overstock_days INTEGER NOT NULL DEFAULT 90,
|
||||
-- low_stock_threshold INTEGER NOT NULL DEFAULT 5,
|
||||
-- min_reorder_quantity INTEGER NOT NULL DEFAULT 1,
|
||||
-- created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- PRIMARY KEY (id),
|
||||
-- FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
-- UNIQUE (category_id, vendor)
|
||||
-- );
|
||||
|
||||
CREATE TRIGGER update_stock_thresholds_updated
|
||||
BEFORE UPDATE ON stock_thresholds
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
-- CREATE TRIGGER update_stock_thresholds_updated
|
||||
-- BEFORE UPDATE ON stock_thresholds
|
||||
-- FOR EACH ROW
|
||||
-- EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE INDEX idx_st_metrics ON stock_thresholds(category_id, vendor);
|
||||
-- CREATE INDEX idx_st_metrics ON stock_thresholds(category_id, vendor);
|
||||
|
||||
-- Lead time threshold configurations
|
||||
CREATE TABLE lead_time_thresholds (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
target_days INTEGER NOT NULL DEFAULT 14,
|
||||
warning_days INTEGER NOT NULL DEFAULT 21,
|
||||
critical_days INTEGER NOT NULL DEFAULT 30,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
-- -- Lead time threshold configurations
|
||||
-- CREATE TABLE lead_time_thresholds (
|
||||
-- id INTEGER NOT NULL,
|
||||
-- category_id BIGINT, -- NULL means default/global threshold
|
||||
-- vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
-- target_days INTEGER NOT NULL DEFAULT 14,
|
||||
-- warning_days INTEGER NOT NULL DEFAULT 21,
|
||||
-- critical_days INTEGER NOT NULL DEFAULT 30,
|
||||
-- created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- PRIMARY KEY (id),
|
||||
-- FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
-- UNIQUE (category_id, vendor)
|
||||
-- );
|
||||
|
||||
CREATE TRIGGER update_lead_time_thresholds_updated
|
||||
BEFORE UPDATE ON lead_time_thresholds
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
-- CREATE TRIGGER update_lead_time_thresholds_updated
|
||||
-- BEFORE UPDATE ON lead_time_thresholds
|
||||
-- FOR EACH ROW
|
||||
-- EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Sales velocity window configurations
|
||||
CREATE TABLE sales_velocity_config (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
daily_window_days INTEGER NOT NULL DEFAULT 30,
|
||||
weekly_window_days INTEGER NOT NULL DEFAULT 7,
|
||||
monthly_window_days INTEGER NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
-- -- Sales velocity window configurations
|
||||
-- CREATE TABLE sales_velocity_config (
|
||||
-- id INTEGER NOT NULL,
|
||||
-- category_id BIGINT, -- NULL means default/global threshold
|
||||
-- vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
-- daily_window_days INTEGER NOT NULL DEFAULT 30,
|
||||
-- weekly_window_days INTEGER NOT NULL DEFAULT 7,
|
||||
-- monthly_window_days INTEGER NOT NULL DEFAULT 90,
|
||||
-- created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- PRIMARY KEY (id),
|
||||
-- FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
-- UNIQUE (category_id, vendor)
|
||||
-- );
|
||||
|
||||
CREATE TRIGGER update_sales_velocity_config_updated
|
||||
BEFORE UPDATE ON sales_velocity_config
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
-- CREATE TRIGGER update_sales_velocity_config_updated
|
||||
-- BEFORE UPDATE ON sales_velocity_config
|
||||
-- FOR EACH ROW
|
||||
-- EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE INDEX idx_sv_metrics ON sales_velocity_config(category_id, vendor);
|
||||
-- CREATE INDEX idx_sv_metrics ON sales_velocity_config(category_id, vendor);
|
||||
|
||||
-- ABC Classification configurations
|
||||
CREATE TABLE abc_classification_config (
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
a_threshold DECIMAL(5,2) NOT NULL DEFAULT 20.0,
|
||||
b_threshold DECIMAL(5,2) NOT NULL DEFAULT 50.0,
|
||||
classification_period_days INTEGER NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
-- -- ABC Classification configurations
|
||||
-- CREATE TABLE abc_classification_config (
|
||||
-- id INTEGER NOT NULL PRIMARY KEY,
|
||||
-- a_threshold DECIMAL(5,2) NOT NULL DEFAULT 20.0,
|
||||
-- b_threshold DECIMAL(5,2) NOT NULL DEFAULT 50.0,
|
||||
-- classification_period_days INTEGER NOT NULL DEFAULT 90,
|
||||
-- created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
-- );
|
||||
|
||||
CREATE TRIGGER update_abc_classification_config_updated
|
||||
BEFORE UPDATE ON abc_classification_config
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
-- CREATE TRIGGER update_abc_classification_config_updated
|
||||
-- BEFORE UPDATE ON abc_classification_config
|
||||
-- FOR EACH ROW
|
||||
-- EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Safety stock configurations
|
||||
CREATE TABLE safety_stock_config (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
coverage_days INTEGER NOT NULL DEFAULT 14,
|
||||
service_level DECIMAL(5,2) NOT NULL DEFAULT 95.0,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
-- -- Safety stock configurations
|
||||
-- CREATE TABLE safety_stock_config (
|
||||
-- id INTEGER NOT NULL,
|
||||
-- category_id BIGINT, -- NULL means default/global threshold
|
||||
-- vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
-- coverage_days INTEGER NOT NULL DEFAULT 14,
|
||||
-- service_level DECIMAL(5,2) NOT NULL DEFAULT 95.0,
|
||||
-- created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- PRIMARY KEY (id),
|
||||
-- FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
-- UNIQUE (category_id, vendor)
|
||||
-- );
|
||||
|
||||
CREATE TRIGGER update_safety_stock_config_updated
|
||||
BEFORE UPDATE ON safety_stock_config
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
-- CREATE TRIGGER update_safety_stock_config_updated
|
||||
-- BEFORE UPDATE ON safety_stock_config
|
||||
-- FOR EACH ROW
|
||||
-- EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE INDEX idx_ss_metrics ON safety_stock_config(category_id, vendor);
|
||||
-- CREATE INDEX idx_ss_metrics ON safety_stock_config(category_id, vendor);
|
||||
|
||||
-- Turnover rate configurations
|
||||
CREATE TABLE turnover_config (
|
||||
id INTEGER NOT NULL,
|
||||
category_id BIGINT, -- NULL means default/global threshold
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
calculation_period_days INTEGER NOT NULL DEFAULT 30,
|
||||
target_rate DECIMAL(10,2) NOT NULL DEFAULT 1.0,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
-- -- Turnover rate configurations
|
||||
-- CREATE TABLE turnover_config (
|
||||
-- id INTEGER NOT NULL,
|
||||
-- category_id BIGINT, -- NULL means default/global threshold
|
||||
-- vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
-- calculation_period_days INTEGER NOT NULL DEFAULT 30,
|
||||
-- target_rate DECIMAL(10,2) NOT NULL DEFAULT 1.0,
|
||||
-- created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- PRIMARY KEY (id),
|
||||
-- FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
-- UNIQUE (category_id, vendor)
|
||||
-- );
|
||||
|
||||
CREATE TRIGGER update_turnover_config_updated
|
||||
BEFORE UPDATE ON turnover_config
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
-- CREATE TRIGGER update_turnover_config_updated
|
||||
-- BEFORE UPDATE ON turnover_config
|
||||
-- FOR EACH ROW
|
||||
-- EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Create table for sales seasonality factors
|
||||
CREATE TABLE sales_seasonality (
|
||||
month INTEGER NOT NULL,
|
||||
seasonality_factor DECIMAL(5,3) DEFAULT 0,
|
||||
last_updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (month),
|
||||
CONSTRAINT month_range CHECK (month BETWEEN 1 AND 12),
|
||||
CONSTRAINT seasonality_range CHECK (seasonality_factor BETWEEN -1.0 AND 1.0)
|
||||
);
|
||||
-- -- Create table for sales seasonality factors
|
||||
-- CREATE TABLE sales_seasonality (
|
||||
-- month INTEGER NOT NULL,
|
||||
-- seasonality_factor DECIMAL(5,3) DEFAULT 0,
|
||||
-- last_updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
-- PRIMARY KEY (month),
|
||||
-- CONSTRAINT month_range CHECK (month BETWEEN 1 AND 12),
|
||||
-- CONSTRAINT seasonality_range CHECK (seasonality_factor BETWEEN -1.0 AND 1.0)
|
||||
-- );
|
||||
|
||||
CREATE TRIGGER update_sales_seasonality_updated
|
||||
BEFORE UPDATE ON sales_seasonality
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
-- CREATE TRIGGER update_sales_seasonality_updated
|
||||
-- BEFORE UPDATE ON sales_seasonality
|
||||
-- FOR EACH ROW
|
||||
-- EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Insert default global thresholds
|
||||
INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
|
||||
VALUES (1, NULL, NULL, 7, 14, 90)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
critical_days = EXCLUDED.critical_days,
|
||||
reorder_days = EXCLUDED.reorder_days,
|
||||
overstock_days = EXCLUDED.overstock_days;
|
||||
-- -- Create table for financial calculation parameters
|
||||
-- CREATE TABLE financial_calc_config (
|
||||
-- id INTEGER NOT NULL PRIMARY KEY,
|
||||
-- order_cost DECIMAL(10,2) NOT NULL DEFAULT 25.00, -- The fixed cost per purchase order (used in EOQ)
|
||||
-- holding_rate DECIMAL(10,4) NOT NULL DEFAULT 0.25, -- The annual inventory holding cost as a percentage of unit cost (used in EOQ)
|
||||
-- service_level_z_score DECIMAL(10,4) NOT NULL DEFAULT 1.96, -- Z-score for ~95% service level (used in Safety Stock)
|
||||
-- min_reorder_qty INTEGER NOT NULL DEFAULT 1, -- Minimum reorder quantity
|
||||
-- default_reorder_qty INTEGER NOT NULL DEFAULT 5, -- Default reorder quantity when sales data is insufficient
|
||||
-- default_safety_stock INTEGER NOT NULL DEFAULT 5, -- Default safety stock when sales data is insufficient
|
||||
-- created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
-- updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
-- );
|
||||
|
||||
INSERT INTO lead_time_thresholds (id, category_id, vendor, target_days, warning_days, critical_days)
|
||||
VALUES (1, NULL, NULL, 14, 21, 30)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
target_days = EXCLUDED.target_days,
|
||||
warning_days = EXCLUDED.warning_days,
|
||||
critical_days = EXCLUDED.critical_days;
|
||||
-- CREATE TRIGGER update_financial_calc_config_updated
|
||||
-- BEFORE UPDATE ON financial_calc_config
|
||||
-- FOR EACH ROW
|
||||
-- EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
INSERT INTO sales_velocity_config (id, category_id, vendor, daily_window_days, weekly_window_days, monthly_window_days)
|
||||
VALUES (1, NULL, NULL, 30, 7, 90)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
daily_window_days = EXCLUDED.daily_window_days,
|
||||
weekly_window_days = EXCLUDED.weekly_window_days,
|
||||
monthly_window_days = EXCLUDED.monthly_window_days;
|
||||
-- -- Insert default global thresholds
|
||||
-- INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
|
||||
-- VALUES (1, NULL, NULL, 7, 14, 90)
|
||||
-- ON CONFLICT (id) DO UPDATE SET
|
||||
-- critical_days = EXCLUDED.critical_days,
|
||||
-- reorder_days = EXCLUDED.reorder_days,
|
||||
-- overstock_days = EXCLUDED.overstock_days;
|
||||
|
||||
INSERT INTO abc_classification_config (id, a_threshold, b_threshold, classification_period_days)
|
||||
VALUES (1, 20.0, 50.0, 90)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
a_threshold = EXCLUDED.a_threshold,
|
||||
b_threshold = EXCLUDED.b_threshold,
|
||||
classification_period_days = EXCLUDED.classification_period_days;
|
||||
-- INSERT INTO lead_time_thresholds (id, category_id, vendor, target_days, warning_days, critical_days)
|
||||
-- VALUES (1, NULL, NULL, 14, 21, 30)
|
||||
-- ON CONFLICT (id) DO UPDATE SET
|
||||
-- target_days = EXCLUDED.target_days,
|
||||
-- warning_days = EXCLUDED.warning_days,
|
||||
-- critical_days = EXCLUDED.critical_days;
|
||||
|
||||
INSERT INTO safety_stock_config (id, category_id, vendor, coverage_days, service_level)
|
||||
VALUES (1, NULL, NULL, 14, 95.0)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
coverage_days = EXCLUDED.coverage_days,
|
||||
service_level = EXCLUDED.service_level;
|
||||
-- INSERT INTO sales_velocity_config (id, category_id, vendor, daily_window_days, weekly_window_days, monthly_window_days)
|
||||
-- VALUES (1, NULL, NULL, 30, 7, 90)
|
||||
-- ON CONFLICT (id) DO UPDATE SET
|
||||
-- daily_window_days = EXCLUDED.daily_window_days,
|
||||
-- weekly_window_days = EXCLUDED.weekly_window_days,
|
||||
-- monthly_window_days = EXCLUDED.monthly_window_days;
|
||||
|
||||
INSERT INTO turnover_config (id, category_id, vendor, calculation_period_days, target_rate)
|
||||
VALUES (1, NULL, NULL, 30, 1.0)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
calculation_period_days = EXCLUDED.calculation_period_days,
|
||||
target_rate = EXCLUDED.target_rate;
|
||||
-- INSERT INTO abc_classification_config (id, a_threshold, b_threshold, classification_period_days)
|
||||
-- VALUES (1, 20.0, 50.0, 90)
|
||||
-- ON CONFLICT (id) DO UPDATE SET
|
||||
-- a_threshold = EXCLUDED.a_threshold,
|
||||
-- b_threshold = EXCLUDED.b_threshold,
|
||||
-- classification_period_days = EXCLUDED.classification_period_days;
|
||||
|
||||
-- Insert default seasonality factors (neutral)
|
||||
INSERT INTO sales_seasonality (month, seasonality_factor)
|
||||
VALUES
|
||||
(1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0),
|
||||
(7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0)
|
||||
ON CONFLICT (month) DO UPDATE SET
|
||||
last_updated = CURRENT_TIMESTAMP;
|
||||
-- INSERT INTO safety_stock_config (id, category_id, vendor, coverage_days, service_level)
|
||||
-- VALUES (1, NULL, NULL, 14, 95.0)
|
||||
-- ON CONFLICT (id) DO UPDATE SET
|
||||
-- coverage_days = EXCLUDED.coverage_days,
|
||||
-- service_level = EXCLUDED.service_level;
|
||||
|
||||
-- View to show thresholds with category names
|
||||
CREATE OR REPLACE VIEW stock_thresholds_view AS
|
||||
SELECT
|
||||
st.*,
|
||||
c.name as category_name,
|
||||
CASE
|
||||
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 'Global Default'
|
||||
WHEN st.category_id IS NULL THEN 'Vendor: ' || st.vendor
|
||||
WHEN st.vendor IS NULL THEN 'Category: ' || c.name
|
||||
ELSE 'Category: ' || c.name || ' / Vendor: ' || st.vendor
|
||||
END as threshold_scope
|
||||
FROM
|
||||
stock_thresholds st
|
||||
LEFT JOIN
|
||||
categories c ON st.category_id = c.cat_id
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 1
|
||||
WHEN st.category_id IS NULL THEN 2
|
||||
WHEN st.vendor IS NULL THEN 3
|
||||
ELSE 4
|
||||
END,
|
||||
c.name,
|
||||
st.vendor;
|
||||
-- INSERT INTO turnover_config (id, category_id, vendor, calculation_period_days, target_rate)
|
||||
-- VALUES (1, NULL, NULL, 30, 1.0)
|
||||
-- ON CONFLICT (id) DO UPDATE SET
|
||||
-- calculation_period_days = EXCLUDED.calculation_period_days,
|
||||
-- target_rate = EXCLUDED.target_rate;
|
||||
|
||||
-- -- Insert default seasonality factors (neutral)
|
||||
-- INSERT INTO sales_seasonality (month, seasonality_factor)
|
||||
-- VALUES
|
||||
-- (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0),
|
||||
-- (7, 0), (8, 0), (9, 0), (10, 0), (11, 0), (12, 0)
|
||||
-- ON CONFLICT (month) DO UPDATE SET
|
||||
-- last_updated = CURRENT_TIMESTAMP;
|
||||
|
||||
-- -- Insert default values
|
||||
-- INSERT INTO financial_calc_config (id, order_cost, holding_rate, service_level_z_score, min_reorder_qty, default_reorder_qty, default_safety_stock)
|
||||
-- VALUES (1, 25.00, 0.25, 1.96, 1, 5, 5)
|
||||
-- ON CONFLICT (id) DO UPDATE SET
|
||||
-- order_cost = EXCLUDED.order_cost,
|
||||
-- holding_rate = EXCLUDED.holding_rate,
|
||||
-- service_level_z_score = EXCLUDED.service_level_z_score,
|
||||
-- min_reorder_qty = EXCLUDED.min_reorder_qty,
|
||||
-- default_reorder_qty = EXCLUDED.default_reorder_qty,
|
||||
-- default_safety_stock = EXCLUDED.default_safety_stock;
|
||||
|
||||
-- -- View to show thresholds with category names
|
||||
-- CREATE OR REPLACE VIEW stock_thresholds_view AS
|
||||
-- SELECT
|
||||
-- st.*,
|
||||
-- c.name as category_name,
|
||||
-- CASE
|
||||
-- WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 'Global Default'
|
||||
-- WHEN st.category_id IS NULL THEN 'Vendor: ' || st.vendor
|
||||
-- WHEN st.vendor IS NULL THEN 'Category: ' || c.name
|
||||
-- ELSE 'Category: ' || c.name || ' / Vendor: ' || st.vendor
|
||||
-- END as threshold_scope
|
||||
-- FROM
|
||||
-- stock_thresholds st
|
||||
-- LEFT JOIN
|
||||
-- categories c ON st.category_id = c.cat_id
|
||||
-- ORDER BY
|
||||
-- CASE
|
||||
-- WHEN st.category_id IS NULL AND st.vendor IS NULL THEN 1
|
||||
-- WHEN st.category_id IS NULL THEN 2
|
||||
-- WHEN st.vendor IS NULL THEN 3
|
||||
-- ELSE 4
|
||||
-- END,
|
||||
-- c.name,
|
||||
-- st.vendor;
|
||||
|
||||
-- History and status tables
|
||||
CREATE TABLE IF NOT EXISTS calculate_history (
|
||||
@@ -252,7 +281,7 @@ CREATE TABLE IF NOT EXISTS calculate_status (
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sync_status (
|
||||
table_name VARCHAR(50) PRIMARY KEY,
|
||||
table_name TEXT PRIMARY KEY,
|
||||
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_sync_id BIGINT
|
||||
);
|
||||
|
||||
165
inventory-server/db/metrics-schema-new.sql
Normal file
165
inventory-server/db/metrics-schema-new.sql
Normal file
@@ -0,0 +1,165 @@
|
||||
-- Drop tables in reverse order of dependency
|
||||
DROP TABLE IF EXISTS public.product_metrics CASCADE;
|
||||
DROP TABLE IF EXISTS public.daily_product_snapshots CASCADE;
|
||||
|
||||
-- Table Definition: daily_product_snapshots
|
||||
CREATE TABLE public.daily_product_snapshots (
|
||||
snapshot_date DATE NOT NULL,
|
||||
pid INT8 NOT NULL,
|
||||
sku VARCHAR, -- Copied for convenience
|
||||
|
||||
-- Inventory Metrics (End of Day / Last Snapshot of Day)
|
||||
eod_stock_quantity INT NOT NULL DEFAULT 0,
|
||||
eod_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- Increased precision
|
||||
eod_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
eod_stock_gross NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
stockout_flag BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
-- Sales Metrics (Aggregated for the snapshot_date)
|
||||
units_sold INT NOT NULL DEFAULT 0,
|
||||
units_returned INT NOT NULL DEFAULT 0,
|
||||
gross_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
discounts NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
returns_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
net_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- gross_revenue - discounts
|
||||
cogs NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
gross_regular_revenue NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
profit NUMERIC(14, 4) NOT NULL DEFAULT 0.00, -- net_revenue - cogs
|
||||
|
||||
-- Receiving Metrics (Aggregated for the snapshot_date)
|
||||
units_received INT NOT NULL DEFAULT 0,
|
||||
cost_received NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
|
||||
calculation_timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
PRIMARY KEY (snapshot_date, pid) -- Composite primary key
|
||||
-- CONSTRAINT fk_daily_snapshot_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE -- FK Optional on snapshot table
|
||||
);
|
||||
|
||||
-- Add Indexes for daily_product_snapshots
|
||||
CREATE INDEX idx_daily_snapshot_pid_date ON public.daily_product_snapshots(pid, snapshot_date); -- Useful for product-specific time series
|
||||
|
||||
|
||||
-- Table Definition: product_metrics
|
||||
CREATE TABLE public.product_metrics (
|
||||
pid INT8 PRIMARY KEY,
|
||||
last_calculated TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- Product Info (Copied for convenience/performance)
|
||||
sku VARCHAR,
|
||||
title VARCHAR,
|
||||
brand VARCHAR,
|
||||
vendor VARCHAR,
|
||||
image_url VARCHAR, -- (e.g., products.image_175)
|
||||
is_visible BOOLEAN,
|
||||
is_replenishable BOOLEAN,
|
||||
|
||||
-- Current Status (Refreshed Hourly)
|
||||
current_price NUMERIC(10, 2),
|
||||
current_regular_price NUMERIC(10, 2),
|
||||
current_cost_price NUMERIC(10, 4), -- Increased precision for cost
|
||||
current_landing_cost_price NUMERIC(10, 4), -- Increased precision for cost
|
||||
current_stock INT NOT NULL DEFAULT 0,
|
||||
current_stock_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
current_stock_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
current_stock_gross NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
on_order_qty INT NOT NULL DEFAULT 0,
|
||||
on_order_cost NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
on_order_retail NUMERIC(14, 4) NOT NULL DEFAULT 0.00,
|
||||
earliest_expected_date DATE,
|
||||
-- total_received_lifetime INT NOT NULL DEFAULT 0, -- Can calc if needed
|
||||
|
||||
-- Historical Dates (Calculated Once/Periodically)
|
||||
date_created DATE,
|
||||
date_first_received DATE,
|
||||
date_last_received DATE,
|
||||
date_first_sold DATE,
|
||||
date_last_sold DATE,
|
||||
age_days INT, -- Calculated based on LEAST(date_created, date_first_sold)
|
||||
|
||||
-- Rolling Period Metrics (Refreshed Hourly from daily_product_snapshots)
|
||||
sales_7d INT, revenue_7d NUMERIC(14, 4),
|
||||
sales_14d INT, revenue_14d NUMERIC(14, 4),
|
||||
sales_30d INT, revenue_30d NUMERIC(14, 4),
|
||||
cogs_30d NUMERIC(14, 4), profit_30d NUMERIC(14, 4),
|
||||
returns_units_30d INT, returns_revenue_30d NUMERIC(14, 4),
|
||||
discounts_30d NUMERIC(14, 4),
|
||||
gross_revenue_30d NUMERIC(14, 4), gross_regular_revenue_30d NUMERIC(14, 4),
|
||||
stockout_days_30d INT,
|
||||
sales_365d INT, revenue_365d NUMERIC(14, 4),
|
||||
avg_stock_units_30d NUMERIC(10, 2), avg_stock_cost_30d NUMERIC(14, 4),
|
||||
avg_stock_retail_30d NUMERIC(14, 4), avg_stock_gross_30d NUMERIC(14, 4),
|
||||
received_qty_30d INT, received_cost_30d NUMERIC(14, 4),
|
||||
|
||||
-- Lifetime Metrics (Recalculated Hourly/Daily from daily_product_snapshots)
|
||||
lifetime_sales INT,
|
||||
lifetime_revenue NUMERIC(16, 4),
|
||||
|
||||
-- First Period Metrics (Calculated Once/Periodically from daily_product_snapshots)
|
||||
first_7_days_sales INT, first_7_days_revenue NUMERIC(14, 4),
|
||||
first_30_days_sales INT, first_30_days_revenue NUMERIC(14, 4),
|
||||
first_60_days_sales INT, first_60_days_revenue NUMERIC(14, 4),
|
||||
first_90_days_sales INT, first_90_days_revenue NUMERIC(14, 4),
|
||||
|
||||
-- Calculated KPIs (Refreshed Hourly based on rolling metrics)
|
||||
asp_30d NUMERIC(10, 2), -- revenue_30d / sales_30d
|
||||
acp_30d NUMERIC(10, 4), -- cogs_30d / sales_30d
|
||||
avg_ros_30d NUMERIC(10, 4), -- profit_30d / sales_30d
|
||||
avg_sales_per_day_30d NUMERIC(10, 2), -- sales_30d / 30.0
|
||||
avg_sales_per_month_30d NUMERIC(10, 2), -- sales_30d (assuming 30d = 1 month for this metric)
|
||||
margin_30d NUMERIC(5, 2), -- (profit_30d / revenue_30d) * 100
|
||||
markup_30d NUMERIC(5, 2), -- (profit_30d / cogs_30d) * 100
|
||||
gmroi_30d NUMERIC(10, 2), -- profit_30d / avg_stock_cost_30d
|
||||
stockturn_30d NUMERIC(10, 2), -- sales_30d / avg_stock_units_30d
|
||||
return_rate_30d NUMERIC(5, 2), -- returns_units_30d / (sales_30d + returns_units_30d) * 100
|
||||
discount_rate_30d NUMERIC(5, 2), -- discounts_30d / gross_revenue_30d * 100
|
||||
stockout_rate_30d NUMERIC(5, 2), -- stockout_days_30d / 30.0 * 100
|
||||
markdown_30d NUMERIC(14, 4), -- gross_regular_revenue_30d - gross_revenue_30d
|
||||
markdown_rate_30d NUMERIC(5, 2), -- markdown_30d / gross_regular_revenue_30d * 100
|
||||
sell_through_30d NUMERIC(5, 2), -- sales_30d / (current_stock + sales_30d) * 100
|
||||
avg_lead_time_days INT, -- Calculated Periodically from purchase_orders
|
||||
|
||||
-- Forecasting & Replenishment (Refreshed Hourly)
|
||||
abc_class CHAR(1), -- Updated Periodically (e.g., Weekly)
|
||||
sales_velocity_daily NUMERIC(10, 4), -- sales_30d / (30.0 - stockout_days_30d)
|
||||
config_lead_time INT, -- From settings tables
|
||||
config_days_of_stock INT, -- From settings tables
|
||||
config_safety_stock INT, -- From settings_product
|
||||
planning_period_days INT, -- config_lead_time + config_days_of_stock
|
||||
lead_time_forecast_units NUMERIC(10, 2), -- sales_velocity_daily * config_lead_time
|
||||
days_of_stock_forecast_units NUMERIC(10, 2), -- sales_velocity_daily * config_days_of_stock
|
||||
planning_period_forecast_units NUMERIC(10, 2), -- lead_time_forecast_units + days_of_stock_forecast_units
|
||||
lead_time_closing_stock NUMERIC(10, 2), -- current_stock + on_order_qty - lead_time_forecast_units
|
||||
days_of_stock_closing_stock NUMERIC(10, 2), -- lead_time_closing_stock - days_of_stock_forecast_units
|
||||
replenishment_needed_raw NUMERIC(10, 2), -- planning_period_forecast_units + config_safety_stock - current_stock - on_order_qty
|
||||
replenishment_units INT, -- CEILING(GREATEST(0, replenishment_needed_raw))
|
||||
replenishment_cost NUMERIC(14, 4), -- replenishment_units * COALESCE(current_landing_cost_price, current_cost_price)
|
||||
replenishment_retail NUMERIC(14, 4), -- replenishment_units * current_price
|
||||
replenishment_profit NUMERIC(14, 4), -- replenishment_units * (current_price - COALESCE(current_landing_cost_price, current_cost_price))
|
||||
to_order_units INT, -- Apply MOQ/UOM logic to replenishment_units
|
||||
forecast_lost_sales_units NUMERIC(10, 2), -- GREATEST(0, -lead_time_closing_stock)
|
||||
forecast_lost_revenue NUMERIC(14, 4), -- forecast_lost_sales_units * current_price
|
||||
stock_cover_in_days NUMERIC(10, 1), -- current_stock / sales_velocity_daily
|
||||
po_cover_in_days NUMERIC(10, 1), -- on_order_qty / sales_velocity_daily
|
||||
sells_out_in_days NUMERIC(10, 1), -- (current_stock + on_order_qty) / sales_velocity_daily
|
||||
replenish_date DATE, -- Calc based on when stock hits safety stock minus lead time
|
||||
overstocked_units INT, -- GREATEST(0, current_stock - config_safety_stock - planning_period_forecast_units)
|
||||
overstocked_cost NUMERIC(14, 4), -- overstocked_units * COALESCE(current_landing_cost_price, current_cost_price)
|
||||
overstocked_retail NUMERIC(14, 4), -- overstocked_units * current_price
|
||||
is_old_stock BOOLEAN, -- Based on age, last sold, last received, on_order status
|
||||
|
||||
-- Yesterday's Metrics (Refreshed Hourly from daily_product_snapshots)
|
||||
yesterday_sales INT,
|
||||
|
||||
CONSTRAINT fk_product_metrics_pid FOREIGN KEY (pid) REFERENCES public.products(pid) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- Add Indexes for product_metrics (adjust based on common filtering/sorting in frontend)
|
||||
CREATE INDEX idx_product_metrics_brand ON public.product_metrics(brand);
|
||||
CREATE INDEX idx_product_metrics_vendor ON public.product_metrics(vendor);
|
||||
CREATE INDEX idx_product_metrics_sku ON public.product_metrics(sku);
|
||||
CREATE INDEX idx_product_metrics_abc_class ON public.product_metrics(abc_class);
|
||||
CREATE INDEX idx_product_metrics_revenue_30d ON public.product_metrics(revenue_30d DESC NULLS LAST); -- Example sorting index
|
||||
CREATE INDEX idx_product_metrics_sales_30d ON public.product_metrics(sales_30d DESC NULLS LAST); -- Example sorting index
|
||||
CREATE INDEX idx_product_metrics_current_stock ON public.product_metrics(current_stock);
|
||||
CREATE INDEX idx_product_metrics_sells_out_in_days ON public.product_metrics(sells_out_in_days ASC NULLS LAST); -- Example sorting index
|
||||
@@ -11,15 +11,17 @@ CREATE TABLE temp_sales_metrics (
|
||||
avg_margin_percent DECIMAL(10,3),
|
||||
first_sale_date DATE,
|
||||
last_sale_date DATE,
|
||||
stddev_daily_sales DECIMAL(10,3),
|
||||
PRIMARY KEY (pid)
|
||||
);
|
||||
|
||||
CREATE TABLE temp_purchase_metrics (
|
||||
pid BIGINT NOT NULL,
|
||||
avg_lead_time_days INTEGER,
|
||||
avg_lead_time_days DECIMAL(10,2),
|
||||
last_purchase_date DATE,
|
||||
first_received_date DATE,
|
||||
last_received_date DATE,
|
||||
stddev_lead_time_days DECIMAL(10,2),
|
||||
PRIMARY KEY (pid)
|
||||
);
|
||||
|
||||
@@ -50,7 +52,7 @@ CREATE TABLE product_metrics (
|
||||
gross_profit DECIMAL(10,3),
|
||||
gmroi DECIMAL(10,3),
|
||||
-- Purchase metrics
|
||||
avg_lead_time_days INTEGER,
|
||||
avg_lead_time_days DECIMAL(10,2),
|
||||
last_purchase_date DATE,
|
||||
first_received_date DATE,
|
||||
last_received_date DATE,
|
||||
|
||||
@@ -7,7 +7,7 @@ BEGIN
|
||||
-- Check which table is being updated and use the appropriate column
|
||||
IF TG_TABLE_NAME = 'categories' THEN
|
||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||
ELSE
|
||||
ELSIF TG_TABLE_NAME IN ('products', 'orders', 'purchase_orders') THEN
|
||||
NEW.updated = CURRENT_TIMESTAMP;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
@@ -17,48 +17,48 @@ $func$ language plpgsql;
|
||||
-- Create tables
|
||||
CREATE TABLE products (
|
||||
pid BIGINT NOT NULL,
|
||||
title VARCHAR(255) NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
sku TEXT NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
first_received TIMESTAMP WITH TIME ZONE,
|
||||
stock_quantity INTEGER DEFAULT 0,
|
||||
preorder_count INTEGER DEFAULT 0,
|
||||
notions_inv_count INTEGER DEFAULT 0,
|
||||
price DECIMAL(10, 3) NOT NULL,
|
||||
regular_price DECIMAL(10, 3) NOT NULL,
|
||||
cost_price DECIMAL(10, 3),
|
||||
landing_cost_price DECIMAL(10, 3),
|
||||
barcode VARCHAR(50),
|
||||
harmonized_tariff_code VARCHAR(20),
|
||||
price NUMERIC(14, 4) NOT NULL,
|
||||
regular_price NUMERIC(14, 4) NOT NULL,
|
||||
cost_price NUMERIC(14, 4),
|
||||
landing_cost_price NUMERIC(14, 4),
|
||||
barcode TEXT,
|
||||
harmonized_tariff_code TEXT,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
visible BOOLEAN DEFAULT true,
|
||||
managing_stock BOOLEAN DEFAULT true,
|
||||
replenishable BOOLEAN DEFAULT true,
|
||||
vendor VARCHAR(100),
|
||||
vendor_reference VARCHAR(100),
|
||||
notions_reference VARCHAR(100),
|
||||
permalink VARCHAR(255),
|
||||
vendor TEXT,
|
||||
vendor_reference TEXT,
|
||||
notions_reference TEXT,
|
||||
permalink TEXT,
|
||||
categories TEXT,
|
||||
image VARCHAR(255),
|
||||
image_175 VARCHAR(255),
|
||||
image_full VARCHAR(255),
|
||||
brand VARCHAR(100),
|
||||
line VARCHAR(100),
|
||||
subline VARCHAR(100),
|
||||
artist VARCHAR(100),
|
||||
image TEXT,
|
||||
image_175 TEXT,
|
||||
image_full TEXT,
|
||||
brand TEXT,
|
||||
line TEXT,
|
||||
subline TEXT,
|
||||
artist TEXT,
|
||||
options TEXT,
|
||||
tags TEXT,
|
||||
moq INTEGER DEFAULT 1,
|
||||
uom INTEGER DEFAULT 1,
|
||||
rating DECIMAL(10,2) DEFAULT 0.00,
|
||||
rating NUMERIC(14, 4) DEFAULT 0.00,
|
||||
reviews INTEGER DEFAULT 0,
|
||||
weight DECIMAL(10,3),
|
||||
length DECIMAL(10,3),
|
||||
width DECIMAL(10,3),
|
||||
height DECIMAL(10,3),
|
||||
country_of_origin VARCHAR(5),
|
||||
location VARCHAR(50),
|
||||
weight NUMERIC(14, 4),
|
||||
length NUMERIC(14, 4),
|
||||
width NUMERIC(14, 4),
|
||||
height NUMERIC(14, 4),
|
||||
country_of_origin TEXT,
|
||||
location TEXT,
|
||||
total_sold INTEGER DEFAULT 0,
|
||||
baskets INTEGER DEFAULT 0,
|
||||
notifies INTEGER DEFAULT 0,
|
||||
@@ -74,25 +74,25 @@ CREATE TRIGGER update_products_updated
|
||||
EXECUTE FUNCTION update_updated_column();
|
||||
|
||||
-- Create indexes for products table
|
||||
CREATE INDEX idx_products_sku ON products(SKU);
|
||||
CREATE INDEX idx_products_sku ON products(sku);
|
||||
CREATE INDEX idx_products_vendor ON products(vendor);
|
||||
CREATE INDEX idx_products_brand ON products(brand);
|
||||
CREATE INDEX idx_products_location ON products(location);
|
||||
CREATE INDEX idx_products_total_sold ON products(total_sold);
|
||||
CREATE INDEX idx_products_date_last_sold ON products(date_last_sold);
|
||||
CREATE INDEX idx_products_visible ON products(visible);
|
||||
CREATE INDEX idx_products_replenishable ON products(replenishable);
|
||||
CREATE INDEX idx_products_updated ON products(updated);
|
||||
|
||||
-- Create categories table with hierarchy support
|
||||
CREATE TABLE categories (
|
||||
cat_id BIGINT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
type SMALLINT NOT NULL,
|
||||
parent_id BIGINT,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
status VARCHAR(20) DEFAULT 'active',
|
||||
FOREIGN KEY (parent_id) REFERENCES categories(cat_id)
|
||||
updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
status TEXT DEFAULT 'active',
|
||||
FOREIGN KEY (parent_id) REFERENCES categories(cat_id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Create trigger for categories
|
||||
@@ -106,6 +106,7 @@ COMMENT ON COLUMN categories.type IS '10=section, 11=category, 12=subcategory, 1
|
||||
CREATE INDEX idx_categories_parent ON categories(parent_id);
|
||||
CREATE INDEX idx_categories_type ON categories(type);
|
||||
CREATE INDEX idx_categories_status ON categories(status);
|
||||
CREATE INDEX idx_categories_name ON categories(name);
|
||||
CREATE INDEX idx_categories_name_type ON categories(name, type);
|
||||
|
||||
-- Create product_categories junction table
|
||||
@@ -118,28 +119,28 @@ CREATE TABLE product_categories (
|
||||
);
|
||||
|
||||
CREATE INDEX idx_product_categories_category ON product_categories(cat_id);
|
||||
CREATE INDEX idx_product_categories_product ON product_categories(pid);
|
||||
|
||||
-- Create orders table with its indexes
|
||||
CREATE TABLE orders (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
order_number VARCHAR(50) NOT NULL,
|
||||
order_number TEXT NOT NULL,
|
||||
pid BIGINT NOT NULL,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
price DECIMAL(10,3) NOT NULL,
|
||||
sku TEXT NOT NULL,
|
||||
date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
price NUMERIC(14, 4) NOT NULL,
|
||||
quantity INTEGER NOT NULL,
|
||||
discount DECIMAL(10,3) DEFAULT 0.000,
|
||||
tax DECIMAL(10,3) DEFAULT 0.000,
|
||||
discount NUMERIC(14, 4) DEFAULT 0.0000,
|
||||
tax NUMERIC(14, 4) DEFAULT 0.0000,
|
||||
tax_included BOOLEAN DEFAULT false,
|
||||
shipping DECIMAL(10,3) DEFAULT 0.000,
|
||||
costeach DECIMAL(10,3) DEFAULT 0.000,
|
||||
customer VARCHAR(50) NOT NULL,
|
||||
customer_name VARCHAR(100),
|
||||
status VARCHAR(20) DEFAULT 'pending',
|
||||
shipping NUMERIC(14, 4) DEFAULT 0.0000,
|
||||
costeach NUMERIC(14, 4) DEFAULT 0.0000,
|
||||
customer TEXT NOT NULL,
|
||||
customer_name TEXT,
|
||||
status TEXT DEFAULT 'pending',
|
||||
canceled BOOLEAN DEFAULT false,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE (order_number, pid)
|
||||
UNIQUE (order_number, pid),
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE RESTRICT
|
||||
);
|
||||
|
||||
-- Create trigger for orders
|
||||
@@ -150,36 +151,37 @@ CREATE TRIGGER update_orders_updated
|
||||
|
||||
CREATE INDEX idx_orders_number ON orders(order_number);
|
||||
CREATE INDEX idx_orders_pid ON orders(pid);
|
||||
CREATE INDEX idx_orders_sku ON orders(sku);
|
||||
CREATE INDEX idx_orders_customer ON orders(customer);
|
||||
CREATE INDEX idx_orders_date ON orders(date);
|
||||
CREATE INDEX idx_orders_status ON orders(status);
|
||||
CREATE INDEX idx_orders_metrics ON orders(pid, date, canceled);
|
||||
CREATE INDEX idx_orders_pid_date ON orders(pid, date);
|
||||
CREATE INDEX idx_orders_updated ON orders(updated);
|
||||
|
||||
-- Create purchase_orders table with its indexes
|
||||
CREATE TABLE purchase_orders (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
po_id VARCHAR(50) NOT NULL,
|
||||
vendor VARCHAR(100) NOT NULL,
|
||||
po_id TEXT NOT NULL,
|
||||
vendor TEXT NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
expected_date DATE,
|
||||
pid BIGINT NOT NULL,
|
||||
sku VARCHAR(50) NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
cost_price DECIMAL(10, 3) NOT NULL,
|
||||
po_cost_price DECIMAL(10, 3) NOT NULL,
|
||||
status SMALLINT DEFAULT 1,
|
||||
receiving_status SMALLINT DEFAULT 1,
|
||||
sku TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
cost_price NUMERIC(14, 4) NOT NULL,
|
||||
po_cost_price NUMERIC(14, 4) NOT NULL,
|
||||
status TEXT DEFAULT 'created',
|
||||
receiving_status TEXT DEFAULT 'created',
|
||||
notes TEXT,
|
||||
long_note TEXT,
|
||||
ordered INTEGER NOT NULL,
|
||||
received INTEGER DEFAULT 0,
|
||||
received_date DATE,
|
||||
last_received_date DATE,
|
||||
received_by VARCHAR,
|
||||
received_by TEXT,
|
||||
receiving_history JSONB,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (pid) REFERENCES products(pid),
|
||||
FOREIGN KEY (pid) REFERENCES products(pid) ON DELETE CASCADE,
|
||||
UNIQUE (po_id, pid)
|
||||
);
|
||||
|
||||
@@ -191,21 +193,75 @@ CREATE TRIGGER update_purchase_orders_updated
|
||||
|
||||
COMMENT ON COLUMN purchase_orders.name IS 'Product name from products.description';
|
||||
COMMENT ON COLUMN purchase_orders.po_cost_price IS 'Original cost from PO, before receiving adjustments';
|
||||
COMMENT ON COLUMN purchase_orders.status IS '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done';
|
||||
COMMENT ON COLUMN purchase_orders.receiving_status IS '0=canceled,1=created,30=partial_received,40=full_received,50=paid';
|
||||
COMMENT ON COLUMN purchase_orders.status IS 'canceled, created, electronically_ready_send, ordered, preordered, electronically_sent, receiving_started, done';
|
||||
COMMENT ON COLUMN purchase_orders.receiving_status IS 'canceled, created, partial_received, full_received, paid';
|
||||
COMMENT ON COLUMN purchase_orders.receiving_history IS 'Array of receiving records with qty, date, cost, receiving_id, and alt_po flag';
|
||||
|
||||
CREATE INDEX idx_po_id ON purchase_orders(po_id);
|
||||
CREATE INDEX idx_po_sku ON purchase_orders(sku);
|
||||
CREATE INDEX idx_po_vendor ON purchase_orders(vendor);
|
||||
CREATE INDEX idx_po_status ON purchase_orders(status);
|
||||
CREATE INDEX idx_po_receiving_status ON purchase_orders(receiving_status);
|
||||
CREATE INDEX idx_po_metrics ON purchase_orders(pid, date, status, ordered, received);
|
||||
CREATE INDEX idx_po_metrics_receiving ON purchase_orders(pid, date, receiving_status, received_date);
|
||||
CREATE INDEX idx_po_product_date ON purchase_orders(pid, date);
|
||||
CREATE INDEX idx_po_product_status ON purchase_orders(pid, status);
|
||||
CREATE INDEX idx_po_expected_date ON purchase_orders(expected_date);
|
||||
CREATE INDEX idx_po_last_received_date ON purchase_orders(last_received_date);
|
||||
CREATE INDEX idx_po_pid_status ON purchase_orders(pid, status);
|
||||
CREATE INDEX idx_po_pid_date ON purchase_orders(pid, date);
|
||||
CREATE INDEX idx_po_updated ON purchase_orders(updated);
|
||||
|
||||
SET session_replication_role = 'origin'; -- Re-enable foreign key checks
|
||||
|
||||
-- Create views for common calculations
|
||||
-- product_sales_trends view moved to metrics-schema.sql
|
||||
|
||||
-- Historical data tables imported from production
|
||||
CREATE TABLE imported_product_current_prices (
|
||||
price_id BIGSERIAL PRIMARY KEY,
|
||||
pid BIGINT NOT NULL,
|
||||
qty_buy SMALLINT NOT NULL,
|
||||
is_min_qty_buy BOOLEAN NOT NULL,
|
||||
price_each NUMERIC(10,3) NOT NULL,
|
||||
qty_limit SMALLINT NOT NULL,
|
||||
no_promo BOOLEAN NOT NULL,
|
||||
checkout_offer BOOLEAN NOT NULL,
|
||||
active BOOLEAN NOT NULL,
|
||||
date_active TIMESTAMP WITH TIME ZONE,
|
||||
date_deactive TIMESTAMP WITH TIME ZONE,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX idx_imported_product_current_prices_pid ON imported_product_current_prices(pid, active, qty_buy);
|
||||
CREATE INDEX idx_imported_product_current_prices_checkout ON imported_product_current_prices(checkout_offer, active);
|
||||
CREATE INDEX idx_imported_product_current_prices_deactive ON imported_product_current_prices(date_deactive, active);
|
||||
CREATE INDEX idx_imported_product_current_prices_active ON imported_product_current_prices(date_active, active);
|
||||
|
||||
CREATE TABLE imported_daily_inventory (
|
||||
date DATE NOT NULL,
|
||||
pid BIGINT NOT NULL,
|
||||
amountsold SMALLINT NOT NULL DEFAULT 0,
|
||||
times_sold SMALLINT NOT NULL DEFAULT 0,
|
||||
qtyreceived SMALLINT NOT NULL DEFAULT 0,
|
||||
price NUMERIC(7,2) NOT NULL DEFAULT 0,
|
||||
costeach NUMERIC(7,2) NOT NULL DEFAULT 0,
|
||||
stamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (date, pid)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_imported_daily_inventory_pid ON imported_daily_inventory(pid);
|
||||
|
||||
CREATE TABLE imported_product_stat_history (
|
||||
pid BIGINT NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
score NUMERIC(10,2) NOT NULL,
|
||||
score2 NUMERIC(10,2) NOT NULL,
|
||||
qty_in_baskets SMALLINT NOT NULL,
|
||||
qty_sold SMALLINT NOT NULL,
|
||||
notifies_set SMALLINT NOT NULL,
|
||||
visibility_score NUMERIC(10,2) NOT NULL,
|
||||
health_score VARCHAR(5) NOT NULL,
|
||||
sold_view_score NUMERIC(6,3) NOT NULL,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (pid, date)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_imported_product_stat_history_date ON imported_product_stat_history(date);
|
||||
@@ -57,25 +57,16 @@ const TEMP_TABLES = [
|
||||
'temp_daily_sales',
|
||||
'temp_product_stats',
|
||||
'temp_category_sales',
|
||||
'temp_category_stats'
|
||||
'temp_category_stats',
|
||||
'temp_beginning_inventory',
|
||||
'temp_monthly_inventory'
|
||||
];
|
||||
|
||||
// Add cleanup function for temporary tables
|
||||
async function cleanupTemporaryTables(connection) {
|
||||
// List of possible temporary tables that might exist
|
||||
const tempTables = [
|
||||
'temp_sales_metrics',
|
||||
'temp_purchase_metrics',
|
||||
'temp_forecast_dates',
|
||||
'temp_daily_sales',
|
||||
'temp_product_stats',
|
||||
'temp_category_sales',
|
||||
'temp_category_stats'
|
||||
];
|
||||
|
||||
try {
|
||||
// Drop each temporary table if it exists
|
||||
for (const table of tempTables) {
|
||||
for (const table of TEMP_TABLES) {
|
||||
await connection.query(`DROP TABLE IF EXISTS ${table}`);
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -534,7 +525,7 @@ async function calculateMetrics() {
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
status = 'error',
|
||||
status = 'failed',
|
||||
end_time = NOW(),
|
||||
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
|
||||
error_message = $1
|
||||
|
||||
@@ -6,14 +6,16 @@ const importCategories = require('./import/categories');
|
||||
const { importProducts } = require('./import/products');
|
||||
const importOrders = require('./import/orders');
|
||||
const importPurchaseOrders = require('./import/purchase-orders');
|
||||
const importHistoricalData = require('./import/historical-data');
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../.env") });
|
||||
|
||||
// Constants to control which imports run
|
||||
const IMPORT_CATEGORIES = false;
|
||||
const IMPORT_PRODUCTS = false;
|
||||
const IMPORT_ORDERS = false;
|
||||
const IMPORT_CATEGORIES = true;
|
||||
const IMPORT_PRODUCTS = true;
|
||||
const IMPORT_ORDERS = true;
|
||||
const IMPORT_PURCHASE_ORDERS = true;
|
||||
const IMPORT_HISTORICAL_DATA = false;
|
||||
|
||||
// Add flag for incremental updates
|
||||
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
|
||||
@@ -78,7 +80,8 @@ async function main() {
|
||||
IMPORT_CATEGORIES,
|
||||
IMPORT_PRODUCTS,
|
||||
IMPORT_ORDERS,
|
||||
IMPORT_PURCHASE_ORDERS
|
||||
IMPORT_PURCHASE_ORDERS,
|
||||
IMPORT_HISTORICAL_DATA
|
||||
].filter(Boolean).length;
|
||||
|
||||
try {
|
||||
@@ -108,17 +111,6 @@ async function main() {
|
||||
WHERE status = 'running'
|
||||
`);
|
||||
|
||||
// Initialize sync_status table if it doesn't exist
|
||||
await localConnection.query(`
|
||||
CREATE TABLE IF NOT EXISTS sync_status (
|
||||
table_name VARCHAR(50) PRIMARY KEY,
|
||||
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_sync_id BIGINT
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_last_sync ON sync_status (last_sync_timestamp);
|
||||
`);
|
||||
|
||||
// Create import history record for the overall session
|
||||
try {
|
||||
const [historyResult] = await localConnection.query(`
|
||||
@@ -137,10 +129,11 @@ async function main() {
|
||||
'categories_enabled', $2::boolean,
|
||||
'products_enabled', $3::boolean,
|
||||
'orders_enabled', $4::boolean,
|
||||
'purchase_orders_enabled', $5::boolean
|
||||
'purchase_orders_enabled', $5::boolean,
|
||||
'historical_data_enabled', $6::boolean
|
||||
)
|
||||
) RETURNING id
|
||||
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS]);
|
||||
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_HISTORICAL_DATA]);
|
||||
importHistoryId = historyResult.rows[0].id;
|
||||
} catch (error) {
|
||||
console.error("Error creating import history record:", error);
|
||||
@@ -157,7 +150,8 @@ async function main() {
|
||||
categories: null,
|
||||
products: null,
|
||||
orders: null,
|
||||
purchaseOrders: null
|
||||
purchaseOrders: null,
|
||||
historicalData: null
|
||||
};
|
||||
|
||||
let totalRecordsAdded = 0;
|
||||
@@ -169,8 +163,8 @@ async function main() {
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Categories import result:', results.categories);
|
||||
totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0) || 0;
|
||||
totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0) || 0;
|
||||
totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0);
|
||||
}
|
||||
|
||||
if (IMPORT_PRODUCTS) {
|
||||
@@ -178,8 +172,8 @@ async function main() {
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Products import result:', results.products);
|
||||
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0) || 0;
|
||||
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0) || 0;
|
||||
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0);
|
||||
}
|
||||
|
||||
if (IMPORT_ORDERS) {
|
||||
@@ -187,8 +181,8 @@ async function main() {
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Orders import result:', results.orders);
|
||||
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0) || 0;
|
||||
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0) || 0;
|
||||
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0);
|
||||
}
|
||||
|
||||
if (IMPORT_PURCHASE_ORDERS) {
|
||||
@@ -202,8 +196,8 @@ async function main() {
|
||||
if (results.purchaseOrders?.status === 'error') {
|
||||
console.error('Purchase orders import had an error:', results.purchaseOrders.error);
|
||||
} else {
|
||||
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0) || 0;
|
||||
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0) || 0;
|
||||
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during purchase orders import:', error);
|
||||
@@ -217,6 +211,32 @@ async function main() {
|
||||
}
|
||||
}
|
||||
|
||||
if (IMPORT_HISTORICAL_DATA) {
|
||||
try {
|
||||
results.historicalData = await importHistoricalData(prodConnection, localConnection, INCREMENTAL_UPDATE);
|
||||
if (isImportCancelled) throw new Error("Import cancelled");
|
||||
completedSteps++;
|
||||
console.log('Historical data import result:', results.historicalData);
|
||||
|
||||
// Handle potential error status
|
||||
if (results.historicalData?.status === 'error') {
|
||||
console.error('Historical data import had an error:', results.historicalData.error);
|
||||
} else {
|
||||
totalRecordsAdded += parseInt(results.historicalData?.recordsAdded || 0);
|
||||
totalRecordsUpdated += parseInt(results.historicalData?.recordsUpdated || 0);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during historical data import:', error);
|
||||
// Continue with other imports, don't fail the whole process
|
||||
results.historicalData = {
|
||||
status: 'error',
|
||||
error: error.message,
|
||||
recordsAdded: 0,
|
||||
recordsUpdated: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const endTime = Date.now();
|
||||
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
|
||||
|
||||
@@ -234,24 +254,28 @@ async function main() {
|
||||
'products_enabled', $5::boolean,
|
||||
'orders_enabled', $6::boolean,
|
||||
'purchase_orders_enabled', $7::boolean,
|
||||
'categories_result', COALESCE($8::jsonb, 'null'::jsonb),
|
||||
'products_result', COALESCE($9::jsonb, 'null'::jsonb),
|
||||
'orders_result', COALESCE($10::jsonb, 'null'::jsonb),
|
||||
'purchase_orders_result', COALESCE($11::jsonb, 'null'::jsonb)
|
||||
'historical_data_enabled', $8::boolean,
|
||||
'categories_result', COALESCE($9::jsonb, 'null'::jsonb),
|
||||
'products_result', COALESCE($10::jsonb, 'null'::jsonb),
|
||||
'orders_result', COALESCE($11::jsonb, 'null'::jsonb),
|
||||
'purchase_orders_result', COALESCE($12::jsonb, 'null'::jsonb),
|
||||
'historical_data_result', COALESCE($13::jsonb, 'null'::jsonb)
|
||||
)
|
||||
WHERE id = $12
|
||||
WHERE id = $14
|
||||
`, [
|
||||
totalElapsedSeconds,
|
||||
parseInt(totalRecordsAdded) || 0,
|
||||
parseInt(totalRecordsUpdated) || 0,
|
||||
parseInt(totalRecordsAdded),
|
||||
parseInt(totalRecordsUpdated),
|
||||
IMPORT_CATEGORIES,
|
||||
IMPORT_PRODUCTS,
|
||||
IMPORT_ORDERS,
|
||||
IMPORT_PURCHASE_ORDERS,
|
||||
IMPORT_HISTORICAL_DATA,
|
||||
JSON.stringify(results.categories),
|
||||
JSON.stringify(results.products),
|
||||
JSON.stringify(results.orders),
|
||||
JSON.stringify(results.purchaseOrders),
|
||||
JSON.stringify(results.historicalData),
|
||||
importHistoryId
|
||||
]);
|
||||
|
||||
|
||||
@@ -16,6 +16,9 @@ async function importCategories(prodConnection, localConnection) {
|
||||
// Start a single transaction for the entire import
|
||||
await localConnection.query('BEGIN');
|
||||
|
||||
// Temporarily disable the trigger that's causing problems
|
||||
await localConnection.query('ALTER TABLE categories DISABLE TRIGGER update_categories_updated_at');
|
||||
|
||||
// Process each type in order with its own savepoint
|
||||
for (const type of typeOrder) {
|
||||
try {
|
||||
@@ -150,6 +153,9 @@ async function importCategories(prodConnection, localConnection) {
|
||||
last_sync_timestamp = NOW()
|
||||
`);
|
||||
|
||||
// Re-enable the trigger
|
||||
await localConnection.query('ALTER TABLE categories ENABLE TRIGGER update_categories_updated_at');
|
||||
|
||||
outputProgress({
|
||||
status: "complete",
|
||||
operation: "Categories import completed",
|
||||
@@ -178,6 +184,9 @@ async function importCategories(prodConnection, localConnection) {
|
||||
// Only rollback if we haven't committed yet
|
||||
try {
|
||||
await localConnection.query('ROLLBACK');
|
||||
|
||||
// Make sure we re-enable the trigger even if there was an error
|
||||
await localConnection.query('ALTER TABLE categories ENABLE TRIGGER update_categories_updated_at');
|
||||
} catch (rollbackError) {
|
||||
console.error("Error during rollback:", rollbackError);
|
||||
}
|
||||
|
||||
961
inventory-server/scripts/import/historical-data.js
Normal file
961
inventory-server/scripts/import/historical-data.js
Normal file
@@ -0,0 +1,961 @@
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('../metrics/utils/progress');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { pipeline } = require('stream');
|
||||
const { promisify } = require('util');
|
||||
|
||||
// Configuration constants to control which tables get imported
|
||||
const IMPORT_PRODUCT_CURRENT_PRICES = false;
|
||||
const IMPORT_DAILY_INVENTORY = false;
|
||||
const IMPORT_PRODUCT_STAT_HISTORY = true;
|
||||
|
||||
// For product stat history, limit to more recent data for faster initial import
|
||||
const USE_RECENT_MONTHS = 12; // Just use the most recent months for product_stat_history
|
||||
|
||||
/**
|
||||
* Validates a date from MySQL before inserting it into PostgreSQL
|
||||
* @param {string|Date|null} mysqlDate - Date string or object from MySQL
|
||||
* @returns {string|null} Valid date string or null if invalid
|
||||
*/
|
||||
function validateDate(mysqlDate) {
|
||||
// Handle null, undefined, or empty values
|
||||
if (!mysqlDate) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Convert to string if it's not already
|
||||
const dateStr = String(mysqlDate);
|
||||
|
||||
// Handle MySQL zero dates and empty values
|
||||
if (dateStr === '0000-00-00' ||
|
||||
dateStr === '0000-00-00 00:00:00' ||
|
||||
dateStr.indexOf('0000-00-00') !== -1 ||
|
||||
dateStr === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if the date is valid
|
||||
const date = new Date(mysqlDate);
|
||||
|
||||
// If the date is invalid or suspiciously old (pre-1970), return null
|
||||
if (isNaN(date.getTime()) || date.getFullYear() < 1970) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return mysqlDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports historical data from MySQL to PostgreSQL
|
||||
*/
|
||||
async function importHistoricalData(
|
||||
prodConnection,
|
||||
localConnection,
|
||||
options = {}
|
||||
) {
|
||||
const {
|
||||
incrementalUpdate = true,
|
||||
oneYearAgo = new Date(new Date().setFullYear(new Date().getFullYear() - 1))
|
||||
} = options;
|
||||
|
||||
const oneYearAgoStr = oneYearAgo.toISOString().split('T')[0];
|
||||
const startTime = Date.now();
|
||||
|
||||
// Use larger batch sizes to improve performance
|
||||
const BATCH_SIZE = 5000; // For fetching from small tables
|
||||
const INSERT_BATCH_SIZE = 500; // For inserting to small tables
|
||||
const LARGE_BATCH_SIZE = 10000; // For fetching from large tables
|
||||
const LARGE_INSERT_BATCH_SIZE = 1000; // For inserting to large tables
|
||||
|
||||
// Calculate date for recent data
|
||||
const recentDateStr = new Date(
|
||||
new Date().setMonth(new Date().getMonth() - USE_RECENT_MONTHS)
|
||||
).toISOString().split('T')[0];
|
||||
|
||||
console.log(`Starting import with:
|
||||
- One year ago date: ${oneYearAgoStr}
|
||||
- Recent months date: ${recentDateStr} (for product_stat_history)
|
||||
- Incremental update: ${incrementalUpdate}
|
||||
- Standard batch size: ${BATCH_SIZE}
|
||||
- Standard insert batch size: ${INSERT_BATCH_SIZE}
|
||||
- Large table batch size: ${LARGE_BATCH_SIZE}
|
||||
- Large table insert batch size: ${LARGE_INSERT_BATCH_SIZE}
|
||||
- Import product_current_prices: ${IMPORT_PRODUCT_CURRENT_PRICES}
|
||||
- Import daily_inventory: ${IMPORT_DAILY_INVENTORY}
|
||||
- Import product_stat_history: ${IMPORT_PRODUCT_STAT_HISTORY}`);
|
||||
|
||||
try {
|
||||
// Get last sync time for incremental updates
|
||||
const lastSyncTimes = {};
|
||||
|
||||
if (incrementalUpdate) {
|
||||
try {
|
||||
const syncResult = await localConnection.query(`
|
||||
SELECT table_name, last_sync_timestamp
|
||||
FROM sync_status
|
||||
WHERE table_name IN (
|
||||
'imported_product_current_prices',
|
||||
'imported_daily_inventory',
|
||||
'imported_product_stat_history'
|
||||
)
|
||||
`);
|
||||
|
||||
// Add check for rows existence and type
|
||||
if (syncResult && Array.isArray(syncResult.rows)) {
|
||||
for (const row of syncResult.rows) {
|
||||
lastSyncTimes[row.table_name] = row.last_sync_timestamp;
|
||||
console.log(`Last sync time for ${row.table_name}: ${row.last_sync_timestamp}`);
|
||||
}
|
||||
} else {
|
||||
console.warn('Sync status query did not return expected rows. Proceeding without last sync times.');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error fetching sync status:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Determine how many tables will be imported
|
||||
const tablesCount = [
|
||||
IMPORT_PRODUCT_CURRENT_PRICES,
|
||||
IMPORT_DAILY_INVENTORY,
|
||||
IMPORT_PRODUCT_STAT_HISTORY
|
||||
].filter(Boolean).length;
|
||||
|
||||
// Run all imports sequentially for better reliability
|
||||
console.log(`Starting sequential imports for ${tablesCount} tables...`);
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import",
|
||||
message: `Starting sequential imports for ${tablesCount} tables...`,
|
||||
current: 0,
|
||||
total: tablesCount,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
let progressCount = 0;
|
||||
let productCurrentPricesResult = { recordsAdded: 0, recordsUpdated: 0, totalProcessed: 0, errors: [] };
|
||||
let dailyInventoryResult = { recordsAdded: 0, recordsUpdated: 0, totalProcessed: 0, errors: [] };
|
||||
let productStatHistoryResult = { recordsAdded: 0, recordsUpdated: 0, totalProcessed: 0, errors: [] };
|
||||
|
||||
// Import product current prices
|
||||
if (IMPORT_PRODUCT_CURRENT_PRICES) {
|
||||
console.log('Importing product current prices...');
|
||||
productCurrentPricesResult = await importProductCurrentPrices(
|
||||
prodConnection,
|
||||
localConnection,
|
||||
oneYearAgoStr,
|
||||
lastSyncTimes['imported_product_current_prices'],
|
||||
BATCH_SIZE,
|
||||
INSERT_BATCH_SIZE,
|
||||
incrementalUpdate,
|
||||
startTime
|
||||
);
|
||||
progressCount++;
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import",
|
||||
message: `Completed import ${progressCount} of ${tablesCount}`,
|
||||
current: progressCount,
|
||||
total: tablesCount,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
}
|
||||
|
||||
// Import daily inventory
|
||||
if (IMPORT_DAILY_INVENTORY) {
|
||||
console.log('Importing daily inventory...');
|
||||
dailyInventoryResult = await importDailyInventory(
|
||||
prodConnection,
|
||||
localConnection,
|
||||
oneYearAgoStr,
|
||||
lastSyncTimes['imported_daily_inventory'],
|
||||
BATCH_SIZE,
|
||||
INSERT_BATCH_SIZE,
|
||||
incrementalUpdate,
|
||||
startTime
|
||||
);
|
||||
progressCount++;
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import",
|
||||
message: `Completed import ${progressCount} of ${tablesCount}`,
|
||||
current: progressCount,
|
||||
total: tablesCount,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
}
|
||||
|
||||
// Import product stat history - using optimized approach
|
||||
if (IMPORT_PRODUCT_STAT_HISTORY) {
|
||||
console.log('Importing product stat history...');
|
||||
productStatHistoryResult = await importProductStatHistory(
|
||||
prodConnection,
|
||||
localConnection,
|
||||
recentDateStr, // Use more recent date for this massive table
|
||||
lastSyncTimes['imported_product_stat_history'],
|
||||
LARGE_BATCH_SIZE,
|
||||
LARGE_INSERT_BATCH_SIZE,
|
||||
incrementalUpdate,
|
||||
startTime,
|
||||
USE_RECENT_MONTHS // Pass the recent months constant
|
||||
);
|
||||
progressCount++;
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import",
|
||||
message: `Completed import ${progressCount} of ${tablesCount}`,
|
||||
current: progressCount,
|
||||
total: tablesCount,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
}
|
||||
|
||||
// Aggregate results
|
||||
const totalRecordsAdded =
|
||||
productCurrentPricesResult.recordsAdded +
|
||||
dailyInventoryResult.recordsAdded +
|
||||
productStatHistoryResult.recordsAdded;
|
||||
|
||||
const totalRecordsUpdated =
|
||||
productCurrentPricesResult.recordsUpdated +
|
||||
dailyInventoryResult.recordsUpdated +
|
||||
productStatHistoryResult.recordsUpdated;
|
||||
|
||||
const totalProcessed =
|
||||
productCurrentPricesResult.totalProcessed +
|
||||
dailyInventoryResult.totalProcessed +
|
||||
productStatHistoryResult.totalProcessed;
|
||||
|
||||
const allErrors = [
|
||||
...productCurrentPricesResult.errors,
|
||||
...dailyInventoryResult.errors,
|
||||
...productStatHistoryResult.errors
|
||||
];
|
||||
|
||||
// Log import summary
|
||||
console.log(`
|
||||
Historical data import complete:
|
||||
-------------------------------
|
||||
Records added: ${totalRecordsAdded}
|
||||
Records updated: ${totalRecordsUpdated}
|
||||
Total processed: ${totalProcessed}
|
||||
Errors: ${allErrors.length}
|
||||
Time taken: ${formatElapsedTime(startTime)}
|
||||
`);
|
||||
|
||||
// Final progress update
|
||||
outputProgress({
|
||||
status: "complete",
|
||||
operation: "Historical data import",
|
||||
message: `Import complete. Added: ${totalRecordsAdded}, Updated: ${totalRecordsUpdated}, Errors: ${allErrors.length}`,
|
||||
current: tablesCount,
|
||||
total: tablesCount,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Log any errors
|
||||
if (allErrors.length > 0) {
|
||||
console.log('Errors encountered during import:');
|
||||
console.log(JSON.stringify(allErrors, null, 2));
|
||||
}
|
||||
|
||||
// Calculate duration
|
||||
const endTime = Date.now();
|
||||
const durationSeconds = Math.round((endTime - startTime) / 1000);
|
||||
const finalStatus = allErrors.length === 0 ? 'complete' : 'failed';
|
||||
const errorMessage = allErrors.length > 0 ? JSON.stringify(allErrors) : null;
|
||||
|
||||
// Update import history
|
||||
await localConnection.query(`
|
||||
INSERT INTO import_history (
|
||||
table_name,
|
||||
end_time,
|
||||
duration_seconds,
|
||||
records_added,
|
||||
records_updated,
|
||||
is_incremental,
|
||||
status,
|
||||
error_message,
|
||||
additional_info
|
||||
)
|
||||
VALUES ($1, NOW(), $2, $3, $4, $5, $6, $7, $8)
|
||||
`, [
|
||||
'historical_data_combined',
|
||||
durationSeconds,
|
||||
totalRecordsAdded,
|
||||
totalRecordsUpdated,
|
||||
incrementalUpdate,
|
||||
finalStatus,
|
||||
errorMessage,
|
||||
JSON.stringify({
|
||||
totalProcessed,
|
||||
tablesImported: {
|
||||
imported_product_current_prices: IMPORT_PRODUCT_CURRENT_PRICES,
|
||||
imported_daily_inventory: IMPORT_DAILY_INVENTORY,
|
||||
imported_product_stat_history: IMPORT_PRODUCT_STAT_HISTORY
|
||||
}
|
||||
})
|
||||
]);
|
||||
|
||||
// Return summary
|
||||
return {
|
||||
recordsAdded: totalRecordsAdded,
|
||||
recordsUpdated: totalRecordsUpdated,
|
||||
totalProcessed,
|
||||
errors: allErrors,
|
||||
timeTaken: formatElapsedTime(startTime)
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error importing historical data:', error);
|
||||
|
||||
// Final progress update on error
|
||||
outputProgress({
|
||||
status: "failed",
|
||||
operation: "Historical data import",
|
||||
message: `Import failed: ${error.message}`,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports product_current_prices data from MySQL to PostgreSQL
|
||||
*/
|
||||
async function importProductCurrentPrices(
|
||||
prodConnection,
|
||||
localConnection,
|
||||
oneYearAgoStr,
|
||||
lastSyncTime,
|
||||
batchSize,
|
||||
insertBatchSize,
|
||||
incrementalUpdate,
|
||||
startTime
|
||||
) {
|
||||
let recordsAdded = 0;
|
||||
let recordsUpdated = 0;
|
||||
let totalProcessed = 0;
|
||||
let errors = [];
|
||||
let offset = 0;
|
||||
let allProcessed = false;
|
||||
|
||||
try {
|
||||
// Get total count for progress reporting
|
||||
const [countResult] = await prodConnection.query(`
|
||||
SELECT COUNT(*) as total
|
||||
FROM product_current_prices
|
||||
WHERE (date_active >= ? OR date_deactive >= ?)
|
||||
${incrementalUpdate && lastSyncTime ? `AND date_deactive > ?` : ''}
|
||||
`, [oneYearAgoStr, oneYearAgoStr, ...(incrementalUpdate && lastSyncTime ? [lastSyncTime] : [])]);
|
||||
|
||||
const totalCount = countResult[0].total;
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import - Product Current Prices",
|
||||
message: `Found ${totalCount} records to process`,
|
||||
current: 0,
|
||||
total: totalCount,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Process in batches for better performance
|
||||
while (!allProcessed) {
|
||||
try {
|
||||
// Fetch batch from production
|
||||
const [rows] = await prodConnection.query(`
|
||||
SELECT
|
||||
price_id,
|
||||
pid,
|
||||
qty_buy,
|
||||
is_min_qty_buy,
|
||||
price_each,
|
||||
qty_limit,
|
||||
no_promo,
|
||||
checkout_offer,
|
||||
active,
|
||||
date_active,
|
||||
date_deactive
|
||||
FROM product_current_prices
|
||||
WHERE (date_active >= ? OR date_deactive >= ?)
|
||||
${incrementalUpdate && lastSyncTime ? `AND date_deactive > ?` : ''}
|
||||
ORDER BY price_id
|
||||
LIMIT ? OFFSET ?
|
||||
`, [
|
||||
oneYearAgoStr,
|
||||
oneYearAgoStr,
|
||||
...(incrementalUpdate && lastSyncTime ? [lastSyncTime] : []),
|
||||
batchSize,
|
||||
offset
|
||||
]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
allProcessed = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// Process rows in smaller batches for better performance
|
||||
for (let i = 0; i < rows.length; i += insertBatchSize) {
|
||||
const batch = rows.slice(i, i + insertBatchSize);
|
||||
|
||||
if (batch.length === 0) continue;
|
||||
|
||||
try {
|
||||
// Build parameterized query to handle NULL values properly
|
||||
const values = [];
|
||||
const placeholders = [];
|
||||
let placeholderIndex = 1;
|
||||
|
||||
for (const row of batch) {
|
||||
const rowPlaceholders = [
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`
|
||||
];
|
||||
|
||||
placeholders.push(`(${rowPlaceholders.join(', ')})`);
|
||||
|
||||
values.push(
|
||||
row.price_id,
|
||||
row.pid,
|
||||
row.qty_buy,
|
||||
row.is_min_qty_buy ? true : false,
|
||||
row.price_each,
|
||||
row.qty_limit, // PostgreSQL will handle null values properly
|
||||
row.no_promo ? true : false,
|
||||
row.checkout_offer ? true : false,
|
||||
row.active ? true : false,
|
||||
validateDate(row.date_active),
|
||||
validateDate(row.date_deactive)
|
||||
);
|
||||
}
|
||||
|
||||
// Execute batch insert
|
||||
const result = await localConnection.query(`
|
||||
WITH ins AS (
|
||||
INSERT INTO imported_product_current_prices (
|
||||
price_id, pid, qty_buy, is_min_qty_buy, price_each, qty_limit,
|
||||
no_promo, checkout_offer, active, date_active, date_deactive
|
||||
)
|
||||
VALUES ${placeholders.join(',\n')}
|
||||
ON CONFLICT (price_id) DO UPDATE SET
|
||||
pid = EXCLUDED.pid,
|
||||
qty_buy = EXCLUDED.qty_buy,
|
||||
is_min_qty_buy = EXCLUDED.is_min_qty_buy,
|
||||
price_each = EXCLUDED.price_each,
|
||||
qty_limit = EXCLUDED.qty_limit,
|
||||
no_promo = EXCLUDED.no_promo,
|
||||
checkout_offer = EXCLUDED.checkout_offer,
|
||||
active = EXCLUDED.active,
|
||||
date_active = EXCLUDED.date_active,
|
||||
date_deactive = EXCLUDED.date_deactive,
|
||||
updated = CURRENT_TIMESTAMP
|
||||
RETURNING (xmax = 0) AS inserted
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE inserted) AS inserted_count,
|
||||
COUNT(*) FILTER (WHERE NOT inserted) AS updated_count
|
||||
FROM ins
|
||||
`, values);
|
||||
|
||||
// Safely update counts based on the result
|
||||
if (result && result.rows && result.rows.length > 0) {
|
||||
const insertedCount = parseInt(result.rows[0].inserted_count || 0);
|
||||
const updatedCount = parseInt(result.rows[0].updated_count || 0);
|
||||
|
||||
recordsAdded += insertedCount;
|
||||
recordsUpdated += updatedCount;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error in batch import of product_current_prices at offset ${i}:`, error);
|
||||
errors.push({
|
||||
table: 'imported_product_current_prices',
|
||||
batchOffset: i,
|
||||
batchSize: batch.length,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
totalProcessed += rows.length;
|
||||
offset += rows.length;
|
||||
|
||||
// Update progress
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import - Product Current Prices",
|
||||
message: `Processed ${totalProcessed} of ${totalCount} records`,
|
||||
current: totalProcessed,
|
||||
total: totalCount,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, totalProcessed, totalCount),
|
||||
rate: calculateRate(startTime, totalProcessed)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in batch import of product_current_prices:', error);
|
||||
errors.push({
|
||||
table: 'imported_product_current_prices',
|
||||
error: error.message,
|
||||
offset: offset,
|
||||
batchSize: batchSize
|
||||
});
|
||||
|
||||
// Try to continue with next batch
|
||||
offset += batchSize;
|
||||
}
|
||||
}
|
||||
|
||||
// Update sync status
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
VALUES ('imported_product_current_prices', NOW())
|
||||
ON CONFLICT (table_name) DO UPDATE SET
|
||||
last_sync_timestamp = NOW()
|
||||
`);
|
||||
|
||||
return { recordsAdded, recordsUpdated, totalProcessed, errors };
|
||||
} catch (error) {
|
||||
console.error('Error in product current prices import:', error);
|
||||
return {
|
||||
recordsAdded,
|
||||
recordsUpdated,
|
||||
totalProcessed,
|
||||
errors: [...errors, {
|
||||
table: 'imported_product_current_prices',
|
||||
error: error.message
|
||||
}]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports daily_inventory data from MySQL to PostgreSQL
|
||||
*/
|
||||
async function importDailyInventory(
|
||||
prodConnection,
|
||||
localConnection,
|
||||
oneYearAgoStr,
|
||||
lastSyncTime,
|
||||
batchSize,
|
||||
insertBatchSize,
|
||||
incrementalUpdate,
|
||||
startTime
|
||||
) {
|
||||
let recordsAdded = 0;
|
||||
let recordsUpdated = 0;
|
||||
let totalProcessed = 0;
|
||||
let errors = [];
|
||||
let offset = 0;
|
||||
let allProcessed = false;
|
||||
|
||||
try {
|
||||
// Get total count for progress reporting
|
||||
const [countResult] = await prodConnection.query(`
|
||||
SELECT COUNT(*) as total
|
||||
FROM daily_inventory
|
||||
WHERE date >= ?
|
||||
${incrementalUpdate && lastSyncTime ? `AND stamp > ?` : ''}
|
||||
`, [oneYearAgoStr, ...(incrementalUpdate && lastSyncTime ? [lastSyncTime] : [])]);
|
||||
|
||||
const totalCount = countResult[0].total;
|
||||
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import - Daily Inventory",
|
||||
message: `Found ${totalCount} records to process`,
|
||||
current: 0,
|
||||
total: totalCount,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// Process in batches for better performance
|
||||
while (!allProcessed) {
|
||||
try {
|
||||
// Fetch batch from production
|
||||
const [rows] = await prodConnection.query(`
|
||||
SELECT
|
||||
date,
|
||||
pid,
|
||||
amountsold,
|
||||
times_sold,
|
||||
qtyreceived,
|
||||
price,
|
||||
costeach,
|
||||
stamp
|
||||
FROM daily_inventory
|
||||
WHERE date >= ?
|
||||
${incrementalUpdate && lastSyncTime ? `AND stamp > ?` : ''}
|
||||
ORDER BY date, pid
|
||||
LIMIT ? OFFSET ?
|
||||
`, [
|
||||
oneYearAgoStr,
|
||||
...(incrementalUpdate && lastSyncTime ? [lastSyncTime] : []),
|
||||
batchSize,
|
||||
offset
|
||||
]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
allProcessed = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// Process rows in smaller batches for better performance
|
||||
for (let i = 0; i < rows.length; i += insertBatchSize) {
|
||||
const batch = rows.slice(i, i + insertBatchSize);
|
||||
|
||||
if (batch.length === 0) continue;
|
||||
|
||||
try {
|
||||
// Build parameterized query to handle NULL values properly
|
||||
const values = [];
|
||||
const placeholders = [];
|
||||
let placeholderIndex = 1;
|
||||
|
||||
for (const row of batch) {
|
||||
const rowPlaceholders = [
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`
|
||||
];
|
||||
|
||||
placeholders.push(`(${rowPlaceholders.join(', ')})`);
|
||||
|
||||
values.push(
|
||||
validateDate(row.date),
|
||||
row.pid,
|
||||
row.amountsold || 0,
|
||||
row.times_sold || 0,
|
||||
row.qtyreceived || 0,
|
||||
row.price || 0,
|
||||
row.costeach || 0,
|
||||
validateDate(row.stamp)
|
||||
);
|
||||
}
|
||||
|
||||
// Execute batch insert
|
||||
const result = await localConnection.query(`
|
||||
WITH ins AS (
|
||||
INSERT INTO imported_daily_inventory (
|
||||
date, pid, amountsold, times_sold, qtyreceived, price, costeach, stamp
|
||||
)
|
||||
VALUES ${placeholders.join(',\n')}
|
||||
ON CONFLICT (date, pid) DO UPDATE SET
|
||||
amountsold = EXCLUDED.amountsold,
|
||||
times_sold = EXCLUDED.times_sold,
|
||||
qtyreceived = EXCLUDED.qtyreceived,
|
||||
price = EXCLUDED.price,
|
||||
costeach = EXCLUDED.costeach,
|
||||
stamp = EXCLUDED.stamp,
|
||||
updated = CURRENT_TIMESTAMP
|
||||
RETURNING (xmax = 0) AS inserted
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE inserted) AS inserted_count,
|
||||
COUNT(*) FILTER (WHERE NOT inserted) AS updated_count
|
||||
FROM ins
|
||||
`, values);
|
||||
|
||||
// Safely update counts based on the result
|
||||
if (result && result.rows && result.rows.length > 0) {
|
||||
const insertedCount = parseInt(result.rows[0].inserted_count || 0);
|
||||
const updatedCount = parseInt(result.rows[0].updated_count || 0);
|
||||
|
||||
recordsAdded += insertedCount;
|
||||
recordsUpdated += updatedCount;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error in batch import of daily_inventory at offset ${i}:`, error);
|
||||
errors.push({
|
||||
table: 'imported_daily_inventory',
|
||||
batchOffset: i,
|
||||
batchSize: batch.length,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
totalProcessed += rows.length;
|
||||
offset += rows.length;
|
||||
|
||||
// Update progress
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import - Daily Inventory",
|
||||
message: `Processed ${totalProcessed} of ${totalCount} records`,
|
||||
current: totalProcessed,
|
||||
total: totalCount,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, totalProcessed, totalCount),
|
||||
rate: calculateRate(startTime, totalProcessed)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in batch import of daily_inventory:', error);
|
||||
errors.push({
|
||||
table: 'imported_daily_inventory',
|
||||
error: error.message,
|
||||
offset: offset,
|
||||
batchSize: batchSize
|
||||
});
|
||||
|
||||
// Try to continue with next batch
|
||||
offset += batchSize;
|
||||
}
|
||||
}
|
||||
|
||||
// Update sync status
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
VALUES ('imported_daily_inventory', NOW())
|
||||
ON CONFLICT (table_name) DO UPDATE SET
|
||||
last_sync_timestamp = NOW()
|
||||
`);
|
||||
|
||||
return { recordsAdded, recordsUpdated, totalProcessed, errors };
|
||||
} catch (error) {
|
||||
console.error('Error in daily inventory import:', error);
|
||||
return {
|
||||
recordsAdded,
|
||||
recordsUpdated,
|
||||
totalProcessed,
|
||||
errors: [...errors, {
|
||||
table: 'imported_daily_inventory',
|
||||
error: error.message
|
||||
}]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports product_stat_history data from MySQL to PostgreSQL
|
||||
* Using fast direct inserts without conflict checking
|
||||
*/
|
||||
async function importProductStatHistory(
|
||||
prodConnection,
|
||||
localConnection,
|
||||
recentDateStr, // Use more recent date instead of one year ago
|
||||
lastSyncTime,
|
||||
batchSize,
|
||||
insertBatchSize,
|
||||
incrementalUpdate,
|
||||
startTime,
|
||||
recentMonths // Add parameter for recent months
|
||||
) {
|
||||
let recordsAdded = 0;
|
||||
let recordsUpdated = 0;
|
||||
let totalProcessed = 0;
|
||||
let errors = [];
|
||||
let offset = 0;
|
||||
let allProcessed = false;
|
||||
let lastRateCheck = Date.now();
|
||||
let lastProcessed = 0;
|
||||
|
||||
try {
|
||||
// Get total count for progress reporting
|
||||
const [countResult] = await prodConnection.query(`
|
||||
SELECT COUNT(*) as total
|
||||
FROM product_stat_history
|
||||
WHERE date >= ?
|
||||
${incrementalUpdate && lastSyncTime ? `AND date > ?` : ''}
|
||||
`, [recentDateStr, ...(incrementalUpdate && lastSyncTime ? [lastSyncTime] : [])]);
|
||||
|
||||
const totalCount = countResult[0].total;
|
||||
console.log(`Found ${totalCount} records to process in product_stat_history (using recent date: ${recentDateStr})`);
|
||||
|
||||
// Progress indicator
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import - Product Stat History",
|
||||
message: `Found ${totalCount} records to process (last ${recentMonths} months only)`,
|
||||
current: 0,
|
||||
total: totalCount,
|
||||
elapsed: formatElapsedTime(startTime)
|
||||
});
|
||||
|
||||
// If not incremental, truncate the table first for better performance
|
||||
if (!incrementalUpdate) {
|
||||
console.log('Truncating imported_product_stat_history for full import...');
|
||||
await localConnection.query('TRUNCATE TABLE imported_product_stat_history');
|
||||
} else if (lastSyncTime) {
|
||||
// For incremental updates, delete records that will be reimported
|
||||
console.log(`Deleting records from imported_product_stat_history since ${lastSyncTime}...`);
|
||||
await localConnection.query('DELETE FROM imported_product_stat_history WHERE date > $1', [lastSyncTime]);
|
||||
}
|
||||
|
||||
// Process in batches for better performance
|
||||
while (!allProcessed) {
|
||||
try {
|
||||
// Fetch batch from production with minimal filtering and no sorting
|
||||
const [rows] = await prodConnection.query(`
|
||||
SELECT
|
||||
pid,
|
||||
date,
|
||||
COALESCE(score, 0) as score,
|
||||
COALESCE(score2, 0) as score2,
|
||||
COALESCE(qty_in_baskets, 0) as qty_in_baskets,
|
||||
COALESCE(qty_sold, 0) as qty_sold,
|
||||
COALESCE(notifies_set, 0) as notifies_set,
|
||||
COALESCE(visibility_score, 0) as visibility_score,
|
||||
COALESCE(health_score, 0) as health_score,
|
||||
COALESCE(sold_view_score, 0) as sold_view_score
|
||||
FROM product_stat_history
|
||||
WHERE date >= ?
|
||||
${incrementalUpdate && lastSyncTime ? `AND date > ?` : ''}
|
||||
LIMIT ? OFFSET ?
|
||||
`, [
|
||||
recentDateStr,
|
||||
...(incrementalUpdate && lastSyncTime ? [lastSyncTime] : []),
|
||||
batchSize,
|
||||
offset
|
||||
]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
allProcessed = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// Process rows in smaller batches for better performance
|
||||
for (let i = 0; i < rows.length; i += insertBatchSize) {
|
||||
const batch = rows.slice(i, i + insertBatchSize);
|
||||
|
||||
if (batch.length === 0) continue;
|
||||
|
||||
try {
|
||||
// Build parameterized query to handle NULL values properly
|
||||
const values = [];
|
||||
const placeholders = [];
|
||||
let placeholderIndex = 1;
|
||||
|
||||
for (const row of batch) {
|
||||
const rowPlaceholders = [
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`,
|
||||
`$${placeholderIndex++}`
|
||||
];
|
||||
|
||||
placeholders.push(`(${rowPlaceholders.join(', ')})`);
|
||||
|
||||
values.push(
|
||||
row.pid,
|
||||
validateDate(row.date),
|
||||
row.score,
|
||||
row.score2,
|
||||
row.qty_in_baskets,
|
||||
row.qty_sold,
|
||||
row.notifies_set,
|
||||
row.visibility_score,
|
||||
row.health_score,
|
||||
row.sold_view_score
|
||||
);
|
||||
}
|
||||
|
||||
// Execute direct batch insert without conflict checking
|
||||
await localConnection.query(`
|
||||
INSERT INTO imported_product_stat_history (
|
||||
pid, date, score, score2, qty_in_baskets, qty_sold, notifies_set,
|
||||
visibility_score, health_score, sold_view_score
|
||||
)
|
||||
VALUES ${placeholders.join(',\n')}
|
||||
`, values);
|
||||
|
||||
// All inserts are new records when using this approach
|
||||
recordsAdded += batch.length;
|
||||
} catch (error) {
|
||||
console.error(`Error in batch insert of product_stat_history at offset ${i}:`, error);
|
||||
errors.push({
|
||||
table: 'imported_product_stat_history',
|
||||
batchOffset: i,
|
||||
batchSize: batch.length,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
totalProcessed += rows.length;
|
||||
offset += rows.length;
|
||||
|
||||
// Calculate current rate every 10 seconds or 100,000 records
|
||||
const now = Date.now();
|
||||
if (now - lastRateCheck > 10000 || totalProcessed - lastProcessed > 100000) {
|
||||
const timeElapsed = (now - lastRateCheck) / 1000; // seconds
|
||||
const recordsProcessed = totalProcessed - lastProcessed;
|
||||
const currentRate = Math.round(recordsProcessed / timeElapsed);
|
||||
|
||||
console.log(`Current import rate: ${currentRate} records/second`);
|
||||
|
||||
lastRateCheck = now;
|
||||
lastProcessed = totalProcessed;
|
||||
}
|
||||
|
||||
// Update progress
|
||||
outputProgress({
|
||||
status: "running",
|
||||
operation: "Historical data import - Product Stat History",
|
||||
message: `Processed ${totalProcessed} of ${totalCount} records`,
|
||||
current: totalProcessed,
|
||||
total: totalCount,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, totalProcessed, totalCount),
|
||||
rate: calculateRate(startTime, totalProcessed)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in batch import of product_stat_history:', error);
|
||||
errors.push({
|
||||
table: 'imported_product_stat_history',
|
||||
error: error.message,
|
||||
offset: offset,
|
||||
batchSize: batchSize
|
||||
});
|
||||
|
||||
// Try to continue with next batch
|
||||
offset += batchSize;
|
||||
}
|
||||
}
|
||||
|
||||
// Update sync status
|
||||
await localConnection.query(`
|
||||
INSERT INTO sync_status (table_name, last_sync_timestamp)
|
||||
VALUES ('imported_product_stat_history', NOW())
|
||||
ON CONFLICT (table_name) DO UPDATE SET
|
||||
last_sync_timestamp = NOW()
|
||||
`);
|
||||
|
||||
return { recordsAdded, recordsUpdated, totalProcessed, errors };
|
||||
} catch (error) {
|
||||
console.error('Error in product stat history import:', error);
|
||||
return {
|
||||
recordsAdded,
|
||||
recordsUpdated,
|
||||
totalProcessed,
|
||||
errors: [...errors, {
|
||||
table: 'imported_product_stat_history',
|
||||
error: error.message
|
||||
}]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = importHistoricalData;
|
||||
@@ -117,43 +117,43 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
CREATE TEMP TABLE temp_order_items (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
price DECIMAL(10,2) NOT NULL,
|
||||
sku TEXT NOT NULL,
|
||||
price NUMERIC(14, 4) NOT NULL,
|
||||
quantity INTEGER NOT NULL,
|
||||
base_discount DECIMAL(10,2) DEFAULT 0,
|
||||
base_discount NUMERIC(14, 4) DEFAULT 0,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_order_meta (
|
||||
order_id INTEGER NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
customer VARCHAR(100) NOT NULL,
|
||||
customer_name VARCHAR(150) NOT NULL,
|
||||
status INTEGER,
|
||||
date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
customer TEXT NOT NULL,
|
||||
customer_name TEXT NOT NULL,
|
||||
status TEXT,
|
||||
canceled BOOLEAN,
|
||||
summary_discount DECIMAL(10,2) DEFAULT 0.00,
|
||||
summary_subtotal DECIMAL(10,2) DEFAULT 0.00,
|
||||
summary_discount NUMERIC(14, 4) DEFAULT 0.0000,
|
||||
summary_subtotal NUMERIC(14, 4) DEFAULT 0.0000,
|
||||
PRIMARY KEY (order_id)
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_order_discounts (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
discount DECIMAL(10,2) NOT NULL,
|
||||
discount NUMERIC(14, 4) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_order_taxes (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
tax DECIMAL(10,2) NOT NULL,
|
||||
tax NUMERIC(14, 4) NOT NULL,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
CREATE TEMP TABLE temp_order_costs (
|
||||
order_id INTEGER NOT NULL,
|
||||
pid INTEGER NOT NULL,
|
||||
costeach DECIMAL(10,3) DEFAULT 0.000,
|
||||
costeach NUMERIC(14, 4) DEFAULT 0.0000,
|
||||
PRIMARY KEY (order_id, pid)
|
||||
);
|
||||
|
||||
@@ -172,10 +172,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_order_items (order_id, pid, SKU, price, quantity, base_discount)
|
||||
INSERT INTO temp_order_items (order_id, pid, sku, price, quantity, base_discount)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_id, pid) DO UPDATE SET
|
||||
SKU = EXCLUDED.SKU,
|
||||
sku = EXCLUDED.sku,
|
||||
price = EXCLUDED.price,
|
||||
quantity = EXCLUDED.quantity,
|
||||
base_discount = EXCLUDED.base_discount
|
||||
@@ -241,10 +241,10 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
|
||||
const values = subBatch.flatMap(order => [
|
||||
order.order_id,
|
||||
order.date,
|
||||
new Date(order.date), // Convert to TIMESTAMP WITH TIME ZONE
|
||||
order.customer,
|
||||
toTitleCase(order.customer_name) || '',
|
||||
order.status,
|
||||
order.status.toString(), // Convert status to TEXT
|
||||
order.canceled,
|
||||
order.summary_discount || 0,
|
||||
order.summary_subtotal || 0
|
||||
@@ -447,7 +447,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
SELECT
|
||||
oi.order_id as order_number,
|
||||
oi.pid::bigint as pid,
|
||||
oi.SKU as sku,
|
||||
oi.sku,
|
||||
om.date,
|
||||
oi.price,
|
||||
oi.quantity,
|
||||
@@ -457,18 +457,18 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
WHEN om.summary_discount > 0 AND om.summary_subtotal > 0 THEN
|
||||
ROUND((om.summary_discount * (oi.price * oi.quantity)) / NULLIF(om.summary_subtotal, 0), 2)
|
||||
ELSE 0
|
||||
END)::DECIMAL(10,2) as discount,
|
||||
COALESCE(ot.total_tax, 0)::DECIMAL(10,2) as tax,
|
||||
END)::NUMERIC(14, 4) as discount,
|
||||
COALESCE(ot.total_tax, 0)::NUMERIC(14, 4) as tax,
|
||||
false as tax_included,
|
||||
0 as shipping,
|
||||
om.customer,
|
||||
om.customer_name,
|
||||
om.status,
|
||||
om.canceled,
|
||||
COALESCE(ot.costeach, oi.price * 0.5)::DECIMAL(10,3) as costeach
|
||||
COALESCE(ot.costeach, oi.price * 0.5)::NUMERIC(14, 4) as costeach
|
||||
FROM (
|
||||
SELECT DISTINCT ON (order_id, pid)
|
||||
order_id, pid, SKU, price, quantity, base_discount
|
||||
order_id, pid, sku, price, quantity, base_discount
|
||||
FROM temp_order_items
|
||||
WHERE order_id = ANY($1)
|
||||
ORDER BY order_id, pid
|
||||
@@ -508,7 +508,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
o.order_number,
|
||||
o.pid,
|
||||
o.sku || 'NO-SKU',
|
||||
o.date,
|
||||
o.date, // This is now a TIMESTAMP WITH TIME ZONE
|
||||
o.price,
|
||||
o.quantity,
|
||||
o.discount,
|
||||
@@ -517,7 +517,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
o.shipping,
|
||||
o.customer,
|
||||
o.customer_name,
|
||||
o.status,
|
||||
o.status.toString(), // Convert status to TEXT
|
||||
o.canceled,
|
||||
o.costeach
|
||||
]);
|
||||
|
||||
@@ -57,50 +57,50 @@ async function setupTemporaryTables(connection) {
|
||||
await connection.query(`
|
||||
CREATE TEMP TABLE temp_products (
|
||||
pid BIGINT NOT NULL,
|
||||
title VARCHAR(255),
|
||||
title TEXT,
|
||||
description TEXT,
|
||||
sku VARCHAR(50),
|
||||
sku TEXT,
|
||||
stock_quantity INTEGER DEFAULT 0,
|
||||
preorder_count INTEGER DEFAULT 0,
|
||||
notions_inv_count INTEGER DEFAULT 0,
|
||||
price DECIMAL(10,3) NOT NULL DEFAULT 0,
|
||||
regular_price DECIMAL(10,3) NOT NULL DEFAULT 0,
|
||||
cost_price DECIMAL(10,3),
|
||||
vendor VARCHAR(100),
|
||||
vendor_reference VARCHAR(100),
|
||||
notions_reference VARCHAR(100),
|
||||
brand VARCHAR(100),
|
||||
line VARCHAR(100),
|
||||
subline VARCHAR(100),
|
||||
artist VARCHAR(100),
|
||||
price NUMERIC(14, 4) NOT NULL DEFAULT 0,
|
||||
regular_price NUMERIC(14, 4) NOT NULL DEFAULT 0,
|
||||
cost_price NUMERIC(14, 4),
|
||||
vendor TEXT,
|
||||
vendor_reference TEXT,
|
||||
notions_reference TEXT,
|
||||
brand TEXT,
|
||||
line TEXT,
|
||||
subline TEXT,
|
||||
artist TEXT,
|
||||
categories TEXT,
|
||||
created_at TIMESTAMP,
|
||||
first_received TIMESTAMP,
|
||||
landing_cost_price DECIMAL(10,3),
|
||||
barcode VARCHAR(50),
|
||||
harmonized_tariff_code VARCHAR(50),
|
||||
updated_at TIMESTAMP,
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
first_received TIMESTAMP WITH TIME ZONE,
|
||||
landing_cost_price NUMERIC(14, 4),
|
||||
barcode TEXT,
|
||||
harmonized_tariff_code TEXT,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
visible BOOLEAN,
|
||||
managing_stock BOOLEAN DEFAULT true,
|
||||
replenishable BOOLEAN,
|
||||
permalink VARCHAR(255),
|
||||
permalink TEXT,
|
||||
moq INTEGER DEFAULT 1,
|
||||
uom INTEGER DEFAULT 1,
|
||||
rating DECIMAL(10,2),
|
||||
rating NUMERIC(14, 4),
|
||||
reviews INTEGER,
|
||||
weight DECIMAL(10,3),
|
||||
length DECIMAL(10,3),
|
||||
width DECIMAL(10,3),
|
||||
height DECIMAL(10,3),
|
||||
country_of_origin VARCHAR(100),
|
||||
location VARCHAR(100),
|
||||
weight NUMERIC(14, 4),
|
||||
length NUMERIC(14, 4),
|
||||
width NUMERIC(14, 4),
|
||||
height NUMERIC(14, 4),
|
||||
country_of_origin TEXT,
|
||||
location TEXT,
|
||||
total_sold INTEGER,
|
||||
baskets INTEGER,
|
||||
notifies INTEGER,
|
||||
date_last_sold TIMESTAMP,
|
||||
image VARCHAR(255),
|
||||
image_175 VARCHAR(255),
|
||||
image_full VARCHAR(255),
|
||||
date_last_sold TIMESTAMP WITH TIME ZONE,
|
||||
image TEXT,
|
||||
image_175 TEXT,
|
||||
image_full TEXT,
|
||||
options TEXT,
|
||||
tags TEXT,
|
||||
needs_update BOOLEAN DEFAULT TRUE,
|
||||
|
||||
@@ -73,19 +73,18 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
|
||||
-- Temporary table for purchase orders
|
||||
CREATE TEMP TABLE temp_purchase_orders (
|
||||
po_id VARCHAR(50) NOT NULL,
|
||||
po_id TEXT NOT NULL,
|
||||
pid BIGINT NOT NULL,
|
||||
sku VARCHAR(50),
|
||||
name VARCHAR(255),
|
||||
vendor VARCHAR(255),
|
||||
sku TEXT,
|
||||
name TEXT,
|
||||
vendor TEXT,
|
||||
date TIMESTAMP WITH TIME ZONE,
|
||||
expected_date DATE,
|
||||
status INTEGER,
|
||||
status_text VARCHAR(50),
|
||||
status TEXT,
|
||||
notes TEXT,
|
||||
long_note TEXT,
|
||||
ordered INTEGER,
|
||||
po_cost_price DECIMAL(10,3),
|
||||
po_cost_price NUMERIC(14, 4),
|
||||
supplier_id INTEGER,
|
||||
date_created TIMESTAMP WITH TIME ZONE,
|
||||
date_ordered TIMESTAMP WITH TIME ZONE,
|
||||
@@ -94,27 +93,26 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
|
||||
-- Temporary table for receivings
|
||||
CREATE TEMP TABLE temp_receivings (
|
||||
receiving_id VARCHAR(50) NOT NULL,
|
||||
po_id VARCHAR(50),
|
||||
receiving_id TEXT NOT NULL,
|
||||
po_id TEXT,
|
||||
pid BIGINT NOT NULL,
|
||||
qty_each INTEGER,
|
||||
cost_each DECIMAL(10,5),
|
||||
cost_each NUMERIC(14, 4),
|
||||
received_by INTEGER,
|
||||
received_date TIMESTAMP WITH TIME ZONE,
|
||||
receiving_created_date TIMESTAMP WITH TIME ZONE,
|
||||
supplier_id INTEGER,
|
||||
status INTEGER,
|
||||
status_text VARCHAR(50),
|
||||
status TEXT,
|
||||
PRIMARY KEY (receiving_id, pid)
|
||||
);
|
||||
|
||||
-- Temporary table for tracking FIFO allocations
|
||||
CREATE TEMP TABLE temp_receiving_allocations (
|
||||
po_id VARCHAR(50) NOT NULL,
|
||||
po_id TEXT NOT NULL,
|
||||
pid BIGINT NOT NULL,
|
||||
receiving_id VARCHAR(50) NOT NULL,
|
||||
receiving_id TEXT NOT NULL,
|
||||
allocated_qty INTEGER NOT NULL,
|
||||
cost_each DECIMAL(10,5) NOT NULL,
|
||||
cost_each NUMERIC(14, 4) NOT NULL,
|
||||
received_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
received_by INTEGER,
|
||||
PRIMARY KEY (po_id, pid, receiving_id)
|
||||
@@ -123,8 +121,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
-- Temporary table for employee names
|
||||
CREATE TEMP TABLE employee_names (
|
||||
employeeid INTEGER PRIMARY KEY,
|
||||
firstname VARCHAR(100),
|
||||
lastname VARCHAR(100)
|
||||
firstname TEXT,
|
||||
lastname TEXT
|
||||
);
|
||||
|
||||
-- Create indexes for efficient joins
|
||||
@@ -135,22 +133,22 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
|
||||
// Map status codes to text values
|
||||
const poStatusMap = {
|
||||
0: 'Canceled',
|
||||
1: 'Created',
|
||||
10: 'Ready ESend',
|
||||
11: 'Ordered',
|
||||
12: 'Preordered',
|
||||
13: 'Electronically Sent',
|
||||
15: 'Receiving Started',
|
||||
50: 'Done'
|
||||
0: 'canceled',
|
||||
1: 'created',
|
||||
10: 'electronically_ready_send',
|
||||
11: 'ordered',
|
||||
12: 'preordered',
|
||||
13: 'electronically_sent',
|
||||
15: 'receiving_started',
|
||||
50: 'done'
|
||||
};
|
||||
|
||||
const receivingStatusMap = {
|
||||
0: 'Canceled',
|
||||
1: 'Created',
|
||||
30: 'Partial Received',
|
||||
40: 'Full Received',
|
||||
50: 'Paid'
|
||||
0: 'canceled',
|
||||
1: 'created',
|
||||
30: 'partial_received',
|
||||
40: 'full_received',
|
||||
50: 'paid'
|
||||
};
|
||||
|
||||
// Get time window for data retrieval
|
||||
@@ -281,8 +279,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
vendor: po.vendor || 'Unknown Vendor',
|
||||
date: validateDate(po.date_ordered) || validateDate(po.date_created),
|
||||
expected_date: validateDate(po.date_estin),
|
||||
status: po.status,
|
||||
status_text: poStatusMap[po.status] || '',
|
||||
status: poStatusMap[po.status] || 'created',
|
||||
notes: po.notes || '',
|
||||
long_note: po.long_note || '',
|
||||
ordered: product.qty_each,
|
||||
@@ -298,8 +295,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
const batch = completePOs.slice(i, i + INSERT_BATCH_SIZE);
|
||||
|
||||
const placeholders = batch.map((_, idx) => {
|
||||
const base = idx * 16;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15}, $${base + 16})`;
|
||||
const base = idx * 15;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15})`;
|
||||
}).join(',');
|
||||
|
||||
const values = batch.flatMap(po => [
|
||||
@@ -311,7 +308,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
po.date,
|
||||
po.expected_date,
|
||||
po.status,
|
||||
po.status_text,
|
||||
po.notes,
|
||||
po.long_note,
|
||||
po.ordered,
|
||||
@@ -323,8 +319,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_purchase_orders (
|
||||
po_id, pid, sku, name, vendor, date, expected_date, status, status_text,
|
||||
notes, long_note, ordered, po_cost_price, supplier_id, date_created, date_ordered
|
||||
po_id, pid, sku, name, vendor, date, expected_date, status, notes, long_note,
|
||||
ordered, po_cost_price, supplier_id, date_created, date_ordered
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (po_id, pid) DO UPDATE SET
|
||||
@@ -334,7 +330,6 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
date = EXCLUDED.date,
|
||||
expected_date = EXCLUDED.expected_date,
|
||||
status = EXCLUDED.status,
|
||||
status_text = EXCLUDED.status_text,
|
||||
notes = EXCLUDED.notes,
|
||||
long_note = EXCLUDED.long_note,
|
||||
ordered = EXCLUDED.ordered,
|
||||
@@ -448,9 +443,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
received_date: validateDate(product.received_date) || validateDate(product.receiving_created_date),
|
||||
receiving_created_date: validateDate(product.receiving_created_date),
|
||||
supplier_id: receiving.supplier_id,
|
||||
status: receiving.status,
|
||||
status_text: receivingStatusMap[receiving.status] || '',
|
||||
receiving_created_date: validateDate(product.receiving_created_date)
|
||||
status: receivingStatusMap[receiving.status] || 'created'
|
||||
});
|
||||
}
|
||||
|
||||
@@ -459,8 +452,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
const batch = completeReceivings.slice(i, i + INSERT_BATCH_SIZE);
|
||||
|
||||
const placeholders = batch.map((_, idx) => {
|
||||
const base = idx * 11;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11})`;
|
||||
const base = idx * 10;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10})`;
|
||||
}).join(',');
|
||||
|
||||
const values = batch.flatMap(r => [
|
||||
@@ -473,14 +466,13 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
r.received_date,
|
||||
r.receiving_created_date,
|
||||
r.supplier_id,
|
||||
r.status,
|
||||
r.status_text
|
||||
r.status
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_receivings (
|
||||
receiving_id, po_id, pid, qty_each, cost_each, received_by,
|
||||
received_date, receiving_created_date, supplier_id, status, status_text
|
||||
received_date, receiving_created_date, supplier_id, status
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (receiving_id, pid) DO UPDATE SET
|
||||
@@ -491,8 +483,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
received_date = EXCLUDED.received_date,
|
||||
receiving_created_date = EXCLUDED.receiving_created_date,
|
||||
supplier_id = EXCLUDED.supplier_id,
|
||||
status = EXCLUDED.status,
|
||||
status_text = EXCLUDED.status_text
|
||||
status = EXCLUDED.status
|
||||
`, values);
|
||||
}
|
||||
|
||||
@@ -586,11 +577,11 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
name: "Handling standalone receivings",
|
||||
query: `
|
||||
INSERT INTO temp_purchase_orders (
|
||||
po_id, pid, sku, name, vendor, date, status, status_text,
|
||||
po_id, pid, sku, name, vendor, date, status,
|
||||
ordered, po_cost_price, supplier_id, date_created, date_ordered
|
||||
)
|
||||
SELECT
|
||||
'R' || r.receiving_id as po_id,
|
||||
r.receiving_id::text as po_id,
|
||||
r.pid,
|
||||
COALESCE(p.sku, 'NO-SKU') as sku,
|
||||
COALESCE(p.name, 'Unknown Product') as name,
|
||||
@@ -600,8 +591,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
'Unknown Vendor'
|
||||
) as vendor,
|
||||
COALESCE(r.received_date, r.receiving_created_date) as date,
|
||||
NULL as status,
|
||||
NULL as status_text,
|
||||
'created' as status,
|
||||
NULL as ordered,
|
||||
r.cost_each as po_cost_price,
|
||||
r.supplier_id,
|
||||
@@ -626,7 +616,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
po_id, pid, receiving_id, allocated_qty, cost_each, received_date, received_by
|
||||
)
|
||||
SELECT
|
||||
'R' || r.receiving_id as po_id,
|
||||
r.receiving_id::text as po_id,
|
||||
r.pid,
|
||||
r.receiving_id,
|
||||
r.qty_each as allocated_qty,
|
||||
@@ -872,13 +862,13 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
po.name,
|
||||
COALESCE(ca.avg_cost, po.po_cost_price) as cost_price,
|
||||
po.po_cost_price,
|
||||
CASE WHEN po.status IS NULL THEN 1 ELSE po.status END as status,
|
||||
COALESCE(po.status, 'created'),
|
||||
CASE
|
||||
WHEN rs.total_received IS NULL THEN 1
|
||||
WHEN rs.total_received = 0 THEN 1
|
||||
WHEN rs.total_received < po.ordered THEN 30
|
||||
WHEN rs.total_received >= po.ordered THEN 40
|
||||
ELSE 1
|
||||
WHEN rs.total_received IS NULL THEN 'created'
|
||||
WHEN rs.total_received = 0 THEN 'created'
|
||||
WHEN rs.total_received < po.ordered THEN 'partial_received'
|
||||
WHEN rs.total_received >= po.ordered THEN 'full_received'
|
||||
ELSE 'created'
|
||||
END as receiving_status,
|
||||
po.notes,
|
||||
po.long_note,
|
||||
|
||||
306
inventory-server/scripts/metrics-new/update-daily-snapshots.js
Normal file
306
inventory-server/scripts/metrics-new/update-daily-snapshots.js
Normal file
@@ -0,0 +1,306 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
// Change working directory to script directory
|
||||
process.chdir(path.dirname(__filename));
|
||||
|
||||
// Load environment variables
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../..', '.env') });
|
||||
|
||||
// Add error handler for uncaught exceptions
|
||||
process.on('uncaughtException', (error) => {
|
||||
console.error('Uncaught Exception:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Add error handler for unhandled promise rejections
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Load progress module
|
||||
const progress = require('./utils/progress');
|
||||
|
||||
// Store progress functions in global scope to ensure availability
|
||||
global.formatElapsedTime = progress.formatElapsedTime;
|
||||
global.estimateRemaining = progress.estimateRemaining;
|
||||
global.calculateRate = progress.calculateRate;
|
||||
global.outputProgress = progress.outputProgress;
|
||||
global.clearProgress = progress.clearProgress;
|
||||
global.getProgress = progress.getProgress;
|
||||
global.logError = progress.logError;
|
||||
|
||||
// Load database module
|
||||
const { getConnection, closePool } = require('./utils/db');
|
||||
|
||||
// Add cancel handler
|
||||
let isCancelled = false;
|
||||
|
||||
function cancelCalculation() {
|
||||
isCancelled = true;
|
||||
console.log('Calculation has been cancelled by user');
|
||||
|
||||
// Force-terminate any query that's been running for more than 5 seconds
|
||||
try {
|
||||
const connection = getConnection();
|
||||
connection.then(async (conn) => {
|
||||
try {
|
||||
// Identify and terminate long-running queries from our application
|
||||
await conn.query(`
|
||||
SELECT pg_cancel_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE query_start < now() - interval '5 seconds'
|
||||
AND application_name LIKE '%node%'
|
||||
AND query NOT LIKE '%pg_cancel_backend%'
|
||||
`);
|
||||
|
||||
// Release connection
|
||||
conn.release();
|
||||
} catch (err) {
|
||||
console.error('Error during force cancellation:', err);
|
||||
conn.release();
|
||||
}
|
||||
}).catch(err => {
|
||||
console.error('Could not get connection for cancellation:', err);
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Failed to terminate running queries:', err);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Calculation has been cancelled'
|
||||
};
|
||||
}
|
||||
|
||||
// Handle SIGTERM signal for cancellation
|
||||
process.on('SIGTERM', cancelCalculation);
|
||||
|
||||
async function updateDailySnapshots() {
|
||||
let connection;
|
||||
const startTime = Date.now();
|
||||
let calculateHistoryId;
|
||||
|
||||
// Set a maximum execution time (30 minutes)
|
||||
const MAX_EXECUTION_TIME = 30 * 60 * 1000;
|
||||
const timeout = setTimeout(() => {
|
||||
console.error(`Calculation timed out after ${MAX_EXECUTION_TIME/1000} seconds, forcing termination`);
|
||||
// Call cancel and force exit
|
||||
cancelCalculation();
|
||||
process.exit(1);
|
||||
}, MAX_EXECUTION_TIME);
|
||||
|
||||
try {
|
||||
// Read the SQL file
|
||||
const sqlFilePath = path.resolve(__dirname, 'update_daily_snapshots.sql');
|
||||
const sqlQuery = fs.readFileSync(sqlFilePath, 'utf8');
|
||||
|
||||
// Clean up any previously running calculations
|
||||
connection = await getConnection();
|
||||
|
||||
// Ensure the calculate_status table exists and has the correct structure
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS calculate_status (
|
||||
module_name TEXT PRIMARY KEY,
|
||||
last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
status = 'cancelled',
|
||||
end_time = NOW(),
|
||||
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
|
||||
error_message = 'Previous calculation was not completed properly'
|
||||
WHERE status = 'running' AND additional_info->>'type' = 'daily_snapshots'
|
||||
`);
|
||||
|
||||
// Create history record for this calculation
|
||||
const historyResult = await connection.query(`
|
||||
INSERT INTO calculate_history (
|
||||
start_time,
|
||||
status,
|
||||
additional_info
|
||||
) VALUES (
|
||||
NOW(),
|
||||
'running',
|
||||
jsonb_build_object(
|
||||
'type', 'daily_snapshots',
|
||||
'sql_file', 'update_daily_snapshots.sql'
|
||||
)
|
||||
) RETURNING id
|
||||
`);
|
||||
calculateHistoryId = historyResult.rows[0].id;
|
||||
|
||||
// Initialize progress
|
||||
global.outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting daily snapshots calculation',
|
||||
current: 0,
|
||||
total: 100,
|
||||
elapsed: '0s',
|
||||
remaining: 'Calculating...',
|
||||
rate: 0,
|
||||
percentage: '0',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
// Execute the SQL query
|
||||
global.outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Executing daily snapshots SQL query',
|
||||
current: 25,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: 'Calculating...',
|
||||
rate: 0,
|
||||
percentage: '25',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
await connection.query(sqlQuery);
|
||||
|
||||
// Update calculate_status table
|
||||
await connection.query(`
|
||||
INSERT INTO calculate_status (module_name, last_calculation_timestamp)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (module_name) DO UPDATE
|
||||
SET last_calculation_timestamp = EXCLUDED.last_calculation_timestamp
|
||||
`, ['daily_snapshots', new Date()]);
|
||||
|
||||
// Update progress to 100%
|
||||
global.outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Daily snapshots calculation complete',
|
||||
current: 100,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: '0s',
|
||||
rate: 0,
|
||||
percentage: '100',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
// Update history with completion
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = $1,
|
||||
status = 'completed'
|
||||
WHERE id = $2
|
||||
`, [Math.round((Date.now() - startTime) / 1000), calculateHistoryId]);
|
||||
|
||||
// Clear progress file on successful completion
|
||||
global.clearProgress();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Daily snapshots calculation completed successfully',
|
||||
duration: Math.round((Date.now() - startTime) / 1000)
|
||||
};
|
||||
} catch (error) {
|
||||
const endTime = Date.now();
|
||||
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
|
||||
|
||||
// Update history with error
|
||||
if (connection && calculateHistoryId) {
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = $1,
|
||||
status = $2,
|
||||
error_message = $3
|
||||
WHERE id = $4
|
||||
`, [
|
||||
totalElapsedSeconds,
|
||||
isCancelled ? 'cancelled' : 'failed',
|
||||
error.message,
|
||||
calculateHistoryId
|
||||
]);
|
||||
}
|
||||
|
||||
if (isCancelled) {
|
||||
global.outputProgress({
|
||||
status: 'cancelled',
|
||||
operation: 'Calculation cancelled',
|
||||
current: 50,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: 0,
|
||||
percentage: '50',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
} else {
|
||||
global.outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Error: ' + error.message,
|
||||
current: 50,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: 0,
|
||||
percentage: '50',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
// Clear the timeout to prevent forced termination
|
||||
clearTimeout(timeout);
|
||||
|
||||
// Always release connection
|
||||
if (connection) {
|
||||
try {
|
||||
connection.release();
|
||||
} catch (err) {
|
||||
console.error('Error in final cleanup:', err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export as a module with all necessary functions
|
||||
module.exports = {
|
||||
updateDailySnapshots,
|
||||
cancelCalculation,
|
||||
getProgress: global.getProgress
|
||||
};
|
||||
|
||||
// Run directly if called from command line
|
||||
if (require.main === module) {
|
||||
updateDailySnapshots().then(() => {
|
||||
closePool().then(() => {
|
||||
process.exit(0);
|
||||
});
|
||||
}).catch(error => {
|
||||
console.error('Error:', error);
|
||||
closePool().then(() => {
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
}
|
||||
306
inventory-server/scripts/metrics-new/update-periodic-metrics.js
Normal file
306
inventory-server/scripts/metrics-new/update-periodic-metrics.js
Normal file
@@ -0,0 +1,306 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
// Change working directory to script directory
|
||||
process.chdir(path.dirname(__filename));
|
||||
|
||||
// Load environment variables
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../..', '.env') });
|
||||
|
||||
// Add error handler for uncaught exceptions
|
||||
process.on('uncaughtException', (error) => {
|
||||
console.error('Uncaught Exception:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Add error handler for unhandled promise rejections
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Load progress module
|
||||
const progress = require('./utils/progress');
|
||||
|
||||
// Store progress functions in global scope to ensure availability
|
||||
global.formatElapsedTime = progress.formatElapsedTime;
|
||||
global.estimateRemaining = progress.estimateRemaining;
|
||||
global.calculateRate = progress.calculateRate;
|
||||
global.outputProgress = progress.outputProgress;
|
||||
global.clearProgress = progress.clearProgress;
|
||||
global.getProgress = progress.getProgress;
|
||||
global.logError = progress.logError;
|
||||
|
||||
// Load database module
|
||||
const { getConnection, closePool } = require('./utils/db');
|
||||
|
||||
// Add cancel handler
|
||||
let isCancelled = false;
|
||||
|
||||
function cancelCalculation() {
|
||||
isCancelled = true;
|
||||
console.log('Calculation has been cancelled by user');
|
||||
|
||||
// Force-terminate any query that's been running for more than 5 seconds
|
||||
try {
|
||||
const connection = getConnection();
|
||||
connection.then(async (conn) => {
|
||||
try {
|
||||
// Identify and terminate long-running queries from our application
|
||||
await conn.query(`
|
||||
SELECT pg_cancel_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE query_start < now() - interval '5 seconds'
|
||||
AND application_name LIKE '%node%'
|
||||
AND query NOT LIKE '%pg_cancel_backend%'
|
||||
`);
|
||||
|
||||
// Release connection
|
||||
conn.release();
|
||||
} catch (err) {
|
||||
console.error('Error during force cancellation:', err);
|
||||
conn.release();
|
||||
}
|
||||
}).catch(err => {
|
||||
console.error('Could not get connection for cancellation:', err);
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Failed to terminate running queries:', err);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Calculation has been cancelled'
|
||||
};
|
||||
}
|
||||
|
||||
// Handle SIGTERM signal for cancellation
|
||||
process.on('SIGTERM', cancelCalculation);
|
||||
|
||||
async function updatePeriodicMetrics() {
|
||||
let connection;
|
||||
const startTime = Date.now();
|
||||
let calculateHistoryId;
|
||||
|
||||
// Set a maximum execution time (30 minutes)
|
||||
const MAX_EXECUTION_TIME = 30 * 60 * 1000;
|
||||
const timeout = setTimeout(() => {
|
||||
console.error(`Calculation timed out after ${MAX_EXECUTION_TIME/1000} seconds, forcing termination`);
|
||||
// Call cancel and force exit
|
||||
cancelCalculation();
|
||||
process.exit(1);
|
||||
}, MAX_EXECUTION_TIME);
|
||||
|
||||
try {
|
||||
// Read the SQL file
|
||||
const sqlFilePath = path.resolve(__dirname, 'update_periodic_metrics.sql');
|
||||
const sqlQuery = fs.readFileSync(sqlFilePath, 'utf8');
|
||||
|
||||
// Clean up any previously running calculations
|
||||
connection = await getConnection();
|
||||
|
||||
// Ensure the calculate_status table exists and has the correct structure
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS calculate_status (
|
||||
module_name TEXT PRIMARY KEY,
|
||||
last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
status = 'cancelled',
|
||||
end_time = NOW(),
|
||||
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
|
||||
error_message = 'Previous calculation was not completed properly'
|
||||
WHERE status = 'running' AND additional_info->>'type' = 'periodic_metrics'
|
||||
`);
|
||||
|
||||
// Create history record for this calculation
|
||||
const historyResult = await connection.query(`
|
||||
INSERT INTO calculate_history (
|
||||
start_time,
|
||||
status,
|
||||
additional_info
|
||||
) VALUES (
|
||||
NOW(),
|
||||
'running',
|
||||
jsonb_build_object(
|
||||
'type', 'periodic_metrics',
|
||||
'sql_file', 'update_periodic_metrics.sql'
|
||||
)
|
||||
) RETURNING id
|
||||
`);
|
||||
calculateHistoryId = historyResult.rows[0].id;
|
||||
|
||||
// Initialize progress
|
||||
global.outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting periodic metrics calculation',
|
||||
current: 0,
|
||||
total: 100,
|
||||
elapsed: '0s',
|
||||
remaining: 'Calculating...',
|
||||
rate: 0,
|
||||
percentage: '0',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
// Execute the SQL query
|
||||
global.outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Executing periodic metrics SQL query',
|
||||
current: 25,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: 'Calculating...',
|
||||
rate: 0,
|
||||
percentage: '25',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
await connection.query(sqlQuery);
|
||||
|
||||
// Update calculate_status table
|
||||
await connection.query(`
|
||||
INSERT INTO calculate_status (module_name, last_calculation_timestamp)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (module_name) DO UPDATE
|
||||
SET last_calculation_timestamp = EXCLUDED.last_calculation_timestamp
|
||||
`, ['periodic_metrics', new Date()]);
|
||||
|
||||
// Update progress to 100%
|
||||
global.outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Periodic metrics calculation complete',
|
||||
current: 100,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: '0s',
|
||||
rate: 0,
|
||||
percentage: '100',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
// Update history with completion
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = $1,
|
||||
status = 'completed'
|
||||
WHERE id = $2
|
||||
`, [Math.round((Date.now() - startTime) / 1000), calculateHistoryId]);
|
||||
|
||||
// Clear progress file on successful completion
|
||||
global.clearProgress();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Periodic metrics calculation completed successfully',
|
||||
duration: Math.round((Date.now() - startTime) / 1000)
|
||||
};
|
||||
} catch (error) {
|
||||
const endTime = Date.now();
|
||||
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
|
||||
|
||||
// Update history with error
|
||||
if (connection && calculateHistoryId) {
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = $1,
|
||||
status = $2,
|
||||
error_message = $3
|
||||
WHERE id = $4
|
||||
`, [
|
||||
totalElapsedSeconds,
|
||||
isCancelled ? 'cancelled' : 'failed',
|
||||
error.message,
|
||||
calculateHistoryId
|
||||
]);
|
||||
}
|
||||
|
||||
if (isCancelled) {
|
||||
global.outputProgress({
|
||||
status: 'cancelled',
|
||||
operation: 'Calculation cancelled',
|
||||
current: 50,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: 0,
|
||||
percentage: '50',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
} else {
|
||||
global.outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Error: ' + error.message,
|
||||
current: 50,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: 0,
|
||||
percentage: '50',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
// Clear the timeout to prevent forced termination
|
||||
clearTimeout(timeout);
|
||||
|
||||
// Always release connection
|
||||
if (connection) {
|
||||
try {
|
||||
connection.release();
|
||||
} catch (err) {
|
||||
console.error('Error in final cleanup:', err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export as a module with all necessary functions
|
||||
module.exports = {
|
||||
updatePeriodicMetrics,
|
||||
cancelCalculation,
|
||||
getProgress: global.getProgress
|
||||
};
|
||||
|
||||
// Run directly if called from command line
|
||||
if (require.main === module) {
|
||||
updatePeriodicMetrics().then(() => {
|
||||
closePool().then(() => {
|
||||
process.exit(0);
|
||||
});
|
||||
}).catch(error => {
|
||||
console.error('Error:', error);
|
||||
closePool().then(() => {
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
}
|
||||
306
inventory-server/scripts/metrics-new/update-product-metrics.js
Normal file
306
inventory-server/scripts/metrics-new/update-product-metrics.js
Normal file
@@ -0,0 +1,306 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
// Change working directory to script directory
|
||||
process.chdir(path.dirname(__filename));
|
||||
|
||||
// Load environment variables
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../..', '.env') });
|
||||
|
||||
// Add error handler for uncaught exceptions
|
||||
process.on('uncaughtException', (error) => {
|
||||
console.error('Uncaught Exception:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Add error handler for unhandled promise rejections
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Load progress module
|
||||
const progress = require('./utils/progress');
|
||||
|
||||
// Store progress functions in global scope to ensure availability
|
||||
global.formatElapsedTime = progress.formatElapsedTime;
|
||||
global.estimateRemaining = progress.estimateRemaining;
|
||||
global.calculateRate = progress.calculateRate;
|
||||
global.outputProgress = progress.outputProgress;
|
||||
global.clearProgress = progress.clearProgress;
|
||||
global.getProgress = progress.getProgress;
|
||||
global.logError = progress.logError;
|
||||
|
||||
// Load database module
|
||||
const { getConnection, closePool } = require('./utils/db');
|
||||
|
||||
// Add cancel handler
|
||||
let isCancelled = false;
|
||||
|
||||
function cancelCalculation() {
|
||||
isCancelled = true;
|
||||
console.log('Calculation has been cancelled by user');
|
||||
|
||||
// Force-terminate any query that's been running for more than 5 seconds
|
||||
try {
|
||||
const connection = getConnection();
|
||||
connection.then(async (conn) => {
|
||||
try {
|
||||
// Identify and terminate long-running queries from our application
|
||||
await conn.query(`
|
||||
SELECT pg_cancel_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE query_start < now() - interval '5 seconds'
|
||||
AND application_name LIKE '%node%'
|
||||
AND query NOT LIKE '%pg_cancel_backend%'
|
||||
`);
|
||||
|
||||
// Release connection
|
||||
conn.release();
|
||||
} catch (err) {
|
||||
console.error('Error during force cancellation:', err);
|
||||
conn.release();
|
||||
}
|
||||
}).catch(err => {
|
||||
console.error('Could not get connection for cancellation:', err);
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Failed to terminate running queries:', err);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Calculation has been cancelled'
|
||||
};
|
||||
}
|
||||
|
||||
// Handle SIGTERM signal for cancellation
|
||||
process.on('SIGTERM', cancelCalculation);
|
||||
|
||||
async function updateProductMetrics() {
|
||||
let connection;
|
||||
const startTime = Date.now();
|
||||
let calculateHistoryId;
|
||||
|
||||
// Set a maximum execution time (30 minutes)
|
||||
const MAX_EXECUTION_TIME = 30 * 60 * 1000;
|
||||
const timeout = setTimeout(() => {
|
||||
console.error(`Calculation timed out after ${MAX_EXECUTION_TIME/1000} seconds, forcing termination`);
|
||||
// Call cancel and force exit
|
||||
cancelCalculation();
|
||||
process.exit(1);
|
||||
}, MAX_EXECUTION_TIME);
|
||||
|
||||
try {
|
||||
// Read the SQL file
|
||||
const sqlFilePath = path.resolve(__dirname, 'update_product_metrics.sql');
|
||||
const sqlQuery = fs.readFileSync(sqlFilePath, 'utf8');
|
||||
|
||||
// Clean up any previously running calculations
|
||||
connection = await getConnection();
|
||||
|
||||
// Ensure the calculate_status table exists and has the correct structure
|
||||
await connection.query(`
|
||||
CREATE TABLE IF NOT EXISTS calculate_status (
|
||||
module_name TEXT PRIMARY KEY,
|
||||
last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
status = 'cancelled',
|
||||
end_time = NOW(),
|
||||
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
|
||||
error_message = 'Previous calculation was not completed properly'
|
||||
WHERE status = 'running' AND additional_info->>'type' = 'product_metrics'
|
||||
`);
|
||||
|
||||
// Create history record for this calculation
|
||||
const historyResult = await connection.query(`
|
||||
INSERT INTO calculate_history (
|
||||
start_time,
|
||||
status,
|
||||
additional_info
|
||||
) VALUES (
|
||||
NOW(),
|
||||
'running',
|
||||
jsonb_build_object(
|
||||
'type', 'product_metrics',
|
||||
'sql_file', 'update_product_metrics.sql'
|
||||
)
|
||||
) RETURNING id
|
||||
`);
|
||||
calculateHistoryId = historyResult.rows[0].id;
|
||||
|
||||
// Initialize progress
|
||||
global.outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting product metrics calculation',
|
||||
current: 0,
|
||||
total: 100,
|
||||
elapsed: '0s',
|
||||
remaining: 'Calculating...',
|
||||
rate: 0,
|
||||
percentage: '0',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
// Execute the SQL query
|
||||
global.outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Executing product metrics SQL query',
|
||||
current: 25,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: 'Calculating...',
|
||||
rate: 0,
|
||||
percentage: '25',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
await connection.query(sqlQuery);
|
||||
|
||||
// Update calculate_status table
|
||||
await connection.query(`
|
||||
INSERT INTO calculate_status (module_name, last_calculation_timestamp)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (module_name) DO UPDATE
|
||||
SET last_calculation_timestamp = EXCLUDED.last_calculation_timestamp
|
||||
`, ['product_metrics', new Date()]);
|
||||
|
||||
// Update progress to 100%
|
||||
global.outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Product metrics calculation complete',
|
||||
current: 100,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: '0s',
|
||||
rate: 0,
|
||||
percentage: '100',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
// Update history with completion
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = $1,
|
||||
status = 'completed'
|
||||
WHERE id = $2
|
||||
`, [Math.round((Date.now() - startTime) / 1000), calculateHistoryId]);
|
||||
|
||||
// Clear progress file on successful completion
|
||||
global.clearProgress();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Product metrics calculation completed successfully',
|
||||
duration: Math.round((Date.now() - startTime) / 1000)
|
||||
};
|
||||
} catch (error) {
|
||||
const endTime = Date.now();
|
||||
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
|
||||
|
||||
// Update history with error
|
||||
if (connection && calculateHistoryId) {
|
||||
await connection.query(`
|
||||
UPDATE calculate_history
|
||||
SET
|
||||
end_time = NOW(),
|
||||
duration_seconds = $1,
|
||||
status = $2,
|
||||
error_message = $3
|
||||
WHERE id = $4
|
||||
`, [
|
||||
totalElapsedSeconds,
|
||||
isCancelled ? 'cancelled' : 'failed',
|
||||
error.message,
|
||||
calculateHistoryId
|
||||
]);
|
||||
}
|
||||
|
||||
if (isCancelled) {
|
||||
global.outputProgress({
|
||||
status: 'cancelled',
|
||||
operation: 'Calculation cancelled',
|
||||
current: 50,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: 0,
|
||||
percentage: '50',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
} else {
|
||||
global.outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Error: ' + error.message,
|
||||
current: 50,
|
||||
total: 100,
|
||||
elapsed: global.formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: 0,
|
||||
percentage: '50',
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
// Clear the timeout to prevent forced termination
|
||||
clearTimeout(timeout);
|
||||
|
||||
// Always release connection
|
||||
if (connection) {
|
||||
try {
|
||||
connection.release();
|
||||
} catch (err) {
|
||||
console.error('Error in final cleanup:', err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export as a module with all necessary functions
|
||||
module.exports = {
|
||||
updateProductMetrics,
|
||||
cancelCalculation,
|
||||
getProgress: global.getProgress
|
||||
};
|
||||
|
||||
// Run directly if called from command line
|
||||
if (require.main === module) {
|
||||
updateProductMetrics().then(() => {
|
||||
closePool().then(() => {
|
||||
process.exit(0);
|
||||
});
|
||||
}).catch(error => {
|
||||
console.error('Error:', error);
|
||||
closePool().then(() => {
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
}
|
||||
142
inventory-server/scripts/metrics-new/update_daily_snapshots.sql
Normal file
142
inventory-server/scripts/metrics-new/update_daily_snapshots.sql
Normal file
@@ -0,0 +1,142 @@
|
||||
-- Description: Calculates and updates daily aggregated product data for the current day.
|
||||
-- Uses UPSERT (INSERT ON CONFLICT UPDATE) for idempotency.
|
||||
-- Dependencies: Core import tables (products, orders, purchase_orders), calculate_status table.
|
||||
-- Frequency: Hourly (Run ~5-10 minutes after hourly data import completes).
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
_module_name TEXT := 'daily_snapshots';
|
||||
_start_time TIMESTAMPTZ := clock_timestamp(); -- Time execution started
|
||||
_last_calc_time TIMESTAMPTZ;
|
||||
_target_date DATE := CURRENT_DATE; -- Always recalculate today for simplicity with hourly runs
|
||||
BEGIN
|
||||
-- Get the timestamp before the last successful run of this module
|
||||
SELECT last_calculation_timestamp INTO _last_calc_time
|
||||
FROM public.calculate_status
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RAISE NOTICE 'Running % for date %. Start Time: %', _module_name, _target_date, _start_time;
|
||||
|
||||
-- Use CTEs to aggregate data for the target date
|
||||
WITH SalesData AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.sku,
|
||||
-- Aggregate Sales (Quantity > 0, Status not Canceled/Returned)
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.quantity ELSE 0 END), 0) AS units_sold,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.price * o.quantity ELSE 0 END), 0.00) AS gross_revenue_unadjusted, -- Before discount
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN o.discount ELSE 0 END), 0.00) AS discounts,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN COALESCE(o.costeach, p.landing_cost_price, p.cost_price) * o.quantity ELSE 0 END), 0.00) AS cogs,
|
||||
COALESCE(SUM(CASE WHEN o.quantity > 0 AND COALESCE(o.status, 'pending') NOT IN ('canceled', 'returned') THEN p.regular_price * o.quantity ELSE 0 END), 0.00) AS gross_regular_revenue, -- Use current regular price for simplicity here
|
||||
|
||||
-- Aggregate Returns (Quantity < 0 or Status = Returned)
|
||||
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN ABS(o.quantity) ELSE 0 END), 0) AS units_returned,
|
||||
COALESCE(SUM(CASE WHEN o.quantity < 0 OR COALESCE(o.status, 'pending') = 'returned' THEN o.price * ABS(o.quantity) ELSE 0 END), 0.00) AS returns_revenue
|
||||
FROM public.products p -- Start from products to include those with no orders today
|
||||
LEFT JOIN public.orders o
|
||||
ON p.pid = o.pid
|
||||
AND o.date >= _target_date -- Filter orders for the target date
|
||||
AND o.date < _target_date + INTERVAL '1 day'
|
||||
GROUP BY p.pid, p.sku
|
||||
),
|
||||
ReceivingData AS (
|
||||
SELECT
|
||||
po.pid,
|
||||
COALESCE(SUM((rh.item->>'qty')::numeric), 0) AS units_received,
|
||||
COALESCE(SUM((rh.item->>'qty')::numeric * COALESCE((rh.item->>'cost')::numeric, po.cost_price)), 0.00) AS cost_received
|
||||
FROM public.purchase_orders po
|
||||
CROSS JOIN LATERAL jsonb_array_elements(po.receiving_history) AS rh(item)
|
||||
WHERE (rh.item->>'received_at')::date = _target_date -- Filter receipts for the target date
|
||||
GROUP BY po.pid
|
||||
),
|
||||
CurrentStock AS (
|
||||
-- Select current stock values directly from products table
|
||||
SELECT
|
||||
pid,
|
||||
stock_quantity,
|
||||
COALESCE(landing_cost_price, cost_price, 0.00) as effective_cost_price,
|
||||
COALESCE(price, 0.00) as current_price,
|
||||
COALESCE(regular_price, 0.00) as current_regular_price
|
||||
FROM public.products
|
||||
)
|
||||
-- Upsert into the daily snapshots table
|
||||
INSERT INTO public.daily_product_snapshots (
|
||||
snapshot_date,
|
||||
pid,
|
||||
sku,
|
||||
eod_stock_quantity,
|
||||
eod_stock_cost,
|
||||
eod_stock_retail,
|
||||
eod_stock_gross,
|
||||
stockout_flag,
|
||||
units_sold,
|
||||
units_returned,
|
||||
gross_revenue,
|
||||
discounts,
|
||||
returns_revenue,
|
||||
net_revenue,
|
||||
cogs,
|
||||
gross_regular_revenue,
|
||||
profit,
|
||||
units_received,
|
||||
cost_received,
|
||||
calculation_timestamp
|
||||
)
|
||||
SELECT
|
||||
_target_date AS snapshot_date,
|
||||
p.pid,
|
||||
p.sku,
|
||||
-- Inventory Metrics (Using CurrentStock)
|
||||
cs.stock_quantity AS eod_stock_quantity,
|
||||
cs.stock_quantity * cs.effective_cost_price AS eod_stock_cost,
|
||||
cs.stock_quantity * cs.current_price AS eod_stock_retail,
|
||||
cs.stock_quantity * cs.current_regular_price AS eod_stock_gross,
|
||||
(cs.stock_quantity <= 0) AS stockout_flag,
|
||||
-- Sales Metrics (From SalesData)
|
||||
COALESCE(sd.units_sold, 0),
|
||||
COALESCE(sd.units_returned, 0),
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00),
|
||||
COALESCE(sd.discounts, 0.00),
|
||||
COALESCE(sd.returns_revenue, 0.00),
|
||||
COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00) AS net_revenue,
|
||||
COALESCE(sd.cogs, 0.00),
|
||||
COALESCE(sd.gross_regular_revenue, 0.00),
|
||||
(COALESCE(sd.gross_revenue_unadjusted, 0.00) - COALESCE(sd.discounts, 0.00)) - COALESCE(sd.cogs, 0.00) AS profit, -- Basic profit: Net Revenue - COGS
|
||||
-- Receiving Metrics (From ReceivingData)
|
||||
COALESCE(rd.units_received, 0),
|
||||
COALESCE(rd.cost_received, 0.00),
|
||||
_start_time -- Timestamp of this calculation run
|
||||
FROM public.products p
|
||||
LEFT JOIN CurrentStock cs ON p.pid = cs.pid
|
||||
LEFT JOIN SalesData sd ON p.pid = sd.pid
|
||||
LEFT JOIN ReceivingData rd ON p.pid = rd.pid
|
||||
WHERE p.pid IS NOT NULL -- Ensure we only insert for existing products
|
||||
|
||||
ON CONFLICT (snapshot_date, pid) DO UPDATE SET
|
||||
sku = EXCLUDED.sku,
|
||||
eod_stock_quantity = EXCLUDED.eod_stock_quantity,
|
||||
eod_stock_cost = EXCLUDED.eod_stock_cost,
|
||||
eod_stock_retail = EXCLUDED.eod_stock_retail,
|
||||
eod_stock_gross = EXCLUDED.eod_stock_gross,
|
||||
stockout_flag = EXCLUDED.stockout_flag,
|
||||
units_sold = EXCLUDED.units_sold,
|
||||
units_returned = EXCLUDED.units_returned,
|
||||
gross_revenue = EXCLUDED.gross_revenue,
|
||||
discounts = EXCLUDED.discounts,
|
||||
returns_revenue = EXCLUDED.returns_revenue,
|
||||
net_revenue = EXCLUDED.net_revenue,
|
||||
cogs = EXCLUDED.cogs,
|
||||
gross_regular_revenue = EXCLUDED.gross_regular_revenue,
|
||||
profit = EXCLUDED.profit,
|
||||
units_received = EXCLUDED.units_received,
|
||||
cost_received = EXCLUDED.cost_received,
|
||||
calculation_timestamp = EXCLUDED.calculation_timestamp; -- Use the timestamp from this run
|
||||
|
||||
-- Update the status table with the timestamp from the START of this run
|
||||
UPDATE public.calculate_status
|
||||
SET last_calculation_timestamp = _start_time
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RAISE NOTICE 'Finished % for date %. Duration: %', _module_name, _target_date, clock_timestamp() - _start_time;
|
||||
|
||||
END $$;
|
||||
114
inventory-server/scripts/metrics-new/update_periodic_metrics.sql
Normal file
114
inventory-server/scripts/metrics-new/update_periodic_metrics.sql
Normal file
@@ -0,0 +1,114 @@
|
||||
-- Description: Calculates metrics that don't need hourly updates, like ABC class
|
||||
-- and average lead time.
|
||||
-- Dependencies: product_metrics, purchase_orders, settings_global, calculate_status.
|
||||
-- Frequency: Daily or Weekly (e.g., run via cron job overnight).
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
_module_name TEXT := 'periodic_metrics';
|
||||
_start_time TIMESTAMPTZ := clock_timestamp();
|
||||
_last_calc_time TIMESTAMPTZ;
|
||||
_abc_basis VARCHAR;
|
||||
_abc_period INT;
|
||||
_threshold_a NUMERIC;
|
||||
_threshold_b NUMERIC;
|
||||
BEGIN
|
||||
-- Get the timestamp before the last successful run of this module
|
||||
SELECT last_calculation_timestamp INTO _last_calc_time
|
||||
FROM public.calculate_status
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RAISE NOTICE 'Running % module. Start Time: %', _module_name, _start_time;
|
||||
|
||||
-- 1. Calculate Average Lead Time
|
||||
RAISE NOTICE 'Calculating Average Lead Time...';
|
||||
WITH LeadTimes AS (
|
||||
SELECT
|
||||
pid,
|
||||
AVG(GREATEST(1, (last_received_date::date - date::date))) AS avg_days -- Use GREATEST(1,...) to avoid 0 or negative days
|
||||
FROM public.purchase_orders
|
||||
WHERE status = 'received' -- Or potentially 'full_received' if using that status
|
||||
AND last_received_date IS NOT NULL
|
||||
AND date IS NOT NULL
|
||||
AND last_received_date >= date -- Ensure received date is not before order date
|
||||
GROUP BY pid
|
||||
)
|
||||
UPDATE public.product_metrics pm
|
||||
SET avg_lead_time_days = lt.avg_days::int
|
||||
FROM LeadTimes lt
|
||||
WHERE pm.pid = lt.pid
|
||||
AND pm.avg_lead_time_days IS DISTINCT FROM lt.avg_days::int; -- Only update if changed
|
||||
RAISE NOTICE 'Finished Average Lead Time calculation.';
|
||||
|
||||
|
||||
-- 2. Calculate ABC Classification
|
||||
RAISE NOTICE 'Calculating ABC Classification...';
|
||||
-- Get ABC settings
|
||||
SELECT setting_value INTO _abc_basis FROM public.settings_global WHERE setting_key = 'abc_calculation_basis' LIMIT 1;
|
||||
SELECT setting_value::numeric INTO _threshold_a FROM public.settings_global WHERE setting_key = 'abc_revenue_threshold_a' LIMIT 1;
|
||||
SELECT setting_value::numeric INTO _threshold_b FROM public.settings_global WHERE setting_key = 'abc_revenue_threshold_b' LIMIT 1;
|
||||
_abc_basis := COALESCE(_abc_basis, 'revenue_30d'); -- Default basis
|
||||
_threshold_a := COALESCE(_threshold_a, 0.80);
|
||||
_threshold_b := COALESCE(_threshold_b, 0.95);
|
||||
|
||||
RAISE NOTICE 'Using ABC Basis: %, Threshold A: %, Threshold B: %', _abc_basis, _threshold_a, _threshold_b;
|
||||
|
||||
WITH RankedProducts AS (
|
||||
SELECT
|
||||
pid,
|
||||
-- Dynamically select the metric based on setting
|
||||
CASE _abc_basis
|
||||
WHEN 'sales_30d' THEN COALESCE(sales_30d, 0)
|
||||
WHEN 'lifetime_revenue' THEN COALESCE(lifetime_revenue, 0)::numeric -- Cast needed if different type
|
||||
ELSE COALESCE(revenue_30d, 0) -- Default to revenue_30d
|
||||
END AS metric_value
|
||||
FROM public.product_metrics
|
||||
WHERE is_replenishable = TRUE -- Typically only classify replenishable items
|
||||
),
|
||||
Cumulative AS (
|
||||
SELECT
|
||||
pid,
|
||||
metric_value,
|
||||
SUM(metric_value) OVER (ORDER BY metric_value DESC NULLS LAST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as cumulative_metric,
|
||||
SUM(metric_value) OVER () as total_metric
|
||||
FROM RankedProducts
|
||||
WHERE metric_value > 0 -- Exclude items with no contribution
|
||||
)
|
||||
UPDATE public.product_metrics pm
|
||||
SET abc_class =
|
||||
CASE
|
||||
WHEN c.cumulative_metric / NULLIF(c.total_metric, 0) <= _threshold_a THEN 'A'
|
||||
WHEN c.cumulative_metric / NULLIF(c.total_metric, 0) <= _threshold_b THEN 'B'
|
||||
ELSE 'C'
|
||||
END
|
||||
FROM Cumulative c
|
||||
WHERE pm.pid = c.pid
|
||||
AND pm.abc_class IS DISTINCT FROM ( -- Only update if changed
|
||||
CASE
|
||||
WHEN c.cumulative_metric / NULLIF(c.total_metric, 0) <= _threshold_a THEN 'A'
|
||||
WHEN c.cumulative_metric / NULLIF(c.total_metric, 0) <= _threshold_b THEN 'B'
|
||||
ELSE 'C'
|
||||
END);
|
||||
|
||||
-- Set non-contributing or non-replenishable to 'C' or NULL if preferred
|
||||
UPDATE public.product_metrics
|
||||
SET abc_class = 'C' -- Or NULL
|
||||
WHERE abc_class IS NULL AND is_replenishable = TRUE; -- Catch those with 0 metric value
|
||||
|
||||
UPDATE public.product_metrics
|
||||
SET abc_class = NULL -- Or 'N/A'?
|
||||
WHERE is_replenishable = FALSE AND abc_class IS NOT NULL; -- Unclassify non-replenishable items
|
||||
|
||||
|
||||
RAISE NOTICE 'Finished ABC Classification calculation.';
|
||||
|
||||
-- Add other periodic calculations here if needed (e.g., recalculating first/last dates)
|
||||
|
||||
-- Update the status table with the timestamp from the START of this run
|
||||
UPDATE public.calculate_status
|
||||
SET last_calculation_timestamp = _start_time
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RAISE NOTICE 'Finished % module. Duration: %', _module_name, clock_timestamp() - _start_time;
|
||||
|
||||
END $$;
|
||||
310
inventory-server/scripts/metrics-new/update_product_metrics.sql
Normal file
310
inventory-server/scripts/metrics-new/update_product_metrics.sql
Normal file
@@ -0,0 +1,310 @@
|
||||
-- Description: Calculates and updates the main product_metrics table based on current data
|
||||
-- and aggregated daily snapshots. Uses UPSERT for idempotency.
|
||||
-- Dependencies: Core import tables, daily_product_snapshots, configuration tables, calculate_status.
|
||||
-- Frequency: Hourly (Run AFTER update_daily_snapshots.sql completes).
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
_module_name TEXT := 'product_metrics';
|
||||
_start_time TIMESTAMPTZ := clock_timestamp();
|
||||
_last_calc_time TIMESTAMPTZ;
|
||||
_current_date DATE := CURRENT_DATE;
|
||||
BEGIN
|
||||
-- Get the timestamp before the last successful run of this module
|
||||
SELECT last_calculation_timestamp INTO _last_calc_time
|
||||
FROM public.calculate_status
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RAISE NOTICE 'Running % module. Start Time: %', _module_name, _start_time;
|
||||
|
||||
-- Use CTEs to gather all necessary information
|
||||
WITH CurrentInfo AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.sku,
|
||||
p.title,
|
||||
p.brand,
|
||||
p.vendor,
|
||||
COALESCE(p.image_175, p.image) as image_url,
|
||||
p.visible as is_visible,
|
||||
p.replenishable as is_replenishable,
|
||||
COALESCE(p.price, 0.00) as current_price,
|
||||
COALESCE(p.regular_price, 0.00) as current_regular_price,
|
||||
COALESCE(p.cost_price, 0.00) as current_cost_price,
|
||||
COALESCE(p.landing_cost_price, p.cost_price, 0.00) as current_effective_cost, -- Use landing if available, else cost
|
||||
p.stock_quantity as current_stock,
|
||||
p.created_at,
|
||||
p.first_received,
|
||||
p.date_last_sold,
|
||||
p.moq,
|
||||
p.uom -- Assuming UOM logic is handled elsewhere or simple (e.g., 1=each)
|
||||
FROM public.products p
|
||||
),
|
||||
OnOrderInfo AS (
|
||||
SELECT
|
||||
pid,
|
||||
COALESCE(SUM(ordered - received), 0) AS on_order_qty,
|
||||
COALESCE(SUM((ordered - received) * cost_price), 0.00) AS on_order_cost,
|
||||
MIN(expected_date) AS earliest_expected_date
|
||||
FROM public.purchase_orders
|
||||
WHERE status IN ('open', 'partially_received', 'ordered', 'preordered', 'receiving_started', 'electronically_sent', 'electronically_ready_send') -- Adjust based on your status workflow representing active POs not fully received
|
||||
AND (ordered - received) > 0
|
||||
GROUP BY pid
|
||||
),
|
||||
HistoricalDates AS (
|
||||
-- Note: Calculating these MIN/MAX values hourly can be slow on large tables.
|
||||
-- Consider calculating periodically or storing on products if import can populate them.
|
||||
SELECT
|
||||
p.pid,
|
||||
MIN(o.date)::date AS date_first_sold,
|
||||
MAX(o.date)::date AS max_order_date, -- Use MAX for potential recalc of date_last_sold
|
||||
MIN(rh.first_receipt_date) AS date_first_received_calc,
|
||||
MAX(rh.last_receipt_date) AS date_last_received_calc
|
||||
FROM public.products p
|
||||
LEFT JOIN public.orders o ON p.pid = o.pid AND o.quantity > 0 AND o.status NOT IN ('canceled', 'returned')
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
po.pid,
|
||||
MIN((rh.item->>'received_at')::date) as first_receipt_date,
|
||||
MAX((rh.item->>'received_at')::date) as last_receipt_date
|
||||
FROM public.purchase_orders po
|
||||
CROSS JOIN LATERAL jsonb_array_elements(po.receiving_history) AS rh(item)
|
||||
WHERE jsonb_typeof(po.receiving_history) = 'array' AND jsonb_array_length(po.receiving_history) > 0
|
||||
GROUP BY po.pid
|
||||
) rh ON p.pid = rh.pid
|
||||
GROUP BY p.pid
|
||||
),
|
||||
SnapshotAggregates AS (
|
||||
SELECT
|
||||
pid,
|
||||
-- Rolling periods (ensure dates are inclusive/exclusive as needed)
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' THEN units_sold ELSE 0 END) AS sales_7d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS revenue_7d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' THEN units_sold ELSE 0 END) AS sales_14d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '13 days' THEN net_revenue ELSE 0 END) AS revenue_14d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_sold ELSE 0 END) AS sales_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN cogs ELSE 0 END) AS cogs_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN profit ELSE 0 END) AS profit_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_returned ELSE 0 END) AS returns_units_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN returns_revenue ELSE 0 END) AS returns_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN discounts ELSE 0 END) AS discounts_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN gross_revenue ELSE 0 END) AS gross_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN gross_regular_revenue ELSE 0 END) AS gross_regular_revenue_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' AND stockout_flag THEN 1 ELSE 0 END) AS stockout_days_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' THEN units_sold ELSE 0 END) AS sales_365d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '364 days' THEN net_revenue ELSE 0 END) AS revenue_365d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN units_received ELSE 0 END) AS received_qty_30d,
|
||||
SUM(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN cost_received ELSE 0 END) AS received_cost_30d,
|
||||
|
||||
-- Averages (check for NULLIF 0 days in period if filtering dates)
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_quantity END) AS avg_stock_units_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_cost END) AS avg_stock_cost_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_retail END) AS avg_stock_retail_30d,
|
||||
AVG(CASE WHEN snapshot_date >= _current_date - INTERVAL '29 days' THEN eod_stock_gross END) AS avg_stock_gross_30d,
|
||||
|
||||
-- Lifetime
|
||||
SUM(units_sold) AS lifetime_sales,
|
||||
SUM(net_revenue) AS lifetime_revenue,
|
||||
|
||||
-- Yesterday
|
||||
SUM(CASE WHEN snapshot_date = _current_date - INTERVAL '1 day' THEN units_sold ELSE 0 END) as yesterday_sales
|
||||
|
||||
FROM public.daily_product_snapshots
|
||||
WHERE snapshot_date <= _current_date -- Include today's snapshot
|
||||
AND snapshot_date >= _current_date - INTERVAL '365 days' -- Limit history scan slightly
|
||||
GROUP BY pid
|
||||
),
|
||||
FirstPeriodMetrics AS (
|
||||
SELECT
|
||||
pid,
|
||||
date_first_sold,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '6 days' THEN units_sold ELSE 0 END) AS first_7_days_sales,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '6 days' THEN net_revenue ELSE 0 END) AS first_7_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '29 days' THEN units_sold ELSE 0 END) AS first_30_days_sales,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '29 days' THEN net_revenue ELSE 0 END) AS first_30_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '59 days' THEN units_sold ELSE 0 END) AS first_60_days_sales,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '59 days' THEN net_revenue ELSE 0 END) AS first_60_days_revenue,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '89 days' THEN units_sold ELSE 0 END) AS first_90_days_sales,
|
||||
SUM(CASE WHEN snapshot_date BETWEEN date_first_sold AND date_first_sold + INTERVAL '89 days' THEN net_revenue ELSE 0 END) AS first_90_days_revenue
|
||||
FROM public.daily_product_snapshots ds
|
||||
JOIN HistoricalDates hd USING(pid)
|
||||
WHERE date_first_sold IS NOT NULL
|
||||
AND snapshot_date >= date_first_sold
|
||||
AND snapshot_date <= date_first_sold + INTERVAL '90 days' -- Limit scan range
|
||||
GROUP BY pid, date_first_sold
|
||||
),
|
||||
Settings AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
COALESCE(sp.lead_time_days, sv.default_lead_time_days, (SELECT setting_value FROM settings_global WHERE setting_key = 'default_lead_time_days')::int, 14) AS effective_lead_time,
|
||||
COALESCE(sp.days_of_stock, sv.default_days_of_stock, (SELECT setting_value FROM settings_global WHERE setting_key = 'default_days_of_stock')::int, 30) AS effective_days_of_stock,
|
||||
COALESCE(sp.safety_stock, 0) AS effective_safety_stock, -- Assuming safety stock is units, not days from global for now
|
||||
COALESCE(sp.exclude_from_forecast, FALSE) AS exclude_forecast
|
||||
FROM public.products p
|
||||
LEFT JOIN public.settings_product sp ON p.pid = sp.pid
|
||||
LEFT JOIN public.settings_vendor sv ON p.vendor = sv.vendor
|
||||
)
|
||||
-- Final UPSERT into product_metrics
|
||||
INSERT INTO public.product_metrics (
|
||||
pid, last_calculated, sku, title, brand, vendor, image_url, is_visible, is_replenishable,
|
||||
current_price, current_regular_price, current_cost_price, current_landing_cost_price,
|
||||
current_stock, current_stock_cost, current_stock_retail, current_stock_gross,
|
||||
on_order_qty, on_order_cost, on_order_retail, earliest_expected_date,
|
||||
date_created, date_first_received, date_last_received, date_first_sold, date_last_sold, age_days,
|
||||
sales_7d, revenue_7d, sales_14d, revenue_14d, sales_30d, revenue_30d, cogs_30d, profit_30d,
|
||||
returns_units_30d, returns_revenue_30d, discounts_30d, gross_revenue_30d, gross_regular_revenue_30d,
|
||||
stockout_days_30d, sales_365d, revenue_365d,
|
||||
avg_stock_units_30d, avg_stock_cost_30d, avg_stock_retail_30d, avg_stock_gross_30d,
|
||||
received_qty_30d, received_cost_30d,
|
||||
lifetime_sales, lifetime_revenue,
|
||||
first_7_days_sales, first_7_days_revenue, first_30_days_sales, first_30_days_revenue,
|
||||
first_60_days_sales, first_60_days_revenue, first_90_days_sales, first_90_days_revenue,
|
||||
asp_30d, acp_30d, avg_ros_30d, avg_sales_per_day_30d, avg_sales_per_month_30d,
|
||||
margin_30d, markup_30d, gmroi_30d, stockturn_30d, return_rate_30d, discount_rate_30d,
|
||||
stockout_rate_30d, markdown_30d, markdown_rate_30d, sell_through_30d,
|
||||
-- avg_lead_time_days, -- Calculated periodically
|
||||
-- abc_class, -- Calculated periodically
|
||||
sales_velocity_daily, config_lead_time, config_days_of_stock, config_safety_stock,
|
||||
planning_period_days, lead_time_forecast_units, days_of_stock_forecast_units,
|
||||
planning_period_forecast_units, lead_time_closing_stock, days_of_stock_closing_stock,
|
||||
replenishment_needed_raw, replenishment_units, replenishment_cost, replenishment_retail, replenishment_profit,
|
||||
to_order_units, forecast_lost_sales_units, forecast_lost_revenue,
|
||||
stock_cover_in_days, po_cover_in_days, sells_out_in_days, replenish_date,
|
||||
overstocked_units, overstocked_cost, overstocked_retail, is_old_stock,
|
||||
yesterday_sales
|
||||
)
|
||||
SELECT
|
||||
ci.pid, _start_time, ci.sku, ci.title, ci.brand, ci.vendor, ci.image_url, ci.is_visible, ci.is_replenishable,
|
||||
ci.current_price, ci.current_regular_price, ci.current_cost_price, ci.current_effective_cost,
|
||||
ci.current_stock, ci.current_stock * ci.current_effective_cost, ci.current_stock * ci.current_price, ci.current_stock * ci.current_regular_price,
|
||||
COALESCE(ooi.on_order_qty, 0), COALESCE(ooi.on_order_cost, 0.00), COALESCE(ooi.on_order_qty, 0) * ci.current_price, ooi.earliest_expected_date,
|
||||
ci.created_at::date, COALESCE(ci.first_received::date, hd.date_first_received_calc), hd.date_last_received_calc, hd.date_first_sold, COALESCE(ci.date_last_sold, hd.max_order_date),
|
||||
CASE
|
||||
WHEN ci.created_at IS NULL AND hd.date_first_sold IS NULL THEN 0
|
||||
WHEN ci.created_at IS NULL THEN (_current_date - hd.date_first_sold)::integer
|
||||
WHEN hd.date_first_sold IS NULL THEN (_current_date - ci.created_at::date)::integer
|
||||
ELSE (_current_date - LEAST(ci.created_at::date, hd.date_first_sold))::integer
|
||||
END AS age_days,
|
||||
sa.sales_7d, sa.revenue_7d, sa.sales_14d, sa.revenue_14d, sa.sales_30d, sa.revenue_30d, sa.cogs_30d, sa.profit_30d,
|
||||
sa.returns_units_30d, sa.returns_revenue_30d, sa.discounts_30d, sa.gross_revenue_30d, sa.gross_regular_revenue_30d,
|
||||
sa.stockout_days_30d, sa.sales_365d, sa.revenue_365d,
|
||||
sa.avg_stock_units_30d, sa.avg_stock_cost_30d, sa.avg_stock_retail_30d, sa.avg_stock_gross_30d,
|
||||
sa.received_qty_30d, sa.received_cost_30d,
|
||||
sa.lifetime_sales, sa.lifetime_revenue,
|
||||
fpm.first_7_days_sales, fpm.first_7_days_revenue, fpm.first_30_days_sales, fpm.first_30_days_revenue,
|
||||
fpm.first_60_days_sales, fpm.first_60_days_revenue, fpm.first_90_days_sales, fpm.first_90_days_revenue,
|
||||
|
||||
-- Calculated KPIs
|
||||
sa.revenue_30d / NULLIF(sa.sales_30d, 0) AS asp_30d,
|
||||
sa.cogs_30d / NULLIF(sa.sales_30d, 0) AS acp_30d,
|
||||
sa.profit_30d / NULLIF(sa.sales_30d, 0) AS avg_ros_30d,
|
||||
sa.sales_30d / 30.0 AS avg_sales_per_day_30d,
|
||||
sa.sales_30d AS avg_sales_per_month_30d, -- Using 30d sales as proxy for month
|
||||
(sa.profit_30d / NULLIF(sa.revenue_30d, 0)) * 100 AS margin_30d,
|
||||
(sa.profit_30d / NULLIF(sa.cogs_30d, 0)) * 100 AS markup_30d,
|
||||
sa.profit_30d / NULLIF(sa.avg_stock_cost_30d, 0) AS gmroi_30d,
|
||||
sa.sales_30d / NULLIF(sa.avg_stock_units_30d, 0) AS stockturn_30d,
|
||||
(sa.returns_units_30d / NULLIF(sa.sales_30d + sa.returns_units_30d, 0)) * 100 AS return_rate_30d,
|
||||
(sa.discounts_30d / NULLIF(sa.gross_revenue_30d, 0)) * 100 AS discount_rate_30d,
|
||||
(sa.stockout_days_30d / 30.0) * 100 AS stockout_rate_30d,
|
||||
sa.gross_regular_revenue_30d - sa.gross_revenue_30d AS markdown_30d,
|
||||
((sa.gross_regular_revenue_30d - sa.gross_revenue_30d) / NULLIF(sa.gross_regular_revenue_30d, 0)) * 100 AS markdown_rate_30d,
|
||||
(sa.sales_30d / NULLIF(ci.current_stock + sa.sales_30d, 0)) * 100 AS sell_through_30d,
|
||||
|
||||
-- Forecasting intermediate values
|
||||
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) AS sales_velocity_daily,
|
||||
s.effective_lead_time AS config_lead_time,
|
||||
s.effective_days_of_stock AS config_days_of_stock,
|
||||
s.effective_safety_stock AS config_safety_stock,
|
||||
(s.effective_lead_time + s.effective_days_of_stock) AS planning_period_days,
|
||||
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time AS lead_time_forecast_units,
|
||||
(sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock AS days_of_stock_forecast_units,
|
||||
((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock) AS planning_period_forecast_units,
|
||||
(ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time)) AS lead_time_closing_stock,
|
||||
((ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock) AS days_of_stock_closing_stock,
|
||||
(((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0) AS replenishment_needed_raw,
|
||||
|
||||
-- Final Forecasting / Replenishment Metrics (apply CEILING/GREATEST/etc.)
|
||||
-- Note: These calculations are nested for clarity, can be simplified in prod
|
||||
CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS replenishment_units,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_effective_cost AS replenishment_cost,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * ci.current_price AS replenishment_retail,
|
||||
(CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int) * (ci.current_price - ci.current_effective_cost) AS replenishment_profit,
|
||||
|
||||
-- Placeholder for To Order (Apply MOQ/UOM logic here if needed, otherwise equals replenishment)
|
||||
CEILING(GREATEST(0, ((((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)) + s.effective_safety_stock - ci.current_stock - COALESCE(ooi.on_order_qty, 0))))::int AS to_order_units,
|
||||
|
||||
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) AS forecast_lost_sales_units,
|
||||
GREATEST(0, - (ci.current_stock + COALESCE(ooi.on_order_qty, 0) - ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time))) * ci.current_price AS forecast_lost_revenue,
|
||||
|
||||
ci.current_stock / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS stock_cover_in_days,
|
||||
COALESCE(ooi.on_order_qty, 0) / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS po_cover_in_days,
|
||||
(ci.current_stock + COALESCE(ooi.on_order_qty, 0)) / NULLIF((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)), 0) AS sells_out_in_days,
|
||||
|
||||
-- Replenish Date: Date when stock is projected to hit safety stock, minus lead time
|
||||
CASE
|
||||
WHEN (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) > 0
|
||||
THEN _current_date + FLOOR(GREATEST(0, ci.current_stock - s.effective_safety_stock) / (sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)))::int - s.effective_lead_time
|
||||
ELSE NULL
|
||||
END AS replenish_date,
|
||||
|
||||
GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))::int AS overstocked_units,
|
||||
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))) * ci.current_effective_cost AS overstocked_cost,
|
||||
(GREATEST(0, ci.current_stock - s.effective_safety_stock - (((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_lead_time) + ((sa.sales_30d / NULLIF(30.0 - sa.stockout_days_30d, 0)) * s.effective_days_of_stock)))) * ci.current_price AS overstocked_retail,
|
||||
|
||||
-- Old Stock Flag
|
||||
(ci.created_at::date < _current_date - INTERVAL '60 day') AND
|
||||
(COALESCE(ci.date_last_sold, hd.max_order_date) IS NULL OR COALESCE(ci.date_last_sold, hd.max_order_date) < _current_date - INTERVAL '60 day') AND
|
||||
(hd.date_last_received_calc IS NULL OR hd.date_last_received_calc < _current_date - INTERVAL '60 day') AND
|
||||
COALESCE(ooi.on_order_qty, 0) = 0
|
||||
AS is_old_stock,
|
||||
|
||||
sa.yesterday_sales
|
||||
|
||||
FROM CurrentInfo ci
|
||||
LEFT JOIN OnOrderInfo ooi ON ci.pid = ooi.pid
|
||||
LEFT JOIN HistoricalDates hd ON ci.pid = hd.pid
|
||||
LEFT JOIN SnapshotAggregates sa ON ci.pid = sa.pid
|
||||
LEFT JOIN FirstPeriodMetrics fpm ON ci.pid = fpm.pid
|
||||
LEFT JOIN Settings s ON ci.pid = s.pid
|
||||
WHERE s.exclude_forecast IS FALSE OR s.exclude_forecast IS NULL -- Exclude products explicitly marked
|
||||
|
||||
ON CONFLICT (pid) DO UPDATE SET
|
||||
last_calculated = EXCLUDED.last_calculated,
|
||||
sku = EXCLUDED.sku, title = EXCLUDED.title, brand = EXCLUDED.brand, vendor = EXCLUDED.vendor, image_url = EXCLUDED.image_url, is_visible = EXCLUDED.is_visible, is_replenishable = EXCLUDED.is_replenishable,
|
||||
current_price = EXCLUDED.current_price, current_regular_price = EXCLUDED.current_regular_price, current_cost_price = EXCLUDED.current_cost_price, current_landing_cost_price = EXCLUDED.current_landing_cost_price,
|
||||
current_stock = EXCLUDED.current_stock, current_stock_cost = EXCLUDED.current_stock_cost, current_stock_retail = EXCLUDED.current_stock_retail, current_stock_gross = EXCLUDED.current_stock_gross,
|
||||
on_order_qty = EXCLUDED.on_order_qty, on_order_cost = EXCLUDED.on_order_cost, on_order_retail = EXCLUDED.on_order_retail, earliest_expected_date = EXCLUDED.earliest_expected_date,
|
||||
date_created = EXCLUDED.date_created, date_first_received = EXCLUDED.date_first_received, date_last_received = EXCLUDED.date_last_received, date_first_sold = EXCLUDED.date_first_sold, date_last_sold = EXCLUDED.date_last_sold, age_days = EXCLUDED.age_days,
|
||||
sales_7d = EXCLUDED.sales_7d, revenue_7d = EXCLUDED.revenue_7d, sales_14d = EXCLUDED.sales_14d, revenue_14d = EXCLUDED.revenue_14d, sales_30d = EXCLUDED.sales_30d, revenue_30d = EXCLUDED.revenue_30d, cogs_30d = EXCLUDED.cogs_30d, profit_30d = EXCLUDED.profit_30d,
|
||||
returns_units_30d = EXCLUDED.returns_units_30d, returns_revenue_30d = EXCLUDED.returns_revenue_30d, discounts_30d = EXCLUDED.discounts_30d, gross_revenue_30d = EXCLUDED.gross_revenue_30d, gross_regular_revenue_30d = EXCLUDED.gross_regular_revenue_30d,
|
||||
stockout_days_30d = EXCLUDED.stockout_days_30d, sales_365d = EXCLUDED.sales_365d, revenue_365d = EXCLUDED.revenue_365d,
|
||||
avg_stock_units_30d = EXCLUDED.avg_stock_units_30d, avg_stock_cost_30d = EXCLUDED.avg_stock_cost_30d, avg_stock_retail_30d = EXCLUDED.avg_stock_retail_30d, avg_stock_gross_30d = EXCLUDED.avg_stock_gross_30d,
|
||||
received_qty_30d = EXCLUDED.received_qty_30d, received_cost_30d = EXCLUDED.received_cost_30d,
|
||||
lifetime_sales = EXCLUDED.lifetime_sales, lifetime_revenue = EXCLUDED.lifetime_revenue,
|
||||
first_7_days_sales = EXCLUDED.first_7_days_sales, first_7_days_revenue = EXCLUDED.first_7_days_revenue, first_30_days_sales = EXCLUDED.first_30_days_sales, first_30_days_revenue = EXCLUDED.first_30_days_revenue,
|
||||
first_60_days_sales = EXCLUDED.first_60_days_sales, first_60_days_revenue = EXCLUDED.first_60_days_revenue, first_90_days_sales = EXCLUDED.first_90_days_sales, first_90_days_revenue = EXCLUDED.first_90_days_revenue,
|
||||
asp_30d = EXCLUDED.asp_30d, acp_30d = EXCLUDED.acp_30d, avg_ros_30d = EXCLUDED.avg_ros_30d, avg_sales_per_day_30d = EXCLUDED.avg_sales_per_day_30d, avg_sales_per_month_30d = EXCLUDED.avg_sales_per_month_30d,
|
||||
margin_30d = EXCLUDED.margin_30d, markup_30d = EXCLUDED.markup_30d, gmroi_30d = EXCLUDED.gmroi_30d, stockturn_30d = EXCLUDED.stockturn_30d, return_rate_30d = EXCLUDED.return_rate_30d, discount_rate_30d = EXCLUDED.discount_rate_30d,
|
||||
stockout_rate_30d = EXCLUDED.stockout_rate_30d, markdown_30d = EXCLUDED.markdown_30d, markdown_rate_30d = EXCLUDED.markdown_rate_30d, sell_through_30d = EXCLUDED.sell_through_30d,
|
||||
-- avg_lead_time_days = EXCLUDED.avg_lead_time_days, -- Updated Periodically
|
||||
-- abc_class = EXCLUDED.abc_class, -- Updated Periodically
|
||||
sales_velocity_daily = EXCLUDED.sales_velocity_daily, config_lead_time = EXCLUDED.config_lead_time, config_days_of_stock = EXCLUDED.config_days_of_stock, config_safety_stock = EXCLUDED.config_safety_stock,
|
||||
planning_period_days = EXCLUDED.planning_period_days, lead_time_forecast_units = EXCLUDED.lead_time_forecast_units, days_of_stock_forecast_units = EXCLUDED.days_of_stock_forecast_units,
|
||||
planning_period_forecast_units = EXCLUDED.planning_period_forecast_units, lead_time_closing_stock = EXCLUDED.lead_time_closing_stock, days_of_stock_closing_stock = EXCLUDED.days_of_stock_closing_stock,
|
||||
replenishment_needed_raw = EXCLUDED.replenishment_needed_raw, replenishment_units = EXCLUDED.replenishment_units, replenishment_cost = EXCLUDED.replenishment_cost, replenishment_retail = EXCLUDED.replenishment_retail, replenishment_profit = EXCLUDED.replenishment_profit,
|
||||
to_order_units = EXCLUDED.to_order_units, forecast_lost_sales_units = EXCLUDED.forecast_lost_sales_units, forecast_lost_revenue = EXCLUDED.forecast_lost_revenue,
|
||||
stock_cover_in_days = EXCLUDED.stock_cover_in_days, po_cover_in_days = EXCLUDED.po_cover_in_days, sells_out_in_days = EXCLUDED.sells_out_in_days, replenish_date = EXCLUDED.replenish_date,
|
||||
overstocked_units = EXCLUDED.overstocked_units, overstocked_cost = EXCLUDED.overstocked_cost, overstocked_retail = EXCLUDED.overstocked_retail, is_old_stock = EXCLUDED.is_old_stock,
|
||||
yesterday_sales = EXCLUDED.yesterday_sales
|
||||
;
|
||||
|
||||
-- Update the status table with the timestamp from the START of this run
|
||||
UPDATE public.calculate_status
|
||||
SET last_calculation_timestamp = _start_time
|
||||
WHERE module_name = _module_name;
|
||||
|
||||
RAISE NOTICE 'Finished % module. Duration: %', _module_name, clock_timestamp() - _start_time;
|
||||
|
||||
END $$;
|
||||
39
inventory-server/scripts/metrics-new/utils/db.js
Normal file
39
inventory-server/scripts/metrics-new/utils/db.js
Normal file
@@ -0,0 +1,39 @@
|
||||
const { Pool } = require('pg');
|
||||
const path = require('path');
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../../..', '.env') });
|
||||
|
||||
// Database configuration
|
||||
const dbConfig = {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
port: process.env.DB_PORT || 5432,
|
||||
ssl: process.env.DB_SSL === 'true',
|
||||
// Add performance optimizations
|
||||
max: 10, // connection pool max size
|
||||
idleTimeoutMillis: 30000,
|
||||
connectionTimeoutMillis: 60000
|
||||
};
|
||||
|
||||
// Create a single pool instance to be reused
|
||||
const pool = new Pool(dbConfig);
|
||||
|
||||
// Add event handlers for pool
|
||||
pool.on('error', (err, client) => {
|
||||
console.error('Unexpected error on idle client', err);
|
||||
});
|
||||
|
||||
async function getConnection() {
|
||||
return await pool.connect();
|
||||
}
|
||||
|
||||
async function closePool() {
|
||||
await pool.end();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
dbConfig,
|
||||
getConnection,
|
||||
closePool
|
||||
};
|
||||
158
inventory-server/scripts/metrics-new/utils/progress.js
Normal file
158
inventory-server/scripts/metrics-new/utils/progress.js
Normal file
@@ -0,0 +1,158 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Helper function to format elapsed time
|
||||
function formatElapsedTime(elapsed) {
|
||||
// If elapsed is a timestamp, convert to elapsed milliseconds
|
||||
if (elapsed instanceof Date || elapsed > 1000000000000) {
|
||||
elapsed = Date.now() - elapsed;
|
||||
} else {
|
||||
// If elapsed is in seconds, convert to milliseconds
|
||||
elapsed = elapsed * 1000;
|
||||
}
|
||||
|
||||
const seconds = Math.floor(elapsed / 1000);
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
|
||||
if (hours > 0) {
|
||||
return `${hours}h ${minutes % 60}m`;
|
||||
} else if (minutes > 0) {
|
||||
return `${minutes}m ${seconds % 60}s`;
|
||||
} else {
|
||||
return `${seconds}s`;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to estimate remaining time
|
||||
function estimateRemaining(startTime, current, total) {
|
||||
if (current === 0) return null;
|
||||
const elapsed = Date.now() - startTime;
|
||||
const rate = current / elapsed;
|
||||
const remaining = (total - current) / rate;
|
||||
|
||||
const minutes = Math.floor(remaining / 60000);
|
||||
const seconds = Math.floor((remaining % 60000) / 1000);
|
||||
|
||||
if (minutes > 0) {
|
||||
return `${minutes}m ${seconds}s`;
|
||||
} else {
|
||||
return `${seconds}s`;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to calculate rate
|
||||
function calculateRate(startTime, current) {
|
||||
const elapsed = (Date.now() - startTime) / 1000; // Convert to seconds
|
||||
return elapsed > 0 ? Math.round(current / elapsed) : 0;
|
||||
}
|
||||
|
||||
// Set up logging
|
||||
const LOG_DIR = path.join(__dirname, '../../../logs');
|
||||
const ERROR_LOG = path.join(LOG_DIR, 'import-errors.log');
|
||||
const IMPORT_LOG = path.join(LOG_DIR, 'import.log');
|
||||
const STATUS_FILE = path.join(LOG_DIR, 'metrics-status.json');
|
||||
|
||||
// Ensure log directory exists
|
||||
if (!fs.existsSync(LOG_DIR)) {
|
||||
fs.mkdirSync(LOG_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// Helper function to log errors
|
||||
function logError(error, context = '') {
|
||||
const timestamp = new Date().toISOString();
|
||||
const errorMessage = `[${timestamp}] ${context}\nError: ${error.message}\nStack: ${error.stack}\n\n`;
|
||||
|
||||
// Log to error file
|
||||
fs.appendFileSync(ERROR_LOG, errorMessage);
|
||||
|
||||
// Also log to console
|
||||
console.error(`\n${context}\nError: ${error.message}`);
|
||||
}
|
||||
|
||||
// Helper function to log import progress
|
||||
function logImport(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const logMessage = `[${timestamp}] ${message}\n`;
|
||||
fs.appendFileSync(IMPORT_LOG, logMessage);
|
||||
}
|
||||
|
||||
// Helper function to output progress
|
||||
function outputProgress(data) {
|
||||
// Save progress to file for resumption
|
||||
saveProgress(data);
|
||||
// Format as SSE event
|
||||
const event = {
|
||||
progress: data
|
||||
};
|
||||
// Always send to stdout for frontend
|
||||
process.stdout.write(JSON.stringify(event) + '\n');
|
||||
|
||||
// Log significant events to disk
|
||||
const isSignificant =
|
||||
// Operation starts
|
||||
(data.operation && !data.current) ||
|
||||
// Operation completions and errors
|
||||
data.status === 'complete' ||
|
||||
data.status === 'error' ||
|
||||
// Major phase changes
|
||||
data.operation?.includes('Starting ABC classification') ||
|
||||
data.operation?.includes('Starting time-based aggregates') ||
|
||||
data.operation?.includes('Starting vendor metrics');
|
||||
|
||||
if (isSignificant) {
|
||||
logImport(`${data.operation || 'Operation'}${data.message ? ': ' + data.message : ''}${data.error ? ' Error: ' + data.error : ''}${data.status ? ' Status: ' + data.status : ''}`);
|
||||
}
|
||||
}
|
||||
|
||||
function saveProgress(progress) {
|
||||
try {
|
||||
fs.writeFileSync(STATUS_FILE, JSON.stringify({
|
||||
...progress,
|
||||
timestamp: Date.now()
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error('Failed to save progress:', err);
|
||||
}
|
||||
}
|
||||
|
||||
function clearProgress() {
|
||||
try {
|
||||
if (fs.existsSync(STATUS_FILE)) {
|
||||
fs.unlinkSync(STATUS_FILE);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to clear progress:', err);
|
||||
}
|
||||
}
|
||||
|
||||
function getProgress() {
|
||||
try {
|
||||
if (fs.existsSync(STATUS_FILE)) {
|
||||
const progress = JSON.parse(fs.readFileSync(STATUS_FILE, 'utf8'));
|
||||
// Check if the progress is still valid (less than 1 hour old)
|
||||
if (progress.timestamp && Date.now() - progress.timestamp < 3600000) {
|
||||
return progress;
|
||||
} else {
|
||||
// Clear old progress
|
||||
clearProgress();
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to read progress:', err);
|
||||
clearProgress();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
formatElapsedTime,
|
||||
estimateRemaining,
|
||||
calculateRate,
|
||||
logError,
|
||||
logImport,
|
||||
outputProgress,
|
||||
saveProgress,
|
||||
clearProgress,
|
||||
getProgress
|
||||
};
|
||||
@@ -56,36 +56,94 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate financial metrics with optimized query
|
||||
// First, calculate beginning inventory values (12 months ago)
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_beginning_inventory AS
|
||||
WITH beginning_inventory_calc AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.stock_quantity as current_quantity,
|
||||
COALESCE(SUM(o.quantity), 0) as sold_quantity,
|
||||
COALESCE(SUM(po.received), 0) as received_quantity,
|
||||
GREATEST(0, (p.stock_quantity + COALESCE(SUM(o.quantity), 0) - COALESCE(SUM(po.received), 0))) as beginning_quantity,
|
||||
p.cost_price
|
||||
FROM
|
||||
products p
|
||||
LEFT JOIN
|
||||
orders o ON p.pid = o.pid
|
||||
AND o.canceled = false
|
||||
AND o.date >= CURRENT_DATE - INTERVAL '12 months'::interval
|
||||
LEFT JOIN
|
||||
purchase_orders po ON p.pid = po.pid
|
||||
AND po.received_date IS NOT NULL
|
||||
AND po.received_date >= CURRENT_DATE - INTERVAL '12 months'::interval
|
||||
GROUP BY
|
||||
p.pid, p.stock_quantity, p.cost_price
|
||||
)
|
||||
SELECT
|
||||
pid,
|
||||
beginning_quantity,
|
||||
beginning_quantity * cost_price as beginning_value,
|
||||
current_quantity * cost_price as current_value,
|
||||
((beginning_quantity * cost_price) + (current_quantity * cost_price)) / 2 as average_inventory_value
|
||||
FROM
|
||||
beginning_inventory_calc
|
||||
`);
|
||||
|
||||
processedCount = Math.floor(totalProducts * 0.60);
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Beginning inventory values calculated, computing financial metrics',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalProducts),
|
||||
rate: calculateRate(startTime, processedCount),
|
||||
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate financial metrics with optimized query and standard formulas
|
||||
await connection.query(`
|
||||
WITH product_financials AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
SUM(o.quantity * o.price) as total_revenue,
|
||||
SUM(o.quantity * p.cost_price) as cost_of_goods_sold,
|
||||
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
|
||||
COALESCE(bi.average_inventory_value, p.cost_price * p.stock_quantity) as avg_inventory_value,
|
||||
p.cost_price * p.stock_quantity as current_inventory_value,
|
||||
SUM(o.quantity * (o.price - COALESCE(o.discount, 0))) as total_revenue,
|
||||
SUM(o.quantity * COALESCE(o.costeach, 0)) as cost_of_goods_sold,
|
||||
SUM(o.quantity * (o.price - COALESCE(o.discount, 0) - COALESCE(o.costeach, 0))) as gross_profit,
|
||||
MIN(o.date) as first_sale_date,
|
||||
MAX(o.date) as last_sale_date,
|
||||
EXTRACT(DAY FROM (MAX(o.date)::timestamp with time zone - MIN(o.date)::timestamp with time zone)) + 1 as calculation_period_days,
|
||||
COUNT(DISTINCT DATE(o.date)) as active_days
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
LEFT JOIN temp_beginning_inventory bi ON p.pid = bi.pid
|
||||
WHERE o.canceled = false
|
||||
AND DATE(o.date) >= CURRENT_DATE - INTERVAL '12 months'
|
||||
GROUP BY p.pid, p.cost_price, p.stock_quantity
|
||||
AND DATE(o.date) >= CURRENT_DATE - INTERVAL '12 months'::interval
|
||||
GROUP BY p.pid, p.cost_price, p.stock_quantity, bi.average_inventory_value
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
SET
|
||||
inventory_value = COALESCE(pf.inventory_value, 0),
|
||||
total_revenue = COALESCE(pf.total_revenue, 0),
|
||||
cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0),
|
||||
gross_profit = COALESCE(pf.gross_profit, 0),
|
||||
gmroi = CASE
|
||||
WHEN COALESCE(pf.inventory_value, 0) > 0 AND pf.active_days > 0 THEN
|
||||
(COALESCE(pf.gross_profit, 0) * (365.0 / pf.active_days)) / COALESCE(pf.inventory_value, 0)
|
||||
inventory_value = COALESCE(pf.current_inventory_value, 0)::decimal(10,3),
|
||||
total_revenue = COALESCE(pf.total_revenue, 0)::decimal(10,3),
|
||||
cost_of_goods_sold = COALESCE(pf.cost_of_goods_sold, 0)::decimal(10,3),
|
||||
gross_profit = COALESCE(pf.gross_profit, 0)::decimal(10,3),
|
||||
turnover_rate = CASE
|
||||
WHEN COALESCE(pf.avg_inventory_value, 0) > 0 THEN
|
||||
COALESCE(pf.cost_of_goods_sold, 0) / NULLIF(pf.avg_inventory_value, 0)
|
||||
ELSE 0
|
||||
END,
|
||||
END::decimal(12,3),
|
||||
gmroi = CASE
|
||||
WHEN COALESCE(pf.avg_inventory_value, 0) > 0 THEN
|
||||
COALESCE(pf.gross_profit, 0) / NULLIF(pf.avg_inventory_value, 0)
|
||||
ELSE 0
|
||||
END::decimal(10,3),
|
||||
last_calculated_at = CURRENT_TIMESTAMP
|
||||
FROM product_financials pf
|
||||
WHERE pm.pid = pf.pid
|
||||
@@ -115,53 +173,8 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
|
||||
success
|
||||
};
|
||||
|
||||
// Update time-based aggregates with optimized query
|
||||
await connection.query(`
|
||||
WITH monthly_financials AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
|
||||
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
|
||||
COUNT(DISTINCT DATE(o.date)) as active_days,
|
||||
MIN(o.date) as period_start,
|
||||
MAX(o.date) as period_end
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity
|
||||
)
|
||||
UPDATE product_time_aggregates pta
|
||||
SET
|
||||
inventory_value = COALESCE(mf.inventory_value, 0),
|
||||
gmroi = CASE
|
||||
WHEN COALESCE(mf.inventory_value, 0) > 0 AND mf.active_days > 0 THEN
|
||||
(COALESCE(mf.gross_profit, 0) * (365.0 / mf.active_days)) / COALESCE(mf.inventory_value, 0)
|
||||
ELSE 0
|
||||
END
|
||||
FROM monthly_financials mf
|
||||
WHERE pta.pid = mf.pid
|
||||
AND pta.year = mf.year
|
||||
AND pta.month = mf.month
|
||||
`);
|
||||
|
||||
processedCount = Math.floor(totalProducts * 0.70);
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Time-based aggregates updated',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalProducts),
|
||||
rate: calculateRate(startTime, processedCount),
|
||||
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
// Clean up temporary tables
|
||||
await connection.query('DROP TABLE IF EXISTS temp_beginning_inventory');
|
||||
|
||||
// If we get here, everything completed successfully
|
||||
success = true;
|
||||
@@ -187,6 +200,12 @@ async function calculateFinancialMetrics(startTime, totalProducts, processedCoun
|
||||
throw error;
|
||||
} finally {
|
||||
if (connection) {
|
||||
try {
|
||||
// Make sure temporary tables are always cleaned up
|
||||
await connection.query('DROP TABLE IF EXISTS temp_beginning_inventory');
|
||||
} catch (err) {
|
||||
console.error('Error cleaning up temp tables:', err);
|
||||
}
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,8 +66,36 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
WHERE category_id IS NULL AND vendor IS NULL
|
||||
LIMIT 1
|
||||
`);
|
||||
|
||||
// Check if threshold data was returned
|
||||
if (!thresholds.rows || thresholds.rows.length === 0) {
|
||||
console.warn('No default thresholds found in the database. Using explicit type casting in the query.');
|
||||
}
|
||||
|
||||
const defaultThresholds = thresholds.rows[0];
|
||||
|
||||
// Get financial calculation configuration parameters
|
||||
const financialConfig = await connection.query(`
|
||||
SELECT
|
||||
order_cost,
|
||||
holding_rate,
|
||||
service_level_z_score,
|
||||
min_reorder_qty,
|
||||
default_reorder_qty,
|
||||
default_safety_stock
|
||||
FROM financial_calc_config
|
||||
WHERE id = 1
|
||||
LIMIT 1
|
||||
`);
|
||||
const finConfig = financialConfig.rows[0] || {
|
||||
order_cost: 25.00,
|
||||
holding_rate: 0.25,
|
||||
service_level_z_score: 1.96,
|
||||
min_reorder_qty: 1,
|
||||
default_reorder_qty: 5,
|
||||
default_safety_stock: 5
|
||||
};
|
||||
|
||||
// Calculate base product metrics
|
||||
if (!SKIP_PRODUCT_BASE_METRICS) {
|
||||
outputProgress({
|
||||
@@ -109,6 +137,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
avg_margin_percent DECIMAL(10,3),
|
||||
first_sale_date DATE,
|
||||
last_sale_date DATE,
|
||||
stddev_daily_sales DECIMAL(10,3),
|
||||
PRIMARY KEY (pid)
|
||||
)
|
||||
`);
|
||||
@@ -117,10 +146,11 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE temp_purchase_metrics (
|
||||
pid BIGINT NOT NULL,
|
||||
avg_lead_time_days DOUBLE PRECISION,
|
||||
avg_lead_time_days DECIMAL(10,2),
|
||||
last_purchase_date DATE,
|
||||
first_received_date DATE,
|
||||
last_received_date DATE,
|
||||
stddev_lead_time_days DECIMAL(10,2),
|
||||
PRIMARY KEY (pid)
|
||||
)
|
||||
`);
|
||||
@@ -140,11 +170,22 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
ELSE 0
|
||||
END as avg_margin_percent,
|
||||
MIN(o.date) as first_sale_date,
|
||||
MAX(o.date) as last_sale_date
|
||||
MAX(o.date) as last_sale_date,
|
||||
COALESCE(STDDEV_SAMP(daily_qty.quantity), 0) as stddev_daily_sales
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
AND o.canceled = false
|
||||
AND o.date >= CURRENT_DATE - INTERVAL '90 days'
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
pid,
|
||||
DATE(date) as sale_date,
|
||||
SUM(quantity) as quantity
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND date >= CURRENT_DATE - INTERVAL '90 days'
|
||||
GROUP BY pid, DATE(date)
|
||||
) daily_qty ON p.pid = daily_qty.pid
|
||||
GROUP BY p.pid
|
||||
`);
|
||||
|
||||
@@ -163,7 +204,14 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
) as avg_lead_time_days,
|
||||
MAX(po.date) as last_purchase_date,
|
||||
MIN(po.received_date) as first_received_date,
|
||||
MAX(po.received_date) as last_received_date
|
||||
MAX(po.received_date) as last_received_date,
|
||||
STDDEV_SAMP(
|
||||
CASE
|
||||
WHEN po.received_date IS NOT NULL AND po.date IS NOT NULL
|
||||
THEN EXTRACT(EPOCH FROM (po.received_date::timestamp with time zone - po.date::timestamp with time zone)) / 86400.0
|
||||
ELSE NULL
|
||||
END
|
||||
) as stddev_lead_time_days
|
||||
FROM products p
|
||||
LEFT JOIN purchase_orders po ON p.pid = po.pid
|
||||
AND po.received_date IS NOT NULL
|
||||
@@ -184,7 +232,8 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
30.0 as avg_lead_time_days,
|
||||
NULL as last_purchase_date,
|
||||
NULL as first_received_date,
|
||||
NULL as last_received_date
|
||||
NULL as last_received_date,
|
||||
0.0 as stddev_lead_time_days
|
||||
FROM products p
|
||||
LEFT JOIN temp_purchase_metrics tpm ON p.pid = tpm.pid
|
||||
WHERE tpm.pid IS NULL
|
||||
@@ -208,6 +257,17 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
if (batch.rows.length === 0) break;
|
||||
|
||||
// Process the entire batch in a single efficient query
|
||||
const lowStockThreshold = parseInt(defaultThresholds?.low_stock_threshold) || 5;
|
||||
const criticalDays = parseInt(defaultThresholds?.critical_days) || 7;
|
||||
const reorderDays = parseInt(defaultThresholds?.reorder_days) || 14;
|
||||
const overstockDays = parseInt(defaultThresholds?.overstock_days) || 90;
|
||||
const serviceLevel = parseFloat(finConfig?.service_level_z_score) || 1.96;
|
||||
const defaultSafetyStock = parseInt(finConfig?.default_safety_stock) || 5;
|
||||
const defaultReorderQty = parseInt(finConfig?.default_reorder_qty) || 5;
|
||||
const orderCost = parseFloat(finConfig?.order_cost) || 25.00;
|
||||
const holdingRate = parseFloat(finConfig?.holding_rate) || 0.25;
|
||||
const minReorderQty = parseInt(finConfig?.min_reorder_qty) || 1;
|
||||
|
||||
await connection.query(`
|
||||
UPDATE product_metrics pm
|
||||
SET
|
||||
@@ -219,7 +279,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
avg_margin_percent = COALESCE(sm.avg_margin_percent, 0),
|
||||
first_sale_date = sm.first_sale_date,
|
||||
last_sale_date = sm.last_sale_date,
|
||||
avg_lead_time_days = COALESCE(lm.avg_lead_time_days, 30),
|
||||
avg_lead_time_days = COALESCE(lm.avg_lead_time_days, 30.0),
|
||||
days_of_inventory = CASE
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) > 0
|
||||
THEN FLOOR(p.stock_quantity / NULLIF(sm.daily_sales_avg, 0))
|
||||
@@ -232,57 +292,61 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
END,
|
||||
stock_status = CASE
|
||||
WHEN p.stock_quantity <= 0 THEN 'Out of Stock'
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) = 0 AND p.stock_quantity <= $1 THEN 'Low Stock'
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) = 0 AND p.stock_quantity <= ${lowStockThreshold} THEN 'Low Stock'
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) = 0 THEN 'In Stock'
|
||||
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= $2 THEN 'Critical'
|
||||
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= $3 THEN 'Reorder'
|
||||
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > $4 THEN 'Overstocked'
|
||||
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= ${criticalDays} THEN 'Critical'
|
||||
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) <= ${reorderDays} THEN 'Reorder'
|
||||
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > ${overstockDays} THEN 'Overstocked'
|
||||
ELSE 'Healthy'
|
||||
END,
|
||||
safety_stock = CASE
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 THEN
|
||||
CEIL(sm.daily_sales_avg * SQRT(ABS(COALESCE(lm.avg_lead_time_days, 30))) * 1.96)
|
||||
ELSE $5
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND COALESCE(lm.avg_lead_time_days, 0) > 0 THEN
|
||||
CEIL(
|
||||
${serviceLevel} * SQRT(
|
||||
GREATEST(0, COALESCE(lm.avg_lead_time_days, 0)) * POWER(COALESCE(sm.stddev_daily_sales, 0), 2) +
|
||||
POWER(COALESCE(sm.daily_sales_avg, 0), 2) * POWER(COALESCE(lm.stddev_lead_time_days, 0), 2)
|
||||
)
|
||||
)
|
||||
ELSE ${defaultSafetyStock}
|
||||
END,
|
||||
reorder_point = CASE
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 THEN
|
||||
CEIL(sm.daily_sales_avg * COALESCE(lm.avg_lead_time_days, 30)) +
|
||||
CEIL(sm.daily_sales_avg * SQRT(ABS(COALESCE(lm.avg_lead_time_days, 30))) * 1.96)
|
||||
ELSE $6
|
||||
CEIL(sm.daily_sales_avg * GREATEST(0, COALESCE(lm.avg_lead_time_days, 30.0))) +
|
||||
(CASE
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND COALESCE(lm.avg_lead_time_days, 0) > 0 THEN
|
||||
CEIL(
|
||||
${serviceLevel} * SQRT(
|
||||
GREATEST(0, COALESCE(lm.avg_lead_time_days, 0)) * POWER(COALESCE(sm.stddev_daily_sales, 0), 2) +
|
||||
POWER(COALESCE(sm.daily_sales_avg, 0), 2) * POWER(COALESCE(lm.stddev_lead_time_days, 0), 2)
|
||||
)
|
||||
)
|
||||
ELSE ${defaultSafetyStock}
|
||||
END)
|
||||
ELSE ${lowStockThreshold}
|
||||
END,
|
||||
reorder_qty = CASE
|
||||
WHEN COALESCE(sm.daily_sales_avg, 0) > 0 AND NULLIF(p.cost_price, 0) IS NOT NULL AND NULLIF(p.cost_price, 0) > 0 THEN
|
||||
GREATEST(
|
||||
CEIL(SQRT(ABS((2 * (sm.daily_sales_avg * 365) * 25) / (NULLIF(p.cost_price, 0) * 0.25)))),
|
||||
$7
|
||||
CEIL(SQRT(
|
||||
(2 * (sm.daily_sales_avg * 365) * ${orderCost}) /
|
||||
NULLIF(p.cost_price * ${holdingRate}, 0)
|
||||
)),
|
||||
${minReorderQty}
|
||||
)
|
||||
ELSE $8
|
||||
ELSE ${defaultReorderQty}
|
||||
END,
|
||||
overstocked_amt = CASE
|
||||
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > $9
|
||||
THEN GREATEST(0, p.stock_quantity - CEIL(sm.daily_sales_avg * $10))
|
||||
WHEN p.stock_quantity / NULLIF(sm.daily_sales_avg, 0) > ${overstockDays}
|
||||
THEN GREATEST(0, p.stock_quantity - CEIL(sm.daily_sales_avg * ${overstockDays}))
|
||||
ELSE 0
|
||||
END,
|
||||
last_calculated_at = NOW()
|
||||
FROM products p
|
||||
LEFT JOIN temp_sales_metrics sm ON p.pid = sm.pid
|
||||
LEFT JOIN temp_purchase_metrics lm ON p.pid = lm.pid
|
||||
WHERE p.pid = ANY($11::bigint[])
|
||||
WHERE p.pid = ANY($1::BIGINT[])
|
||||
AND pm.pid = p.pid
|
||||
`,
|
||||
[
|
||||
defaultThresholds.low_stock_threshold,
|
||||
defaultThresholds.critical_days,
|
||||
defaultThresholds.reorder_days,
|
||||
defaultThresholds.overstock_days,
|
||||
defaultThresholds.low_stock_threshold,
|
||||
defaultThresholds.low_stock_threshold,
|
||||
defaultThresholds.low_stock_threshold,
|
||||
defaultThresholds.low_stock_threshold,
|
||||
defaultThresholds.overstock_days,
|
||||
defaultThresholds.overstock_days,
|
||||
batch.rows.map(row => row.pid)
|
||||
]);
|
||||
`, [batch.rows.map(row => row.pid)]);
|
||||
|
||||
lastPid = batch.rows[batch.rows.length - 1].pid;
|
||||
processedCount += batch.rows.length;
|
||||
@@ -311,25 +375,22 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
}
|
||||
|
||||
// Calculate forecast accuracy and bias in batches
|
||||
lastPid = 0;
|
||||
let forecastPid = 0;
|
||||
while (true) {
|
||||
if (isCancelled) break;
|
||||
|
||||
const batch = await connection.query(
|
||||
const forecastBatch = await connection.query(
|
||||
'SELECT pid FROM products WHERE pid > $1 ORDER BY pid LIMIT $2',
|
||||
[lastPid, BATCH_SIZE]
|
||||
[forecastPid, BATCH_SIZE]
|
||||
);
|
||||
|
||||
if (batch.rows.length === 0) break;
|
||||
if (forecastBatch.rows.length === 0) break;
|
||||
|
||||
const forecastPidArray = forecastBatch.rows.map(row => row.pid);
|
||||
|
||||
// Use array_to_string to convert the array to a string of comma-separated values
|
||||
await connection.query(`
|
||||
UPDATE product_metrics pm
|
||||
SET
|
||||
forecast_accuracy = GREATEST(0, 100 - LEAST(fa.avg_forecast_error, 100)),
|
||||
forecast_bias = GREATEST(-100, LEAST(fa.avg_forecast_bias, 100)),
|
||||
last_forecast_date = fa.last_forecast_date,
|
||||
last_calculated_at = NOW()
|
||||
FROM (
|
||||
WITH forecast_metrics AS (
|
||||
SELECT
|
||||
sf.pid,
|
||||
AVG(CASE
|
||||
@@ -348,13 +409,20 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
AND DATE(o.date) = sf.forecast_date
|
||||
WHERE o.canceled = false
|
||||
AND sf.forecast_date >= CURRENT_DATE - INTERVAL '90 days'
|
||||
AND sf.pid = ANY($1::bigint[])
|
||||
AND sf.pid = ANY('{${forecastPidArray.join(',')}}'::BIGINT[])
|
||||
GROUP BY sf.pid
|
||||
) fa
|
||||
WHERE pm.pid = fa.pid
|
||||
`, [batch.rows.map(row => row.pid)]);
|
||||
)
|
||||
UPDATE product_metrics pm
|
||||
SET
|
||||
forecast_accuracy = GREATEST(0, 100 - LEAST(fm.avg_forecast_error, 100)),
|
||||
forecast_bias = GREATEST(-100, LEAST(fm.avg_forecast_bias, 100)),
|
||||
last_forecast_date = fm.last_forecast_date,
|
||||
last_calculated_at = NOW()
|
||||
FROM forecast_metrics fm
|
||||
WHERE pm.pid = fm.pid
|
||||
`);
|
||||
|
||||
lastPid = batch.rows[batch.rows.length - 1].pid;
|
||||
forecastPid = forecastBatch.rows[forecastBatch.rows.length - 1].pid;
|
||||
}
|
||||
|
||||
// Calculate product time aggregates
|
||||
@@ -375,61 +443,12 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate time-based aggregates
|
||||
await connection.query(`
|
||||
INSERT INTO product_time_aggregates (
|
||||
pid,
|
||||
year,
|
||||
month,
|
||||
total_quantity_sold,
|
||||
total_revenue,
|
||||
total_cost,
|
||||
order_count,
|
||||
avg_price,
|
||||
profit_margin,
|
||||
inventory_value,
|
||||
gmroi
|
||||
)
|
||||
SELECT
|
||||
p.pid,
|
||||
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
|
||||
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
SUM(o.price * o.quantity) as total_revenue,
|
||||
SUM(p.cost_price * o.quantity) as total_cost,
|
||||
COUNT(DISTINCT o.order_number) as order_count,
|
||||
AVG(o.price) as avg_price,
|
||||
CASE
|
||||
WHEN SUM(o.quantity * o.price) > 0
|
||||
THEN ((SUM(o.quantity * o.price) - SUM(o.quantity * p.cost_price)) / SUM(o.quantity * o.price)) * 100
|
||||
ELSE 0
|
||||
END as profit_margin,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
CASE
|
||||
WHEN p.cost_price * p.stock_quantity > 0
|
||||
THEN (SUM(o.quantity * (o.price - p.cost_price))) / (p.cost_price * p.stock_quantity)
|
||||
ELSE 0
|
||||
END as gmroi
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid AND o.canceled = false
|
||||
WHERE o.date >= CURRENT_DATE - INTERVAL '12 months'
|
||||
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone)
|
||||
ON CONFLICT (pid, year, month) DO UPDATE
|
||||
SET
|
||||
total_quantity_sold = EXCLUDED.total_quantity_sold,
|
||||
total_revenue = EXCLUDED.total_revenue,
|
||||
total_cost = EXCLUDED.total_cost,
|
||||
order_count = EXCLUDED.order_count,
|
||||
avg_price = EXCLUDED.avg_price,
|
||||
profit_margin = EXCLUDED.profit_margin,
|
||||
inventory_value = EXCLUDED.inventory_value,
|
||||
gmroi = EXCLUDED.gmroi
|
||||
`);
|
||||
|
||||
// Note: The time-aggregates calculation has been moved to time-aggregates.js
|
||||
// This module will not duplicate that functionality
|
||||
processedCount = Math.floor(totalProducts * 0.6);
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Product time aggregates calculated',
|
||||
operation: 'Product time aggregates calculation delegated to time-aggregates module',
|
||||
current: processedCount || 0,
|
||||
total: totalProducts || 0,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
@@ -488,6 +507,10 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
const abcConfig = await connection.query('SELECT a_threshold, b_threshold FROM abc_classification_config WHERE id = 1');
|
||||
const abcThresholds = abcConfig.rows[0] || { a_threshold: 20, b_threshold: 50 };
|
||||
|
||||
// Extract values and ensure they are valid numbers
|
||||
const aThreshold = parseFloat(abcThresholds.a_threshold) || 20;
|
||||
const bThreshold = parseFloat(abcThresholds.b_threshold) || 50;
|
||||
|
||||
// First, create and populate the rankings table with an index
|
||||
await connection.query('DROP TABLE IF EXISTS temp_revenue_ranks');
|
||||
await connection.query(`
|
||||
@@ -557,13 +580,13 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
OR pm.abc_class !=
|
||||
CASE
|
||||
WHEN tr.pid IS NULL THEN 'C'
|
||||
WHEN tr.percentile <= $2 THEN 'A'
|
||||
WHEN tr.percentile <= $3 THEN 'B'
|
||||
WHEN tr.percentile <= ${aThreshold} THEN 'A'
|
||||
WHEN tr.percentile <= ${bThreshold} THEN 'B'
|
||||
ELSE 'C'
|
||||
END)
|
||||
ORDER BY pm.pid
|
||||
LIMIT $4
|
||||
`, [abcProcessedCount, abcThresholds.a_threshold, abcThresholds.b_threshold, batchSize]);
|
||||
LIMIT $2
|
||||
`, [abcProcessedCount, batchSize]);
|
||||
|
||||
if (pids.rows.length === 0) break;
|
||||
|
||||
@@ -574,15 +597,15 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
SET abc_class =
|
||||
CASE
|
||||
WHEN tr.pid IS NULL THEN 'C'
|
||||
WHEN tr.percentile <= $1 THEN 'A'
|
||||
WHEN tr.percentile <= $2 THEN 'B'
|
||||
WHEN tr.percentile <= ${aThreshold} THEN 'A'
|
||||
WHEN tr.percentile <= ${bThreshold} THEN 'B'
|
||||
ELSE 'C'
|
||||
END,
|
||||
last_calculated_at = NOW()
|
||||
FROM (SELECT pid, percentile FROM temp_revenue_ranks) tr
|
||||
WHERE pm.pid = tr.pid AND pm.pid = ANY($3::bigint[])
|
||||
OR (pm.pid = ANY($3::bigint[]) AND tr.pid IS NULL)
|
||||
`, [abcThresholds.a_threshold, abcThresholds.b_threshold, pidValues]);
|
||||
WHERE pm.pid = tr.pid AND pm.pid = ANY($1::BIGINT[])
|
||||
OR (pm.pid = ANY($1::BIGINT[]) AND tr.pid IS NULL)
|
||||
`, [pidValues]);
|
||||
|
||||
// Now update turnover rate with proper handling of zero inventory periods
|
||||
await connection.query(`
|
||||
@@ -610,7 +633,7 @@ async function calculateProductMetrics(startTime, totalProducts, processedCount
|
||||
JOIN products p ON o.pid = p.pid
|
||||
WHERE o.canceled = false
|
||||
AND o.date >= CURRENT_DATE - INTERVAL '90 days'
|
||||
AND o.pid = ANY($1::bigint[])
|
||||
AND o.pid = ANY($1::BIGINT[])
|
||||
GROUP BY o.pid
|
||||
) sales
|
||||
WHERE pm.pid = sales.pid
|
||||
@@ -707,40 +730,7 @@ function calculateStockStatus(stock, config, daily_sales_avg, weekly_sales_avg,
|
||||
return 'Healthy';
|
||||
}
|
||||
|
||||
function calculateReorderQuantities(stock, stock_status, daily_sales_avg, avg_lead_time, config) {
|
||||
// Calculate safety stock based on service level and lead time
|
||||
const z_score = 1.96; // 95% service level
|
||||
const lead_time = avg_lead_time || config.target_days;
|
||||
const safety_stock = Math.ceil(daily_sales_avg * Math.sqrt(lead_time) * z_score);
|
||||
|
||||
// Calculate reorder point
|
||||
const lead_time_demand = daily_sales_avg * lead_time;
|
||||
const reorder_point = Math.ceil(lead_time_demand + safety_stock);
|
||||
|
||||
// Calculate reorder quantity using EOQ formula if we have the necessary data
|
||||
let reorder_qty = 0;
|
||||
if (daily_sales_avg > 0) {
|
||||
const annual_demand = daily_sales_avg * 365;
|
||||
const order_cost = 25; // Fixed cost per order
|
||||
const holding_cost = config.cost_price * 0.25; // 25% of unit cost as annual holding cost
|
||||
|
||||
reorder_qty = Math.ceil(Math.sqrt((2 * annual_demand * order_cost) / holding_cost));
|
||||
} else {
|
||||
// If no sales data, use a basic calculation
|
||||
reorder_qty = Math.max(safety_stock, config.low_stock_threshold);
|
||||
}
|
||||
|
||||
// Calculate overstocked amount
|
||||
const overstocked_amt = stock_status === 'Overstocked' ?
|
||||
stock - Math.ceil(daily_sales_avg * config.overstock_days) :
|
||||
0;
|
||||
|
||||
return {
|
||||
safety_stock,
|
||||
reorder_point,
|
||||
reorder_qty,
|
||||
overstocked_amt
|
||||
};
|
||||
}
|
||||
// Note: calculateReorderQuantities function has been removed as its logic has been incorporated
|
||||
// in the main SQL query with configurable parameters
|
||||
|
||||
module.exports = calculateProductMetrics;
|
||||
@@ -216,13 +216,7 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
|
||||
GREATEST(0,
|
||||
ROUND(
|
||||
ds.avg_daily_qty *
|
||||
(1 + COALESCE(sf.seasonality_factor, 0)) *
|
||||
CASE
|
||||
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 1.5 THEN 0.85
|
||||
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 1.0 THEN 0.9
|
||||
WHEN ds.std_daily_qty / NULLIF(ds.avg_daily_qty, 0) > 0.5 THEN 0.95
|
||||
ELSE 1.0
|
||||
END
|
||||
(1 + COALESCE(sf.seasonality_factor, 0))
|
||||
)
|
||||
) as forecast_quantity,
|
||||
CASE
|
||||
@@ -336,8 +330,8 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
|
||||
cs.cat_id::bigint as category_id,
|
||||
fd.forecast_date,
|
||||
GREATEST(0,
|
||||
AVG(cs.daily_quantity) *
|
||||
(1 + COALESCE(sf.seasonality_factor, 0))
|
||||
ROUND(AVG(cs.daily_quantity) *
|
||||
(1 + COALESCE(sf.seasonality_factor, 0)))
|
||||
) as forecast_units,
|
||||
GREATEST(0,
|
||||
COALESCE(
|
||||
@@ -345,8 +339,7 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
|
||||
WHEN SUM(cs.day_count) >= 4 THEN AVG(cs.daily_revenue)
|
||||
ELSE ct.overall_avg_revenue
|
||||
END *
|
||||
(1 + COALESCE(sf.seasonality_factor, 0)) *
|
||||
(0.95 + (random() * 0.1)),
|
||||
(1 + COALESCE(sf.seasonality_factor, 0)),
|
||||
0
|
||||
)
|
||||
) as forecast_revenue,
|
||||
@@ -427,6 +420,18 @@ async function calculateSalesForecasts(startTime, totalProducts, processedCount
|
||||
throw error;
|
||||
} finally {
|
||||
if (connection) {
|
||||
try {
|
||||
// Ensure temporary tables are cleaned up
|
||||
await connection.query(`
|
||||
DROP TABLE IF EXISTS temp_forecast_dates;
|
||||
DROP TABLE IF EXISTS temp_daily_sales;
|
||||
DROP TABLE IF EXISTS temp_product_stats;
|
||||
DROP TABLE IF EXISTS temp_category_sales;
|
||||
DROP TABLE IF EXISTS temp_category_stats;
|
||||
`);
|
||||
} catch (err) {
|
||||
console.error('Error cleaning up temporary tables:', err);
|
||||
}
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,6 +55,93 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
|
||||
}
|
||||
});
|
||||
|
||||
// Create a temporary table for end-of-month inventory values
|
||||
await connection.query(`
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_monthly_inventory AS
|
||||
WITH months AS (
|
||||
-- Generate all year/month combinations for the last 12 months
|
||||
SELECT
|
||||
EXTRACT(YEAR FROM month_date)::INTEGER as year,
|
||||
EXTRACT(MONTH FROM month_date)::INTEGER as month,
|
||||
month_date as start_date,
|
||||
(month_date + INTERVAL '1 month'::interval - INTERVAL '1 day'::interval)::DATE as end_date
|
||||
FROM (
|
||||
SELECT generate_series(
|
||||
DATE_TRUNC('month', CURRENT_DATE - INTERVAL '12 months'::interval)::DATE,
|
||||
DATE_TRUNC('month', CURRENT_DATE)::DATE,
|
||||
INTERVAL '1 month'::interval
|
||||
) as month_date
|
||||
) dates
|
||||
),
|
||||
monthly_inventory_calc AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
m.year,
|
||||
m.month,
|
||||
m.end_date,
|
||||
p.stock_quantity as current_quantity,
|
||||
-- Calculate sold during period (before end_date)
|
||||
COALESCE(SUM(
|
||||
CASE
|
||||
WHEN o.date <= m.end_date THEN o.quantity
|
||||
ELSE 0
|
||||
END
|
||||
), 0) as sold_after_end_date,
|
||||
-- Calculate received during period (before end_date)
|
||||
COALESCE(SUM(
|
||||
CASE
|
||||
WHEN po.received_date <= m.end_date THEN po.received
|
||||
ELSE 0
|
||||
END
|
||||
), 0) as received_after_end_date,
|
||||
p.cost_price
|
||||
FROM
|
||||
products p
|
||||
CROSS JOIN
|
||||
months m
|
||||
LEFT JOIN
|
||||
orders o ON p.pid = o.pid
|
||||
AND o.canceled = false
|
||||
AND o.date > m.end_date
|
||||
AND o.date <= CURRENT_DATE
|
||||
LEFT JOIN
|
||||
purchase_orders po ON p.pid = po.pid
|
||||
AND po.received_date IS NOT NULL
|
||||
AND po.received_date > m.end_date
|
||||
AND po.received_date <= CURRENT_DATE
|
||||
GROUP BY
|
||||
p.pid, m.year, m.month, m.end_date, p.stock_quantity, p.cost_price
|
||||
)
|
||||
SELECT
|
||||
pid,
|
||||
year,
|
||||
month,
|
||||
-- End of month quantity = current quantity - sold after + received after
|
||||
GREATEST(0, current_quantity - sold_after_end_date + received_after_end_date) as end_of_month_quantity,
|
||||
-- End of month inventory value
|
||||
GREATEST(0, current_quantity - sold_after_end_date + received_after_end_date) * cost_price as end_of_month_value,
|
||||
cost_price
|
||||
FROM
|
||||
monthly_inventory_calc
|
||||
`);
|
||||
|
||||
processedCount = Math.floor(totalProducts * 0.40);
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Monthly inventory values calculated, processing time aggregates',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalProducts),
|
||||
rate: calculateRate(startTime, processedCount),
|
||||
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
|
||||
// Initial insert of time-based aggregates
|
||||
await connection.query(`
|
||||
INSERT INTO product_time_aggregates (
|
||||
@@ -75,76 +162,67 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
|
||||
WITH monthly_sales AS (
|
||||
SELECT
|
||||
o.pid,
|
||||
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
|
||||
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
|
||||
EXTRACT(YEAR FROM o.date::timestamp with time zone)::INTEGER as year,
|
||||
EXTRACT(MONTH FROM o.date::timestamp with time zone)::INTEGER as month,
|
||||
SUM(o.quantity) as total_quantity_sold,
|
||||
SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) as total_revenue,
|
||||
SUM(COALESCE(p.cost_price, 0) * o.quantity) as total_cost,
|
||||
SUM(COALESCE(o.costeach, 0) * o.quantity) as total_cost,
|
||||
COUNT(DISTINCT o.order_number) as order_count,
|
||||
AVG(o.price - COALESCE(o.discount, 0)) as avg_price,
|
||||
CASE
|
||||
WHEN SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) > 0
|
||||
THEN ((SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) - SUM(COALESCE(p.cost_price, 0) * o.quantity))
|
||||
THEN ((SUM((o.price - COALESCE(o.discount, 0)) * o.quantity) - SUM(COALESCE(o.costeach, 0) * o.quantity))
|
||||
/ SUM((o.price - COALESCE(o.discount, 0)) * o.quantity)) * 100
|
||||
ELSE 0
|
||||
END as profit_margin,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
COUNT(DISTINCT DATE(o.date)) as active_days
|
||||
FROM orders o
|
||||
JOIN products p ON o.pid = p.pid
|
||||
WHERE o.canceled = false
|
||||
GROUP BY o.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity
|
||||
GROUP BY o.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone)
|
||||
),
|
||||
monthly_stock AS (
|
||||
SELECT
|
||||
pid,
|
||||
EXTRACT(YEAR FROM date::timestamp with time zone) as year,
|
||||
EXTRACT(MONTH FROM date::timestamp with time zone) as month,
|
||||
EXTRACT(YEAR FROM date::timestamp with time zone)::INTEGER as year,
|
||||
EXTRACT(MONTH FROM date::timestamp with time zone)::INTEGER as month,
|
||||
SUM(received) as stock_received,
|
||||
SUM(ordered) as stock_ordered
|
||||
FROM purchase_orders
|
||||
GROUP BY pid, EXTRACT(YEAR FROM date::timestamp with time zone), EXTRACT(MONTH FROM date::timestamp with time zone)
|
||||
),
|
||||
base_products AS (
|
||||
SELECT
|
||||
p.pid,
|
||||
p.cost_price * p.stock_quantity as inventory_value
|
||||
FROM products p
|
||||
)
|
||||
SELECT
|
||||
COALESCE(s.pid, ms.pid) as pid,
|
||||
COALESCE(s.year, ms.year) as year,
|
||||
COALESCE(s.month, ms.month) as month,
|
||||
COALESCE(s.total_quantity_sold, 0) as total_quantity_sold,
|
||||
COALESCE(s.total_revenue, 0) as total_revenue,
|
||||
COALESCE(s.total_cost, 0) as total_cost,
|
||||
COALESCE(s.order_count, 0) as order_count,
|
||||
COALESCE(ms.stock_received, 0) as stock_received,
|
||||
COALESCE(ms.stock_ordered, 0) as stock_ordered,
|
||||
COALESCE(s.avg_price, 0) as avg_price,
|
||||
COALESCE(s.profit_margin, 0) as profit_margin,
|
||||
COALESCE(s.inventory_value, bp.inventory_value, 0) as inventory_value,
|
||||
COALESCE(s.pid, ms.pid, mi.pid) as pid,
|
||||
COALESCE(s.year, ms.year, mi.year) as year,
|
||||
COALESCE(s.month, ms.month, mi.month) as month,
|
||||
COALESCE(s.total_quantity_sold, 0)::INTEGER as total_quantity_sold,
|
||||
COALESCE(s.total_revenue, 0)::DECIMAL(10,3) as total_revenue,
|
||||
COALESCE(s.total_cost, 0)::DECIMAL(10,3) as total_cost,
|
||||
COALESCE(s.order_count, 0)::INTEGER as order_count,
|
||||
COALESCE(ms.stock_received, 0)::INTEGER as stock_received,
|
||||
COALESCE(ms.stock_ordered, 0)::INTEGER as stock_ordered,
|
||||
COALESCE(s.avg_price, 0)::DECIMAL(10,3) as avg_price,
|
||||
COALESCE(s.profit_margin, 0)::DECIMAL(10,3) as profit_margin,
|
||||
COALESCE(mi.end_of_month_value, 0)::DECIMAL(10,3) as inventory_value,
|
||||
CASE
|
||||
WHEN COALESCE(s.inventory_value, bp.inventory_value, 0) > 0
|
||||
AND COALESCE(s.active_days, 0) > 0
|
||||
THEN (COALESCE(s.total_revenue - s.total_cost, 0) * (365.0 / s.active_days))
|
||||
/ COALESCE(s.inventory_value, bp.inventory_value)
|
||||
WHEN COALESCE(mi.end_of_month_value, 0) > 0
|
||||
THEN (COALESCE(s.total_revenue, 0) - COALESCE(s.total_cost, 0))
|
||||
/ NULLIF(COALESCE(mi.end_of_month_value, 0), 0)
|
||||
ELSE 0
|
||||
END as gmroi
|
||||
END::DECIMAL(10,3) as gmroi
|
||||
FROM (
|
||||
SELECT * FROM monthly_sales s
|
||||
UNION ALL
|
||||
SELECT
|
||||
ms.pid,
|
||||
ms.year,
|
||||
ms.month,
|
||||
pid,
|
||||
year,
|
||||
month,
|
||||
0 as total_quantity_sold,
|
||||
0 as total_revenue,
|
||||
0 as total_cost,
|
||||
0 as order_count,
|
||||
NULL as avg_price,
|
||||
0 as profit_margin,
|
||||
NULL as inventory_value,
|
||||
0 as active_days
|
||||
FROM monthly_stock ms
|
||||
WHERE NOT EXISTS (
|
||||
@@ -153,50 +231,40 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
|
||||
AND s2.year = ms.year
|
||||
AND s2.month = ms.month
|
||||
)
|
||||
UNION ALL
|
||||
SELECT
|
||||
pid,
|
||||
year,
|
||||
month,
|
||||
0 as total_quantity_sold,
|
||||
0 as total_revenue,
|
||||
0 as total_cost,
|
||||
0 as order_count,
|
||||
NULL as avg_price,
|
||||
0 as profit_margin,
|
||||
0 as active_days
|
||||
FROM temp_monthly_inventory mi
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM monthly_sales s3
|
||||
WHERE s3.pid = mi.pid
|
||||
AND s3.year = mi.year
|
||||
AND s3.month = mi.month
|
||||
)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM monthly_stock ms3
|
||||
WHERE ms3.pid = mi.pid
|
||||
AND ms3.year = mi.year
|
||||
AND ms3.month = mi.month
|
||||
)
|
||||
) s
|
||||
LEFT JOIN monthly_stock ms
|
||||
ON s.pid = ms.pid
|
||||
AND s.year = ms.year
|
||||
AND s.month = ms.month
|
||||
JOIN base_products bp ON COALESCE(s.pid, ms.pid) = bp.pid
|
||||
UNION
|
||||
SELECT
|
||||
ms.pid,
|
||||
ms.year,
|
||||
ms.month,
|
||||
0 as total_quantity_sold,
|
||||
0 as total_revenue,
|
||||
0 as total_cost,
|
||||
0 as order_count,
|
||||
ms.stock_received,
|
||||
ms.stock_ordered,
|
||||
0 as avg_price,
|
||||
0 as profit_margin,
|
||||
bp.inventory_value,
|
||||
0 as gmroi
|
||||
FROM monthly_stock ms
|
||||
JOIN base_products bp ON ms.pid = bp.pid
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM (
|
||||
SELECT * FROM monthly_sales
|
||||
UNION ALL
|
||||
SELECT
|
||||
ms2.pid,
|
||||
ms2.year,
|
||||
ms2.month,
|
||||
0, 0, 0, 0, NULL, 0, NULL, 0
|
||||
FROM monthly_stock ms2
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM monthly_sales s2
|
||||
WHERE s2.pid = ms2.pid
|
||||
AND s2.year = ms2.year
|
||||
AND s2.month = ms2.month
|
||||
)
|
||||
) s
|
||||
WHERE s.pid = ms.pid
|
||||
AND s.year = ms.year
|
||||
AND s.month = ms.month
|
||||
)
|
||||
LEFT JOIN temp_monthly_inventory mi
|
||||
ON s.pid = mi.pid
|
||||
AND s.year = mi.year
|
||||
AND s.month = mi.month
|
||||
ON CONFLICT (pid, year, month) DO UPDATE
|
||||
SET
|
||||
total_quantity_sold = EXCLUDED.total_quantity_sold,
|
||||
@@ -214,7 +282,7 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
|
||||
processedCount = Math.floor(totalProducts * 0.60);
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Base time aggregates calculated, updating financial metrics',
|
||||
operation: 'Base time aggregates calculated',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
@@ -235,44 +303,8 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
|
||||
success
|
||||
};
|
||||
|
||||
// Update with financial metrics
|
||||
await connection.query(`
|
||||
UPDATE product_time_aggregates pta
|
||||
SET inventory_value = COALESCE(fin.inventory_value, 0)
|
||||
FROM (
|
||||
SELECT
|
||||
p.pid,
|
||||
EXTRACT(YEAR FROM o.date::timestamp with time zone) as year,
|
||||
EXTRACT(MONTH FROM o.date::timestamp with time zone) as month,
|
||||
p.cost_price * p.stock_quantity as inventory_value,
|
||||
SUM(o.quantity * (o.price - p.cost_price)) as gross_profit,
|
||||
COUNT(DISTINCT DATE(o.date)) as active_days
|
||||
FROM products p
|
||||
LEFT JOIN orders o ON p.pid = o.pid
|
||||
WHERE o.canceled = false
|
||||
GROUP BY p.pid, EXTRACT(YEAR FROM o.date::timestamp with time zone), EXTRACT(MONTH FROM o.date::timestamp with time zone), p.cost_price, p.stock_quantity
|
||||
) fin
|
||||
WHERE pta.pid = fin.pid
|
||||
AND pta.year = fin.year
|
||||
AND pta.month = fin.month
|
||||
`);
|
||||
|
||||
processedCount = Math.floor(totalProducts * 0.65);
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Financial metrics updated',
|
||||
current: processedCount,
|
||||
total: totalProducts,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, processedCount, totalProducts),
|
||||
rate: calculateRate(startTime, processedCount),
|
||||
percentage: ((processedCount / totalProducts) * 100).toFixed(1),
|
||||
timing: {
|
||||
start_time: new Date(startTime).toISOString(),
|
||||
end_time: new Date().toISOString(),
|
||||
elapsed_seconds: Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
});
|
||||
// Clean up temporary tables
|
||||
await connection.query('DROP TABLE IF EXISTS temp_monthly_inventory');
|
||||
|
||||
// If we get here, everything completed successfully
|
||||
success = true;
|
||||
@@ -298,6 +330,12 @@ async function calculateTimeAggregates(startTime, totalProducts, processedCount
|
||||
throw error;
|
||||
} finally {
|
||||
if (connection) {
|
||||
try {
|
||||
// Ensure temporary tables are cleaned up
|
||||
await connection.query('DROP TABLE IF EXISTS temp_monthly_inventory');
|
||||
} catch (err) {
|
||||
console.error('Error cleaning up temporary tables:', err);
|
||||
}
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,7 +184,7 @@ async function resetDatabase() {
|
||||
SELECT string_agg(tablename, ', ') as tables
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename NOT IN ('users', 'permissions', 'user_permissions', 'calculate_history', 'import_history', 'ai_prompts', 'ai_validation_performance', 'templates');
|
||||
AND tablename NOT IN ('users', 'permissions', 'user_permissions', 'calculate_history', 'import_history', 'ai_prompts', 'ai_validation_performance', 'templates', 'reusable_images');
|
||||
`);
|
||||
|
||||
if (!tablesResult.rows[0].tables) {
|
||||
@@ -204,7 +204,7 @@ async function resetDatabase() {
|
||||
// Drop all tables except users
|
||||
const tables = tablesResult.rows[0].tables.split(', ');
|
||||
for (const table of tables) {
|
||||
if (!['users'].includes(table)) {
|
||||
if (!['users', 'reusable_images'].includes(table)) {
|
||||
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
|
||||
}
|
||||
}
|
||||
|
||||
381
inventory-server/scripts/reset-metrics-new.js
Normal file
381
inventory-server/scripts/reset-metrics-new.js
Normal file
@@ -0,0 +1,381 @@
|
||||
const { Client } = require('pg');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../.env') });
|
||||
|
||||
const dbConfig = {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
port: process.env.DB_PORT || 5432
|
||||
};
|
||||
|
||||
function outputProgress(data) {
|
||||
if (!data.status) {
|
||||
data = {
|
||||
status: 'running',
|
||||
...data
|
||||
};
|
||||
}
|
||||
console.log(JSON.stringify(data));
|
||||
}
|
||||
|
||||
// Tables to always protect from being dropped
|
||||
const PROTECTED_TABLES = [
|
||||
'users',
|
||||
'permissions',
|
||||
'user_permissions',
|
||||
'calculate_history',
|
||||
'import_history',
|
||||
'ai_prompts',
|
||||
'ai_validation_performance',
|
||||
'templates',
|
||||
'reusable_images'
|
||||
];
|
||||
|
||||
// Split SQL into individual statements
|
||||
function splitSQLStatements(sql) {
|
||||
sql = sql.replace(/\r\n/g, '\n');
|
||||
let statements = [];
|
||||
let currentStatement = '';
|
||||
let inString = false;
|
||||
let stringChar = '';
|
||||
|
||||
for (let i = 0; i < sql.length; i++) {
|
||||
const char = sql[i];
|
||||
const nextChar = sql[i + 1] || '';
|
||||
|
||||
if ((char === "'" || char === '"') && sql[i - 1] !== '\\') {
|
||||
if (!inString) {
|
||||
inString = true;
|
||||
stringChar = char;
|
||||
} else if (char === stringChar) {
|
||||
inString = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!inString && char === '-' && nextChar === '-') {
|
||||
while (i < sql.length && sql[i] !== '\n') i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inString && char === '/' && nextChar === '*') {
|
||||
i += 2;
|
||||
while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++;
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inString && char === ';') {
|
||||
if (currentStatement.trim()) {
|
||||
statements.push(currentStatement.trim());
|
||||
}
|
||||
currentStatement = '';
|
||||
} else {
|
||||
currentStatement += char;
|
||||
}
|
||||
}
|
||||
|
||||
if (currentStatement.trim()) {
|
||||
statements.push(currentStatement.trim());
|
||||
}
|
||||
|
||||
return statements;
|
||||
}
|
||||
|
||||
async function resetMetrics() {
|
||||
let client;
|
||||
try {
|
||||
outputProgress({
|
||||
operation: 'Starting metrics reset',
|
||||
message: 'Connecting to database...'
|
||||
});
|
||||
|
||||
client = new Client(dbConfig);
|
||||
await client.connect();
|
||||
|
||||
// Get metrics tables from the schema file by looking for CREATE TABLE statements
|
||||
const schemaPath = path.resolve(__dirname, '../db/metrics-schema-new.sql');
|
||||
if (!fs.existsSync(schemaPath)) {
|
||||
throw new Error(`Schema file not found at: ${schemaPath}`);
|
||||
}
|
||||
|
||||
const schemaSQL = fs.readFileSync(schemaPath, 'utf8');
|
||||
const createTableRegex = /create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(?:public\.)?(\w+)["]?/gi;
|
||||
let metricsTables = [];
|
||||
let match;
|
||||
|
||||
while ((match = createTableRegex.exec(schemaSQL)) !== null) {
|
||||
if (match[1] && !PROTECTED_TABLES.includes(match[1])) {
|
||||
metricsTables.push(match[1]);
|
||||
}
|
||||
}
|
||||
|
||||
if (metricsTables.length === 0) {
|
||||
throw new Error('No tables found in the schema file');
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Schema analysis',
|
||||
message: `Found ${metricsTables.length} metrics tables in schema: ${metricsTables.join(', ')}`
|
||||
});
|
||||
|
||||
// Explicitly begin a transaction
|
||||
await client.query('BEGIN');
|
||||
|
||||
// First verify current state
|
||||
const initialTables = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = ANY($1)
|
||||
AND tablename NOT IN (SELECT unnest($2::text[]))
|
||||
`, [metricsTables, PROTECTED_TABLES]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Initial state',
|
||||
message: `Found ${initialTables.rows.length} existing metrics tables: ${initialTables.rows.map(t => t.name).join(', ')}`
|
||||
});
|
||||
|
||||
// Disable foreign key checks at the start
|
||||
await client.query('SET session_replication_role = \'replica\'');
|
||||
|
||||
// Drop all metrics tables in reverse order to handle dependencies
|
||||
outputProgress({
|
||||
operation: 'Dropping metrics tables',
|
||||
message: 'Removing existing metrics tables...'
|
||||
});
|
||||
|
||||
// Reverse the array to handle dependencies properly
|
||||
for (const table of [...metricsTables].reverse()) {
|
||||
// Skip protected tables (redundant check)
|
||||
if (PROTECTED_TABLES.includes(table)) {
|
||||
outputProgress({
|
||||
operation: 'Protected table',
|
||||
message: `Skipping protected table: ${table}`
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// Use NOWAIT to avoid hanging if there's a lock
|
||||
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
|
||||
|
||||
// Verify the table was actually dropped
|
||||
const checkDrop = await client.query(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = $1
|
||||
`, [table]);
|
||||
|
||||
if (parseInt(checkDrop.rows[0].count) > 0) {
|
||||
throw new Error(`Failed to drop table ${table} - table still exists`);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Table dropped',
|
||||
message: `Successfully dropped table: ${table}`
|
||||
});
|
||||
|
||||
// Commit after each table drop to ensure locks are released
|
||||
await client.query('COMMIT');
|
||||
// Start a new transaction for the next table
|
||||
await client.query('BEGIN');
|
||||
// Re-disable foreign key constraints for the new transaction
|
||||
await client.query('SET session_replication_role = \'replica\'');
|
||||
} catch (err) {
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Drop table error',
|
||||
message: `Error dropping table ${table}: ${err.message}`
|
||||
});
|
||||
await client.query('ROLLBACK');
|
||||
// Re-start transaction for next table
|
||||
await client.query('BEGIN');
|
||||
await client.query('SET session_replication_role = \'replica\'');
|
||||
}
|
||||
}
|
||||
|
||||
// Verify all tables were dropped
|
||||
const afterDrop = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = ANY($1)
|
||||
`, [metricsTables]);
|
||||
|
||||
if (afterDrop.rows.length > 0) {
|
||||
throw new Error(`Failed to drop all tables. Remaining tables: ${afterDrop.rows.map(t => t.name).join(', ')}`);
|
||||
}
|
||||
|
||||
// Make sure we have a fresh transaction here
|
||||
await client.query('COMMIT');
|
||||
await client.query('BEGIN');
|
||||
await client.query('SET session_replication_role = \'replica\'');
|
||||
|
||||
// Read metrics schema
|
||||
outputProgress({
|
||||
operation: 'Reading schema',
|
||||
message: 'Loading metrics schema file...'
|
||||
});
|
||||
|
||||
const statements = splitSQLStatements(schemaSQL);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Schema loaded',
|
||||
message: `Found ${statements.length} SQL statements to execute`
|
||||
});
|
||||
|
||||
// Execute schema statements
|
||||
for (let i = 0; i < statements.length; i++) {
|
||||
const stmt = statements[i];
|
||||
try {
|
||||
const result = await client.query(stmt);
|
||||
|
||||
// If this is a CREATE TABLE statement, verify the table was created
|
||||
if (stmt.trim().toLowerCase().startsWith('create table')) {
|
||||
const tableName = stmt.match(/create\s+table\s+(?:if\s+not\s+exists\s+)?["]?(?:public\.)?(\w+)["]?/i)?.[1];
|
||||
if (tableName) {
|
||||
const checkCreate = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = $1
|
||||
`, [tableName]);
|
||||
|
||||
if (checkCreate.rows.length === 0) {
|
||||
throw new Error(`Failed to create table ${tableName} - table does not exist after CREATE statement`);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'Table created',
|
||||
message: `Successfully created table: ${tableName}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
operation: 'SQL Progress',
|
||||
message: {
|
||||
statement: i + 1,
|
||||
total: statements.length,
|
||||
preview: stmt.substring(0, 100) + (stmt.length > 100 ? '...' : ''),
|
||||
rowCount: result.rowCount
|
||||
}
|
||||
});
|
||||
|
||||
// Commit every 10 statements to avoid long-running transactions
|
||||
if (i > 0 && i % 10 === 0) {
|
||||
await client.query('COMMIT');
|
||||
await client.query('BEGIN');
|
||||
await client.query('SET session_replication_role = \'replica\'');
|
||||
}
|
||||
} catch (sqlError) {
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'SQL Error',
|
||||
message: {
|
||||
error: sqlError.message,
|
||||
statement: stmt,
|
||||
statementNumber: i + 1
|
||||
}
|
||||
});
|
||||
await client.query('ROLLBACK');
|
||||
throw sqlError;
|
||||
}
|
||||
}
|
||||
|
||||
// Final commit for any pending statements
|
||||
await client.query('COMMIT');
|
||||
|
||||
// Start new transaction for final checks
|
||||
await client.query('BEGIN');
|
||||
|
||||
// Re-enable foreign key checks after all tables are created
|
||||
await client.query('SET session_replication_role = \'origin\'');
|
||||
|
||||
// Verify metrics tables were created
|
||||
outputProgress({
|
||||
operation: 'Verifying metrics tables',
|
||||
message: 'Checking all metrics tables were created...'
|
||||
});
|
||||
|
||||
const metricsTablesResult = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = ANY($1)
|
||||
`, [metricsTables]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Tables found',
|
||||
message: `Found ${metricsTablesResult.rows.length} tables: ${metricsTablesResult.rows.map(t => t.name).join(', ')}`
|
||||
});
|
||||
|
||||
const existingMetricsTables = metricsTablesResult.rows.map(t => t.name);
|
||||
const missingMetricsTables = metricsTables.filter(t => !existingMetricsTables.includes(t));
|
||||
|
||||
if (missingMetricsTables.length > 0) {
|
||||
// Do one final check of the actual tables
|
||||
const finalCheck = await client.query(`
|
||||
SELECT tablename as name
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
`);
|
||||
outputProgress({
|
||||
operation: 'Final table check',
|
||||
message: `All database tables: ${finalCheck.rows.map(t => t.name).join(', ')}`
|
||||
});
|
||||
await client.query('ROLLBACK');
|
||||
throw new Error(`Failed to create metrics tables: ${missingMetricsTables.join(', ')}`);
|
||||
}
|
||||
|
||||
// Commit final transaction
|
||||
await client.query('COMMIT');
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Reset complete',
|
||||
message: 'All metrics tables have been reset successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'Reset failed',
|
||||
message: error.message,
|
||||
stack: error.stack
|
||||
});
|
||||
|
||||
if (client) {
|
||||
try {
|
||||
await client.query('ROLLBACK');
|
||||
} catch (rollbackError) {
|
||||
console.error('Error during rollback:', rollbackError);
|
||||
}
|
||||
// Make sure to re-enable foreign key checks even if there's an error
|
||||
await client.query('SET session_replication_role = \'origin\'').catch(() => {});
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
if (client) {
|
||||
// One final attempt to ensure foreign key checks are enabled
|
||||
await client.query('SET session_replication_role = \'origin\'').catch(() => {});
|
||||
await client.end();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export if required as a module
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = resetMetrics;
|
||||
}
|
||||
|
||||
// Run if called from command line
|
||||
if (require.main === module) {
|
||||
resetMetrics().catch(error => {
|
||||
console.error('Error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -39,6 +39,19 @@ const METRICS_TABLES = [
|
||||
'vendor_details'
|
||||
];
|
||||
|
||||
// Tables to always protect from being dropped
|
||||
const PROTECTED_TABLES = [
|
||||
'users',
|
||||
'permissions',
|
||||
'user_permissions',
|
||||
'calculate_history',
|
||||
'import_history',
|
||||
'ai_prompts',
|
||||
'ai_validation_performance',
|
||||
'templates',
|
||||
'reusable_images'
|
||||
];
|
||||
|
||||
// Split SQL into individual statements
|
||||
function splitSQLStatements(sql) {
|
||||
sql = sql.replace(/\r\n/g, '\n');
|
||||
@@ -109,7 +122,8 @@ async function resetMetrics() {
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename = ANY($1)
|
||||
`, [METRICS_TABLES]);
|
||||
AND tablename NOT IN (SELECT unnest($2::text[]))
|
||||
`, [METRICS_TABLES, PROTECTED_TABLES]);
|
||||
|
||||
outputProgress({
|
||||
operation: 'Initial state',
|
||||
@@ -126,6 +140,15 @@ async function resetMetrics() {
|
||||
});
|
||||
|
||||
for (const table of [...METRICS_TABLES].reverse()) {
|
||||
// Skip protected tables
|
||||
if (PROTECTED_TABLES.includes(table)) {
|
||||
outputProgress({
|
||||
operation: 'Protected table',
|
||||
message: `Skipping protected table: ${table}`
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// Use NOWAIT to avoid hanging if there's a lock
|
||||
await client.query(`DROP TABLE IF EXISTS "${table}" CASCADE`);
|
||||
|
||||
@@ -779,10 +779,16 @@ router.get('/history/calculate', async (req, res) => {
|
||||
id,
|
||||
start_time,
|
||||
end_time,
|
||||
duration_minutes,
|
||||
status,
|
||||
error_message,
|
||||
modules_processed::integer,
|
||||
total_modules::integer
|
||||
total_products,
|
||||
total_orders,
|
||||
total_purchase_orders,
|
||||
processed_products,
|
||||
processed_orders,
|
||||
processed_purchase_orders,
|
||||
additional_info
|
||||
FROM calculate_history
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 20
|
||||
@@ -830,4 +836,58 @@ router.get('/status/tables', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// GET /status/table-counts - Get record counts for all tables
|
||||
router.get('/status/table-counts', async (req, res) => {
|
||||
try {
|
||||
const pool = req.app.locals.pool;
|
||||
const tables = [
|
||||
// Core tables
|
||||
'products', 'categories', 'product_categories', 'orders', 'purchase_orders',
|
||||
// Metrics tables
|
||||
'product_metrics', 'product_time_aggregates', 'vendor_metrics', 'category_metrics',
|
||||
'vendor_time_metrics', 'category_time_metrics', 'category_sales_metrics',
|
||||
'brand_metrics', 'brand_time_metrics', 'sales_forecasts', 'category_forecasts',
|
||||
// Config tables
|
||||
'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config',
|
||||
'abc_classification_config', 'safety_stock_config', 'turnover_config',
|
||||
'sales_seasonality', 'financial_calc_config'
|
||||
];
|
||||
|
||||
const counts = await Promise.all(
|
||||
tables.map(table =>
|
||||
pool.query(`SELECT COUNT(*) as count FROM ${table}`)
|
||||
.then(result => ({
|
||||
table_name: table,
|
||||
count: parseInt(result.rows[0].count)
|
||||
}))
|
||||
.catch(err => ({
|
||||
table_name: table,
|
||||
count: null,
|
||||
error: err.message
|
||||
}))
|
||||
)
|
||||
);
|
||||
|
||||
// Group tables by type
|
||||
const groupedCounts = {
|
||||
core: counts.filter(c => ['products', 'categories', 'product_categories', 'orders', 'purchase_orders'].includes(c.table_name)),
|
||||
metrics: counts.filter(c => [
|
||||
'product_metrics', 'product_time_aggregates', 'vendor_metrics', 'category_metrics',
|
||||
'vendor_time_metrics', 'category_time_metrics', 'category_sales_metrics',
|
||||
'brand_metrics', 'brand_time_metrics', 'sales_forecasts', 'category_forecasts'
|
||||
].includes(c.table_name)),
|
||||
config: counts.filter(c => [
|
||||
'stock_thresholds', 'lead_time_thresholds', 'sales_velocity_config',
|
||||
'abc_classification_config', 'safety_stock_config', 'turnover_config',
|
||||
'sales_seasonality', 'financial_calc_config'
|
||||
].includes(c.table_name))
|
||||
};
|
||||
|
||||
res.json(groupedCounts);
|
||||
} catch (error) {
|
||||
console.error('Error fetching table counts:', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -102,35 +102,40 @@ router.get('/stock/metrics', async (req, res) => {
|
||||
// Returns purchase order metrics by vendor
|
||||
router.get('/purchase/metrics', async (req, res) => {
|
||||
try {
|
||||
// First check if there are any purchase orders in the database
|
||||
const { rows: [poCount] } = await executeQuery(`
|
||||
SELECT COUNT(*) as count FROM purchase_orders
|
||||
`);
|
||||
|
||||
const { rows: [poMetrics] } = await executeQuery(`
|
||||
SELECT
|
||||
COALESCE(COUNT(DISTINCT CASE
|
||||
WHEN po.receiving_status < $1
|
||||
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
|
||||
THEN po.po_id
|
||||
END), 0)::integer as active_pos,
|
||||
COALESCE(COUNT(DISTINCT CASE
|
||||
WHEN po.receiving_status < $1
|
||||
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
|
||||
AND po.expected_date < CURRENT_DATE
|
||||
THEN po.po_id
|
||||
END), 0)::integer as overdue_pos,
|
||||
COALESCE(SUM(CASE
|
||||
WHEN po.receiving_status < $1
|
||||
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
|
||||
THEN po.ordered
|
||||
ELSE 0
|
||||
END), 0)::integer as total_units,
|
||||
ROUND(COALESCE(SUM(CASE
|
||||
WHEN po.receiving_status < $1
|
||||
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
|
||||
THEN po.ordered * po.cost_price
|
||||
ELSE 0
|
||||
END), 0)::numeric, 3) as total_cost,
|
||||
ROUND(COALESCE(SUM(CASE
|
||||
WHEN po.receiving_status < $1
|
||||
WHEN po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
|
||||
THEN po.ordered * p.price
|
||||
ELSE 0
|
||||
END), 0)::numeric, 3) as total_retail
|
||||
FROM purchase_orders po
|
||||
JOIN products p ON po.pid = p.pid
|
||||
`, [ReceivingStatus.PartialReceived]);
|
||||
`);
|
||||
|
||||
const { rows: vendorOrders } = await executeQuery(`
|
||||
SELECT
|
||||
@@ -141,15 +146,15 @@ router.get('/purchase/metrics', async (req, res) => {
|
||||
ROUND(COALESCE(SUM(po.ordered * p.price), 0)::numeric, 3) as retail
|
||||
FROM purchase_orders po
|
||||
JOIN products p ON po.pid = p.pid
|
||||
WHERE po.receiving_status < $1
|
||||
WHERE po.receiving_status NOT IN ('partial_received', 'full_received', 'paid')
|
||||
GROUP BY po.vendor
|
||||
HAVING ROUND(COALESCE(SUM(po.ordered * po.cost_price), 0)::numeric, 3) > 0
|
||||
ORDER BY cost DESC
|
||||
`, [ReceivingStatus.PartialReceived]);
|
||||
`);
|
||||
|
||||
// If no data or missing metrics, provide dummy data
|
||||
if (!poMetrics || vendorOrders.length === 0) {
|
||||
console.log('No purchase metrics found, returning dummy data');
|
||||
// If no purchase orders exist at all in the database, return dummy data
|
||||
if (parseInt(poCount.count) === 0) {
|
||||
console.log('No purchase orders found in database, returning dummy data');
|
||||
|
||||
return res.json({
|
||||
activePurchaseOrders: 12,
|
||||
@@ -165,6 +170,20 @@ router.get('/purchase/metrics', async (req, res) => {
|
||||
});
|
||||
}
|
||||
|
||||
// If no active purchase orders match the criteria, return zeros instead of dummy data
|
||||
if (vendorOrders.length === 0) {
|
||||
console.log('No active purchase orders matching criteria, returning zeros');
|
||||
|
||||
return res.json({
|
||||
activePurchaseOrders: parseInt(poMetrics.active_pos) || 0,
|
||||
overduePurchaseOrders: parseInt(poMetrics.overdue_pos) || 0,
|
||||
onOrderUnits: parseInt(poMetrics.total_units) || 0,
|
||||
onOrderCost: parseFloat(poMetrics.total_cost) || 0,
|
||||
onOrderRetail: parseFloat(poMetrics.total_retail) || 0,
|
||||
vendorOrders: []
|
||||
});
|
||||
}
|
||||
|
||||
// Format response to match PurchaseMetricsData interface
|
||||
const response = {
|
||||
activePurchaseOrders: parseInt(poMetrics.active_pos) || 0,
|
||||
@@ -184,19 +203,15 @@ router.get('/purchase/metrics', async (req, res) => {
|
||||
res.json(response);
|
||||
} catch (err) {
|
||||
console.error('Error fetching purchase metrics:', err);
|
||||
|
||||
// Return dummy data on error
|
||||
res.json({
|
||||
activePurchaseOrders: 12,
|
||||
overduePurchaseOrders: 3,
|
||||
onOrderUnits: 1250,
|
||||
onOrderCost: 12500,
|
||||
onOrderRetail: 25000,
|
||||
vendorOrders: [
|
||||
{ vendor: "Test Vendor 1", orders: 5, units: 500, cost: 5000, retail: 10000 },
|
||||
{ vendor: "Test Vendor 2", orders: 4, units: 400, cost: 4000, retail: 8000 },
|
||||
{ vendor: "Test Vendor 3", orders: 3, units: 350, cost: 3500, retail: 7000 }
|
||||
]
|
||||
res.status(500).json({
|
||||
error: 'Failed to fetch purchase metrics',
|
||||
details: err.message,
|
||||
activePurchaseOrders: 0,
|
||||
overduePurchaseOrders: 0,
|
||||
onOrderUnits: 0,
|
||||
onOrderCost: 0,
|
||||
onOrderRetail: 0,
|
||||
vendorOrders: []
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -1018,17 +1033,17 @@ router.get('/vendor/performance', async (req, res) => {
|
||||
THEN EXTRACT(EPOCH FROM (po.received_date - po.date))/86400
|
||||
ELSE NULL END)::numeric, 2), 0) as avg_lead_time,
|
||||
COALESCE(ROUND(SUM(CASE
|
||||
WHEN po.status = 'completed' AND po.received_date <= po.expected_date
|
||||
WHEN po.status = 'done' AND po.received_date <= po.expected_date
|
||||
THEN 1
|
||||
ELSE 0
|
||||
END)::numeric * 100.0 / NULLIF(COUNT(*)::numeric, 0), 2), 0) as on_time_delivery_rate,
|
||||
COALESCE(ROUND(AVG(CASE
|
||||
WHEN po.status = 'completed'
|
||||
WHEN po.status = 'done'
|
||||
THEN po.received::numeric / NULLIF(po.ordered::numeric, 0) * 100
|
||||
ELSE NULL
|
||||
END)::numeric, 2), 0) as avg_fill_rate,
|
||||
COUNT(CASE WHEN po.status = 'open' THEN 1 END)::integer as active_orders,
|
||||
COUNT(CASE WHEN po.status = 'open' AND po.expected_date < CURRENT_DATE THEN 1 END)::integer as overdue_orders
|
||||
COUNT(CASE WHEN po.status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') THEN 1 END)::integer as active_orders,
|
||||
COUNT(CASE WHEN po.status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') AND po.expected_date < CURRENT_DATE THEN 1 END)::integer as overdue_orders
|
||||
FROM purchase_orders po
|
||||
WHERE po.date >= CURRENT_DATE - INTERVAL '180 days'
|
||||
GROUP BY po.vendor
|
||||
@@ -1165,7 +1180,7 @@ router.get('/key-metrics', async (req, res) => {
|
||||
SELECT
|
||||
COUNT(DISTINCT po_id) as total_pos,
|
||||
SUM(ordered * cost_price) as total_po_value,
|
||||
COUNT(CASE WHEN status = 'open' THEN 1 END) as open_pos
|
||||
COUNT(CASE WHEN status IN ('created', 'electronically_ready_send', 'ordered', 'preordered', 'electronically_sent', 'receiving_started') THEN 1 END) as open_pos
|
||||
FROM purchase_orders
|
||||
WHERE order_date >= CURRENT_DATE - INTERVAL '${days} days'
|
||||
)
|
||||
|
||||
@@ -8,7 +8,9 @@ const fs = require('fs');
|
||||
|
||||
// Create uploads directory if it doesn't exist
|
||||
const uploadsDir = path.join('/var/www/html/inventory/uploads/products');
|
||||
const reusableUploadsDir = path.join('/var/www/html/inventory/uploads/reusable');
|
||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
||||
fs.mkdirSync(reusableUploadsDir, { recursive: true });
|
||||
|
||||
// Create a Map to track image upload times and their scheduled deletion
|
||||
const imageUploadMap = new Map();
|
||||
@@ -35,6 +37,12 @@ const connectionCache = {
|
||||
|
||||
// Function to schedule image deletion after 24 hours
|
||||
const scheduleImageDeletion = (filename, filePath) => {
|
||||
// Only schedule deletion for images in the products folder
|
||||
if (!filePath.includes('/uploads/products/')) {
|
||||
console.log(`Skipping deletion for non-product image: ${filename}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Delete any existing timeout for this file
|
||||
if (imageUploadMap.has(filename)) {
|
||||
clearTimeout(imageUploadMap.get(filename).timeoutId);
|
||||
@@ -407,6 +415,14 @@ router.delete('/delete-image', (req, res) => {
|
||||
return res.status(404).json({ error: 'File not found' });
|
||||
}
|
||||
|
||||
// Only allow deletion of images in the products folder
|
||||
if (!filePath.includes('/uploads/products/')) {
|
||||
return res.status(403).json({
|
||||
error: 'Cannot delete images outside the products folder',
|
||||
message: 'This image is in a protected folder and cannot be deleted through this endpoint'
|
||||
});
|
||||
}
|
||||
|
||||
// Delete the file
|
||||
fs.unlinkSync(filePath);
|
||||
|
||||
@@ -641,11 +657,19 @@ router.get('/check-file/:filename', (req, res) => {
|
||||
return res.status(400).json({ error: 'Invalid filename' });
|
||||
}
|
||||
|
||||
const filePath = path.join(uploadsDir, filename);
|
||||
// First check in products directory
|
||||
let filePath = path.join(uploadsDir, filename);
|
||||
let exists = fs.existsSync(filePath);
|
||||
|
||||
// If not found in products, check in reusable directory
|
||||
if (!exists) {
|
||||
filePath = path.join(reusableUploadsDir, filename);
|
||||
exists = fs.existsSync(filePath);
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if file exists
|
||||
if (!fs.existsSync(filePath)) {
|
||||
if (!exists) {
|
||||
return res.status(404).json({
|
||||
error: 'File not found',
|
||||
path: filePath,
|
||||
@@ -685,13 +709,23 @@ router.get('/check-file/:filename', (req, res) => {
|
||||
// List all files in uploads directory
|
||||
router.get('/list-uploads', (req, res) => {
|
||||
try {
|
||||
if (!fs.existsSync(uploadsDir)) {
|
||||
return res.status(404).json({ error: 'Uploads directory not found', path: uploadsDir });
|
||||
const { directory = 'products' } = req.query;
|
||||
|
||||
// Determine which directory to list
|
||||
let targetDir;
|
||||
if (directory === 'reusable') {
|
||||
targetDir = reusableUploadsDir;
|
||||
} else {
|
||||
targetDir = uploadsDir; // default to products
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(uploadsDir);
|
||||
if (!fs.existsSync(targetDir)) {
|
||||
return res.status(404).json({ error: 'Uploads directory not found', path: targetDir });
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(targetDir);
|
||||
const fileDetails = files.map(file => {
|
||||
const filePath = path.join(uploadsDir, file);
|
||||
const filePath = path.join(targetDir, file);
|
||||
try {
|
||||
const stats = fs.statSync(filePath);
|
||||
return {
|
||||
@@ -709,12 +743,13 @@ router.get('/list-uploads', (req, res) => {
|
||||
});
|
||||
|
||||
return res.json({
|
||||
directory: uploadsDir,
|
||||
directory: targetDir,
|
||||
type: directory,
|
||||
count: files.length,
|
||||
files: fileDetails
|
||||
});
|
||||
} catch (error) {
|
||||
return res.status(500).json({ error: error.message, path: uploadsDir });
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,62 +1,345 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { Pool } = require('pg'); // Assuming pg driver
|
||||
|
||||
// Get key metrics trends (revenue, inventory value, GMROI)
|
||||
router.get('/trends', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const { rows } = await pool.query(`
|
||||
WITH MonthlyMetrics AS (
|
||||
SELECT
|
||||
make_date(pta.year, pta.month, 1) as date,
|
||||
ROUND(COALESCE(SUM(pta.total_revenue), 0)::numeric, 3) as revenue,
|
||||
ROUND(COALESCE(SUM(pta.total_cost), 0)::numeric, 3) as cost,
|
||||
ROUND(COALESCE(SUM(pm.inventory_value), 0)::numeric, 3) as inventory_value,
|
||||
CASE
|
||||
WHEN SUM(pm.inventory_value) > 0
|
||||
THEN ROUND((SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value) * 100)::numeric, 3)
|
||||
ELSE 0
|
||||
END as gmroi
|
||||
FROM product_time_aggregates pta
|
||||
JOIN product_metrics pm ON pta.pid = pm.pid
|
||||
WHERE (pta.year * 100 + pta.month) >=
|
||||
EXTRACT(YEAR FROM CURRENT_DATE - INTERVAL '12 months')::integer * 100 +
|
||||
EXTRACT(MONTH FROM CURRENT_DATE - INTERVAL '12 months')::integer
|
||||
GROUP BY pta.year, pta.month
|
||||
ORDER BY date ASC
|
||||
)
|
||||
SELECT
|
||||
to_char(date, 'Mon YY') as date,
|
||||
revenue,
|
||||
inventory_value,
|
||||
gmroi
|
||||
FROM MonthlyMetrics
|
||||
`);
|
||||
// --- Configuration & Helpers ---
|
||||
|
||||
console.log('Raw metrics trends data:', rows);
|
||||
const DEFAULT_PAGE_LIMIT = 50;
|
||||
const MAX_PAGE_LIMIT = 200; // Prevent excessive data requests
|
||||
|
||||
// Transform the data into the format expected by the frontend
|
||||
const transformedData = {
|
||||
revenue: rows.map(row => ({
|
||||
date: row.date,
|
||||
value: parseFloat(row.revenue)
|
||||
})),
|
||||
inventory_value: rows.map(row => ({
|
||||
date: row.date,
|
||||
value: parseFloat(row.inventory_value)
|
||||
})),
|
||||
gmroi: rows.map(row => ({
|
||||
date: row.date,
|
||||
value: parseFloat(row.gmroi)
|
||||
}))
|
||||
};
|
||||
/**
|
||||
* Maps user-friendly query parameter keys (camelCase) to database column names.
|
||||
* Also validates if the column is safe for sorting or filtering.
|
||||
* Add ALL columns from product_metrics that should be filterable/sortable.
|
||||
*/
|
||||
const COLUMN_MAP = {
|
||||
// Product Info
|
||||
pid: { dbCol: 'pm.pid', type: 'number' },
|
||||
sku: { dbCol: 'pm.sku', type: 'string' },
|
||||
title: { dbCol: 'pm.title', type: 'string' },
|
||||
brand: { dbCol: 'pm.brand', type: 'string' },
|
||||
vendor: { dbCol: 'pm.vendor', type: 'string' },
|
||||
imageUrl: { dbCol: 'pm.image_url', type: 'string' },
|
||||
isVisible: { dbCol: 'pm.is_visible', type: 'boolean' },
|
||||
isReplenishable: { dbCol: 'pm.is_replenishable', type: 'boolean' },
|
||||
// Current Status
|
||||
currentPrice: { dbCol: 'pm.current_price', type: 'number' },
|
||||
currentRegularPrice: { dbCol: 'pm.current_regular_price', type: 'number' },
|
||||
currentCostPrice: { dbCol: 'pm.current_cost_price', type: 'number' },
|
||||
currentLandingCostPrice: { dbCol: 'pm.current_landing_cost_price', type: 'number' },
|
||||
currentStock: { dbCol: 'pm.current_stock', type: 'number' },
|
||||
currentStockCost: { dbCol: 'pm.current_stock_cost', type: 'number' },
|
||||
currentStockRetail: { dbCol: 'pm.current_stock_retail', type: 'number' },
|
||||
currentStockGross: { dbCol: 'pm.current_stock_gross', type: 'number' },
|
||||
onOrderQty: { dbCol: 'pm.on_order_qty', type: 'number' },
|
||||
onOrderCost: { dbCol: 'pm.on_order_cost', type: 'number' },
|
||||
onOrderRetail: { dbCol: 'pm.on_order_retail', type: 'number' },
|
||||
earliestExpectedDate: { dbCol: 'pm.earliest_expected_date', type: 'date' },
|
||||
// Historical Dates
|
||||
dateCreated: { dbCol: 'pm.date_created', type: 'date' },
|
||||
dateFirstReceived: { dbCol: 'pm.date_first_received', type: 'date' },
|
||||
dateLastReceived: { dbCol: 'pm.date_last_received', type: 'date' },
|
||||
dateFirstSold: { dbCol: 'pm.date_first_sold', type: 'date' },
|
||||
dateLastSold: { dbCol: 'pm.date_last_sold', type: 'date' },
|
||||
ageDays: { dbCol: 'pm.age_days', type: 'number' },
|
||||
// Rolling Period Metrics
|
||||
sales7d: { dbCol: 'pm.sales_7d', type: 'number' }, revenue7d: { dbCol: 'pm.revenue_7d', type: 'number' },
|
||||
sales14d: { dbCol: 'pm.sales_14d', type: 'number' }, revenue14d: { dbCol: 'pm.revenue_14d', type: 'number' },
|
||||
sales30d: { dbCol: 'pm.sales_30d', type: 'number' }, revenue30d: { dbCol: 'pm.revenue_30d', type: 'number' },
|
||||
cogs30d: { dbCol: 'pm.cogs_30d', type: 'number' }, profit30d: { dbCol: 'pm.profit_30d', type: 'number' },
|
||||
returnsUnits30d: { dbCol: 'pm.returns_units_30d', type: 'number' }, returnsRevenue30d: { dbCol: 'pm.returns_revenue_30d', type: 'number' },
|
||||
discounts30d: { dbCol: 'pm.discounts_30d', type: 'number' }, grossRevenue30d: { dbCol: 'pm.gross_revenue_30d', type: 'number' },
|
||||
grossRegularRevenue30d: { dbCol: 'pm.gross_regular_revenue_30d', type: 'number' },
|
||||
stockoutDays30d: { dbCol: 'pm.stockout_days_30d', type: 'number' },
|
||||
sales365d: { dbCol: 'pm.sales_365d', type: 'number' }, revenue365d: { dbCol: 'pm.revenue_365d', type: 'number' },
|
||||
avgStockUnits30d: { dbCol: 'pm.avg_stock_units_30d', type: 'number' }, avgStockCost30d: { dbCol: 'pm.avg_stock_cost_30d', type: 'number' },
|
||||
avgStockRetail30d: { dbCol: 'pm.avg_stock_retail_30d', type: 'number' }, avgStockGross30d: { dbCol: 'pm.avg_stock_gross_30d', type: 'number' },
|
||||
receivedQty30d: { dbCol: 'pm.received_qty_30d', type: 'number' }, receivedCost30d: { dbCol: 'pm.received_cost_30d', type: 'number' },
|
||||
// Lifetime Metrics
|
||||
lifetimeSales: { dbCol: 'pm.lifetime_sales', type: 'number' }, lifetimeRevenue: { dbCol: 'pm.lifetime_revenue', type: 'number' },
|
||||
// First Period Metrics
|
||||
first7DaysSales: { dbCol: 'pm.first_7_days_sales', type: 'number' }, first7DaysRevenue: { dbCol: 'pm.first_7_days_revenue', type: 'number' },
|
||||
first30DaysSales: { dbCol: 'pm.first_30_days_sales', type: 'number' }, first30DaysRevenue: { dbCol: 'pm.first_30_days_revenue', type: 'number' },
|
||||
first60DaysSales: { dbCol: 'pm.first_60_days_sales', type: 'number' }, first60DaysRevenue: { dbCol: 'pm.first_60_days_revenue', type: 'number' },
|
||||
first90DaysSales: { dbCol: 'pm.first_90_days_sales', type: 'number' }, first90DaysRevenue: { dbCol: 'pm.first_90_days_revenue', type: 'number' },
|
||||
// Calculated KPIs
|
||||
asp30d: { dbCol: 'pm.asp_30d', type: 'number' }, acp30d: { dbCol: 'pm.acp_30d', type: 'number' }, avgRos30d: { dbCol: 'pm.avg_ros_30d', type: 'number' },
|
||||
avgSalesPerDay30d: { dbCol: 'pm.avg_sales_per_day_30d', type: 'number' }, avgSalesPerMonth30d: { dbCol: 'pm.avg_sales_per_month_30d', type: 'number' },
|
||||
margin30d: { dbCol: 'pm.margin_30d', type: 'number' }, markup30d: { dbCol: 'pm.markup_30d', type: 'number' }, gmroi30d: { dbCol: 'pm.gmroi_30d', type: 'number' },
|
||||
stockturn30d: { dbCol: 'pm.stockturn_30d', type: 'number' }, returnRate30d: { dbCol: 'pm.return_rate_30d', type: 'number' },
|
||||
discountRate30d: { dbCol: 'pm.discount_rate_30d', type: 'number' }, stockoutRate30d: { dbCol: 'pm.stockout_rate_30d', type: 'number' },
|
||||
markdown30d: { dbCol: 'pm.markdown_30d', type: 'number' }, markdownRate30d: { dbCol: 'pm.markdown_rate_30d', type: 'number' },
|
||||
sellThrough30d: { dbCol: 'pm.sell_through_30d', type: 'number' }, avgLeadTimeDays: { dbCol: 'pm.avg_lead_time_days', type: 'number' },
|
||||
// Forecasting & Replenishment
|
||||
abcClass: { dbCol: 'pm.abc_class', type: 'string' }, salesVelocityDaily: { dbCol: 'pm.sales_velocity_daily', type: 'number' },
|
||||
configLeadTime: { dbCol: 'pm.config_lead_time', type: 'number' }, configDaysOfStock: { dbCol: 'pm.config_days_of_stock', type: 'number' },
|
||||
configSafetyStock: { dbCol: 'pm.config_safety_stock', type: 'number' }, planningPeriodDays: { dbCol: 'pm.planning_period_days', type: 'number' },
|
||||
leadTimeForecastUnits: { dbCol: 'pm.lead_time_forecast_units', type: 'number' }, daysOfStockForecastUnits: { dbCol: 'pm.days_of_stock_forecast_units', type: 'number' },
|
||||
planningPeriodForecastUnits: { dbCol: 'pm.planning_period_forecast_units', type: 'number' }, leadTimeClosingStock: { dbCol: 'pm.lead_time_closing_stock', type: 'number' },
|
||||
daysOfStockClosingStock: { dbCol: 'pm.days_of_stock_closing_stock', type: 'number' }, replenishmentNeededRaw: { dbCol: 'pm.replenishment_needed_raw', type: 'number' },
|
||||
replenishmentUnits: { dbCol: 'pm.replenishment_units', type: 'number' }, replenishmentCost: { dbCol: 'pm.replenishment_cost', type: 'number' },
|
||||
replenishmentRetail: { dbCol: 'pm.replenishment_retail', type: 'number' }, replenishmentProfit: { dbCol: 'pm.replenishment_profit', type: 'number' },
|
||||
toOrderUnits: { dbCol: 'pm.to_order_units', type: 'number' }, forecastLostSalesUnits: { dbCol: 'pm.forecast_lost_sales_units', type: 'number' },
|
||||
forecastLostRevenue: { dbCol: 'pm.forecast_lost_revenue', type: 'number' }, stockCoverInDays: { dbCol: 'pm.stock_cover_in_days', type: 'number' },
|
||||
poCoverInDays: { dbCol: 'pm.po_cover_in_days', type: 'number' }, sellsOutInDays: { dbCol: 'pm.sells_out_in_days', type: 'number' },
|
||||
replenishDate: { dbCol: 'pm.replenish_date', type: 'date' }, overstockedUnits: { dbCol: 'pm.overstocked_units', type: 'number' },
|
||||
overstockedCost: { dbCol: 'pm.overstocked_cost', type: 'number' }, overstockedRetail: { dbCol: 'pm.overstocked_retail', type: 'number' },
|
||||
isOldStock: { dbCol: 'pm.is_old_stock', type: 'boolean' },
|
||||
// Yesterday
|
||||
yesterdaySales: { dbCol: 'pm.yesterday_sales', type: 'number' },
|
||||
};
|
||||
|
||||
console.log('Transformed metrics data:', transformedData);
|
||||
res.json(transformedData);
|
||||
} catch (error) {
|
||||
console.error('Error fetching metrics trends:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch metrics trends' });
|
||||
}
|
||||
function getSafeColumnInfo(queryParamKey) {
|
||||
return COLUMN_MAP[queryParamKey] || null;
|
||||
}
|
||||
|
||||
// --- Route Handlers ---
|
||||
|
||||
// GET /metrics/filter-options - Provide distinct values for filter dropdowns
|
||||
router.get('/filter-options', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
console.log('GET /metrics/filter-options');
|
||||
try {
|
||||
const [vendorRes, brandRes, abcClassRes] = await Promise.all([
|
||||
pool.query(`SELECT DISTINCT vendor FROM public.product_metrics WHERE vendor IS NOT NULL AND vendor <> '' ORDER BY vendor`),
|
||||
pool.query(`SELECT DISTINCT COALESCE(brand, 'Unbranded') as brand FROM public.product_metrics WHERE brand IS NOT NULL AND brand <> '' ORDER BY brand`),
|
||||
pool.query(`SELECT DISTINCT abc_class FROM public.product_metrics WHERE abc_class IS NOT NULL ORDER BY abc_class`)
|
||||
// Add queries for other distinct options if needed (e.g., categories if stored on pm)
|
||||
]);
|
||||
|
||||
res.json({
|
||||
vendors: vendorRes.rows.map(r => r.vendor),
|
||||
brands: brandRes.rows.map(r => r.brand),
|
||||
abcClasses: abcClassRes.rows.map(r => r.abc_class),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching filter options:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch filter options' });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// GET /metrics/ - List all product metrics with filtering, sorting, pagination
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool; // Get pool from app instance
|
||||
console.log('GET /metrics received query:', req.query);
|
||||
|
||||
try {
|
||||
// --- Pagination ---
|
||||
let page = parseInt(req.query.page, 10);
|
||||
let limit = parseInt(req.query.limit, 10);
|
||||
if (isNaN(page) || page < 1) page = 1;
|
||||
if (isNaN(limit) || limit < 1) limit = DEFAULT_PAGE_LIMIT;
|
||||
limit = Math.min(limit, MAX_PAGE_LIMIT); // Cap the limit
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
// --- Sorting ---
|
||||
const sortQueryKey = req.query.sort || 'title'; // Default sort field key
|
||||
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
|
||||
const sortColumn = sortColumnInfo ? sortColumnInfo.dbCol : 'pm.title'; // Default DB column
|
||||
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
|
||||
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST'); // Consistent null handling
|
||||
|
||||
// --- Filtering ---
|
||||
const conditions = [];
|
||||
const params = [];
|
||||
let paramCounter = 1;
|
||||
|
||||
// Add default visibility/replenishable filters unless overridden
|
||||
if (req.query.showInvisible !== 'true') conditions.push(`pm.is_visible = true`);
|
||||
if (req.query.showNonReplenishable !== 'true') conditions.push(`pm.is_replenishable = true`);
|
||||
|
||||
// Process other filters from query parameters
|
||||
for (const key in req.query) {
|
||||
if (['page', 'limit', 'sort', 'order', 'showInvisible', 'showNonReplenishable'].includes(key)) continue; // Skip control params
|
||||
|
||||
let filterKey = key;
|
||||
let operator = '='; // Default operator
|
||||
let value = req.query[key];
|
||||
|
||||
// Check for operator suffixes (e.g., sales30d_gt, title_like)
|
||||
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
|
||||
if (operatorMatch) {
|
||||
filterKey = operatorMatch[1]; // e.g., "sales30d"
|
||||
operator = operatorMatch[2]; // e.g., "gt"
|
||||
}
|
||||
|
||||
const columnInfo = getSafeColumnInfo(filterKey);
|
||||
if (!columnInfo) {
|
||||
console.warn(`Invalid filter key ignored: ${key}`);
|
||||
continue; // Skip if the key doesn't map to a known column
|
||||
}
|
||||
|
||||
const dbColumn = columnInfo.dbCol;
|
||||
const valueType = columnInfo.type;
|
||||
|
||||
// --- Build WHERE clause fragment ---
|
||||
try {
|
||||
let conditionFragment = '';
|
||||
let needsParam = true; // Most operators need a parameter
|
||||
|
||||
switch (operator.toLowerCase()) {
|
||||
case 'eq': operator = '='; break;
|
||||
case 'ne': operator = '<>'; break;
|
||||
case 'gt': operator = '>'; break;
|
||||
case 'gte': operator = '>='; break;
|
||||
case 'lt': operator = '<'; break;
|
||||
case 'lte': operator = '<='; break;
|
||||
case 'like': operator = 'LIKE'; value = `%${value}%`; break; // Add wildcards for LIKE
|
||||
case 'ilike': operator = 'ILIKE'; value = `%${value}%`; break; // Add wildcards for ILIKE
|
||||
case 'between':
|
||||
const [val1, val2] = String(value).split(',');
|
||||
if (val1 !== undefined && val2 !== undefined) {
|
||||
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
|
||||
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
|
||||
needsParam = false; // Params added manually
|
||||
} else {
|
||||
console.warn(`Invalid 'between' value for ${key}: ${value}`);
|
||||
continue; // Skip this filter
|
||||
}
|
||||
break;
|
||||
case 'in':
|
||||
const inValues = String(value).split(',');
|
||||
if (inValues.length > 0) {
|
||||
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
|
||||
conditionFragment = `${dbColumn} IN (${placeholders})`;
|
||||
params.push(...inValues.map(v => parseValue(v, valueType))); // Add all parsed values
|
||||
needsParam = false; // Params added manually
|
||||
} else {
|
||||
console.warn(`Invalid 'in' value for ${key}: ${value}`);
|
||||
continue; // Skip this filter
|
||||
}
|
||||
break;
|
||||
// Add other operators as needed (IS NULL, IS NOT NULL, etc.)
|
||||
case '=': // Keep default '='
|
||||
default: operator = '='; break; // Ensure default is handled
|
||||
}
|
||||
|
||||
if (needsParam) {
|
||||
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||
params.push(parseValue(value, valueType));
|
||||
}
|
||||
|
||||
if (conditionFragment) {
|
||||
conditions.push(`(${conditionFragment})`); // Wrap condition in parentheses
|
||||
}
|
||||
|
||||
} catch (parseError) {
|
||||
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
|
||||
// Decrement counter if param wasn't actually used due to error
|
||||
if (needsParam) paramCounter--;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Construct and Execute Queries ---
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Count Query
|
||||
const countSql = `SELECT COUNT(*) AS total FROM public.product_metrics pm ${whereClause}`;
|
||||
console.log('Executing Count Query:', countSql, params);
|
||||
const countPromise = pool.query(countSql, params);
|
||||
|
||||
// Data Query (Select all columns from metrics table for now)
|
||||
const dataSql = `
|
||||
SELECT pm.*
|
||||
FROM public.product_metrics pm
|
||||
${whereClause}
|
||||
ORDER BY ${sortColumn} ${sortDirection} ${nullsOrder}
|
||||
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||
`;
|
||||
const dataParams = [...params, limit, offset];
|
||||
console.log('Executing Data Query:', dataSql, dataParams);
|
||||
const dataPromise = pool.query(dataSql, dataParams);
|
||||
|
||||
// Execute queries in parallel
|
||||
const [countResult, dataResult] = await Promise.all([countPromise, dataPromise]);
|
||||
|
||||
const total = parseInt(countResult.rows[0].total, 10);
|
||||
const metrics = dataResult.rows;
|
||||
console.log(`Total: ${total}, Fetched: ${metrics.length} for page ${page}`);
|
||||
|
||||
// --- Respond ---
|
||||
res.json({
|
||||
metrics,
|
||||
pagination: {
|
||||
total,
|
||||
pages: Math.ceil(total / limit),
|
||||
currentPage: page,
|
||||
limit,
|
||||
},
|
||||
// Optionally include applied filters/sort for frontend confirmation
|
||||
appliedQuery: {
|
||||
filters: req.query, // Send back raw query filters
|
||||
sort: sortQueryKey,
|
||||
order: sortDirection.toLowerCase()
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching metrics list:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch product metrics list.' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /metrics/:pid - Get metrics for a single product
|
||||
router.get('/:pid', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
const pid = parseInt(req.params.pid, 10);
|
||||
|
||||
if (isNaN(pid)) {
|
||||
return res.status(400).json({ error: 'Invalid Product ID.' });
|
||||
}
|
||||
|
||||
console.log(`GET /metrics/${pid}`);
|
||||
try {
|
||||
const { rows } = await pool.query(
|
||||
`SELECT * FROM public.product_metrics WHERE pid = $1`,
|
||||
[pid]
|
||||
);
|
||||
|
||||
if (rows.length === 0) {
|
||||
console.log(`Metrics not found for PID: ${pid}`);
|
||||
return res.status(404).json({ error: 'Metrics not found for this product.' });
|
||||
}
|
||||
|
||||
console.log(`Metrics found for PID: ${pid}`);
|
||||
// Data is pre-calculated, return the first (only) row
|
||||
res.json(rows[0]);
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error fetching metrics for PID ${pid}:`, error);
|
||||
res.status(500).json({ error: 'Failed to fetch product metrics.' });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Parses a value based on its expected type.
|
||||
* Throws error for invalid formats.
|
||||
*/
|
||||
function parseValue(value, type) {
|
||||
if (value === null || value === undefined || value === '') return null; // Allow empty strings? Or handle differently?
|
||||
|
||||
switch (type) {
|
||||
case 'number':
|
||||
const num = parseFloat(value);
|
||||
if (isNaN(num)) throw new Error(`Invalid number format: "${value}"`);
|
||||
return num;
|
||||
case 'boolean':
|
||||
if (String(value).toLowerCase() === 'true') return true;
|
||||
if (String(value).toLowerCase() === 'false') return false;
|
||||
throw new Error(`Invalid boolean format: "${value}"`);
|
||||
case 'date':
|
||||
// Basic validation, rely on DB to handle actual date conversion
|
||||
if (!String(value).match(/^\d{4}-\d{2}-\d{2}$/)) {
|
||||
// Allow full timestamps too? Adjust regex if needed
|
||||
// console.warn(`Potentially invalid date format: "${value}"`); // Warn instead of throwing?
|
||||
}
|
||||
return String(value); // Send as string, let DB handle it
|
||||
case 'string':
|
||||
default:
|
||||
return String(value);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
@@ -65,6 +65,68 @@ router.get('/', async (req, res) => {
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Handle text filters for specific fields
|
||||
if (req.query.barcode) {
|
||||
conditions.push(`p.barcode ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.barcode}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.vendor_reference) {
|
||||
conditions.push(`p.vendor_reference ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.vendor_reference}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Add new text filters for the additional fields
|
||||
if (req.query.description) {
|
||||
conditions.push(`p.description ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.description}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.harmonized_tariff_code) {
|
||||
conditions.push(`p.harmonized_tariff_code ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.harmonized_tariff_code}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.notions_reference) {
|
||||
conditions.push(`p.notions_reference ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.notions_reference}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.line) {
|
||||
conditions.push(`p.line ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.line}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.subline) {
|
||||
conditions.push(`p.subline ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.subline}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.artist) {
|
||||
conditions.push(`p.artist ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.artist}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.country_of_origin) {
|
||||
conditions.push(`p.country_of_origin ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.country_of_origin}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
if (req.query.location) {
|
||||
conditions.push(`p.location ILIKE $${paramCounter}`);
|
||||
params.push(`%${req.query.location}%`);
|
||||
paramCounter++;
|
||||
}
|
||||
|
||||
// Handle numeric filters with operators
|
||||
const numericFields = {
|
||||
stock: 'p.stock_quantity',
|
||||
@@ -74,11 +136,31 @@ router.get('/', async (req, res) => {
|
||||
dailySalesAvg: 'pm.daily_sales_avg',
|
||||
weeklySalesAvg: 'pm.weekly_sales_avg',
|
||||
monthlySalesAvg: 'pm.monthly_sales_avg',
|
||||
avgQuantityPerOrder: 'pm.avg_quantity_per_order',
|
||||
numberOfOrders: 'pm.number_of_orders',
|
||||
margin: 'pm.avg_margin_percent',
|
||||
gmroi: 'pm.gmroi',
|
||||
inventoryValue: 'pm.inventory_value',
|
||||
costOfGoodsSold: 'pm.cost_of_goods_sold',
|
||||
grossProfit: 'pm.gross_profit',
|
||||
turnoverRate: 'pm.turnover_rate',
|
||||
leadTime: 'pm.current_lead_time',
|
||||
currentLeadTime: 'pm.current_lead_time',
|
||||
targetLeadTime: 'pm.target_lead_time',
|
||||
stockCoverage: 'pm.days_of_inventory',
|
||||
daysOfStock: 'pm.days_of_inventory'
|
||||
daysOfStock: 'pm.days_of_inventory',
|
||||
weeksOfStock: 'pm.weeks_of_inventory',
|
||||
reorderPoint: 'pm.reorder_point',
|
||||
safetyStock: 'pm.safety_stock',
|
||||
// Add new numeric fields
|
||||
preorderCount: 'p.preorder_count',
|
||||
notionsInvCount: 'p.notions_inv_count',
|
||||
rating: 'p.rating',
|
||||
reviews: 'p.reviews',
|
||||
weight: 'p.weight',
|
||||
totalSold: 'p.total_sold',
|
||||
baskets: 'p.baskets',
|
||||
notifies: 'p.notifies'
|
||||
};
|
||||
|
||||
Object.entries(req.query).forEach(([key, value]) => {
|
||||
@@ -102,6 +184,24 @@ router.get('/', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Handle date filters
|
||||
const dateFields = {
|
||||
firstSaleDate: 'pm.first_sale_date',
|
||||
lastSaleDate: 'pm.last_sale_date',
|
||||
lastPurchaseDate: 'pm.last_purchase_date',
|
||||
firstReceivedDate: 'pm.first_received_date',
|
||||
lastReceivedDate: 'pm.last_received_date'
|
||||
};
|
||||
|
||||
Object.entries(req.query).forEach(([key, value]) => {
|
||||
const field = dateFields[key];
|
||||
if (field) {
|
||||
conditions.push(`${field}::TEXT LIKE $${paramCounter}`);
|
||||
params.push(`${value}%`); // Format like '2023-01%' to match by month or '2023-01-01' for exact date
|
||||
paramCounter++;
|
||||
}
|
||||
});
|
||||
|
||||
// Handle select filters
|
||||
if (req.query.vendor) {
|
||||
conditions.push(`p.vendor = $${paramCounter}`);
|
||||
@@ -256,7 +356,8 @@ router.get('/', async (req, res) => {
|
||||
pm.last_received_date,
|
||||
pm.abc_class,
|
||||
pm.stock_status,
|
||||
pm.turnover_rate
|
||||
pm.turnover_rate,
|
||||
p.date_last_sold
|
||||
FROM products p
|
||||
LEFT JOIN product_metrics pm ON p.pid = pm.pid
|
||||
LEFT JOIN product_categories pc ON p.pid = pc.pid
|
||||
@@ -473,6 +574,29 @@ router.get('/:id', async (req, res) => {
|
||||
uom: parseInt(productRows[0].uom),
|
||||
managing_stock: Boolean(productRows[0].managing_stock),
|
||||
replenishable: Boolean(productRows[0].replenishable),
|
||||
// Format new fields
|
||||
preorder_count: parseInt(productRows[0].preorder_count || 0),
|
||||
notions_inv_count: parseInt(productRows[0].notions_inv_count || 0),
|
||||
harmonized_tariff_code: productRows[0].harmonized_tariff_code || '',
|
||||
notions_reference: productRows[0].notions_reference || '',
|
||||
line: productRows[0].line || '',
|
||||
subline: productRows[0].subline || '',
|
||||
artist: productRows[0].artist || '',
|
||||
rating: parseFloat(productRows[0].rating || 0),
|
||||
reviews: parseInt(productRows[0].reviews || 0),
|
||||
weight: parseFloat(productRows[0].weight || 0),
|
||||
dimensions: {
|
||||
length: parseFloat(productRows[0].length || 0),
|
||||
width: parseFloat(productRows[0].width || 0),
|
||||
height: parseFloat(productRows[0].height || 0),
|
||||
},
|
||||
country_of_origin: productRows[0].country_of_origin || '',
|
||||
location: productRows[0].location || '',
|
||||
total_sold: parseInt(productRows[0].total_sold || 0),
|
||||
baskets: parseInt(productRows[0].baskets || 0),
|
||||
notifies: parseInt(productRows[0].notifies || 0),
|
||||
date_last_sold: productRows[0].date_last_sold || null,
|
||||
// Format existing analytics fields
|
||||
daily_sales_avg: parseFloat(productRows[0].daily_sales_avg) || 0,
|
||||
weekly_sales_avg: parseFloat(productRows[0].weekly_sales_avg) || 0,
|
||||
monthly_sales_avg: parseFloat(productRows[0].monthly_sales_avg) || 0,
|
||||
|
||||
@@ -2,9 +2,12 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/box.svg" />
|
||||
<link rel="icon" type="image/x-icon" href="/cherrybottom.ico" />
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@100..900&display=swap" rel="stylesheet">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Inventory Manager</title>
|
||||
<title>A Cherry On Bottom</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><g fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"><path d="M21 8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16Z"/><path d="m3.3 7l8.7 5l8.7-5M12 22V12"/></g></svg>
|
||||
|
Before Width: | Height: | Size: 340 B |
BIN
inventory/public/cherrybottom.ico
Normal file
BIN
inventory/public/cherrybottom.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 30 KiB |
BIN
inventory/public/cherrybottom.png
Normal file
BIN
inventory/public/cherrybottom.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
@@ -1,42 +1,3 @@
|
||||
#root {
|
||||
max-width: 1800px;
|
||||
margin: 0 auto;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.logo {
|
||||
height: 6em;
|
||||
padding: 1.5em;
|
||||
will-change: filter;
|
||||
transition: filter 300ms;
|
||||
}
|
||||
.logo:hover {
|
||||
filter: drop-shadow(0 0 2em #646cffaa);
|
||||
}
|
||||
.logo.react:hover {
|
||||
filter: drop-shadow(0 0 2em #61dafbaa);
|
||||
}
|
||||
|
||||
@keyframes logo-spin {
|
||||
from {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: no-preference) {
|
||||
a:nth-of-type(2) .logo {
|
||||
animation: logo-spin infinite 20s linear;
|
||||
}
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 2em;
|
||||
}
|
||||
|
||||
.read-the-docs {
|
||||
color: #888;
|
||||
font-family: 'Inter', sans-serif;
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||
import { ResponsiveContainer, BarChart, Bar, XAxis, YAxis, Tooltip, ScatterChart, Scatter, ZAxis } from 'recharts';
|
||||
import config from '../../config';
|
||||
|
||||
66
inventory/src/components/dashboard/Overview.tsx
Normal file
66
inventory/src/components/dashboard/Overview.tsx
Normal file
@@ -0,0 +1,66 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { Line, LineChart, ResponsiveContainer, Tooltip, XAxis, YAxis } from 'recharts';
|
||||
import config from '../../config';
|
||||
|
||||
interface SalesData {
|
||||
date: string;
|
||||
total: number;
|
||||
}
|
||||
|
||||
export function Overview() {
|
||||
const { data, isLoading, error } = useQuery<SalesData[]>({
|
||||
queryKey: ['sales-overview'],
|
||||
queryFn: async () => {
|
||||
const response = await fetch(`${config.apiUrl}/dashboard/sales-overview`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch sales overview');
|
||||
}
|
||||
const rawData = await response.json();
|
||||
return rawData.map((item: SalesData) => ({
|
||||
...item,
|
||||
total: parseFloat(item.total.toString()),
|
||||
date: new Date(item.date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' })
|
||||
}));
|
||||
},
|
||||
});
|
||||
|
||||
if (isLoading) {
|
||||
return <div>Loading chart...</div>;
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return <div className="text-red-500">Error loading sales overview</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<ResponsiveContainer width="100%" height={350}>
|
||||
<LineChart data={data}>
|
||||
<XAxis
|
||||
dataKey="date"
|
||||
stroke="#888888"
|
||||
fontSize={12}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
/>
|
||||
<YAxis
|
||||
stroke="#888888"
|
||||
fontSize={12}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
tickFormatter={(value) => `$${value.toLocaleString()}`}
|
||||
/>
|
||||
<Tooltip
|
||||
formatter={(value: number) => [`$${value.toLocaleString()}`, 'Sales']}
|
||||
labelFormatter={(label) => `Date: ${label}`}
|
||||
/>
|
||||
<Line
|
||||
type="monotone"
|
||||
dataKey="total"
|
||||
stroke="hsl(var(--primary))"
|
||||
strokeWidth={2}
|
||||
dot={false}
|
||||
/>
|
||||
</LineChart>
|
||||
</ResponsiveContainer>
|
||||
);
|
||||
}
|
||||
79
inventory/src/components/dashboard/VendorPerformance.tsx
Normal file
79
inventory/src/components/dashboard/VendorPerformance.tsx
Normal file
@@ -0,0 +1,79 @@
|
||||
import { useQuery } from "@tanstack/react-query"
|
||||
import { CardHeader, CardTitle, CardContent } from "@/components/ui/card"
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table"
|
||||
import { Progress } from "@/components/ui/progress"
|
||||
import config from "@/config"
|
||||
|
||||
interface VendorMetrics {
|
||||
vendor: string
|
||||
avg_lead_time: number
|
||||
on_time_delivery_rate: number
|
||||
avg_fill_rate: number
|
||||
total_orders: number
|
||||
active_orders: number
|
||||
overdue_orders: number
|
||||
}
|
||||
|
||||
export function VendorPerformance() {
|
||||
const { data: vendors } = useQuery<VendorMetrics[]>({
|
||||
queryKey: ["vendor-metrics"],
|
||||
queryFn: async () => {
|
||||
const response = await fetch(`${config.apiUrl}/dashboard/vendor/performance`)
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch vendor metrics")
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
})
|
||||
|
||||
// Sort vendors by on-time delivery rate
|
||||
const sortedVendors = vendors
|
||||
?.sort((a, b) => b.on_time_delivery_rate - a.on_time_delivery_rate)
|
||||
|
||||
return (
|
||||
<>
|
||||
<CardHeader>
|
||||
<CardTitle className="text-lg font-medium">Top Vendor Performance</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="max-h-[400px] overflow-auto">
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Vendor</TableHead>
|
||||
<TableHead>On-Time</TableHead>
|
||||
<TableHead className="text-right">Fill Rate</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{sortedVendors?.map((vendor) => (
|
||||
<TableRow key={vendor.vendor}>
|
||||
<TableCell className="font-medium">{vendor.vendor}</TableCell>
|
||||
<TableCell>
|
||||
<div className="flex items-center gap-2">
|
||||
<Progress
|
||||
value={vendor.on_time_delivery_rate}
|
||||
className="h-2"
|
||||
/>
|
||||
<span className="w-10 text-sm">
|
||||
{vendor.on_time_delivery_rate.toFixed(0)}%
|
||||
</span>
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell className="text-right">
|
||||
{vendor.avg_fill_rate.toFixed(0)}%
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</CardContent>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import {
|
||||
Package,
|
||||
BarChart2,
|
||||
Settings,
|
||||
Box,
|
||||
ClipboardList,
|
||||
LogOut,
|
||||
Users,
|
||||
@@ -22,6 +21,7 @@ import {
|
||||
SidebarMenuButton,
|
||||
SidebarMenuItem,
|
||||
SidebarSeparator,
|
||||
useSidebar
|
||||
} from "@/components/ui/sidebar";
|
||||
import { useLocation, useNavigate, Link } from "react-router-dom";
|
||||
import { Protected } from "@/components/auth/Protected";
|
||||
@@ -80,6 +80,7 @@ const items = [
|
||||
export function AppSidebar() {
|
||||
const location = useLocation();
|
||||
const navigate = useNavigate();
|
||||
useSidebar();
|
||||
|
||||
const handleLogout = () => {
|
||||
localStorage.removeItem('token');
|
||||
@@ -90,11 +91,19 @@ export function AppSidebar() {
|
||||
return (
|
||||
<Sidebar collapsible="icon" variant="sidebar">
|
||||
<SidebarHeader>
|
||||
<div className="p-4 flex items-center gap-2 group-data-[collapsible=icon]:justify-center">
|
||||
<Box className="h-6 w-6 shrink-0" />
|
||||
<h2 className="text-lg font-semibold group-data-[collapsible=icon]:hidden">
|
||||
Inventory Manager
|
||||
</h2>
|
||||
<div className="py-1 flex justify-center items-center">
|
||||
<div className="flex items-center">
|
||||
<div className="flex-shrink-0 w-8 h-8 relative flex items-center justify-center">
|
||||
<img
|
||||
src="/cherrybottom.png"
|
||||
alt="Cherry Bottom"
|
||||
className="w-6 h-6 object-contain -rotate-12 transform hover:rotate-0 transition-transform ease-in-out duration-300"
|
||||
/>
|
||||
</div>
|
||||
<div className="ml-2 transition-all duration-200 whitespace-nowrap group-[.group[data-state=collapsed]]:hidden">
|
||||
<span className="font-bold text-lg">A Cherry On Bottom</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</SidebarHeader>
|
||||
<SidebarSeparator />
|
||||
|
||||
@@ -95,12 +95,8 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
|
||||
isChangeReverted,
|
||||
getFieldDisplayValueWithHighlight,
|
||||
fields,
|
||||
debugData,
|
||||
}) => {
|
||||
const [costPerMillionTokens, setCostPerMillionTokens] = useState(2.5); // Default cost
|
||||
const hasCompanyPrompts =
|
||||
currentPrompt.debugData?.promptSources?.companyPrompts &&
|
||||
currentPrompt.debugData.promptSources.companyPrompts.length > 0;
|
||||
|
||||
// Create our own state to track changes
|
||||
const [localReversionState, setLocalReversionState] = useState<
|
||||
@@ -157,17 +153,6 @@ export const AiValidationDialogs: React.FC<AiValidationDialogsProps> = ({
|
||||
return !!localReversionState[key];
|
||||
};
|
||||
|
||||
// Use "full" as the default tab
|
||||
const defaultTab = "full";
|
||||
const [activeTab, setActiveTab] = useState(defaultTab);
|
||||
|
||||
// Update activeTab when the dialog is opened with new data
|
||||
React.useEffect(() => {
|
||||
if (currentPrompt.isOpen) {
|
||||
setActiveTab("full");
|
||||
}
|
||||
}, [currentPrompt.isOpen]);
|
||||
|
||||
// Format time helper
|
||||
const formatTime = (seconds: number): string => {
|
||||
if (seconds < 60) {
|
||||
|
||||
@@ -125,6 +125,11 @@ interface Product {
|
||||
}>;
|
||||
|
||||
category_paths?: Record<string, string>;
|
||||
|
||||
description?: string;
|
||||
|
||||
preorder_count: number;
|
||||
notions_inv_count: number;
|
||||
}
|
||||
|
||||
interface ProductDetailProps {
|
||||
@@ -225,6 +230,7 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
|
||||
<TabsTrigger value="purchase">Purchase History</TabsTrigger>
|
||||
<TabsTrigger value="financial">Financial</TabsTrigger>
|
||||
<TabsTrigger value="vendor">Vendor</TabsTrigger>
|
||||
<TabsTrigger value="details">Additional Info</TabsTrigger>
|
||||
</TabsList>
|
||||
</div>
|
||||
|
||||
@@ -255,6 +261,12 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
|
||||
<dt className="text-sm text-muted-foreground">UPC</dt>
|
||||
<dd>{product?.barcode || "N/A"}</dd>
|
||||
</div>
|
||||
{product?.description && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Description</dt>
|
||||
<dd>{product.description}</dd>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Categories</dt>
|
||||
<dd className="flex flex-col gap-2">
|
||||
@@ -359,6 +371,51 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
<Card className="p-4">
|
||||
<h3 className="font-semibold mb-2">Customer Engagement</h3>
|
||||
<dl className="grid grid-cols-3 gap-4">
|
||||
{product?.total_sold > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Total Sold</dt>
|
||||
<dd className="text-2xl font-semibold">{product.total_sold}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.rating > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Rating</dt>
|
||||
<dd className="text-2xl font-semibold flex items-center">
|
||||
{product.rating.toFixed(1)}
|
||||
<span className="ml-1 text-yellow-500">★</span>
|
||||
</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.reviews > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Reviews</dt>
|
||||
<dd className="text-2xl font-semibold">{product.reviews}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.baskets > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">In Baskets</dt>
|
||||
<dd className="text-2xl font-semibold">{product.baskets}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.notifies > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Notify Requests</dt>
|
||||
<dd className="text-2xl font-semibold">{product.notifies}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.date_last_sold && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Last Sold</dt>
|
||||
<dd className="text-xl font-semibold">{formatDate(product.date_last_sold)}</dd>
|
||||
</div>
|
||||
)}
|
||||
</dl>
|
||||
</Card>
|
||||
|
||||
<Card className="p-4">
|
||||
<h3 className="font-semibold mb-2">Financial Metrics</h3>
|
||||
<dl className="space-y-2">
|
||||
@@ -426,6 +483,18 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
|
||||
<dt className="text-sm text-muted-foreground">Days of Inventory</dt>
|
||||
<dd className="text-2xl font-semibold">{product?.metrics?.days_of_inventory || 0}</dd>
|
||||
</div>
|
||||
{product?.preorder_count > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Preorders</dt>
|
||||
<dd className="text-2xl font-semibold">{product?.preorder_count}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.notions_inv_count > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Notions Inventory</dt>
|
||||
<dd className="text-2xl font-semibold">{product?.notions_inv_count}</dd>
|
||||
</div>
|
||||
)}
|
||||
</dl>
|
||||
</Card>
|
||||
|
||||
@@ -506,6 +575,51 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
|
||||
</ResponsiveContainer>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
<Card className="p-4">
|
||||
<h3 className="font-semibold mb-2">Customer Engagement</h3>
|
||||
<dl className="grid grid-cols-3 gap-4">
|
||||
{product?.total_sold > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Total Sold</dt>
|
||||
<dd className="text-2xl font-semibold">{product.total_sold}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.rating > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Rating</dt>
|
||||
<dd className="text-2xl font-semibold flex items-center">
|
||||
{product.rating.toFixed(1)}
|
||||
<span className="ml-1 text-yellow-500">★</span>
|
||||
</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.reviews > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Reviews</dt>
|
||||
<dd className="text-2xl font-semibold">{product.reviews}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.baskets > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">In Baskets</dt>
|
||||
<dd className="text-2xl font-semibold">{product.baskets}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.notifies > 0 && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Notify Requests</dt>
|
||||
<dd className="text-2xl font-semibold">{product.notifies}</dd>
|
||||
</div>
|
||||
)}
|
||||
{product?.date_last_sold && (
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Last Sold</dt>
|
||||
<dd className="text-xl font-semibold">{formatDate(product.date_last_sold)}</dd>
|
||||
</div>
|
||||
)}
|
||||
</dl>
|
||||
</Card>
|
||||
</div>
|
||||
)}
|
||||
</TabsContent>
|
||||
@@ -661,6 +775,123 @@ export function ProductDetail({ productId, onClose }: ProductDetailProps) {
|
||||
<div className="text-center text-muted-foreground">No vendor performance data available</div>
|
||||
)}
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="details" className="p-4">
|
||||
{isLoading ? (
|
||||
<Skeleton className="h-48 w-full" />
|
||||
) : (
|
||||
<div className="space-y-4">
|
||||
<Card className="p-4">
|
||||
<h3 className="font-semibold mb-2">Product Details</h3>
|
||||
<dl className="grid grid-cols-2 gap-4">
|
||||
{product?.description && (
|
||||
<div className="col-span-2">
|
||||
<dt className="text-sm text-muted-foreground">Description</dt>
|
||||
<dd>{product.description}</dd>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Created Date</dt>
|
||||
<dd>{formatDate(product?.created_at)}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Last Updated</dt>
|
||||
<dd>{formatDate(product?.updated_at)}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Product ID</dt>
|
||||
<dd>{product?.pid}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Line</dt>
|
||||
<dd>{product?.line || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Subline</dt>
|
||||
<dd>{product?.subline || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Artist</dt>
|
||||
<dd>{product?.artist || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Country of Origin</dt>
|
||||
<dd>{product?.country_of_origin || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Location</dt>
|
||||
<dd>{product?.location || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">HTS Code</dt>
|
||||
<dd>{product?.harmonized_tariff_code || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Notions Reference</dt>
|
||||
<dd>{product?.notions_reference || 'N/A'}</dd>
|
||||
</div>
|
||||
</dl>
|
||||
</Card>
|
||||
|
||||
<Card className="p-4">
|
||||
<h3 className="font-semibold mb-2">Physical Attributes</h3>
|
||||
<dl className="grid grid-cols-2 gap-4">
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Weight</dt>
|
||||
<dd>{product?.weight ? `${product.weight} kg` : 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Dimensions</dt>
|
||||
<dd>
|
||||
{product?.dimensions
|
||||
? `${product.dimensions.length} × ${product.dimensions.width} × ${product.dimensions.height} cm`
|
||||
: 'N/A'
|
||||
}
|
||||
</dd>
|
||||
</div>
|
||||
</dl>
|
||||
</Card>
|
||||
|
||||
<Card className="p-4">
|
||||
<h3 className="font-semibold mb-2">Customer Metrics</h3>
|
||||
<dl className="grid grid-cols-2 gap-4">
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Rating</dt>
|
||||
<dd className="flex items-center">
|
||||
{product?.rating
|
||||
? <>
|
||||
{product.rating.toFixed(1)}
|
||||
<span className="ml-1 text-yellow-500">★</span>
|
||||
</>
|
||||
: 'N/A'
|
||||
}
|
||||
</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Review Count</dt>
|
||||
<dd>{product?.reviews || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Total Sold</dt>
|
||||
<dd>{product?.total_sold || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Currently in Baskets</dt>
|
||||
<dd>{product?.baskets || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Notify Requests</dt>
|
||||
<dd>{product?.notifies || 'N/A'}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt className="text-sm text-muted-foreground">Date Last Sold</dt>
|
||||
<dd>{formatDate(product?.date_last_sold) || 'N/A'}</dd>
|
||||
</div>
|
||||
</dl>
|
||||
</Card>
|
||||
</div>
|
||||
)}
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
</VaulDrawer.Content>
|
||||
</VaulDrawer.Portal>
|
||||
|
||||
@@ -51,9 +51,28 @@ const FILTER_OPTIONS: FilterOption[] = [
|
||||
// Basic Info Group
|
||||
{ id: "search", label: "Search", type: "text", group: "Basic Info" },
|
||||
{ id: "sku", label: "SKU", type: "text", group: "Basic Info" },
|
||||
{ id: "barcode", label: "UPC/Barcode", type: "text", group: "Basic Info" },
|
||||
{ id: "vendor", label: "Vendor", type: "select", group: "Basic Info" },
|
||||
{ id: "vendor_reference", label: "Supplier #", type: "text", group: "Basic Info" },
|
||||
{ id: "brand", label: "Brand", type: "select", group: "Basic Info" },
|
||||
{ id: "category", label: "Category", type: "select", group: "Basic Info" },
|
||||
{ id: "description", label: "Description", type: "text", group: "Basic Info" },
|
||||
{ id: "harmonized_tariff_code", label: "HTS Code", type: "text", group: "Basic Info" },
|
||||
{ id: "notions_reference", label: "Notions Ref", type: "text", group: "Basic Info" },
|
||||
{ id: "line", label: "Line", type: "text", group: "Basic Info" },
|
||||
{ id: "subline", label: "Subline", type: "text", group: "Basic Info" },
|
||||
{ id: "artist", label: "Artist", type: "text", group: "Basic Info" },
|
||||
{ id: "country_of_origin", label: "Origin", type: "text", group: "Basic Info" },
|
||||
{ id: "location", label: "Location", type: "text", group: "Basic Info" },
|
||||
|
||||
// Physical Properties
|
||||
{
|
||||
id: "weight",
|
||||
label: "Weight",
|
||||
type: "number",
|
||||
group: "Physical Properties",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
|
||||
// Inventory Group
|
||||
{
|
||||
@@ -77,6 +96,20 @@ const FILTER_OPTIONS: FilterOption[] = [
|
||||
group: "Inventory",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "preorderCount",
|
||||
label: "Preorder Count",
|
||||
type: "number",
|
||||
group: "Inventory",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "notionsInvCount",
|
||||
label: "Notions Inventory",
|
||||
type: "number",
|
||||
group: "Inventory",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "daysOfStock",
|
||||
label: "Days of Stock",
|
||||
@@ -84,6 +117,27 @@ const FILTER_OPTIONS: FilterOption[] = [
|
||||
group: "Inventory",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "weeksOfStock",
|
||||
label: "Weeks of Stock",
|
||||
type: "number",
|
||||
group: "Inventory",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "reorderPoint",
|
||||
label: "Reorder Point",
|
||||
type: "number",
|
||||
group: "Inventory",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "safetyStock",
|
||||
label: "Safety Stock",
|
||||
type: "number",
|
||||
group: "Inventory",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "replenishable",
|
||||
label: "Replenishable",
|
||||
@@ -94,6 +148,17 @@ const FILTER_OPTIONS: FilterOption[] = [
|
||||
],
|
||||
group: "Inventory",
|
||||
},
|
||||
{
|
||||
id: "abcClass",
|
||||
label: "ABC Class",
|
||||
type: "select",
|
||||
options: [
|
||||
{ label: "A", value: "A" },
|
||||
{ label: "B", value: "B" },
|
||||
{ label: "C", value: "C" },
|
||||
],
|
||||
group: "Inventory",
|
||||
},
|
||||
|
||||
// Pricing Group
|
||||
{
|
||||
@@ -140,6 +205,73 @@ const FILTER_OPTIONS: FilterOption[] = [
|
||||
group: "Sales Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "avgQuantityPerOrder",
|
||||
label: "Avg Qty/Order",
|
||||
type: "number",
|
||||
group: "Sales Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "numberOfOrders",
|
||||
label: "Order Count",
|
||||
type: "number",
|
||||
group: "Sales Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "firstSaleDate",
|
||||
label: "First Sale Date",
|
||||
type: "text",
|
||||
group: "Sales Metrics",
|
||||
},
|
||||
{
|
||||
id: "lastSaleDate",
|
||||
label: "Last Sale Date",
|
||||
type: "text",
|
||||
group: "Sales Metrics",
|
||||
},
|
||||
{
|
||||
id: "date_last_sold",
|
||||
label: "Date Last Sold",
|
||||
type: "text",
|
||||
group: "Sales Metrics",
|
||||
},
|
||||
{
|
||||
id: "total_sold",
|
||||
label: "Total Sold",
|
||||
type: "number",
|
||||
group: "Sales Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "baskets",
|
||||
label: "In Baskets",
|
||||
type: "number",
|
||||
group: "Sales Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "notifies",
|
||||
label: "Notifies",
|
||||
type: "number",
|
||||
group: "Sales Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "rating",
|
||||
label: "Rating",
|
||||
type: "number",
|
||||
group: "Sales Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "reviews",
|
||||
label: "Reviews",
|
||||
type: "number",
|
||||
group: "Sales Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
|
||||
// Financial Metrics Group
|
||||
{
|
||||
@@ -156,6 +288,34 @@ const FILTER_OPTIONS: FilterOption[] = [
|
||||
group: "Financial Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "inventoryValue",
|
||||
label: "Inventory Value",
|
||||
type: "number",
|
||||
group: "Financial Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "costOfGoodsSold",
|
||||
label: "COGS",
|
||||
type: "number",
|
||||
group: "Financial Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "grossProfit",
|
||||
label: "Gross Profit",
|
||||
type: "number",
|
||||
group: "Financial Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "turnoverRate",
|
||||
label: "Turnover Rate",
|
||||
type: "number",
|
||||
group: "Financial Metrics",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
|
||||
// Lead Time & Stock Coverage Group
|
||||
{
|
||||
@@ -165,6 +325,20 @@ const FILTER_OPTIONS: FilterOption[] = [
|
||||
group: "Lead Time & Coverage",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "currentLeadTime",
|
||||
label: "Current Lead Time",
|
||||
type: "number",
|
||||
group: "Lead Time & Coverage",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "targetLeadTime",
|
||||
label: "Target Lead Time",
|
||||
type: "number",
|
||||
group: "Lead Time & Coverage",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "leadTimeStatus",
|
||||
label: "Lead Time Status",
|
||||
@@ -183,19 +357,26 @@ const FILTER_OPTIONS: FilterOption[] = [
|
||||
group: "Lead Time & Coverage",
|
||||
operators: ["=", ">", ">=", "<", "<=", "between"],
|
||||
},
|
||||
{
|
||||
id: "lastPurchaseDate",
|
||||
label: "Last Purchase Date",
|
||||
type: "text",
|
||||
group: "Lead Time & Coverage",
|
||||
},
|
||||
{
|
||||
id: "firstReceivedDate",
|
||||
label: "First Received Date",
|
||||
type: "text",
|
||||
group: "Lead Time & Coverage",
|
||||
},
|
||||
{
|
||||
id: "lastReceivedDate",
|
||||
label: "Last Received Date",
|
||||
type: "text",
|
||||
group: "Lead Time & Coverage",
|
||||
},
|
||||
|
||||
// Classification Group
|
||||
{
|
||||
id: "abcClass",
|
||||
label: "ABC Class",
|
||||
type: "select",
|
||||
options: [
|
||||
{ label: "A", value: "A" },
|
||||
{ label: "B", value: "B" },
|
||||
{ label: "C", value: "C" },
|
||||
],
|
||||
group: "Classification",
|
||||
},
|
||||
{
|
||||
id: "managingStock",
|
||||
label: "Managing Stock",
|
||||
|
||||
@@ -234,6 +234,11 @@ export function ProductTable({
|
||||
)) || '-'}
|
||||
</div>
|
||||
);
|
||||
case 'dimensions':
|
||||
if (value) {
|
||||
return `${value.length}×${value.width}×${value.height}`;
|
||||
}
|
||||
return '-';
|
||||
case 'stock_status':
|
||||
return getStockStatus(product.stock_status);
|
||||
case 'abc_class':
|
||||
@@ -252,6 +257,14 @@ export function ProductTable({
|
||||
) : (
|
||||
<Badge variant="outline">Non-Replenishable</Badge>
|
||||
);
|
||||
case 'rating':
|
||||
if (value === undefined || value === null) return '-';
|
||||
return (
|
||||
<div className="flex items-center">
|
||||
{value.toFixed(1)}
|
||||
<span className="ml-1 text-yellow-500">★</span>
|
||||
</div>
|
||||
);
|
||||
default:
|
||||
if (columnDef?.format && value !== undefined && value !== null) {
|
||||
// For numeric formats (those using toFixed), ensure the value is a number
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
import { useState, useMemo, useCallback, useRef, useEffect } from "react";
|
||||
import { useState, useMemo, useCallback, useEffect } from "react";
|
||||
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
@@ -90,7 +90,7 @@ const ImageForm = ({
|
||||
}: {
|
||||
editingImage: ReusableImage | null;
|
||||
formData: ImageFormData;
|
||||
setFormData: (data: ImageFormData) => void;
|
||||
setFormData: (data: ImageFormData | ((prev: ImageFormData) => ImageFormData)) => void;
|
||||
onSubmit: (e: React.FormEvent) => void;
|
||||
onCancel: () => void;
|
||||
fieldOptions: FieldOptions | undefined;
|
||||
@@ -99,11 +99,11 @@ const ImageForm = ({
|
||||
isDragActive: boolean;
|
||||
}) => {
|
||||
const handleNameChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setFormData(prev => ({ ...prev, name: e.target.value }));
|
||||
setFormData((prev: ImageFormData) => ({ ...prev, name: e.target.value }));
|
||||
}, [setFormData]);
|
||||
|
||||
const handleGlobalChange = useCallback((checked: boolean) => {
|
||||
setFormData(prev => ({
|
||||
setFormData((prev: ImageFormData) => ({
|
||||
...prev,
|
||||
is_global: checked,
|
||||
company: checked ? null : prev.company
|
||||
@@ -111,7 +111,7 @@ const ImageForm = ({
|
||||
}, [setFormData]);
|
||||
|
||||
const handleCompanyChange = useCallback((value: string) => {
|
||||
setFormData(prev => ({ ...prev, company: value }));
|
||||
setFormData((prev: ImageFormData) => ({ ...prev, company: value }));
|
||||
}, [setFormData]);
|
||||
|
||||
return (
|
||||
@@ -738,12 +738,18 @@ export function ReusableImageManagement() {
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
<style jsx global>{`
|
||||
{/* Add global styles for this component using regular style tag */}
|
||||
<style>{`
|
||||
.reusable-image-table thead tr th,
|
||||
.reusable-image-table tbody tr td {
|
||||
padding-left: 1rem;
|
||||
padding-right: 1rem;
|
||||
}
|
||||
.bg-checkerboard {
|
||||
background-image: linear-gradient(45deg, #f0f0f0 25%, transparent 25%),
|
||||
linear-gradient(-45deg, #f0f0f0 25%, transparent 25%),
|
||||
linear-gradient(45deg, transparent 75%, #f0f0f0 75%),
|
||||
linear-gradient(-45deg, transparent 75%, #f0f0f0 75%);
|
||||
linear-gradient(-45deg, #f0f0f0 25%, transparent 25%),
|
||||
linear-gradient(45deg, transparent 75%, #f0f0f0 75%),
|
||||
linear-gradient(-45deg, transparent 75%, #f0f0f0 75%);
|
||||
background-size: 20px 20px;
|
||||
background-position: 0 0, 0 10px, 10px -10px, -10px 0px;
|
||||
}
|
||||
|
||||
@@ -96,17 +96,15 @@
|
||||
@apply border-border;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
@apply bg-background text-foreground font-sans;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border outline-ring/50;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
@apply bg-background text-foreground font-sans;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import './App.css'
|
||||
import App from './App.tsx'
|
||||
import { BrowserRouter as Router } from 'react-router-dom'
|
||||
|
||||
|
||||
@@ -1,25 +1,28 @@
|
||||
import { useState, useContext } from "react";
|
||||
import { useNavigate, useSearchParams } from "react-router-dom";
|
||||
import { AuthContext } from "@/contexts/AuthContext";
|
||||
import { toast } from "sonner";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { toast } from "sonner";
|
||||
import { Loader2, Box } from "lucide-react";
|
||||
import { motion } from "framer-motion";
|
||||
import { AuthContext } from "@/contexts/AuthContext";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { motion } from "motion/react";
|
||||
|
||||
export function Login() {
|
||||
const [username, setUsername] = useState("");
|
||||
const [password, setPassword] = useState("");
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const navigate = useNavigate();
|
||||
const [searchParams] = useSearchParams();
|
||||
const { login } = useContext(AuthContext);
|
||||
|
||||
const handleLogin = async (e: React.FormEvent) => {
|
||||
const handleSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault();
|
||||
setIsLoading(true);
|
||||
|
||||
const formData = new FormData(e.currentTarget);
|
||||
const username = formData.get("username") as string;
|
||||
const password = formData.get("password") as string;
|
||||
|
||||
try {
|
||||
await login(username, password);
|
||||
|
||||
@@ -36,70 +39,77 @@ export function Login() {
|
||||
};
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
layout
|
||||
className="min-h-screen bg-gradient-to-b from-slate-100 to-slate-200 dark:from-slate-900 dark:to-slate-800 antialiased"
|
||||
>
|
||||
<div className="flex flex-col gap-2 p-2 bg-primary">
|
||||
<div className="p-4 flex items-center gap-2 group-data-[collapsible=icon]:justify-center text-white">
|
||||
<Box className="h-6 w-6 shrink-0" />
|
||||
<h2 className="text-lg font-semibold group-data-[collapsible=icon]:hidden">
|
||||
Inventory Manager
|
||||
</h2>
|
||||
<motion.div className="flex min-h-svh flex-row items-center justify-center bg-muted p-6 md:p-10">
|
||||
<div className="fixed top-0 w-full backdrop-blur-sm bg-white/40 border-b shadow-sm z-10">
|
||||
<div className="mx-auto p-4 sm:p-6">
|
||||
<div className="flex items-center gap-2 font-medium text-3xl justify-center sm:justify-start">
|
||||
<div className="relative">
|
||||
<div className="absolute inset-0 "></div>
|
||||
<img
|
||||
src="/cherrybottom.png"
|
||||
alt="Cherry Bottom"
|
||||
className="h-12 w-12 object-contain -rotate-12 transform hover:rotate-0 transition-transform ease-in-out duration-300 relative z-10"
|
||||
/>
|
||||
</div>
|
||||
<span className="font-bold font-text-primary">A Cherry On Bottom</span>
|
||||
</div>
|
||||
<p className="text-sm italic text-muted-foreground text-center sm:text-left ml-32 -mt-1">
|
||||
supporting the cherry on top
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<motion.div
|
||||
initial={{ opacity: 0, scale: 0.95 }}
|
||||
animate={{ opacity: 1, scale: 1 }}
|
||||
transition={{ duration: 0.3, delay: 0.2 }}
|
||||
className="container relative flex min-h-[calc(100vh-4rem)] flex-col items-center justify-center"
|
||||
>
|
||||
<div className="mx-auto flex w-full flex-col justify-center space-y-6 sm:w-[350px]">
|
||||
<Card className="border-none shadow-xl">
|
||||
<CardHeader className="space-y-1">
|
||||
<div className="flex items-center justify-center mb-2">
|
||||
<Box className="h-10 w-10 text-primary" />
|
||||
</div>
|
||||
<CardTitle className="text-2xl text-center">
|
||||
Log in to continue
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<form onSubmit={handleLogin}>
|
||||
<div className="grid gap-4">
|
||||
<div className="grid gap-2">
|
||||
<Input
|
||||
id="username"
|
||||
placeholder="Username"
|
||||
value={username}
|
||||
onChange={(e) => setUsername(e.target.value)}
|
||||
disabled={isLoading}
|
||||
className="w-full"
|
||||
/>
|
||||
</div>
|
||||
<div className="grid gap-2">
|
||||
<Input
|
||||
id="password"
|
||||
type="password"
|
||||
placeholder="Password"
|
||||
value={password}
|
||||
onChange={(e) => setPassword(e.target.value)}
|
||||
disabled={isLoading}
|
||||
className="w-full"
|
||||
/>
|
||||
</div>
|
||||
<Button className="w-full" type="submit" disabled={isLoading}>
|
||||
{isLoading && (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
Sign In
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
</motion.div>
|
||||
<div className="w-full sm:w-[80%] max-w-sm mt-20">
|
||||
<LoginForm onSubmit={handleSubmit} isLoading={isLoading} />
|
||||
</div>
|
||||
|
||||
</motion.div>
|
||||
);
|
||||
}
|
||||
|
||||
interface LoginFormProps {
|
||||
className?: string;
|
||||
isLoading?: boolean;
|
||||
onSubmit: (e: React.FormEvent<HTMLFormElement>) => void;
|
||||
}
|
||||
|
||||
function LoginForm({ className, isLoading, onSubmit, ...props }: LoginFormProps) {
|
||||
return (
|
||||
<motion.div className={cn("flex flex-col gap-6", className)} {...props}>
|
||||
<Card className="overflow-hidden rounded-lg shadow-lg">
|
||||
<CardHeader className="pb-0">
|
||||
<CardTitle className="text-2xl font-bold text-center">Log in to your account</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="grid p-0 h-full">
|
||||
<form className="p-6 md:p-8 flex flex-col gap-6" onSubmit={onSubmit}>
|
||||
<div className="grid gap-2">
|
||||
<Label htmlFor="username">Username</Label>
|
||||
<Input
|
||||
id="username"
|
||||
name="username"
|
||||
type="text"
|
||||
required
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="grid gap-2">
|
||||
<Label htmlFor="password">Password</Label>
|
||||
<Input
|
||||
id="password"
|
||||
name="password"
|
||||
type="password"
|
||||
required
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Button type="submit" className="w-full" disabled={isLoading}>
|
||||
{isLoading ? "Logging in..." : "Log In"}
|
||||
</Button>
|
||||
</form>
|
||||
|
||||
</CardContent>
|
||||
</Card>
|
||||
</motion.div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -52,30 +52,71 @@ const AVAILABLE_COLUMNS: ColumnDef[] = [
|
||||
{ key: 'vendor', label: 'Supplier', group: 'Basic Info' },
|
||||
{ key: 'vendor_reference', label: 'Supplier #', group: 'Basic Info' },
|
||||
{ key: 'barcode', label: 'UPC', group: 'Basic Info' },
|
||||
{ key: 'description', label: 'Description', group: 'Basic Info' },
|
||||
{ key: 'created_at', label: 'Created', group: 'Basic Info' },
|
||||
{ key: 'harmonized_tariff_code', label: 'HTS Code', group: 'Basic Info' },
|
||||
{ key: 'notions_reference', label: 'Notions Ref', group: 'Basic Info' },
|
||||
{ key: 'line', label: 'Line', group: 'Basic Info' },
|
||||
{ key: 'subline', label: 'Subline', group: 'Basic Info' },
|
||||
{ key: 'artist', label: 'Artist', group: 'Basic Info' },
|
||||
{ key: 'country_of_origin', label: 'Origin', group: 'Basic Info' },
|
||||
{ key: 'location', label: 'Location', group: 'Basic Info' },
|
||||
|
||||
// Physical properties
|
||||
{ key: 'weight', label: 'Weight', group: 'Physical', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'dimensions', label: 'Dimensions', group: 'Physical', format: (v) => v ? `${v.length}x${v.width}x${v.height}` : '-' },
|
||||
|
||||
// Stock columns
|
||||
{ key: 'stock_quantity', label: 'Shelf Count', group: 'Stock', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'stock_status', label: 'Stock Status', group: 'Stock' },
|
||||
{ key: 'preorder_count', label: 'Preorders', group: 'Stock', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'notions_inv_count', label: 'Notions Inv', group: 'Stock', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'days_of_inventory', label: 'Days of Stock', group: 'Stock', format: (v) => v?.toFixed(1) ?? '-' },
|
||||
{ key: 'weeks_of_inventory', label: 'Weeks of Stock', group: 'Stock', format: (v) => v?.toFixed(1) ?? '-' },
|
||||
{ key: 'abc_class', label: 'ABC Class', group: 'Stock' },
|
||||
{ key: 'replenishable', label: 'Replenishable', group: 'Stock' },
|
||||
{ key: 'moq', label: 'MOQ', group: 'Stock', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'reorder_qty', label: 'Reorder Qty', group: 'Stock', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'reorder_point', label: 'Reorder Point', group: 'Stock', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'safety_stock', label: 'Safety Stock', group: 'Stock', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'overstocked_amt', label: 'Overstock Amt', group: 'Stock', format: (v) => v?.toString() ?? '-' },
|
||||
|
||||
// Pricing columns
|
||||
{ key: 'price', label: 'Price', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
{ key: 'regular_price', label: 'Default Price', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
{ key: 'cost_price', label: 'Cost', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
{ key: 'landing_cost_price', label: 'Landing Cost', group: 'Pricing', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
|
||||
// Sales columns
|
||||
{ key: 'daily_sales_avg', label: 'Daily Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
|
||||
{ key: 'weekly_sales_avg', label: 'Weekly Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
|
||||
{ key: 'monthly_sales_avg', label: 'Monthly Sales', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
|
||||
{ key: 'avg_quantity_per_order', label: 'Avg Qty/Order', group: 'Sales', format: (v) => v?.toFixed(1) ?? '-' },
|
||||
{ key: 'number_of_orders', label: 'Order Count', group: 'Sales', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'first_sale_date', label: 'First Sale', group: 'Sales' },
|
||||
{ key: 'last_sale_date', label: 'Last Sale', group: 'Sales' },
|
||||
{ key: 'date_last_sold', label: 'Date Last Sold', group: 'Sales' },
|
||||
{ key: 'total_sold', label: 'Total Sold', group: 'Sales', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'baskets', label: 'In Baskets', group: 'Sales', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'notifies', label: 'Notifies', group: 'Sales', format: (v) => v?.toString() ?? '-' },
|
||||
{ key: 'rating', label: 'Rating', group: 'Sales', format: (v) => v ? v.toFixed(1) : '-' },
|
||||
{ key: 'reviews', label: 'Reviews', group: 'Sales', format: (v) => v?.toString() ?? '-' },
|
||||
|
||||
// Financial columns
|
||||
{ key: 'gmroi', label: 'GMROI', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
{ key: 'turnover_rate', label: 'Turnover Rate', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
{ key: 'avg_margin_percent', label: 'Margin %', group: 'Financial', format: (v) => v ? `${v.toFixed(1)}%` : '-' },
|
||||
{ key: 'inventory_value', label: 'Inventory Value', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
{ key: 'cost_of_goods_sold', label: 'COGS', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
{ key: 'gross_profit', label: 'Gross Profit', group: 'Financial', format: (v) => v?.toFixed(2) ?? '-' },
|
||||
|
||||
// Lead Time columns
|
||||
{ key: 'current_lead_time', label: 'Current Lead Time', group: 'Lead Time', format: (v) => v?.toFixed(1) ?? '-' },
|
||||
{ key: 'target_lead_time', label: 'Target Lead Time', group: 'Lead Time', format: (v) => v?.toFixed(1) ?? '-' },
|
||||
{ key: 'lead_time_status', label: 'Lead Time Status', group: 'Lead Time' },
|
||||
{ key: 'last_purchase_date', label: 'Last Purchase', group: 'Lead Time' },
|
||||
{ key: 'first_received_date', label: 'First Received', group: 'Lead Time' },
|
||||
{ key: 'last_received_date', label: 'Last Received', group: 'Lead Time' },
|
||||
];
|
||||
|
||||
// Define default columns for each view
|
||||
@@ -93,14 +134,17 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
|
||||
'daily_sales_avg',
|
||||
'weekly_sales_avg',
|
||||
'monthly_sales_avg',
|
||||
'inventory_value',
|
||||
],
|
||||
critical: [
|
||||
'image',
|
||||
'title',
|
||||
'stock_quantity',
|
||||
'safety_stock',
|
||||
'daily_sales_avg',
|
||||
'weekly_sales_avg',
|
||||
'reorder_qty',
|
||||
'reorder_point',
|
||||
'vendor',
|
||||
'last_purchase_date',
|
||||
'current_lead_time',
|
||||
@@ -109,11 +153,13 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
|
||||
'image',
|
||||
'title',
|
||||
'stock_quantity',
|
||||
'reorder_point',
|
||||
'daily_sales_avg',
|
||||
'weekly_sales_avg',
|
||||
'reorder_qty',
|
||||
'vendor',
|
||||
'last_purchase_date',
|
||||
'avg_lead_time_days',
|
||||
],
|
||||
overstocked: [
|
||||
'image',
|
||||
@@ -123,15 +169,19 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
|
||||
'weekly_sales_avg',
|
||||
'overstocked_amt',
|
||||
'days_of_inventory',
|
||||
'inventory_value',
|
||||
'turnover_rate',
|
||||
],
|
||||
'at-risk': [
|
||||
'image',
|
||||
'title',
|
||||
'stock_quantity',
|
||||
'safety_stock',
|
||||
'daily_sales_avg',
|
||||
'weekly_sales_avg',
|
||||
'days_of_inventory',
|
||||
'last_sale_date',
|
||||
'current_lead_time',
|
||||
],
|
||||
new: [
|
||||
'image',
|
||||
@@ -141,6 +191,7 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
|
||||
'brand',
|
||||
'price',
|
||||
'regular_price',
|
||||
'first_received_date',
|
||||
],
|
||||
healthy: [
|
||||
'image',
|
||||
@@ -150,6 +201,8 @@ const VIEW_COLUMNS: Record<string, ColumnKey[]> = {
|
||||
'weekly_sales_avg',
|
||||
'monthly_sales_avg',
|
||||
'days_of_inventory',
|
||||
'gross_profit',
|
||||
'gmroi',
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
@@ -23,6 +23,30 @@ export interface Product {
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
|
||||
// New fields
|
||||
description?: string;
|
||||
preorder_count?: number;
|
||||
notions_inv_count?: number;
|
||||
harmonized_tariff_code?: string;
|
||||
notions_reference?: string;
|
||||
line?: string;
|
||||
subline?: string;
|
||||
artist?: string;
|
||||
rating?: number;
|
||||
reviews?: number;
|
||||
weight?: number;
|
||||
dimensions?: {
|
||||
length: number;
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
country_of_origin?: string;
|
||||
location?: string;
|
||||
total_sold?: number;
|
||||
baskets?: number;
|
||||
notifies?: number;
|
||||
date_last_sold?: string;
|
||||
|
||||
// Metrics
|
||||
daily_sales_avg?: string; // numeric(15,3)
|
||||
weekly_sales_avg?: string; // numeric(15,3)
|
||||
@@ -43,6 +67,7 @@ export interface Product {
|
||||
gross_profit?: string; // numeric(15,3)
|
||||
gmroi?: string; // numeric(15,3)
|
||||
avg_lead_time_days?: string; // numeric(15,3)
|
||||
first_received_date?: string;
|
||||
last_received_date?: string;
|
||||
abc_class?: string;
|
||||
stock_status?: string;
|
||||
|
||||
@@ -14,6 +14,9 @@ export default {
|
||||
}
|
||||
},
|
||||
extend: {
|
||||
fontFamily: {
|
||||
sans: ['Inter', 'sans-serif'],
|
||||
},
|
||||
colors: {
|
||||
border: 'hsl(var(--border))',
|
||||
input: 'hsl(var(--input))',
|
||||
|
||||
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user