Fix incorrect columns in import scripts
This commit is contained in:
@@ -1,5 +1,23 @@
|
||||
-- Configuration tables schema
|
||||
|
||||
-- Create function for updating timestamps if it doesn't exist
|
||||
CREATE OR REPLACE FUNCTION update_updated_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ language 'plpgsql';
|
||||
|
||||
-- Create function for updating updated_at timestamps
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ language 'plpgsql';
|
||||
|
||||
-- Stock threshold configurations
|
||||
CREATE TABLE stock_thresholds (
|
||||
id INTEGER NOT NULL,
|
||||
@@ -10,13 +28,18 @@ CREATE TABLE stock_thresholds (
|
||||
overstock_days INTEGER NOT NULL DEFAULT 90,
|
||||
low_stock_threshold INTEGER NOT NULL DEFAULT 5,
|
||||
min_reorder_quantity INTEGER NOT NULL DEFAULT 1,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
CREATE TRIGGER update_stock_thresholds_updated
|
||||
BEFORE UPDATE ON stock_thresholds
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE INDEX idx_st_metrics ON stock_thresholds(category_id, vendor);
|
||||
|
||||
-- Lead time threshold configurations
|
||||
@@ -27,13 +50,18 @@ CREATE TABLE lead_time_thresholds (
|
||||
target_days INTEGER NOT NULL DEFAULT 14,
|
||||
warning_days INTEGER NOT NULL DEFAULT 21,
|
||||
critical_days INTEGER NOT NULL DEFAULT 30,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
CREATE TRIGGER update_lead_time_thresholds_updated
|
||||
BEFORE UPDATE ON lead_time_thresholds
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Sales velocity window configurations
|
||||
CREATE TABLE sales_velocity_config (
|
||||
id INTEGER NOT NULL,
|
||||
@@ -42,13 +70,18 @@ CREATE TABLE sales_velocity_config (
|
||||
daily_window_days INTEGER NOT NULL DEFAULT 30,
|
||||
weekly_window_days INTEGER NOT NULL DEFAULT 7,
|
||||
monthly_window_days INTEGER NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
CREATE TRIGGER update_sales_velocity_config_updated
|
||||
BEFORE UPDATE ON sales_velocity_config
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE INDEX idx_sv_metrics ON sales_velocity_config(category_id, vendor);
|
||||
|
||||
-- ABC Classification configurations
|
||||
@@ -57,10 +90,15 @@ CREATE TABLE abc_classification_config (
|
||||
a_threshold DECIMAL(5,2) NOT NULL DEFAULT 20.0,
|
||||
b_threshold DECIMAL(5,2) NOT NULL DEFAULT 50.0,
|
||||
classification_period_days INTEGER NOT NULL DEFAULT 90,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TRIGGER update_abc_classification_config_updated
|
||||
BEFORE UPDATE ON abc_classification_config
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Safety stock configurations
|
||||
CREATE TABLE safety_stock_config (
|
||||
id INTEGER NOT NULL,
|
||||
@@ -68,13 +106,18 @@ CREATE TABLE safety_stock_config (
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
coverage_days INTEGER NOT NULL DEFAULT 14,
|
||||
service_level DECIMAL(5,2) NOT NULL DEFAULT 95.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
CREATE TRIGGER update_safety_stock_config_updated
|
||||
BEFORE UPDATE ON safety_stock_config
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE INDEX idx_ss_metrics ON safety_stock_config(category_id, vendor);
|
||||
|
||||
-- Turnover rate configurations
|
||||
@@ -84,23 +127,33 @@ CREATE TABLE turnover_config (
|
||||
vendor VARCHAR(100), -- NULL means applies to all vendors
|
||||
calculation_period_days INTEGER NOT NULL DEFAULT 30,
|
||||
target_rate DECIMAL(10,2) NOT NULL DEFAULT 1.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(cat_id) ON DELETE CASCADE,
|
||||
UNIQUE (category_id, vendor)
|
||||
);
|
||||
|
||||
CREATE TRIGGER update_turnover_config_updated
|
||||
BEFORE UPDATE ON turnover_config
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Create table for sales seasonality factors
|
||||
CREATE TABLE sales_seasonality (
|
||||
month INTEGER NOT NULL,
|
||||
seasonality_factor DECIMAL(5,3) DEFAULT 0,
|
||||
last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (month),
|
||||
CONSTRAINT month_range CHECK (month BETWEEN 1 AND 12),
|
||||
CONSTRAINT seasonality_range CHECK (seasonality_factor BETWEEN -1.0 AND 1.0)
|
||||
);
|
||||
|
||||
CREATE TRIGGER update_sales_seasonality_updated
|
||||
BEFORE UPDATE ON sales_seasonality
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Insert default global thresholds
|
||||
INSERT INTO stock_thresholds (id, category_id, vendor, critical_days, reorder_days, overstock_days)
|
||||
VALUES (1, NULL, NULL, 7, 14, 90)
|
||||
@@ -178,8 +231,8 @@ ORDER BY
|
||||
-- History and status tables
|
||||
CREATE TABLE IF NOT EXISTS calculate_history (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP NULL,
|
||||
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP WITH TIME ZONE NULL,
|
||||
duration_seconds INTEGER,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||
total_products INTEGER DEFAULT 0,
|
||||
@@ -195,20 +248,20 @@ CREATE TABLE IF NOT EXISTS calculate_history (
|
||||
|
||||
CREATE TABLE IF NOT EXISTS calculate_status (
|
||||
module_name module_name PRIMARY KEY,
|
||||
last_calculation_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
last_calculation_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sync_status (
|
||||
table_name VARCHAR(50) PRIMARY KEY,
|
||||
last_sync_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_sync_timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_sync_id BIGINT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import_history (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
table_name VARCHAR(50) NOT NULL,
|
||||
start_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP NULL,
|
||||
start_time TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
end_time TIMESTAMP WITH TIME ZONE NULL,
|
||||
duration_seconds INTEGER,
|
||||
duration_minutes DECIMAL(10,2) GENERATED ALWAYS AS (duration_seconds::decimal / 60.0) STORED,
|
||||
records_added INTEGER DEFAULT 0,
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
-- Enable strict error reporting
|
||||
SET session_replication_role = 'replica'; -- Disable foreign key checks temporarily
|
||||
|
||||
-- Create function for updating timestamps
|
||||
CREATE OR REPLACE FUNCTION update_updated_column() RETURNS TRIGGER AS $func$
|
||||
BEGIN
|
||||
NEW.updated = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$func$ language plpgsql;
|
||||
|
||||
-- Create tables
|
||||
CREATE TABLE products (
|
||||
pid BIGINT NOT NULL,
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
SKU VARCHAR(50) NOT NULL,
|
||||
created_at TIMESTAMP,
|
||||
first_received TIMESTAMP,
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
first_received TIMESTAMP WITH TIME ZONE,
|
||||
stock_quantity INTEGER DEFAULT 0,
|
||||
preorder_count INTEGER DEFAULT 0,
|
||||
notions_inv_count INTEGER DEFAULT 0,
|
||||
@@ -18,7 +26,7 @@ CREATE TABLE products (
|
||||
landing_cost_price DECIMAL(10, 3),
|
||||
barcode VARCHAR(50),
|
||||
harmonized_tariff_code VARCHAR(20),
|
||||
updated_at TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
visible BOOLEAN DEFAULT true,
|
||||
managing_stock BOOLEAN DEFAULT true,
|
||||
replenishable BOOLEAN DEFAULT true,
|
||||
@@ -50,10 +58,16 @@ CREATE TABLE products (
|
||||
baskets INTEGER DEFAULT 0,
|
||||
notifies INTEGER DEFAULT 0,
|
||||
date_last_sold DATE,
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (pid)
|
||||
);
|
||||
|
||||
-- Create trigger for products
|
||||
CREATE TRIGGER update_products_updated
|
||||
BEFORE UPDATE ON products
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_column();
|
||||
|
||||
-- Create indexes for products table
|
||||
CREATE INDEX idx_products_sku ON products(SKU);
|
||||
CREATE INDEX idx_products_vendor ON products(vendor);
|
||||
@@ -70,12 +84,18 @@ CREATE TABLE categories (
|
||||
type SMALLINT NOT NULL,
|
||||
parent_id BIGINT,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
status VARCHAR(20) DEFAULT 'active',
|
||||
FOREIGN KEY (parent_id) REFERENCES categories(cat_id)
|
||||
);
|
||||
|
||||
-- Create trigger for categories
|
||||
CREATE TRIGGER update_categories_updated_at
|
||||
BEFORE UPDATE ON categories
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_column();
|
||||
|
||||
COMMENT ON COLUMN categories.type IS '10=section, 11=category, 12=subcategory, 13=subsubcategory, 1=company, 2=line, 3=subline, 40=artist';
|
||||
|
||||
CREATE INDEX idx_categories_parent ON categories(parent_id);
|
||||
@@ -113,10 +133,16 @@ CREATE TABLE orders (
|
||||
customer_name VARCHAR(100),
|
||||
status VARCHAR(20) DEFAULT 'pending',
|
||||
canceled BOOLEAN DEFAULT false,
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE (order_number, pid)
|
||||
);
|
||||
|
||||
-- Create trigger for orders
|
||||
CREATE TRIGGER update_orders_updated
|
||||
BEFORE UPDATE ON orders
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_column();
|
||||
|
||||
CREATE INDEX idx_orders_number ON orders(order_number);
|
||||
CREATE INDEX idx_orders_pid ON orders(pid);
|
||||
CREATE INDEX idx_orders_customer ON orders(customer);
|
||||
@@ -147,11 +173,17 @@ CREATE TABLE purchase_orders (
|
||||
last_received_date DATE,
|
||||
received_by VARCHAR(100),
|
||||
receiving_history JSONB,
|
||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (pid) REFERENCES products(pid),
|
||||
UNIQUE (po_id, pid)
|
||||
);
|
||||
|
||||
-- Create trigger for purchase_orders
|
||||
CREATE TRIGGER update_purchase_orders_updated
|
||||
BEFORE UPDATE ON purchase_orders
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_column();
|
||||
|
||||
COMMENT ON COLUMN purchase_orders.name IS 'Product name from products.description';
|
||||
COMMENT ON COLUMN purchase_orders.po_cost_price IS 'Original cost from PO, before receiving adjustments';
|
||||
COMMENT ON COLUMN purchase_orders.status IS '0=canceled,1=created,10=electronically_ready_send,11=ordered,12=preordered,13=electronically_sent,15=receiving_started,50=done';
|
||||
|
||||
@@ -10,9 +10,9 @@ const importPurchaseOrders = require('./import/purchase-orders');
|
||||
dotenv.config({ path: path.join(__dirname, "../.env") });
|
||||
|
||||
// Constants to control which imports run
|
||||
const IMPORT_CATEGORIES = false;
|
||||
const IMPORT_PRODUCTS = false;
|
||||
const IMPORT_ORDERS = false;
|
||||
const IMPORT_CATEGORIES = true;
|
||||
const IMPORT_PRODUCTS = true;
|
||||
const IMPORT_ORDERS = true;
|
||||
const IMPORT_PURCHASE_ORDERS = true;
|
||||
|
||||
// Add flag for incremental updates
|
||||
@@ -284,16 +284,23 @@ async function main() {
|
||||
throw error;
|
||||
} finally {
|
||||
if (connections) {
|
||||
await closeConnections(connections);
|
||||
await closeConnections(connections).catch(err => {
|
||||
console.error("Error closing connections:", err);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run the import only if this is the main module
|
||||
if (require.main === module) {
|
||||
main().catch((error) => {
|
||||
main().then((results) => {
|
||||
console.log('Import completed successfully:', results);
|
||||
// Force exit after a small delay to ensure all logs are written
|
||||
setTimeout(() => process.exit(0), 500);
|
||||
}).catch((error) => {
|
||||
console.error("Unhandled error in main process:", error);
|
||||
process.exit(1);
|
||||
// Force exit with error code after a small delay
|
||||
setTimeout(() => process.exit(1), 500);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -332,11 +332,12 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
const processCostsBatch = async (batchIds) => {
|
||||
const [costs] = await prodConnection.query(`
|
||||
SELECT
|
||||
oi.order_id,
|
||||
oi.prod_pid as pid,
|
||||
oi.prod_price as costeach
|
||||
FROM order_items oi
|
||||
WHERE oi.order_id IN (?)
|
||||
oc.orderid as order_id,
|
||||
oc.pid,
|
||||
oc.costeach
|
||||
FROM order_costs oc
|
||||
WHERE oc.orderid IN (?)
|
||||
AND oc.pending = 0
|
||||
`, [batchIds]);
|
||||
|
||||
if (costs.length === 0) return;
|
||||
@@ -414,7 +415,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
oi.order_id,
|
||||
oi.pid,
|
||||
SUM(COALESCE(od.discount, 0)) as promo_discount,
|
||||
COALESCE(ot.tax, 0) as total_tax
|
||||
COALESCE(ot.tax, 0) as total_tax,
|
||||
COALESCE(oi.price * 0.5, 0) as costeach
|
||||
FROM debug_order_items oi
|
||||
LEFT JOIN debug_order_discounts od ON oi.order_id = od.order_id AND oi.pid = od.pid
|
||||
LEFT JOIN debug_order_taxes ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
@@ -441,7 +443,7 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
om.customer_name,
|
||||
om.status,
|
||||
om.canceled,
|
||||
COALESCE(oc.costeach, oi.price)::DECIMAL(10,3) as costeach
|
||||
COALESCE(ot.costeach, oi.price * 0.5)::DECIMAL(10,3) as costeach
|
||||
FROM (
|
||||
SELECT DISTINCT ON (order_id, pid)
|
||||
order_id, pid, SKU, price, quantity, base_discount
|
||||
@@ -451,7 +453,6 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
) oi
|
||||
JOIN debug_order_meta om ON oi.order_id = om.order_id
|
||||
LEFT JOIN order_totals ot ON oi.order_id = ot.order_id AND oi.pid = ot.pid
|
||||
LEFT JOIN debug_order_costs oc ON oi.order_id = oc.order_id AND oi.pid = oc.pid
|
||||
ORDER BY oi.order_id, oi.pid
|
||||
`, [subBatchIds]);
|
||||
|
||||
@@ -477,8 +478,8 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
const subBatch = validOrders.slice(k, k + FINAL_BATCH_SIZE);
|
||||
|
||||
const placeholders = subBatch.map((_, idx) => {
|
||||
const base = idx * 15;
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14}, $${base + 15})`;
|
||||
const base = idx * 14; // 14 columns (removed updated)
|
||||
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8}, $${base + 9}, $${base + 10}, $${base + 11}, $${base + 12}, $${base + 13}, $${base + 14})`;
|
||||
}).join(',');
|
||||
|
||||
const batchValues = subBatch.flatMap(o => [
|
||||
@@ -495,20 +496,19 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
o.customer,
|
||||
o.customer_name,
|
||||
o.status,
|
||||
o.canceled,
|
||||
o.costeach
|
||||
o.canceled
|
||||
]);
|
||||
|
||||
const [result] = await localConnection.query(`
|
||||
WITH inserted_orders AS (
|
||||
INSERT INTO orders (
|
||||
order_number, pid, SKU, date, price, quantity, discount,
|
||||
order_number, pid, sku, date, price, quantity, discount,
|
||||
tax, tax_included, shipping, customer, customer_name,
|
||||
status, canceled, costeach
|
||||
status, canceled
|
||||
)
|
||||
VALUES ${placeholders}
|
||||
ON CONFLICT (order_number, pid) DO UPDATE SET
|
||||
SKU = EXCLUDED.SKU,
|
||||
sku = EXCLUDED.sku,
|
||||
date = EXCLUDED.date,
|
||||
price = EXCLUDED.price,
|
||||
quantity = EXCLUDED.quantity,
|
||||
@@ -519,13 +519,12 @@ async function importOrders(prodConnection, localConnection, incrementalUpdate =
|
||||
customer = EXCLUDED.customer,
|
||||
customer_name = EXCLUDED.customer_name,
|
||||
status = EXCLUDED.status,
|
||||
canceled = EXCLUDED.canceled,
|
||||
costeach = EXCLUDED.costeach
|
||||
RETURNING xmax, xmin
|
||||
canceled = EXCLUDED.canceled
|
||||
RETURNING xmax = 0 as inserted
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE xmax = 0) as inserted,
|
||||
COUNT(*) FILTER (WHERE xmax <> 0) as updated
|
||||
COUNT(*) FILTER (WHERE inserted) as inserted,
|
||||
COUNT(*) FILTER (WHERE NOT inserted) as updated
|
||||
FROM inserted_orders
|
||||
`, batchValues);
|
||||
|
||||
|
||||
@@ -55,7 +55,7 @@ async function setupTemporaryTables(connection) {
|
||||
pid BIGINT NOT NULL,
|
||||
title VARCHAR(255),
|
||||
description TEXT,
|
||||
SKU VARCHAR(50),
|
||||
sku VARCHAR(50),
|
||||
stock_quantity INTEGER DEFAULT 0,
|
||||
preorder_count INTEGER DEFAULT 0,
|
||||
notions_inv_count INTEGER DEFAULT 0,
|
||||
@@ -130,7 +130,7 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
p.pid,
|
||||
p.description AS title,
|
||||
p.notes AS description,
|
||||
p.itemnumber AS SKU,
|
||||
p.itemnumber AS sku,
|
||||
p.date_created,
|
||||
p.datein AS first_received,
|
||||
p.location,
|
||||
@@ -283,7 +283,7 @@ async function importMissingProducts(prodConnection, localConnection, missingPid
|
||||
const [result] = await localConnection.query(`
|
||||
WITH inserted_products AS (
|
||||
INSERT INTO products (
|
||||
pid, title, description, SKU, stock_quantity, preorder_count, notions_inv_count,
|
||||
pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count,
|
||||
price, regular_price, cost_price, vendor, vendor_reference, notions_reference,
|
||||
brand, line, subline, artist, categories, created_at, first_received,
|
||||
landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible,
|
||||
@@ -325,7 +325,7 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
p.pid,
|
||||
p.description AS title,
|
||||
p.notes AS description,
|
||||
p.itemnumber AS SKU,
|
||||
p.itemnumber AS sku,
|
||||
p.date_created,
|
||||
p.datein AS first_received,
|
||||
p.location,
|
||||
@@ -481,7 +481,7 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO temp_products (
|
||||
pid, title, description, SKU, stock_quantity, preorder_count, notions_inv_count,
|
||||
pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count,
|
||||
price, regular_price, cost_price, vendor, vendor_reference, notions_reference,
|
||||
brand, line, subline, artist, categories, created_at, first_received,
|
||||
landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible,
|
||||
@@ -492,7 +492,7 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
ON CONFLICT (pid) DO UPDATE SET
|
||||
title = EXCLUDED.title,
|
||||
description = EXCLUDED.description,
|
||||
SKU = EXCLUDED.SKU,
|
||||
sku = EXCLUDED.sku,
|
||||
stock_quantity = EXCLUDED.stock_quantity,
|
||||
preorder_count = EXCLUDED.preorder_count,
|
||||
notions_inv_count = EXCLUDED.notions_inv_count,
|
||||
@@ -535,6 +535,8 @@ async function materializeCalculations(prodConnection, localConnection, incremen
|
||||
image_full = EXCLUDED.image_full,
|
||||
options = EXCLUDED.options,
|
||||
tags = EXCLUDED.tags
|
||||
RETURNING
|
||||
xmax = 0 as inserted
|
||||
`, values);
|
||||
}, `Error inserting batch ${i} to ${i + batch.length}`);
|
||||
|
||||
@@ -588,7 +590,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
t.pid,
|
||||
t.title,
|
||||
t.description,
|
||||
t.SKU,
|
||||
t.sku,
|
||||
t.stock_quantity,
|
||||
t.preorder_count,
|
||||
t.notions_inv_count,
|
||||
@@ -653,7 +655,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
row.pid,
|
||||
row.title,
|
||||
row.description,
|
||||
row.SKU || '',
|
||||
row.sku || '',
|
||||
row.stock_quantity > 5000 ? 0 : Math.max(0, row.stock_quantity),
|
||||
row.preorder_count,
|
||||
row.notions_inv_count,
|
||||
@@ -703,7 +705,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
const [result] = await localConnection.query(`
|
||||
WITH upserted AS (
|
||||
INSERT INTO products (
|
||||
pid, title, description, SKU, stock_quantity, preorder_count, notions_inv_count,
|
||||
pid, title, description, sku, stock_quantity, preorder_count, notions_inv_count,
|
||||
price, regular_price, cost_price, vendor, vendor_reference, notions_reference,
|
||||
brand, line, subline, artist, categories, created_at, first_received,
|
||||
landing_cost_price, barcode, harmonized_tariff_code, updated_at, visible,
|
||||
@@ -715,7 +717,7 @@ async function importProducts(prodConnection, localConnection, incrementalUpdate
|
||||
ON CONFLICT (pid) DO UPDATE SET
|
||||
title = EXCLUDED.title,
|
||||
description = EXCLUDED.description,
|
||||
SKU = EXCLUDED.SKU,
|
||||
sku = EXCLUDED.sku,
|
||||
stock_quantity = EXCLUDED.stock_quantity,
|
||||
preorder_count = EXCLUDED.preorder_count,
|
||||
notions_inv_count = EXCLUDED.notions_inv_count,
|
||||
|
||||
@@ -25,8 +25,8 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
sku VARCHAR(50),
|
||||
name VARCHAR(255),
|
||||
vendor VARCHAR(255),
|
||||
date DATE,
|
||||
expected_date DATE,
|
||||
date TIMESTAMP WITH TIME ZONE,
|
||||
expected_date TIMESTAMP WITH TIME ZONE,
|
||||
status INTEGER,
|
||||
notes TEXT,
|
||||
ordered INTEGER,
|
||||
@@ -40,7 +40,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
receiving_id INTEGER NOT NULL,
|
||||
qty_each INTEGER,
|
||||
cost_each DECIMAL(10,3),
|
||||
received_date TIMESTAMP,
|
||||
received_date TIMESTAMP WITH TIME ZONE,
|
||||
received_by INTEGER,
|
||||
received_by_name VARCHAR(255),
|
||||
is_alt_po INTEGER,
|
||||
@@ -130,12 +130,12 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
) as vendor,
|
||||
CASE
|
||||
WHEN p.po_id IS NOT NULL THEN
|
||||
DATE(COALESCE(
|
||||
COALESCE(
|
||||
NULLIF(p.date_ordered, '0000-00-00 00:00:00'),
|
||||
p.date_created
|
||||
))
|
||||
)
|
||||
WHEN r.receiving_id IS NOT NULL THEN
|
||||
DATE(r.date_created)
|
||||
r.date_created
|
||||
END as date,
|
||||
CASE
|
||||
WHEN p.date_estin = '0000-00-00' THEN NULL
|
||||
@@ -213,7 +213,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
pop.pid,
|
||||
pr.itemnumber as sku,
|
||||
pr.description as name,
|
||||
pop.cost_each,
|
||||
pop.cost_each as cost_price,
|
||||
pop.qty_each as ordered
|
||||
FROM po_products pop
|
||||
USE INDEX (PRIMARY)
|
||||
@@ -320,7 +320,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
po.status,
|
||||
po.notes || po.long_note,
|
||||
product.ordered,
|
||||
product.cost_each
|
||||
product.cost_price
|
||||
);
|
||||
|
||||
const offset = idx * 11; // Updated to match 11 fields
|
||||
@@ -424,6 +424,7 @@ async function importPurchaseOrders(prodConnection, localConnection, incremental
|
||||
SELECT cost_each
|
||||
FROM temp_po_receivings r2
|
||||
WHERE r2.pid = po.pid
|
||||
AND r2.po_id = po.po_id
|
||||
AND r2.is_alt_po = 0
|
||||
AND r2.cost_each > 0
|
||||
ORDER BY r2.received_date
|
||||
|
||||
@@ -54,14 +54,44 @@ function splitSQLStatements(sql) {
|
||||
let currentStatement = '';
|
||||
let inString = false;
|
||||
let stringChar = '';
|
||||
let inDollarQuote = false;
|
||||
let dollarQuoteTag = '';
|
||||
|
||||
// Process character by character
|
||||
for (let i = 0; i < sql.length; i++) {
|
||||
const char = sql[i];
|
||||
const nextChar = sql[i + 1] || '';
|
||||
|
||||
// Handle string literals
|
||||
if ((char === "'" || char === '"') && sql[i - 1] !== '\\') {
|
||||
// Handle dollar quotes
|
||||
if (char === '$' && !inString) {
|
||||
// Look ahead to find the dollar quote tag
|
||||
let tag = '$';
|
||||
let j = i + 1;
|
||||
while (j < sql.length && sql[j] !== '$') {
|
||||
tag += sql[j];
|
||||
j++;
|
||||
}
|
||||
tag += '$';
|
||||
|
||||
if (j < sql.length) { // Found closing $
|
||||
if (!inDollarQuote) {
|
||||
inDollarQuote = true;
|
||||
dollarQuoteTag = tag;
|
||||
currentStatement += tag;
|
||||
i = j;
|
||||
continue;
|
||||
} else if (sql.substring(i, j + 1) === dollarQuoteTag) {
|
||||
inDollarQuote = false;
|
||||
dollarQuoteTag = '';
|
||||
currentStatement += tag;
|
||||
i = j;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle string literals (only if not in dollar quote)
|
||||
if (!inDollarQuote && (char === "'" || char === '"') && sql[i - 1] !== '\\') {
|
||||
if (!inString) {
|
||||
inString = true;
|
||||
stringChar = char;
|
||||
@@ -70,23 +100,25 @@ function splitSQLStatements(sql) {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle comments
|
||||
if (!inString && char === '-' && nextChar === '-') {
|
||||
// Handle comments (only if not in string or dollar quote)
|
||||
if (!inString && !inDollarQuote) {
|
||||
if (char === '-' && nextChar === '-') {
|
||||
// Skip to end of line
|
||||
while (i < sql.length && sql[i] !== '\n') i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inString && char === '/' && nextChar === '*') {
|
||||
if (char === '/' && nextChar === '*') {
|
||||
// Skip until closing */
|
||||
i += 2;
|
||||
while (i < sql.length && (sql[i] !== '*' || sql[i + 1] !== '/')) i++;
|
||||
i++; // Skip the closing /
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle statement boundaries
|
||||
if (!inString && char === ';') {
|
||||
// Handle statement boundaries (only if not in string or dollar quote)
|
||||
if (!inString && !inDollarQuote && char === ';') {
|
||||
if (currentStatement.trim()) {
|
||||
statements.push(currentStatement.trim());
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user