From 5636a78589f27025d1cc2a30363758549d4387aa Mon Sep 17 00:00:00 2001 From: Matt Date: Thu, 9 Jan 2025 01:17:26 -0500 Subject: [PATCH] Add script to import data --- inventory-server/db/procedures.sql | 127 ++++++++++++++ inventory-server/db/schema.sql | 75 ++++++++ inventory-server/package-lock.json | 6 +- inventory-server/package.json | 6 +- inventory-server/scripts/import-csv.js | 226 +++++++++++++++++++++++++ inventory-server/src/db/schema.sql | 36 ---- 6 files changed, 434 insertions(+), 42 deletions(-) create mode 100644 inventory-server/db/procedures.sql create mode 100644 inventory-server/db/schema.sql create mode 100644 inventory-server/scripts/import-csv.js delete mode 100755 inventory-server/src/db/schema.sql diff --git a/inventory-server/db/procedures.sql b/inventory-server/db/procedures.sql new file mode 100644 index 0000000..75e95af --- /dev/null +++ b/inventory-server/db/procedures.sql @@ -0,0 +1,127 @@ +CREATE PROCEDURE import_product( + IN p_product_id BIGINT, + IN p_title VARCHAR(255), + IN p_SKU VARCHAR(50), + IN p_created_at TIMESTAMP, + IN p_stock_quantity INT, + IN p_price DECIMAL(10, 3), + IN p_regular_price DECIMAL(10, 3), + IN p_cost_price DECIMAL(10, 3), + IN p_landing_cost_price DECIMAL(10, 3), + IN p_barcode VARCHAR(50), + IN p_updated_at TIMESTAMP, + IN p_visible BOOLEAN, + IN p_managing_stock BOOLEAN, + IN p_replenishable BOOLEAN, + IN p_vendor VARCHAR(100), + IN p_vendor_reference VARCHAR(100), + IN p_permalink VARCHAR(255), + IN p_categories TEXT, + IN p_image VARCHAR(255), + IN p_brand VARCHAR(100), + IN p_options TEXT, + IN p_tags TEXT, + IN p_moq INT, + IN p_uom INT +) +BEGIN + INSERT INTO products + VALUES ( + p_product_id, p_title, p_SKU, p_created_at, p_stock_quantity, + p_price, p_regular_price, p_cost_price, p_landing_cost_price, + p_barcode, p_updated_at, p_visible, p_managing_stock, + p_replenishable, p_vendor, p_vendor_reference, p_permalink, + p_categories, p_image, p_brand, p_options, p_tags, p_moq, p_uom + ) + ON DUPLICATE KEY UPDATE + title = p_title, + stock_quantity = p_stock_quantity, + price = p_price, + regular_price = p_regular_price, + cost_price = p_cost_price, + landing_cost_price = p_landing_cost_price, + barcode = p_barcode, + updated_at = p_updated_at, + visible = p_visible, + managing_stock = p_managing_stock, + replenishable = p_replenishable, + vendor = p_vendor, + vendor_reference = p_vendor_reference, + permalink = p_permalink, + categories = p_categories, + image = p_image, + brand = p_brand, + options = p_options, + tags = p_tags, + moq = p_moq, + uom = p_uom; +END; + +CREATE PROCEDURE import_order( + IN p_order_number VARCHAR(50), + IN p_product_id BIGINT, + IN p_SKU VARCHAR(50), + IN p_date DATE, + IN p_price DECIMAL(10, 3), + IN p_quantity INT, + IN p_discount DECIMAL(10, 3), + IN p_tax DECIMAL(10, 3), + IN p_tax_included BOOLEAN, + IN p_shipping DECIMAL(10, 3), + IN p_customer VARCHAR(50), + IN p_canceled BOOLEAN +) +BEGIN + INSERT INTO orders ( + order_number, product_id, SKU, date, price, quantity, + discount, tax, tax_included, shipping, customer, canceled + ) + VALUES ( + p_order_number, p_product_id, p_SKU, p_date, p_price, + p_quantity, p_discount, p_tax, p_tax_included, p_shipping, + p_customer, p_canceled + ) + ON DUPLICATE KEY UPDATE + price = p_price, + quantity = p_quantity, + discount = p_discount, + tax = p_tax, + tax_included = p_tax_included, + shipping = p_shipping, + canceled = p_canceled; +END; + +CREATE PROCEDURE import_purchase_order( + IN p_po_id VARCHAR(50), + IN p_vendor VARCHAR(100), + IN p_date DATE, + IN p_expected_date DATE, + IN p_product_id BIGINT, + IN p_sku VARCHAR(50), + IN p_cost_price DECIMAL(10, 3), + IN p_status VARCHAR(20), + IN p_notes TEXT, + IN p_ordered INT, + IN p_received INT, + IN p_received_date DATE +) +BEGIN + INSERT INTO purchase_orders ( + po_id, vendor, date, expected_date, product_id, sku, + cost_price, status, notes, ordered, received, received_date + ) + VALUES ( + p_po_id, p_vendor, p_date, p_expected_date, p_product_id, + p_sku, p_cost_price, p_status, p_notes, p_ordered, + p_received, p_received_date + ) + ON DUPLICATE KEY UPDATE + vendor = p_vendor, + expected_date = p_expected_date, + cost_price = p_cost_price, + status = p_status, + notes = p_notes, + ordered = p_ordered, + received = p_received, + received_date = p_received_date; +END; \ No newline at end of file diff --git a/inventory-server/db/schema.sql b/inventory-server/db/schema.sql new file mode 100644 index 0000000..6fb44ad --- /dev/null +++ b/inventory-server/db/schema.sql @@ -0,0 +1,75 @@ +-- Create tables if they don't exist +CREATE TABLE IF NOT EXISTS products ( + product_id BIGINT NOT NULL, + title VARCHAR(255) NOT NULL, + SKU VARCHAR(50) NOT NULL, + created_at TIMESTAMP NULL, + stock_quantity INT DEFAULT 0, + price DECIMAL(10, 3) NOT NULL, + regular_price DECIMAL(10, 3) NOT NULL, + cost_price DECIMAL(10, 3), + landing_cost_price DECIMAL(10, 3), + barcode VARCHAR(50), + updated_at TIMESTAMP, + visible BOOLEAN DEFAULT true, + managing_stock BOOLEAN DEFAULT true, + replenishable BOOLEAN DEFAULT true, + vendor VARCHAR(100), + vendor_reference VARCHAR(100), + permalink VARCHAR(255), + categories TEXT, + image VARCHAR(255), + brand VARCHAR(100), + options TEXT, + tags TEXT, + moq INT DEFAULT 1, + uom INT DEFAULT 1, + PRIMARY KEY (product_id), + UNIQUE KEY unique_sku (SKU), + INDEX idx_vendor (vendor), + INDEX idx_brand (brand) +); + +CREATE TABLE IF NOT EXISTS orders ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + order_number VARCHAR(50) NOT NULL, + product_id BIGINT NOT NULL, + SKU VARCHAR(50) NOT NULL, + date DATE NOT NULL, + price DECIMAL(10, 3) NOT NULL, + quantity INT NOT NULL, + discount DECIMAL(10, 3) DEFAULT 0, + tax DECIMAL(10, 3) DEFAULT 0, + tax_included BOOLEAN DEFAULT false, + shipping DECIMAL(10, 3) DEFAULT 0, + customer VARCHAR(50) NOT NULL, + canceled BOOLEAN DEFAULT false, + FOREIGN KEY (product_id) REFERENCES products(product_id), + FOREIGN KEY (SKU) REFERENCES products(SKU), + INDEX idx_order_number (order_number), + INDEX idx_customer (customer), + INDEX idx_date (date), + UNIQUE KEY unique_order_product (order_number, product_id) +); + +CREATE TABLE IF NOT EXISTS purchase_orders ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + po_id VARCHAR(50) NOT NULL, + vendor VARCHAR(100) NOT NULL, + date DATE NOT NULL, + expected_date DATE, + product_id BIGINT NOT NULL, + sku VARCHAR(50) NOT NULL, + cost_price DECIMAL(10, 3) NOT NULL, + status VARCHAR(20) DEFAULT 'pending', + notes TEXT, + ordered INT NOT NULL, + received INT DEFAULT 0, + received_date DATE, + FOREIGN KEY (product_id) REFERENCES products(product_id), + FOREIGN KEY (sku) REFERENCES products(SKU), + INDEX idx_po_id (po_id), + INDEX idx_vendor (vendor), + INDEX idx_status (status), + UNIQUE KEY unique_po_product (po_id, product_id) +); \ No newline at end of file diff --git a/inventory-server/package-lock.json b/inventory-server/package-lock.json index 30b2efc..7c959f4 100755 --- a/inventory-server/package-lock.json +++ b/inventory-server/package-lock.json @@ -10,11 +10,11 @@ "license": "ISC", "dependencies": { "cors": "^2.8.5", - "csv-parse": "^5.5.3", - "dotenv": "^16.3.1", + "csv-parse": "^5.6.0", + "dotenv": "^16.4.7", "express": "^4.18.2", "multer": "^1.4.5-lts.1", - "mysql2": "^3.7.0", + "mysql2": "^3.12.0", "pm2": "^5.3.0", "uuid": "^9.0.1" }, diff --git a/inventory-server/package.json b/inventory-server/package.json index 554a5c5..16071ce 100755 --- a/inventory-server/package.json +++ b/inventory-server/package.json @@ -19,11 +19,11 @@ "license": "ISC", "dependencies": { "cors": "^2.8.5", - "csv-parse": "^5.5.3", - "dotenv": "^16.3.1", + "csv-parse": "^5.6.0", + "dotenv": "^16.4.7", "express": "^4.18.2", "multer": "^1.4.5-lts.1", - "mysql2": "^3.7.0", + "mysql2": "^3.12.0", "pm2": "^5.3.0", "uuid": "^9.0.1" }, diff --git a/inventory-server/scripts/import-csv.js b/inventory-server/scripts/import-csv.js new file mode 100644 index 0000000..cec02eb --- /dev/null +++ b/inventory-server/scripts/import-csv.js @@ -0,0 +1,226 @@ +const fs = require('fs'); +const path = require('path'); +const csv = require('csv-parse'); +const mysql = require('mysql2/promise'); +const dotenv = require('dotenv'); + +// For testing purposes, limit the number of rows to import +const TEST_ROW_LIMIT = 5000; + +dotenv.config({ path: path.join(__dirname, '../.env') }); + +const dbConfig = { + host: process.env.DB_HOST, + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + multipleStatements: true +}; + +async function importProducts(connection, filePath) { + const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true })); + + function convertDate(dateStr) { + if (!dateStr) return null; + const [day, month, year] = dateStr.split('-'); + return `${year}-${month}-${day}`; + } + + let updated = 0; + let added = 0; + let rowCount = 0; + + for await (const record of parser) { + // if (rowCount >= TEST_ROW_LIMIT) { + // console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`); + // break; + // } + rowCount++; + + // Check if product exists + const [existing] = await connection.query('SELECT product_id FROM products WHERE product_id = ?', [record.product_id]); + + try { + await connection.query('INSERT INTO products VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE title = VALUES(title), stock_quantity = VALUES(stock_quantity), price = VALUES(price), regular_price = VALUES(regular_price), cost_price = VALUES(cost_price), landing_cost_price = VALUES(landing_cost_price), barcode = VALUES(barcode), updated_at = VALUES(updated_at), visible = VALUES(visible), managing_stock = VALUES(managing_stock), replenishable = VALUES(replenishable), vendor = VALUES(vendor), vendor_reference = VALUES(vendor_reference), permalink = VALUES(permalink), categories = VALUES(categories), image = VALUES(image), brand = VALUES(brand), options = VALUES(options), tags = VALUES(tags), moq = VALUES(moq), uom = VALUES(uom)', [ + record.product_id, + record.title, + record.SKU, + convertDate(record.created_at), + parseInt(record.stock_quantity) || 0, + parseFloat(record.price) || 0, + parseFloat(record.regular_price) || 0, + parseFloat(record.cost_price) || null, + parseFloat(record.landing_cost_price) || null, + record.barcode, + convertDate(record.updated_at), + record.visible === '1', + record.managing_stock === '1', + record.replenishable === '1', + record.vendor, + record.vendor_reference, + record.permalink, + record.categories, + record.image, + record.brand, + record.options, + record.tags, + parseInt(record.moq) || 1, + parseInt(record.uom) || 1 + ]); + existing.length ? updated++ : added++; + } catch (error) { + console.error(`Error importing product ${record.product_id}:`, error.message); + } + } + console.log(`Products import completed: ${added} added, ${updated} updated (processed ${rowCount} rows)`); +} + +async function importOrders(connection, filePath) { + const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true })); + + function convertDate(dateStr) { + if (!dateStr) return null; + const [day, month, year] = dateStr.split('-'); + return `${year}-${month}-${day}`; + } + + // First, get all valid product IDs + const [rows] = await connection.query('SELECT product_id FROM products'); + const validProductIds = new Set(rows.map(row => row.product_id.toString())); + + let skipped = 0; + let updated = 0; + let added = 0; + let rowCount = 0; + + for await (const record of parser) { + if (rowCount >= TEST_ROW_LIMIT) { + console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`); + break; + } + rowCount++; + + if (!validProductIds.has(record.product_id)) { + skipped++; + continue; + } + + try { + // Check if order exists + const [existing] = await connection.query( + 'SELECT id FROM orders WHERE order_number = ? AND product_id = ?', + [record.order_number, record.product_id] + ); + + await connection.query('INSERT INTO orders (order_number, product_id, SKU, date, price, quantity, discount, tax, tax_included, shipping, customer, canceled) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE price = VALUES(price), quantity = VALUES(quantity), discount = VALUES(discount), tax = VALUES(tax), tax_included = VALUES(tax_included), shipping = VALUES(shipping), canceled = VALUES(canceled)', [ + record.order_number, + record.product_id, + record.SKU, + convertDate(record.date), + parseFloat(record.price) || 0, + parseInt(record.quantity) || 0, + parseFloat(record.discount) || 0, + parseFloat(record.tax) || 0, + record.tax_included === '1', + parseFloat(record.shipping) || 0, + record.customer, + record.canceled === '1' + ]); + existing.length ? updated++ : added++; + } catch (error) { + console.error(`Error importing order ${record.order_number}, product ${record.product_id}:`, error.message); + skipped++; + } + } + console.log(`Orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`); +} + +async function importPurchaseOrders(connection, filePath) { + const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true })); + + function convertDate(dateStr) { + if (!dateStr) return null; + const [day, month, year] = dateStr.split('-'); + return `${year}-${month}-${day}`; + } + + // First, get all valid product IDs + const [rows] = await connection.query('SELECT product_id FROM products'); + const validProductIds = new Set(rows.map(row => row.product_id.toString())); + + let skipped = 0; + let updated = 0; + let added = 0; + let rowCount = 0; + + for await (const record of parser) { + if (rowCount >= TEST_ROW_LIMIT) { + console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`); + break; + } + rowCount++; + + if (!validProductIds.has(record.product_id)) { + skipped++; + continue; + } + + try { + // Check if PO exists + const [existing] = await connection.query( + 'SELECT id FROM purchase_orders WHERE po_id = ? AND product_id = ?', + [record.po_id, record.product_id] + ); + + await connection.query('INSERT INTO purchase_orders (po_id, vendor, date, expected_date, product_id, sku, cost_price, status, notes, ordered, received, received_date) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE vendor = VALUES(vendor), expected_date = VALUES(expected_date), cost_price = VALUES(cost_price), status = VALUES(status), notes = VALUES(notes), ordered = VALUES(ordered), received = VALUES(received), received_date = VALUES(received_date)', [ + record.po_id, + record.vendor, + convertDate(record.date), + convertDate(record.expected_date), + record.product_id, + record.sku, + parseFloat(record.cost_price) || 0, + record.status || 'pending', + record.notes, + parseInt(record.ordered) || 0, + parseInt(record.received) || 0, + convertDate(record.received_date) + ]); + existing.length ? updated++ : added++; + } catch (error) { + console.error(`Error importing PO ${record.po_id}, product ${record.product_id}:`, error.message); + skipped++; + } + } + console.log(`Purchase orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`); +} + +async function main() { + const connection = await mysql.createConnection(dbConfig); + + try { + // Check if tables exist, if not create them + console.log('Checking database schema...'); + const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8'); + await connection.query(schemaSQL); + + // Import products first since they're referenced by other tables + console.log('Importing products...'); + await importProducts(connection, path.join(__dirname, '../csv/39f2x83-products.csv')); + + console.log('Importing orders...'); + await importOrders(connection, path.join(__dirname, '../csv/39f2x83-orders.csv')); + + console.log('Importing purchase orders...'); + await importPurchaseOrders(connection, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv')); + + console.log('All imports completed successfully'); + } catch (error) { + console.error('Error during import:', error); + process.exit(1); + } finally { + await connection.end(); + } +} + +main(); \ No newline at end of file diff --git a/inventory-server/src/db/schema.sql b/inventory-server/src/db/schema.sql deleted file mode 100755 index 3d5a706..0000000 --- a/inventory-server/src/db/schema.sql +++ /dev/null @@ -1,36 +0,0 @@ -CREATE DATABASE IF NOT EXISTS inventory_db; -USE inventory_db; - -CREATE TABLE IF NOT EXISTS products ( - id VARCHAR(36) PRIMARY KEY, - sku VARCHAR(50) UNIQUE NOT NULL, - name VARCHAR(255) NOT NULL, - description TEXT, - category VARCHAR(100), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP -); - -CREATE TABLE IF NOT EXISTS inventory_levels ( - id VARCHAR(36) PRIMARY KEY, - product_id VARCHAR(36) NOT NULL, - quantity INT NOT NULL DEFAULT 0, - reorder_point INT, - reorder_quantity INT, - last_reorder_date TIMESTAMP, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - FOREIGN KEY (product_id) REFERENCES products(id) -); - -CREATE TABLE IF NOT EXISTS inventory_transactions ( - id VARCHAR(36) PRIMARY KEY, - product_id VARCHAR(36) NOT NULL, - transaction_type ENUM('purchase', 'sale', 'adjustment') NOT NULL, - quantity INT NOT NULL, - transaction_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - reference_number VARCHAR(100), - notes TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (product_id) REFERENCES products(id) -); \ No newline at end of file