Files
inventory/inventory-server/scripts/import-csv.js
2025-01-09 01:17:26 -05:00

226 lines
9.4 KiB
JavaScript

const fs = require('fs');
const path = require('path');
const csv = require('csv-parse');
const mysql = require('mysql2/promise');
const dotenv = require('dotenv');
// For testing purposes, limit the number of rows to import
const TEST_ROW_LIMIT = 5000;
dotenv.config({ path: path.join(__dirname, '../.env') });
const dbConfig = {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
multipleStatements: true
};
async function importProducts(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
function convertDate(dateStr) {
if (!dateStr) return null;
const [day, month, year] = dateStr.split('-');
return `${year}-${month}-${day}`;
}
let updated = 0;
let added = 0;
let rowCount = 0;
for await (const record of parser) {
// if (rowCount >= TEST_ROW_LIMIT) {
// console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
// break;
// }
rowCount++;
// Check if product exists
const [existing] = await connection.query('SELECT product_id FROM products WHERE product_id = ?', [record.product_id]);
try {
await connection.query('INSERT INTO products VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE title = VALUES(title), stock_quantity = VALUES(stock_quantity), price = VALUES(price), regular_price = VALUES(regular_price), cost_price = VALUES(cost_price), landing_cost_price = VALUES(landing_cost_price), barcode = VALUES(barcode), updated_at = VALUES(updated_at), visible = VALUES(visible), managing_stock = VALUES(managing_stock), replenishable = VALUES(replenishable), vendor = VALUES(vendor), vendor_reference = VALUES(vendor_reference), permalink = VALUES(permalink), categories = VALUES(categories), image = VALUES(image), brand = VALUES(brand), options = VALUES(options), tags = VALUES(tags), moq = VALUES(moq), uom = VALUES(uom)', [
record.product_id,
record.title,
record.SKU,
convertDate(record.created_at),
parseInt(record.stock_quantity) || 0,
parseFloat(record.price) || 0,
parseFloat(record.regular_price) || 0,
parseFloat(record.cost_price) || null,
parseFloat(record.landing_cost_price) || null,
record.barcode,
convertDate(record.updated_at),
record.visible === '1',
record.managing_stock === '1',
record.replenishable === '1',
record.vendor,
record.vendor_reference,
record.permalink,
record.categories,
record.image,
record.brand,
record.options,
record.tags,
parseInt(record.moq) || 1,
parseInt(record.uom) || 1
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing product ${record.product_id}:`, error.message);
}
}
console.log(`Products import completed: ${added} added, ${updated} updated (processed ${rowCount} rows)`);
}
async function importOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
function convertDate(dateStr) {
if (!dateStr) return null;
const [day, month, year] = dateStr.split('-');
return `${year}-${month}-${day}`;
}
// First, get all valid product IDs
const [rows] = await connection.query('SELECT product_id FROM products');
const validProductIds = new Set(rows.map(row => row.product_id.toString()));
let skipped = 0;
let updated = 0;
let added = 0;
let rowCount = 0;
for await (const record of parser) {
if (rowCount >= TEST_ROW_LIMIT) {
console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
break;
}
rowCount++;
if (!validProductIds.has(record.product_id)) {
skipped++;
continue;
}
try {
// Check if order exists
const [existing] = await connection.query(
'SELECT id FROM orders WHERE order_number = ? AND product_id = ?',
[record.order_number, record.product_id]
);
await connection.query('INSERT INTO orders (order_number, product_id, SKU, date, price, quantity, discount, tax, tax_included, shipping, customer, canceled) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE price = VALUES(price), quantity = VALUES(quantity), discount = VALUES(discount), tax = VALUES(tax), tax_included = VALUES(tax_included), shipping = VALUES(shipping), canceled = VALUES(canceled)', [
record.order_number,
record.product_id,
record.SKU,
convertDate(record.date),
parseFloat(record.price) || 0,
parseInt(record.quantity) || 0,
parseFloat(record.discount) || 0,
parseFloat(record.tax) || 0,
record.tax_included === '1',
parseFloat(record.shipping) || 0,
record.customer,
record.canceled === '1'
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing order ${record.order_number}, product ${record.product_id}:`, error.message);
skipped++;
}
}
console.log(`Orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`);
}
async function importPurchaseOrders(connection, filePath) {
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
function convertDate(dateStr) {
if (!dateStr) return null;
const [day, month, year] = dateStr.split('-');
return `${year}-${month}-${day}`;
}
// First, get all valid product IDs
const [rows] = await connection.query('SELECT product_id FROM products');
const validProductIds = new Set(rows.map(row => row.product_id.toString()));
let skipped = 0;
let updated = 0;
let added = 0;
let rowCount = 0;
for await (const record of parser) {
if (rowCount >= TEST_ROW_LIMIT) {
console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
break;
}
rowCount++;
if (!validProductIds.has(record.product_id)) {
skipped++;
continue;
}
try {
// Check if PO exists
const [existing] = await connection.query(
'SELECT id FROM purchase_orders WHERE po_id = ? AND product_id = ?',
[record.po_id, record.product_id]
);
await connection.query('INSERT INTO purchase_orders (po_id, vendor, date, expected_date, product_id, sku, cost_price, status, notes, ordered, received, received_date) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE vendor = VALUES(vendor), expected_date = VALUES(expected_date), cost_price = VALUES(cost_price), status = VALUES(status), notes = VALUES(notes), ordered = VALUES(ordered), received = VALUES(received), received_date = VALUES(received_date)', [
record.po_id,
record.vendor,
convertDate(record.date),
convertDate(record.expected_date),
record.product_id,
record.sku,
parseFloat(record.cost_price) || 0,
record.status || 'pending',
record.notes,
parseInt(record.ordered) || 0,
parseInt(record.received) || 0,
convertDate(record.received_date)
]);
existing.length ? updated++ : added++;
} catch (error) {
console.error(`Error importing PO ${record.po_id}, product ${record.product_id}:`, error.message);
skipped++;
}
}
console.log(`Purchase orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`);
}
async function main() {
const connection = await mysql.createConnection(dbConfig);
try {
// Check if tables exist, if not create them
console.log('Checking database schema...');
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
await connection.query(schemaSQL);
// Import products first since they're referenced by other tables
console.log('Importing products...');
await importProducts(connection, path.join(__dirname, '../csv/39f2x83-products.csv'));
console.log('Importing orders...');
await importOrders(connection, path.join(__dirname, '../csv/39f2x83-orders.csv'));
console.log('Importing purchase orders...');
await importPurchaseOrders(connection, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
console.log('All imports completed successfully');
} catch (error) {
console.error('Error during import:', error);
process.exit(1);
} finally {
await connection.end();
}
}
main();