Add categories table and update/optimize import script
This commit is contained in:
@@ -78,4 +78,23 @@ CREATE TABLE IF NOT EXISTS purchase_orders (
|
||||
INDEX idx_vendor (vendor),
|
||||
INDEX idx_status (status),
|
||||
UNIQUE KEY unique_po_product (po_id, product_id)
|
||||
);
|
||||
|
||||
-- Create categories table
|
||||
CREATE TABLE IF NOT EXISTS categories (
|
||||
id BIGINT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE KEY unique_name (name)
|
||||
);
|
||||
|
||||
-- Create product_categories junction table
|
||||
CREATE TABLE IF NOT EXISTS product_categories (
|
||||
product_id BIGINT NOT NULL,
|
||||
category_id BIGINT NOT NULL,
|
||||
PRIMARY KEY (product_id, category_id),
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE,
|
||||
INDEX idx_category (category_id),
|
||||
INDEX idx_product (product_id)
|
||||
);
|
||||
@@ -16,7 +16,11 @@ const dbConfig = {
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_NAME,
|
||||
multipleStatements: true
|
||||
multipleStatements: true,
|
||||
waitForConnections: true,
|
||||
connectionLimit: 10,
|
||||
queueLimit: 0,
|
||||
namedPlaceholders: true
|
||||
};
|
||||
|
||||
// Helper function to output progress in JSON format
|
||||
@@ -68,7 +72,46 @@ function updateProgress(current, total, operation, startTime) {
|
||||
});
|
||||
}
|
||||
|
||||
async function importProducts(connection, filePath) {
|
||||
// Helper function to handle category normalization
|
||||
async function handleCategories(connection, productId, categoriesStr) {
|
||||
if (!categoriesStr) {
|
||||
// If no categories, remove all existing relationships
|
||||
await connection.query(
|
||||
'DELETE FROM product_categories WHERE product_id = ?',
|
||||
[productId]
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Split categories and clean them
|
||||
const categories = categoriesStr.split(',')
|
||||
.map(cat => cat.trim())
|
||||
.filter(cat => cat.length > 0);
|
||||
|
||||
// Remove existing category relationships for this product
|
||||
await connection.query(
|
||||
'DELETE FROM product_categories WHERE product_id = ?',
|
||||
[productId]
|
||||
);
|
||||
|
||||
// Insert categories and create relationships
|
||||
for (const category of categories) {
|
||||
// Insert category if it doesn't exist
|
||||
await connection.query(
|
||||
'INSERT IGNORE INTO categories (name) VALUES (?)',
|
||||
[category]
|
||||
);
|
||||
|
||||
// Get category ID and create relationship in one query to avoid race conditions
|
||||
await connection.query(`
|
||||
INSERT IGNORE INTO product_categories (product_id, category_id)
|
||||
SELECT ?, id FROM categories WHERE name = ?`,
|
||||
[productId, category]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function importProducts(pool, filePath) {
|
||||
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
|
||||
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
|
||||
const startTime = Date.now();
|
||||
@@ -91,8 +134,17 @@ async function importProducts(connection, filePath) {
|
||||
let rowCount = 0;
|
||||
let lastUpdate = Date.now();
|
||||
|
||||
// Batch processing variables
|
||||
const BATCH_SIZE = 100;
|
||||
let batch = [];
|
||||
let categoryUpdates = new Map(); // Store category updates for batch processing
|
||||
|
||||
for await (const record of parser) {
|
||||
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
|
||||
// Process remaining batch
|
||||
if (batch.length > 0) {
|
||||
await processBatch(batch, categoryUpdates);
|
||||
}
|
||||
outputProgress({
|
||||
operation: 'Products import',
|
||||
message: `Reached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`,
|
||||
@@ -110,40 +162,50 @@ async function importProducts(connection, filePath) {
|
||||
lastUpdate = now;
|
||||
}
|
||||
|
||||
// Check if product exists
|
||||
const [existing] = await connection.query('SELECT product_id FROM products WHERE product_id = ?', [record.product_id]);
|
||||
// Add to batch
|
||||
batch.push({
|
||||
product_id: record.product_id,
|
||||
title: record.title,
|
||||
SKU: record.SKU,
|
||||
created_at: convertDate(record.created_at),
|
||||
stock_quantity: parseInt(record.stock_quantity) || 0,
|
||||
price: parseFloat(record.price) || 0,
|
||||
regular_price: parseFloat(record.regular_price) || 0,
|
||||
cost_price: parseFloat(record.cost_price) || null,
|
||||
landing_cost_price: parseFloat(record.landing_cost_price) || null,
|
||||
barcode: record.barcode,
|
||||
updated_at: convertDate(record.updated_at),
|
||||
visible: record.visible === '1',
|
||||
managing_stock: record.managing_stock === '1',
|
||||
replenishable: record.replenishable === '1',
|
||||
vendor: record.vendor,
|
||||
vendor_reference: record.vendor_reference,
|
||||
permalink: record.permalink,
|
||||
categories: record.categories,
|
||||
image: record.image,
|
||||
brand: record.brand,
|
||||
options: record.options,
|
||||
tags: record.tags,
|
||||
moq: parseInt(record.moq) || 1,
|
||||
uom: parseInt(record.uom) || 1
|
||||
});
|
||||
|
||||
try {
|
||||
await connection.query('INSERT INTO products VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE title = VALUES(title), stock_quantity = VALUES(stock_quantity), price = VALUES(price), regular_price = VALUES(regular_price), cost_price = VALUES(cost_price), landing_cost_price = VALUES(landing_cost_price), barcode = VALUES(barcode), updated_at = VALUES(updated_at), visible = VALUES(visible), managing_stock = VALUES(managing_stock), replenishable = VALUES(replenishable), vendor = VALUES(vendor), vendor_reference = VALUES(vendor_reference), permalink = VALUES(permalink), categories = VALUES(categories), image = VALUES(image), brand = VALUES(brand), options = VALUES(options), tags = VALUES(tags), moq = VALUES(moq), uom = VALUES(uom)', [
|
||||
record.product_id,
|
||||
record.title,
|
||||
record.SKU,
|
||||
convertDate(record.created_at),
|
||||
parseInt(record.stock_quantity) || 0,
|
||||
parseFloat(record.price) || 0,
|
||||
parseFloat(record.regular_price) || 0,
|
||||
parseFloat(record.cost_price) || null,
|
||||
parseFloat(record.landing_cost_price) || null,
|
||||
record.barcode,
|
||||
convertDate(record.updated_at),
|
||||
record.visible === '1',
|
||||
record.managing_stock === '1',
|
||||
record.replenishable === '1',
|
||||
record.vendor,
|
||||
record.vendor_reference,
|
||||
record.permalink,
|
||||
record.categories,
|
||||
record.image,
|
||||
record.brand,
|
||||
record.options,
|
||||
record.tags,
|
||||
parseInt(record.moq) || 1,
|
||||
parseInt(record.uom) || 1
|
||||
]);
|
||||
existing.length ? updated++ : added++;
|
||||
} catch (error) {
|
||||
console.error(`\nError importing product ${record.product_id}:`, error.message);
|
||||
// Store category updates for later
|
||||
if (record.categories) {
|
||||
categoryUpdates.set(record.product_id, record.categories);
|
||||
}
|
||||
|
||||
// Process batch if it reaches BATCH_SIZE
|
||||
if (batch.length >= BATCH_SIZE) {
|
||||
await processBatch(batch, categoryUpdates);
|
||||
batch = [];
|
||||
categoryUpdates.clear();
|
||||
}
|
||||
}
|
||||
|
||||
// Process any remaining records in the final batch
|
||||
if (batch.length > 0) {
|
||||
await processBatch(batch, categoryUpdates);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
@@ -156,9 +218,80 @@ async function importProducts(connection, filePath) {
|
||||
duration: formatDuration((Date.now() - startTime) / 1000),
|
||||
percentage: '100'
|
||||
});
|
||||
|
||||
// Helper function to process a batch of records
|
||||
async function processBatch(records, categoryUpdates) {
|
||||
if (records.length === 0) return;
|
||||
|
||||
const connection = await pool.getConnection();
|
||||
try {
|
||||
await connection.beginTransaction();
|
||||
try {
|
||||
// Create the batch insert/update query
|
||||
const values = records.map(r => [
|
||||
r.product_id, r.title, r.SKU, r.created_at, r.stock_quantity,
|
||||
r.price, r.regular_price, r.cost_price, r.landing_cost_price,
|
||||
r.barcode, r.updated_at, r.visible, r.managing_stock,
|
||||
r.replenishable, r.vendor, r.vendor_reference, r.permalink,
|
||||
r.categories, r.image, r.brand, r.options, r.tags, r.moq, r.uom
|
||||
]);
|
||||
|
||||
const placeholders = records.map(() =>
|
||||
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||
).join(',');
|
||||
|
||||
const sql = `INSERT INTO products VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
title = VALUES(title),
|
||||
stock_quantity = VALUES(stock_quantity),
|
||||
price = VALUES(price),
|
||||
regular_price = VALUES(regular_price),
|
||||
cost_price = VALUES(cost_price),
|
||||
landing_cost_price = VALUES(landing_cost_price),
|
||||
barcode = VALUES(barcode),
|
||||
updated_at = VALUES(updated_at),
|
||||
visible = VALUES(visible),
|
||||
managing_stock = VALUES(managing_stock),
|
||||
replenishable = VALUES(replenishable),
|
||||
vendor = VALUES(vendor),
|
||||
vendor_reference = VALUES(vendor_reference),
|
||||
permalink = VALUES(permalink),
|
||||
categories = VALUES(categories),
|
||||
image = VALUES(image),
|
||||
brand = VALUES(brand),
|
||||
options = VALUES(options),
|
||||
tags = VALUES(tags),
|
||||
moq = VALUES(moq),
|
||||
uom = VALUES(uom)`;
|
||||
|
||||
const [result] = await connection.query(sql, values.flat());
|
||||
|
||||
// Update stats
|
||||
if (result.affectedRows > 0) {
|
||||
updated += result.affectedRows - result.insertId;
|
||||
added += result.insertId;
|
||||
}
|
||||
|
||||
// Process categories within the same transaction
|
||||
for (const [productId, categories] of categoryUpdates) {
|
||||
await handleCategories(connection, productId, categories);
|
||||
}
|
||||
|
||||
await connection.commit();
|
||||
} catch (error) {
|
||||
await connection.rollback();
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`\nError processing batch:`, error.message);
|
||||
// Continue with next batch instead of failing completely
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function importOrders(connection, filePath) {
|
||||
async function importOrders(pool, filePath) {
|
||||
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
|
||||
const totalRows = ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), ORDERS_TEST_LIMIT) : await countRows(filePath);
|
||||
const startTime = Date.now();
|
||||
@@ -177,8 +310,14 @@ async function importOrders(connection, filePath) {
|
||||
}
|
||||
|
||||
// First, get all valid product IDs
|
||||
const [rows] = await connection.query('SELECT product_id FROM products');
|
||||
const validProductIds = new Set(rows.map(row => row.product_id.toString()));
|
||||
const connection = await pool.getConnection();
|
||||
let validProductIds;
|
||||
try {
|
||||
const [rows] = await connection.query('SELECT product_id FROM products');
|
||||
validProductIds = new Set(rows.map(row => row.product_id.toString()));
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
|
||||
let skipped = 0;
|
||||
let updated = 0;
|
||||
@@ -186,8 +325,16 @@ async function importOrders(connection, filePath) {
|
||||
let rowCount = 0;
|
||||
let lastUpdate = Date.now();
|
||||
|
||||
// Batch processing variables
|
||||
const BATCH_SIZE = 500;
|
||||
let batch = [];
|
||||
|
||||
for await (const record of parser) {
|
||||
if (ORDERS_TEST_LIMIT > 0 && rowCount >= ORDERS_TEST_LIMIT) {
|
||||
// Process remaining batch
|
||||
if (batch.length > 0) {
|
||||
await processBatch(batch);
|
||||
}
|
||||
outputProgress({
|
||||
operation: 'Orders import',
|
||||
message: `Reached test limit of ${ORDERS_TEST_LIMIT.toLocaleString()} rows`,
|
||||
@@ -210,33 +357,33 @@ async function importOrders(connection, filePath) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if order exists
|
||||
const [existing] = await connection.query(
|
||||
'SELECT id FROM orders WHERE order_number = ? AND product_id = ?',
|
||||
[record.order_number, record.product_id]
|
||||
);
|
||||
|
||||
await connection.query('INSERT INTO orders (order_number, product_id, SKU, date, price, quantity, discount, tax, tax_included, shipping, customer, canceled) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE price = VALUES(price), quantity = VALUES(quantity), discount = VALUES(discount), tax = VALUES(tax), tax_included = VALUES(tax_included), shipping = VALUES(shipping), canceled = VALUES(canceled)', [
|
||||
record.order_number,
|
||||
record.product_id,
|
||||
record.SKU,
|
||||
convertDate(record.date),
|
||||
parseFloat(record.price) || 0,
|
||||
parseInt(record.quantity) || 0,
|
||||
parseFloat(record.discount) || 0,
|
||||
parseFloat(record.tax) || 0,
|
||||
record.tax_included === '1',
|
||||
parseFloat(record.shipping) || 0,
|
||||
record.customer,
|
||||
record.canceled === '1'
|
||||
]);
|
||||
existing.length ? updated++ : added++;
|
||||
} catch (error) {
|
||||
console.error(`\nError importing order ${record.order_number}, product ${record.product_id}:`, error.message);
|
||||
skipped++;
|
||||
// Add to batch
|
||||
batch.push({
|
||||
order_number: record.order_number,
|
||||
product_id: record.product_id,
|
||||
SKU: record.SKU,
|
||||
date: convertDate(record.date),
|
||||
price: parseFloat(record.price) || 0,
|
||||
quantity: parseInt(record.quantity) || 0,
|
||||
discount: parseFloat(record.discount) || 0,
|
||||
tax: parseFloat(record.tax) || 0,
|
||||
tax_included: record.tax_included === '1',
|
||||
shipping: parseFloat(record.shipping) || 0,
|
||||
customer: record.customer,
|
||||
canceled: record.canceled === '1'
|
||||
});
|
||||
|
||||
// Process batch if it reaches BATCH_SIZE
|
||||
if (batch.length >= BATCH_SIZE) {
|
||||
await processBatch(batch);
|
||||
batch = [];
|
||||
}
|
||||
}
|
||||
|
||||
// Process any remaining records in the final batch
|
||||
if (batch.length > 0) {
|
||||
await processBatch(batch);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
@@ -249,9 +396,54 @@ async function importOrders(connection, filePath) {
|
||||
duration: formatDuration((Date.now() - startTime) / 1000),
|
||||
percentage: '100'
|
||||
});
|
||||
|
||||
// Helper function to process a batch of records
|
||||
async function processBatch(records) {
|
||||
if (records.length === 0) return;
|
||||
|
||||
const connection = await pool.getConnection();
|
||||
try {
|
||||
// Create the batch insert/update query
|
||||
const values = records.map(r => [
|
||||
r.order_number, r.product_id, r.SKU, r.date, r.price,
|
||||
r.quantity, r.discount, r.tax, r.tax_included, r.shipping,
|
||||
r.customer, r.canceled
|
||||
]);
|
||||
|
||||
const placeholders = records.map(() =>
|
||||
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||
).join(',');
|
||||
|
||||
const sql = `INSERT INTO orders (order_number, product_id, SKU, date, price,
|
||||
quantity, discount, tax, tax_included, shipping, customer, canceled)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
price = VALUES(price),
|
||||
quantity = VALUES(quantity),
|
||||
discount = VALUES(discount),
|
||||
tax = VALUES(tax),
|
||||
tax_included = VALUES(tax_included),
|
||||
shipping = VALUES(shipping),
|
||||
canceled = VALUES(canceled)`;
|
||||
|
||||
const [result] = await connection.query(sql, values.flat());
|
||||
|
||||
// Update stats
|
||||
if (result.affectedRows > 0) {
|
||||
updated += result.affectedRows - result.insertId;
|
||||
added += result.insertId;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`\nError processing batch:`, error.message);
|
||||
// Continue with next batch instead of failing completely
|
||||
skipped += records.length;
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function importPurchaseOrders(connection, filePath) {
|
||||
async function importPurchaseOrders(pool, filePath) {
|
||||
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
|
||||
const totalRows = PURCHASE_ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PURCHASE_ORDERS_TEST_LIMIT) : await countRows(filePath);
|
||||
const startTime = Date.now();
|
||||
@@ -270,8 +462,14 @@ async function importPurchaseOrders(connection, filePath) {
|
||||
}
|
||||
|
||||
// First, get all valid product IDs
|
||||
const [rows] = await connection.query('SELECT product_id FROM products');
|
||||
const validProductIds = new Set(rows.map(row => row.product_id.toString()));
|
||||
const connection = await pool.getConnection();
|
||||
let validProductIds;
|
||||
try {
|
||||
const [rows] = await connection.query('SELECT product_id FROM products');
|
||||
validProductIds = new Set(rows.map(row => row.product_id.toString()));
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
|
||||
let skipped = 0;
|
||||
let updated = 0;
|
||||
@@ -279,8 +477,16 @@ async function importPurchaseOrders(connection, filePath) {
|
||||
let rowCount = 0;
|
||||
let lastUpdate = Date.now();
|
||||
|
||||
// Batch processing variables
|
||||
const BATCH_SIZE = 500;
|
||||
let batch = [];
|
||||
|
||||
for await (const record of parser) {
|
||||
if (PURCHASE_ORDERS_TEST_LIMIT > 0 && rowCount >= PURCHASE_ORDERS_TEST_LIMIT) {
|
||||
// Process remaining batch
|
||||
if (batch.length > 0) {
|
||||
await processBatch(batch);
|
||||
}
|
||||
outputProgress({
|
||||
operation: 'Purchase orders import',
|
||||
message: `Reached test limit of ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows`,
|
||||
@@ -303,33 +509,33 @@ async function importPurchaseOrders(connection, filePath) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if PO exists
|
||||
const [existing] = await connection.query(
|
||||
'SELECT id FROM purchase_orders WHERE po_id = ? AND product_id = ?',
|
||||
[record.po_id, record.product_id]
|
||||
);
|
||||
|
||||
await connection.query('INSERT INTO purchase_orders (po_id, vendor, date, expected_date, product_id, sku, cost_price, status, notes, ordered, received, received_date) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE vendor = VALUES(vendor), expected_date = VALUES(expected_date), cost_price = VALUES(cost_price), status = VALUES(status), notes = VALUES(notes), ordered = VALUES(ordered), received = VALUES(received), received_date = VALUES(received_date)', [
|
||||
record.po_id,
|
||||
record.vendor,
|
||||
convertDate(record.date),
|
||||
convertDate(record.expected_date),
|
||||
record.product_id,
|
||||
record.sku,
|
||||
parseFloat(record.cost_price) || 0,
|
||||
record.status || 'pending',
|
||||
record.notes,
|
||||
parseInt(record.ordered) || 0,
|
||||
parseInt(record.received) || 0,
|
||||
convertDate(record.received_date)
|
||||
]);
|
||||
existing.length ? updated++ : added++;
|
||||
} catch (error) {
|
||||
console.error(`\nError importing PO ${record.po_id}, product ${record.product_id}:`, error.message);
|
||||
skipped++;
|
||||
// Add to batch
|
||||
batch.push({
|
||||
po_id: record.po_id,
|
||||
vendor: record.vendor,
|
||||
date: convertDate(record.date),
|
||||
expected_date: convertDate(record.expected_date),
|
||||
product_id: record.product_id,
|
||||
sku: record.sku,
|
||||
cost_price: parseFloat(record.cost_price) || 0,
|
||||
status: record.status || 'pending',
|
||||
notes: record.notes,
|
||||
ordered: parseInt(record.ordered) || 0,
|
||||
received: parseInt(record.received) || 0,
|
||||
received_date: convertDate(record.received_date)
|
||||
});
|
||||
|
||||
// Process batch if it reaches BATCH_SIZE
|
||||
if (batch.length >= BATCH_SIZE) {
|
||||
await processBatch(batch);
|
||||
batch = [];
|
||||
}
|
||||
}
|
||||
|
||||
// Process any remaining records in the final batch
|
||||
if (batch.length > 0) {
|
||||
await processBatch(batch);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
@@ -342,16 +548,62 @@ async function importPurchaseOrders(connection, filePath) {
|
||||
duration: formatDuration((Date.now() - startTime) / 1000),
|
||||
percentage: '100'
|
||||
});
|
||||
|
||||
// Helper function to process a batch of records
|
||||
async function processBatch(records) {
|
||||
if (records.length === 0) return;
|
||||
|
||||
const connection = await pool.getConnection();
|
||||
try {
|
||||
// Create the batch insert/update query
|
||||
const values = records.map(r => [
|
||||
r.po_id, r.vendor, r.date, r.expected_date, r.product_id,
|
||||
r.sku, r.cost_price, r.status, r.notes, r.ordered,
|
||||
r.received, r.received_date
|
||||
]);
|
||||
|
||||
const placeholders = records.map(() =>
|
||||
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||
).join(',');
|
||||
|
||||
const sql = `INSERT INTO purchase_orders (po_id, vendor, date, expected_date,
|
||||
product_id, sku, cost_price, status, notes, ordered, received, received_date)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
vendor = VALUES(vendor),
|
||||
expected_date = VALUES(expected_date),
|
||||
cost_price = VALUES(cost_price),
|
||||
status = VALUES(status),
|
||||
notes = VALUES(notes),
|
||||
ordered = VALUES(ordered),
|
||||
received = VALUES(received),
|
||||
received_date = VALUES(received_date)`;
|
||||
|
||||
const [result] = await connection.query(sql, values.flat());
|
||||
|
||||
// Update stats
|
||||
if (result.affectedRows > 0) {
|
||||
updated += result.affectedRows - result.insertId;
|
||||
added += result.insertId;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`\nError processing batch:`, error.message);
|
||||
// Continue with next batch instead of failing completely
|
||||
skipped += records.length;
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
outputProgress({
|
||||
operation: 'Starting import process',
|
||||
message: 'Connecting to database...'
|
||||
message: 'Creating connection pool...'
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
const connection = await mysql.createConnection(dbConfig);
|
||||
const pool = mysql.createPool(dbConfig);
|
||||
|
||||
try {
|
||||
// Check if tables exist, if not create them
|
||||
@@ -361,12 +613,12 @@ async function main() {
|
||||
});
|
||||
|
||||
const schemaSQL = fs.readFileSync(path.join(__dirname, '../db/schema.sql'), 'utf8');
|
||||
await connection.query(schemaSQL);
|
||||
await pool.query(schemaSQL);
|
||||
|
||||
// Import products first since they're referenced by other tables
|
||||
await importProducts(connection, path.join(__dirname, '../csv/39f2x83-products.csv'));
|
||||
await importOrders(connection, path.join(__dirname, '../csv/39f2x83-orders.csv'));
|
||||
await importPurchaseOrders(connection, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
|
||||
await importProducts(pool, path.join(__dirname, '../csv/39f2x83-products.csv'));
|
||||
await importOrders(pool, path.join(__dirname, '../csv/39f2x83-orders.csv'));
|
||||
await importPurchaseOrders(pool, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
@@ -380,7 +632,7 @@ async function main() {
|
||||
});
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await connection.end();
|
||||
await pool.end();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -54,8 +54,8 @@ export function Settings() {
|
||||
const [eventSource, setEventSource] = useState<EventSource | null>(null);
|
||||
const [limits, setLimits] = useState<ImportLimits>({
|
||||
products: 0,
|
||||
orders: 10000,
|
||||
purchaseOrders: 10000
|
||||
orders: 0,
|
||||
purchaseOrders: 0
|
||||
});
|
||||
|
||||
// Helper to connect to event source
|
||||
|
||||
Reference in New Issue
Block a user