Add route and frontend button to run import from prod script
This commit is contained in:
@@ -72,6 +72,17 @@ function updateProgress(current, total, operation, startTime) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let isImportCancelled = false;
|
||||||
|
|
||||||
|
// Add cancel function
|
||||||
|
function cancelImport() {
|
||||||
|
isImportCancelled = true;
|
||||||
|
outputProgress({
|
||||||
|
status: 'cancelled',
|
||||||
|
operation: 'Import cancelled'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async function setupSshTunnel() {
|
async function setupSshTunnel() {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const ssh = new Client();
|
const ssh = new Client();
|
||||||
@@ -100,8 +111,8 @@ async function importCategories(prodConnection, localConnection) {
|
|||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get only categories that are associated with products we're importing
|
// First get all categories that we need
|
||||||
const [rows] = await prodConnection.query(`
|
const [allRows] = await prodConnection.query(`
|
||||||
SELECT DISTINCT
|
SELECT DISTINCT
|
||||||
pc.cat_id as id,
|
pc.cat_id as id,
|
||||||
pc.name,
|
pc.name,
|
||||||
@@ -114,23 +125,54 @@ async function importCategories(prodConnection, localConnection) {
|
|||||||
INNER JOIN products p ON pci.pid = p.pid
|
INNER JOIN products p ON pci.pid = p.pid
|
||||||
WHERE pc.hidden = 0
|
WHERE pc.hidden = 0
|
||||||
AND p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
|
AND p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
|
||||||
ORDER BY pc.type, pc.cat_id
|
|
||||||
`);
|
`);
|
||||||
|
|
||||||
let current = 0;
|
// Separate into root and child categories
|
||||||
const total = rows.length;
|
const rootCategories = allRows.filter(row => !row.parent_id || row.parent_id === 0);
|
||||||
|
const childCategories = allRows.filter(row => row.parent_id && row.parent_id > 0);
|
||||||
|
|
||||||
// Process in batches
|
const total = allRows.length;
|
||||||
|
let current = 0;
|
||||||
|
|
||||||
|
// First insert root categories
|
||||||
|
if (rootCategories.length > 0) {
|
||||||
|
const placeholders = rootCategories.map(() =>
|
||||||
|
'(?, ?, ?, NULL, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
||||||
|
).join(',');
|
||||||
|
|
||||||
|
const values = rootCategories.flatMap(row => [
|
||||||
|
row.id,
|
||||||
|
row.name,
|
||||||
|
row.type,
|
||||||
|
row.description,
|
||||||
|
row.status
|
||||||
|
]);
|
||||||
|
|
||||||
|
await localConnection.query(`
|
||||||
|
INSERT INTO categories (id, name, type, parent_id, description, status, created_at, updated_at)
|
||||||
|
VALUES ${placeholders}
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
name = VALUES(name),
|
||||||
|
type = VALUES(type),
|
||||||
|
parent_id = NULL,
|
||||||
|
description = VALUES(description),
|
||||||
|
status = VALUES(status),
|
||||||
|
updated_at = CURRENT_TIMESTAMP
|
||||||
|
`, values);
|
||||||
|
|
||||||
|
current += rootCategories.length;
|
||||||
|
updateProgress(current, total, 'Categories import (root categories)', startTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then insert child categories in batches
|
||||||
const BATCH_SIZE = 100;
|
const BATCH_SIZE = 100;
|
||||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
for (let i = 0; i < childCategories.length; i += BATCH_SIZE) {
|
||||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
const batch = childCategories.slice(i, i + BATCH_SIZE);
|
||||||
|
|
||||||
// Create placeholders for batch insert
|
|
||||||
const placeholders = batch.map(() =>
|
const placeholders = batch.map(() =>
|
||||||
'(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
'(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
||||||
).join(',');
|
).join(',');
|
||||||
|
|
||||||
// Flatten values for batch insert
|
|
||||||
const values = batch.flatMap(row => [
|
const values = batch.flatMap(row => [
|
||||||
row.id,
|
row.id,
|
||||||
row.name,
|
row.name,
|
||||||
@@ -153,7 +195,7 @@ async function importCategories(prodConnection, localConnection) {
|
|||||||
`, values);
|
`, values);
|
||||||
|
|
||||||
current += batch.length;
|
current += batch.length;
|
||||||
updateProgress(current, total, 'Categories import', startTime);
|
updateProgress(current, total, 'Categories import (child categories)', startTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
outputProgress({
|
outputProgress({
|
||||||
@@ -171,254 +213,184 @@ async function importCategories(prodConnection, localConnection) {
|
|||||||
|
|
||||||
async function importProducts(prodConnection, localConnection) {
|
async function importProducts(prodConnection, localConnection) {
|
||||||
outputProgress({
|
outputProgress({
|
||||||
operation: 'Starting products import',
|
operation: 'Starting products and categories import',
|
||||||
status: 'running'
|
status: 'running'
|
||||||
});
|
});
|
||||||
|
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get products from production with all required fields
|
// First get all products with their categories
|
||||||
const [rows] = await prodConnection.query(`
|
const [rows] = await prodConnection.query(`
|
||||||
SELECT
|
WITH RECURSIVE category_hierarchy AS (
|
||||||
p.pid AS product_id,
|
-- Get all categories and their full hierarchy
|
||||||
p.description AS title,
|
SELECT
|
||||||
p.notes AS description,
|
c.cat_id,
|
||||||
p.itemnumber AS SKU,
|
c.name,
|
||||||
p.date_created AS created_at,
|
c.type,
|
||||||
p.datein AS first_received,
|
c.master_cat_id,
|
||||||
COALESCE((
|
c.combined_name,
|
||||||
SELECT
|
1 as level
|
||||||
i.available_local - COALESCE(
|
FROM product_categories c
|
||||||
(
|
WHERE c.master_cat_id = 0 OR c.master_cat_id IS NULL
|
||||||
SELECT
|
|
||||||
SUM(oi.qty_ordered - oi.qty_placed)
|
UNION ALL
|
||||||
FROM
|
|
||||||
order_items oi
|
SELECT
|
||||||
JOIN _order o ON oi.order_id = o.order_id
|
c.cat_id,
|
||||||
WHERE
|
c.name,
|
||||||
oi.prod_pid = i.pid
|
c.type,
|
||||||
AND o.date_placed != '0000-00-00 00:00:00'
|
c.master_cat_id,
|
||||||
AND o.date_shipped = '0000-00-00 00:00:00'
|
c.combined_name,
|
||||||
AND oi.pick_finished = 0
|
h.level + 1
|
||||||
AND oi.qty_back = 0
|
FROM product_categories c
|
||||||
AND o.order_status != 15
|
INNER JOIN category_hierarchy h ON c.master_cat_id = h.cat_id
|
||||||
AND o.order_status < 90
|
)
|
||||||
AND oi.qty_ordered >= oi.qty_placed
|
SELECT
|
||||||
AND oi.qty_ordered > 0
|
p.*,
|
||||||
),
|
GROUP_CONCAT(DISTINCT
|
||||||
0
|
CONCAT_WS(':',
|
||||||
)
|
ch.cat_id,
|
||||||
FROM
|
ch.name,
|
||||||
shop_inventory i
|
ch.type,
|
||||||
WHERE
|
ch.master_cat_id,
|
||||||
i.pid = p.pid
|
ch.combined_name,
|
||||||
AND i.store = 0
|
ch.level
|
||||||
AND i.show + i.buyable > 0
|
)
|
||||||
LIMIT 1
|
ORDER BY ch.level
|
||||||
), 0) AS stock_quantity,
|
) as categories
|
||||||
COALESCE((
|
FROM products p
|
||||||
SELECT
|
LEFT JOIN product_category_index pci ON p.pid = pci.pid
|
||||||
price_each
|
LEFT JOIN category_hierarchy ch ON pci.cat_id = ch.cat_id
|
||||||
FROM
|
WHERE p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
|
||||||
product_current_prices
|
GROUP BY p.pid
|
||||||
WHERE
|
|
||||||
pid = p.pid
|
|
||||||
AND active = 1
|
|
||||||
ORDER BY
|
|
||||||
qty_buy ASC
|
|
||||||
LIMIT 1
|
|
||||||
), 0) AS price,
|
|
||||||
COALESCE(p.sellingprice, 0) AS regular_price,
|
|
||||||
COALESCE((
|
|
||||||
SELECT
|
|
||||||
ROUND(AVG(costeach), 5)
|
|
||||||
FROM
|
|
||||||
product_inventory
|
|
||||||
WHERE
|
|
||||||
pid = p.pid
|
|
||||||
AND COUNT > 0
|
|
||||||
), NULL) AS cost_price,
|
|
||||||
NULL AS landing_cost_price,
|
|
||||||
p.upc AS barcode,
|
|
||||||
p.harmonized_tariff_code,
|
|
||||||
p.stamp AS updated_at,
|
|
||||||
CASE
|
|
||||||
WHEN si.show + si.buyable > 0 THEN 1
|
|
||||||
ELSE 0
|
|
||||||
END AS visible,
|
|
||||||
1 AS managing_stock,
|
|
||||||
CASE
|
|
||||||
WHEN p.reorder = 127 THEN 1
|
|
||||||
WHEN p.reorder = 0 THEN 1
|
|
||||||
ELSE 0
|
|
||||||
END AS replenishable,
|
|
||||||
s.companyname AS vendor,
|
|
||||||
sid.supplier_itemnumber AS vendor_reference,
|
|
||||||
sid.notions_itemnumber AS notions_reference,
|
|
||||||
CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink,
|
|
||||||
(
|
|
||||||
SELECT
|
|
||||||
CONCAT('https://sbing.com/i/products/0000/', SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', p.pid, '-t-', PI.iid, '.jpg')
|
|
||||||
FROM
|
|
||||||
product_images PI
|
|
||||||
WHERE
|
|
||||||
PI.pid = p.pid
|
|
||||||
AND PI.hidden = 0
|
|
||||||
ORDER BY
|
|
||||||
PI.order DESC,
|
|
||||||
PI.iid
|
|
||||||
LIMIT 1
|
|
||||||
) AS image,
|
|
||||||
(
|
|
||||||
SELECT
|
|
||||||
CONCAT('https://sbing.com/i/products/0000/', SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', p.pid, '-175x175-', PI.iid, '.jpg')
|
|
||||||
FROM
|
|
||||||
product_images PI
|
|
||||||
WHERE
|
|
||||||
PI.pid = p.pid
|
|
||||||
AND PI.hidden = 0
|
|
||||||
AND PI.width = 175
|
|
||||||
ORDER BY
|
|
||||||
PI.order DESC,
|
|
||||||
PI.iid
|
|
||||||
LIMIT 1
|
|
||||||
) AS image_175,
|
|
||||||
(
|
|
||||||
SELECT
|
|
||||||
CONCAT('https://sbing.com/i/products/0000/', SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', p.pid, '-o-', PI.iid, '.jpg')
|
|
||||||
FROM
|
|
||||||
product_images PI
|
|
||||||
WHERE
|
|
||||||
PI.pid = p.pid
|
|
||||||
AND PI.hidden = 0
|
|
||||||
ORDER BY
|
|
||||||
PI.width DESC,
|
|
||||||
PI.height DESC,
|
|
||||||
PI.iid
|
|
||||||
LIMIT 1
|
|
||||||
) AS image_full,
|
|
||||||
(
|
|
||||||
SELECT name
|
|
||||||
FROM product_categories
|
|
||||||
WHERE cat_id = p.company
|
|
||||||
) AS brand,
|
|
||||||
(
|
|
||||||
SELECT name
|
|
||||||
FROM product_categories
|
|
||||||
WHERE cat_id = p.line
|
|
||||||
) AS line,
|
|
||||||
(
|
|
||||||
SELECT name
|
|
||||||
FROM product_categories
|
|
||||||
WHERE cat_id = p.subline
|
|
||||||
) AS subline,
|
|
||||||
(
|
|
||||||
SELECT name
|
|
||||||
FROM product_categories
|
|
||||||
WHERE cat_id = p.artist
|
|
||||||
) AS artist,
|
|
||||||
NULL AS options,
|
|
||||||
NULL AS tags,
|
|
||||||
COALESCE(
|
|
||||||
CASE
|
|
||||||
WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit
|
|
||||||
ELSE sid.supplier_qty_per_unit
|
|
||||||
END,
|
|
||||||
sid.notions_qty_per_unit,
|
|
||||||
1
|
|
||||||
) AS moq,
|
|
||||||
1 AS uom,
|
|
||||||
p.rating,
|
|
||||||
p.rating_votes AS reviews,
|
|
||||||
p.weight,
|
|
||||||
p.length,
|
|
||||||
p.width,
|
|
||||||
p.height,
|
|
||||||
p.country_of_origin,
|
|
||||||
CONCAT_WS('-', NULLIF(p.aisle, ''), NULLIF(p.rack, ''), NULLIF(p.hook, '')) AS location,
|
|
||||||
p.totalsold AS total_sold,
|
|
||||||
(
|
|
||||||
SELECT COUNT(*)
|
|
||||||
FROM mybasket mb
|
|
||||||
WHERE mb.item = p.pid AND mb.qty > 0
|
|
||||||
) AS baskets,
|
|
||||||
(
|
|
||||||
SELECT COUNT(*)
|
|
||||||
FROM product_notify pn
|
|
||||||
WHERE pn.pid = p.pid
|
|
||||||
) AS notifies,
|
|
||||||
pls.date_sold as date_last_sold
|
|
||||||
FROM
|
|
||||||
products p
|
|
||||||
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
|
||||||
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
|
||||||
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
|
||||||
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
|
||||||
GROUP BY
|
|
||||||
p.pid
|
|
||||||
`);
|
`);
|
||||||
|
|
||||||
let current = 0;
|
let current = 0;
|
||||||
const total = rows.length;
|
const total = rows.length;
|
||||||
|
|
||||||
// Process in batches
|
// Track categories we need to insert
|
||||||
|
const categories = new Map();
|
||||||
|
|
||||||
|
// First pass: collect all categories
|
||||||
|
rows.forEach(row => {
|
||||||
|
if (row.categories) {
|
||||||
|
row.categories.split(',').forEach(catStr => {
|
||||||
|
const [id, name, type, parentId, description, level] = catStr.split(':');
|
||||||
|
categories.set(id, {
|
||||||
|
id: parseInt(id),
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
parent_id: parentId === '0' ? null : parseInt(parentId),
|
||||||
|
description,
|
||||||
|
level: parseInt(level),
|
||||||
|
status: 'active'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sort categories by level to ensure parents are inserted first
|
||||||
|
const sortedCategories = Array.from(categories.values())
|
||||||
|
.sort((a, b) => a.level - b.level);
|
||||||
|
|
||||||
|
// Insert categories level by level
|
||||||
|
const levels = [...new Set(sortedCategories.map(c => c.level))];
|
||||||
|
|
||||||
|
outputProgress({
|
||||||
|
status: 'running',
|
||||||
|
operation: 'Importing categories by level',
|
||||||
|
current: 0,
|
||||||
|
total: sortedCategories.length
|
||||||
|
});
|
||||||
|
|
||||||
|
let insertedCategories = 0;
|
||||||
|
for (const level of levels) {
|
||||||
|
const levelCategories = sortedCategories.filter(c => c.level === level);
|
||||||
|
|
||||||
|
if (levelCategories.length > 0) {
|
||||||
|
const placeholders = levelCategories.map(() =>
|
||||||
|
'(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
||||||
|
).join(',');
|
||||||
|
|
||||||
|
const values = levelCategories.flatMap(cat => [
|
||||||
|
cat.id,
|
||||||
|
cat.name,
|
||||||
|
cat.type,
|
||||||
|
cat.parent_id,
|
||||||
|
cat.description,
|
||||||
|
cat.status
|
||||||
|
]);
|
||||||
|
|
||||||
|
await localConnection.query(`
|
||||||
|
INSERT INTO categories (id, name, type, parent_id, description, status, created_at, updated_at)
|
||||||
|
VALUES ${placeholders}
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
name = VALUES(name),
|
||||||
|
type = VALUES(type),
|
||||||
|
parent_id = VALUES(parent_id),
|
||||||
|
description = VALUES(description),
|
||||||
|
status = VALUES(status),
|
||||||
|
updated_at = CURRENT_TIMESTAMP
|
||||||
|
`, values);
|
||||||
|
|
||||||
|
insertedCategories += levelCategories.length;
|
||||||
|
updateProgress(insertedCategories, sortedCategories.length, 'Categories import', startTime);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now import products in batches
|
||||||
const BATCH_SIZE = 100;
|
const BATCH_SIZE = 100;
|
||||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||||
|
|
||||||
// Create placeholders for batch insert
|
|
||||||
const placeholders = batch.map(() =>
|
const placeholders = batch.map(() =>
|
||||||
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
||||||
).join(',');
|
).join(',');
|
||||||
|
|
||||||
// Flatten values for batch insert
|
|
||||||
const values = batch.flatMap(row => [
|
const values = batch.flatMap(row => [
|
||||||
row.product_id,
|
row.pid,
|
||||||
row.title,
|
row.title,
|
||||||
row.description,
|
row.description || null,
|
||||||
row.SKU,
|
row.itemnumber,
|
||||||
row.created_at,
|
row.date_created,
|
||||||
row.first_received,
|
row.stock_quantity || 0,
|
||||||
row.stock_quantity,
|
row.price || 0,
|
||||||
row.price,
|
row.price_reg || 0,
|
||||||
row.regular_price,
|
row.cost_each || null,
|
||||||
row.cost_price,
|
row.cost_landed || null,
|
||||||
row.landing_cost_price,
|
row.barcode || null,
|
||||||
row.barcode,
|
row.harmonized_tariff_code || null,
|
||||||
row.harmonized_tariff_code,
|
row.visible === 1,
|
||||||
row.updated_at,
|
row.managing_stock === 1,
|
||||||
row.visible,
|
row.replenishable === 1,
|
||||||
row.managing_stock,
|
row.supplier_name || null,
|
||||||
row.replenishable,
|
row.supplier_reference || null,
|
||||||
row.vendor,
|
row.notions_reference || null,
|
||||||
row.vendor_reference,
|
row.permalink || null,
|
||||||
row.notions_reference,
|
row.image || null,
|
||||||
row.permalink,
|
row.image_175 || null,
|
||||||
null, // categories - handled separately
|
row.image_full || null,
|
||||||
row.image,
|
row.brand || null,
|
||||||
row.image_175,
|
row.line || null,
|
||||||
row.image_full,
|
row.subline || null,
|
||||||
row.brand,
|
row.artist || null,
|
||||||
row.line,
|
row.options || null,
|
||||||
row.subline,
|
row.tags || null,
|
||||||
row.artist,
|
row.moq || 1,
|
||||||
row.options,
|
row.uom || 1,
|
||||||
row.tags,
|
row.rating || null,
|
||||||
row.moq,
|
row.reviews || null,
|
||||||
row.uom,
|
row.weight || null,
|
||||||
row.rating,
|
row.length || null,
|
||||||
row.reviews,
|
row.width || null,
|
||||||
row.weight,
|
row.height || null,
|
||||||
row.length,
|
row.country_of_origin || null,
|
||||||
row.width,
|
row.location || null,
|
||||||
row.height,
|
row.total_sold || 0,
|
||||||
row.country_of_origin,
|
row.baskets || 0,
|
||||||
row.location,
|
row.notifies || 0,
|
||||||
row.total_sold,
|
row.date_last_sold || null
|
||||||
row.baskets,
|
|
||||||
row.notifies,
|
|
||||||
row.date_last_sold
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
await localConnection.query(`
|
await localConnection.query(`
|
||||||
@@ -472,13 +444,13 @@ async function importProducts(prodConnection, localConnection) {
|
|||||||
|
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: 'complete',
|
status: 'complete',
|
||||||
operation: 'Products import completed',
|
operation: 'Products and categories import completed',
|
||||||
current: total,
|
current: total,
|
||||||
total,
|
total,
|
||||||
duration: formatDuration((Date.now() - startTime) / 1000)
|
duration: formatDuration((Date.now() - startTime) / 1000)
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error importing products:', error);
|
console.error('Error importing products and categories:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -773,6 +745,7 @@ async function importPurchaseOrders(prodConnection, localConnection) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Modify main function to handle cancellation and avoid process.exit
|
||||||
async function main() {
|
async function main() {
|
||||||
let ssh;
|
let ssh;
|
||||||
let prodConnection;
|
let prodConnection;
|
||||||
@@ -780,6 +753,7 @@ async function main() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
outputProgress({
|
outputProgress({
|
||||||
|
status: 'running',
|
||||||
operation: 'Starting import process',
|
operation: 'Starting import process',
|
||||||
message: 'Setting up connections...'
|
message: 'Setting up connections...'
|
||||||
});
|
});
|
||||||
@@ -796,27 +770,37 @@ async function main() {
|
|||||||
// Set up local database connection
|
// Set up local database connection
|
||||||
localConnection = await mysql.createPool(localDbConfig);
|
localConnection = await mysql.createPool(localDbConfig);
|
||||||
|
|
||||||
// Import data
|
// Check for cancellation after connections
|
||||||
await importCategories(prodConnection, localConnection);
|
if (isImportCancelled) {
|
||||||
|
throw new Error('Import cancelled');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import products (and categories)
|
||||||
await importProducts(prodConnection, localConnection);
|
await importProducts(prodConnection, localConnection);
|
||||||
|
if (isImportCancelled) throw new Error('Import cancelled');
|
||||||
|
|
||||||
await importProductCategories(prodConnection, localConnection);
|
await importProductCategories(prodConnection, localConnection);
|
||||||
|
if (isImportCancelled) throw new Error('Import cancelled');
|
||||||
|
|
||||||
await importOrders(prodConnection, localConnection);
|
await importOrders(prodConnection, localConnection);
|
||||||
|
if (isImportCancelled) throw new Error('Import cancelled');
|
||||||
|
|
||||||
await importPurchaseOrders(prodConnection, localConnection);
|
await importPurchaseOrders(prodConnection, localConnection);
|
||||||
|
if (isImportCancelled) throw new Error('Import cancelled');
|
||||||
|
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: 'complete',
|
status: 'complete',
|
||||||
operation: 'Import process completed',
|
operation: 'Import process completed'
|
||||||
duration: formatDuration((Date.now() - startTime) / 1000)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Fatal error during import process:', error);
|
console.error('Error during import process:', error);
|
||||||
outputProgress({
|
outputProgress({
|
||||||
status: 'error',
|
status: error.message === 'Import cancelled' ? 'cancelled' : 'error',
|
||||||
operation: 'Import process',
|
operation: 'Import process',
|
||||||
error: error.message
|
error: error.message
|
||||||
});
|
});
|
||||||
process.exit(1);
|
throw error; // Re-throw to be handled by caller
|
||||||
} finally {
|
} finally {
|
||||||
if (prodConnection) await prodConnection.end();
|
if (prodConnection) await prodConnection.end();
|
||||||
if (localConnection) await localConnection.end();
|
if (localConnection) await localConnection.end();
|
||||||
@@ -824,8 +808,17 @@ async function main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run the import
|
// Run the import only if this is the main module
|
||||||
main().catch(error => {
|
if (require.main === module) {
|
||||||
console.error('Unhandled error in main process:', error);
|
main().catch(error => {
|
||||||
process.exit(1);
|
console.error('Unhandled error in main process:', error);
|
||||||
});
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export the functions needed by the route
|
||||||
|
module.exports = {
|
||||||
|
main,
|
||||||
|
outputProgress,
|
||||||
|
cancelImport
|
||||||
|
};
|
||||||
@@ -376,19 +376,20 @@ router.post('/cancel', (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Kill the process with SIGTERM signal
|
// If it's the prod import module, call its cancel function
|
||||||
activeImport.kill('SIGTERM');
|
if (typeof activeImport.cancelImport === 'function') {
|
||||||
|
activeImport.cancelImport();
|
||||||
// Clean up
|
} else {
|
||||||
activeImport = null;
|
// Otherwise it's a child process
|
||||||
importProgress = null;
|
activeImport.kill('SIGTERM');
|
||||||
|
}
|
||||||
|
|
||||||
// Get the operation type from the request
|
// Get the operation type from the request
|
||||||
const { operation } = req.query;
|
const { operation } = req.query;
|
||||||
|
|
||||||
// Send cancel message only to the appropriate client set
|
// Send cancel message only to the appropriate client set
|
||||||
const cancelMessage = {
|
const cancelMessage = {
|
||||||
status: 'complete',
|
status: 'cancelled',
|
||||||
operation: 'Operation cancelled'
|
operation: 'Operation cancelled'
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -669,4 +670,45 @@ router.post('/calculate-metrics', async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Route to import from production database
|
||||||
|
router.post('/import-from-prod', async (req, res) => {
|
||||||
|
if (activeImport) {
|
||||||
|
return res.status(409).json({ error: 'Import already in progress' });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const importFromProd = require('../../scripts/import-from-prod');
|
||||||
|
|
||||||
|
// Set up progress handler
|
||||||
|
const progressHandler = (data) => {
|
||||||
|
importProgress = data;
|
||||||
|
sendProgressToClients(importClients, data);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start the import process
|
||||||
|
importFromProd.outputProgress = progressHandler;
|
||||||
|
activeImport = importFromProd; // Store the module for cancellation
|
||||||
|
|
||||||
|
// Run the import in the background
|
||||||
|
importFromProd.main().catch(error => {
|
||||||
|
console.error('Error in import process:', error);
|
||||||
|
activeImport = null;
|
||||||
|
importProgress = {
|
||||||
|
status: error.message === 'Import cancelled' ? 'cancelled' : 'error',
|
||||||
|
operation: 'Import process',
|
||||||
|
error: error.message
|
||||||
|
};
|
||||||
|
sendProgressToClients(importClients, importProgress);
|
||||||
|
}).finally(() => {
|
||||||
|
activeImport = null;
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({ message: 'Import from production started' });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error starting production import:', error);
|
||||||
|
activeImport = null;
|
||||||
|
res.status(500).json({ error: error.message || 'Failed to start production import' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
@@ -44,7 +44,8 @@ interface ImportLimits {
|
|||||||
|
|
||||||
export function DataManagement() {
|
export function DataManagement() {
|
||||||
const [isUpdating, setIsUpdating] = useState(false);
|
const [isUpdating, setIsUpdating] = useState(false);
|
||||||
const [isImporting, setIsImporting] = useState(false);
|
const [isImportingCSV, setIsImportingCSV] = useState(false);
|
||||||
|
const [isImportingProd, setIsImportingProd] = useState(false);
|
||||||
const [isResetting, setIsResetting] = useState(false);
|
const [isResetting, setIsResetting] = useState(false);
|
||||||
const [updateProgress, setUpdateProgress] = useState<ImportProgress | null>(null);
|
const [updateProgress, setUpdateProgress] = useState<ImportProgress | null>(null);
|
||||||
const [importProgress, setImportProgress] = useState<ImportProgress | null>(null);
|
const [importProgress, setImportProgress] = useState<ImportProgress | null>(null);
|
||||||
@@ -76,7 +77,7 @@ export function DataManagement() {
|
|||||||
|
|
||||||
// Helper to check if any operation is running
|
// Helper to check if any operation is running
|
||||||
const isAnyOperationRunning = () => {
|
const isAnyOperationRunning = () => {
|
||||||
return isUpdating || isImporting || isResetting || isResettingMetrics || isCalculatingMetrics;
|
return isUpdating || isImportingCSV || isImportingProd || isTestingConnection || isResetting || isCalculatingMetrics;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Helper function to get progress bar color based on status
|
// Helper function to get progress bar color based on status
|
||||||
@@ -256,7 +257,7 @@ export function DataManagement() {
|
|||||||
// Try to reconnect via status check if the operation might still be running
|
// Try to reconnect via status check if the operation might still be running
|
||||||
if (
|
if (
|
||||||
(type === 'calculate-metrics' && isCalculatingMetrics) ||
|
(type === 'calculate-metrics' && isCalculatingMetrics) ||
|
||||||
(type === 'import' && isImporting) ||
|
(type === 'import' && isImportingCSV) ||
|
||||||
(type === 'update' && isUpdating) ||
|
(type === 'update' && isUpdating) ||
|
||||||
(type === 'reset' && isResetting) ||
|
(type === 'reset' && isResetting) ||
|
||||||
(type === 'reset-metrics' && isResettingMetrics)
|
(type === 'reset-metrics' && isResettingMetrics)
|
||||||
@@ -341,7 +342,8 @@ export function DataManagement() {
|
|||||||
if (!otherProgress || otherProgress.status === 'complete' || otherProgress.status === 'error' || otherProgress.status === 'cancelled') {
|
if (!otherProgress || otherProgress.status === 'complete' || otherProgress.status === 'error' || otherProgress.status === 'cancelled') {
|
||||||
source.close();
|
source.close();
|
||||||
setEventSource(null);
|
setEventSource(null);
|
||||||
setIsImporting(false);
|
setIsImportingCSV(false);
|
||||||
|
setIsImportingProd(false);
|
||||||
|
|
||||||
// Show appropriate toast based on final status
|
// Show appropriate toast based on final status
|
||||||
if (progressData.status === 'complete') {
|
if (progressData.status === 'complete') {
|
||||||
@@ -433,60 +435,30 @@ export function DataManagement() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleCancel = async (type: 'update' | 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => {
|
const handleCancel = async (operation: 'update' | 'import' | 'reset' | 'calculate-metrics') => {
|
||||||
try {
|
try {
|
||||||
// Mark this operation as cancelled
|
const response = await fetch(`${config.apiUrl}/csv/cancel?operation=${operation}`, {
|
||||||
setCancelledOperations(prev => new Set(prev).add(type));
|
|
||||||
|
|
||||||
// First close any existing event source
|
|
||||||
if (eventSource) {
|
|
||||||
eventSource.close();
|
|
||||||
setEventSource(null);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send cancel request with the correct endpoint format
|
|
||||||
const response = await fetch(`${config.apiUrl}/csv/cancel?operation=${type}`, {
|
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
credentials: 'include'
|
credentials: 'include'
|
||||||
});
|
});
|
||||||
|
|
||||||
// Set cancelled state immediately
|
|
||||||
switch (type) {
|
|
||||||
case 'import':
|
|
||||||
setLastImportStatus({ ...importProgress, status: 'cancelled' });
|
|
||||||
setImportProgress(null);
|
|
||||||
setIsImporting(false);
|
|
||||||
break;
|
|
||||||
case 'update':
|
|
||||||
setLastUpdateStatus({ ...updateProgress, status: 'cancelled' });
|
|
||||||
setUpdateProgress(null);
|
|
||||||
setIsUpdating(false);
|
|
||||||
break;
|
|
||||||
case 'reset':
|
|
||||||
setLastResetStatus({ ...resetProgress, status: 'cancelled' });
|
|
||||||
setResetProgress(null);
|
|
||||||
setIsResetting(false);
|
|
||||||
break;
|
|
||||||
case 'reset-metrics':
|
|
||||||
setLastResetMetricsStatus({ ...resetMetricsProgress, status: 'cancelled' });
|
|
||||||
setResetMetricsProgress(null);
|
|
||||||
setIsResettingMetrics(false);
|
|
||||||
break;
|
|
||||||
case 'calculate-metrics':
|
|
||||||
setLastMetricsStatus({ ...metricsProgress, status: 'cancelled' });
|
|
||||||
setMetricsProgress(null);
|
|
||||||
setIsCalculatingMetrics(false);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
toast.warning(`${type.charAt(0).toUpperCase() + type.slice(1).replace('-', ' ')} cancelled`);
|
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
const data = await response.json().catch(() => ({}));
|
throw new Error('Failed to cancel operation');
|
||||||
console.error(`Failed to cancel ${type}:`, data.error || 'Unknown error');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Reset the appropriate state
|
||||||
|
if (operation === 'import') {
|
||||||
|
setIsImportingCSV(false);
|
||||||
|
setIsImportingProd(false);
|
||||||
|
setImportProgress(null);
|
||||||
|
setPurchaseOrdersProgress(null);
|
||||||
|
} else if (operation === 'update') {
|
||||||
|
setIsUpdating(false);
|
||||||
|
setUpdateProgress(null);
|
||||||
|
}
|
||||||
|
// ... other operation states ...
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error cancelling ${type}:`, error);
|
toast.error(`Failed to cancel operation: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -539,7 +511,7 @@ export function DataManagement() {
|
|||||||
|
|
||||||
if (operation.includes('import')) {
|
if (operation.includes('import')) {
|
||||||
console.log('Import is running');
|
console.log('Import is running');
|
||||||
setIsImporting(true);
|
setIsImportingCSV(true);
|
||||||
if (operation.includes('purchase orders')) {
|
if (operation.includes('purchase orders')) {
|
||||||
setPurchaseOrdersProgress(importData.progress || importData);
|
setPurchaseOrdersProgress(importData.progress || importData);
|
||||||
} else {
|
} else {
|
||||||
@@ -628,8 +600,8 @@ export function DataManagement() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const handleImportCSV = async () => {
|
const handleImportCSV = async () => {
|
||||||
setIsImporting(true);
|
setIsImportingCSV(true);
|
||||||
setImportProgress({ status: 'running', operation: 'Starting import process' });
|
setImportProgress({ status: 'running', operation: 'Starting CSV import' });
|
||||||
|
|
||||||
try {
|
try {
|
||||||
connectToEventSource('import');
|
connectToEventSource('import');
|
||||||
@@ -650,40 +622,18 @@ export function DataManagement() {
|
|||||||
// Start new import
|
// Start new import
|
||||||
const response = await fetch(`${config.apiUrl}/csv/import`, {
|
const response = await fetch(`${config.apiUrl}/csv/import`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
credentials: 'include'
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
credentials: 'include',
|
|
||||||
body: JSON.stringify(limits)
|
|
||||||
}).catch(error => {
|
|
||||||
// Ignore network errors as the import might still be running
|
|
||||||
console.log('Import request error (may be timeout):', error);
|
|
||||||
return null;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// If we got no response but have progress, assume it's still running
|
const data = await response.json();
|
||||||
if (!response && (importProgress?.current || purchaseOrdersProgress?.current)) {
|
if (!response.ok) {
|
||||||
console.log('No response but import appears to be running, continuing...');
|
throw new Error(data.error || 'Failed to start CSV import');
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we got a response, check if it indicates an actual error
|
|
||||||
if (response) {
|
|
||||||
const data = await response.json().catch(() => null);
|
|
||||||
if (!response.ok && data?.error && !data.error.includes('already in progress')) {
|
|
||||||
throw new Error(data.error || 'Failed to start CSV import');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Only handle actual errors, not timeouts or connection issues
|
toast.error(`CSV import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) {
|
setIsImportingCSV(false);
|
||||||
toast.error(`CSV import failed: ${error.message}`);
|
setImportProgress(null);
|
||||||
setIsImporting(false);
|
setPurchaseOrdersProgress(null);
|
||||||
setImportProgress(null);
|
|
||||||
setPurchaseOrdersProgress(null);
|
|
||||||
} else {
|
|
||||||
console.log('Ignoring network error, import may still be running:', error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -853,6 +803,61 @@ export function DataManagement() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleImportFromProd = async () => {
|
||||||
|
setIsImportingProd(true);
|
||||||
|
setImportProgress({ status: 'running', operation: 'Starting import from production' });
|
||||||
|
|
||||||
|
try {
|
||||||
|
connectToEventSource('import');
|
||||||
|
|
||||||
|
// First check if import is already running
|
||||||
|
const statusResponse = await fetch(`${config.apiUrl}/csv/status`, {
|
||||||
|
credentials: 'include'
|
||||||
|
}).catch(() => null);
|
||||||
|
|
||||||
|
if (statusResponse) {
|
||||||
|
const statusData = await statusResponse.json().catch(() => null);
|
||||||
|
if (statusData?.active && statusData?.progress) {
|
||||||
|
console.log('Import already running, connecting to existing process');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start new import
|
||||||
|
const response = await fetch(`${config.apiUrl}/csv/import-from-prod`, {
|
||||||
|
method: 'POST',
|
||||||
|
credentials: 'include'
|
||||||
|
}).catch(error => {
|
||||||
|
console.log('Import request error (may be timeout):', error);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
// If we got no response but have progress, assume it's still running
|
||||||
|
if (!response && (importProgress?.current || purchaseOrdersProgress?.current)) {
|
||||||
|
console.log('No response but import appears to be running, continuing...');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we got a response, check if it indicates an actual error
|
||||||
|
if (response) {
|
||||||
|
const data = await response.json().catch(() => null);
|
||||||
|
if (!response.ok && data?.error && !data.error.includes('already in progress')) {
|
||||||
|
throw new Error(data.error || 'Failed to start production import');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Only handle actual errors, not timeouts or connection issues
|
||||||
|
if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) {
|
||||||
|
toast.error(`Production import failed: ${error.message}`);
|
||||||
|
setIsImportingProd(false);
|
||||||
|
setImportProgress(null);
|
||||||
|
setPurchaseOrdersProgress(null);
|
||||||
|
} else {
|
||||||
|
console.log('Ignoring network error, import may still be running:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="max-w-[400px] space-y-4">
|
<div className="max-w-[400px] space-y-4">
|
||||||
{/* Test Production Connection Card */}
|
{/* Test Production Connection Card */}
|
||||||
@@ -926,29 +931,47 @@ export function DataManagement() {
|
|||||||
<Card>
|
<Card>
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle>Import Data</CardTitle>
|
<CardTitle>Import Data</CardTitle>
|
||||||
<CardDescription>Import current CSV files into database</CardDescription>
|
<CardDescription>Import data from CSV files or production database</CardDescription>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent className="space-y-6">
|
<CardContent className="space-y-6">
|
||||||
<div className="flex gap-2">
|
<div className="flex gap-2">
|
||||||
<Button
|
<Button
|
||||||
className="flex-1"
|
className="flex-1 min-w-0"
|
||||||
onClick={handleImportCSV}
|
onClick={handleImportCSV}
|
||||||
disabled={isAnyOperationRunning()}
|
disabled={isAnyOperationRunning()}
|
||||||
>
|
>
|
||||||
{isImporting ? (
|
{isImportingCSV ? (
|
||||||
<>
|
<div className="flex items-center justify-center">
|
||||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||||
Importing Data...
|
<span className="truncate">Importing CSV...</span>
|
||||||
</>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<div className="flex items-center justify-center">
|
||||||
<Upload className="mr-2 h-4 w-4" />
|
<Upload className="mr-2 h-4 w-4" />
|
||||||
Import Data
|
<span>Import from CSV</span>
|
||||||
</>
|
</div>
|
||||||
)}
|
)}
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
{isImporting && (
|
<Button
|
||||||
|
className="flex-1 min-w-0"
|
||||||
|
onClick={handleImportFromProd}
|
||||||
|
disabled={isAnyOperationRunning()}
|
||||||
|
>
|
||||||
|
{isImportingProd ? (
|
||||||
|
<div className="flex items-center justify-center">
|
||||||
|
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||||
|
<span className="truncate">Importing Prod...</span>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="flex items-center justify-center">
|
||||||
|
<Database className="mr-2 h-4 w-4" />
|
||||||
|
<span>Import from Prod</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
{(isImportingCSV || isImportingProd) && (
|
||||||
<Button
|
<Button
|
||||||
variant="destructive"
|
variant="destructive"
|
||||||
onClick={() => handleCancel('import')}
|
onClick={() => handleCancel('import')}
|
||||||
@@ -958,7 +981,7 @@ export function DataManagement() {
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{(isImporting || lastImportStatus) && (
|
{(isImportingCSV || isImportingProd || lastImportStatus) && (
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
{renderProgress(importProgress || lastImportStatus, 'import')}
|
{renderProgress(importProgress || lastImportStatus, 'import')}
|
||||||
{renderProgress(purchaseOrdersProgress, 'import')}
|
{renderProgress(purchaseOrdersProgress, 'import')}
|
||||||
|
|||||||
Reference in New Issue
Block a user