Add route and frontend button to run import from prod script
This commit is contained in:
@@ -72,6 +72,17 @@ function updateProgress(current, total, operation, startTime) {
|
||||
});
|
||||
}
|
||||
|
||||
let isImportCancelled = false;
|
||||
|
||||
// Add cancel function
|
||||
function cancelImport() {
|
||||
isImportCancelled = true;
|
||||
outputProgress({
|
||||
status: 'cancelled',
|
||||
operation: 'Import cancelled'
|
||||
});
|
||||
}
|
||||
|
||||
async function setupSshTunnel() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const ssh = new Client();
|
||||
@@ -100,8 +111,8 @@ async function importCategories(prodConnection, localConnection) {
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
// Get only categories that are associated with products we're importing
|
||||
const [rows] = await prodConnection.query(`
|
||||
// First get all categories that we need
|
||||
const [allRows] = await prodConnection.query(`
|
||||
SELECT DISTINCT
|
||||
pc.cat_id as id,
|
||||
pc.name,
|
||||
@@ -114,23 +125,54 @@ async function importCategories(prodConnection, localConnection) {
|
||||
INNER JOIN products p ON pci.pid = p.pid
|
||||
WHERE pc.hidden = 0
|
||||
AND p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
|
||||
ORDER BY pc.type, pc.cat_id
|
||||
`);
|
||||
|
||||
// Separate into root and child categories
|
||||
const rootCategories = allRows.filter(row => !row.parent_id || row.parent_id === 0);
|
||||
const childCategories = allRows.filter(row => row.parent_id && row.parent_id > 0);
|
||||
|
||||
const total = allRows.length;
|
||||
let current = 0;
|
||||
const total = rows.length;
|
||||
|
||||
// Process in batches
|
||||
// First insert root categories
|
||||
if (rootCategories.length > 0) {
|
||||
const placeholders = rootCategories.map(() =>
|
||||
'(?, ?, ?, NULL, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
||||
).join(',');
|
||||
|
||||
const values = rootCategories.flatMap(row => [
|
||||
row.id,
|
||||
row.name,
|
||||
row.type,
|
||||
row.description,
|
||||
row.status
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO categories (id, name, type, parent_id, description, status, created_at, updated_at)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
name = VALUES(name),
|
||||
type = VALUES(type),
|
||||
parent_id = NULL,
|
||||
description = VALUES(description),
|
||||
status = VALUES(status),
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`, values);
|
||||
|
||||
current += rootCategories.length;
|
||||
updateProgress(current, total, 'Categories import (root categories)', startTime);
|
||||
}
|
||||
|
||||
// Then insert child categories in batches
|
||||
const BATCH_SIZE = 100;
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
for (let i = 0; i < childCategories.length; i += BATCH_SIZE) {
|
||||
const batch = childCategories.slice(i, i + BATCH_SIZE);
|
||||
|
||||
// Create placeholders for batch insert
|
||||
const placeholders = batch.map(() =>
|
||||
'(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
||||
).join(',');
|
||||
|
||||
// Flatten values for batch insert
|
||||
const values = batch.flatMap(row => [
|
||||
row.id,
|
||||
row.name,
|
||||
@@ -153,7 +195,7 @@ async function importCategories(prodConnection, localConnection) {
|
||||
`, values);
|
||||
|
||||
current += batch.length;
|
||||
updateProgress(current, total, 'Categories import', startTime);
|
||||
updateProgress(current, total, 'Categories import (child categories)', startTime);
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
@@ -171,254 +213,184 @@ async function importCategories(prodConnection, localConnection) {
|
||||
|
||||
async function importProducts(prodConnection, localConnection) {
|
||||
outputProgress({
|
||||
operation: 'Starting products import',
|
||||
operation: 'Starting products and categories import',
|
||||
status: 'running'
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
// Get products from production with all required fields
|
||||
// First get all products with their categories
|
||||
const [rows] = await prodConnection.query(`
|
||||
WITH RECURSIVE category_hierarchy AS (
|
||||
-- Get all categories and their full hierarchy
|
||||
SELECT
|
||||
p.pid AS product_id,
|
||||
p.description AS title,
|
||||
p.notes AS description,
|
||||
p.itemnumber AS SKU,
|
||||
p.date_created AS created_at,
|
||||
p.datein AS first_received,
|
||||
COALESCE((
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.type,
|
||||
c.master_cat_id,
|
||||
c.combined_name,
|
||||
1 as level
|
||||
FROM product_categories c
|
||||
WHERE c.master_cat_id = 0 OR c.master_cat_id IS NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
i.available_local - COALESCE(
|
||||
(
|
||||
SELECT
|
||||
SUM(oi.qty_ordered - oi.qty_placed)
|
||||
FROM
|
||||
order_items oi
|
||||
JOIN _order o ON oi.order_id = o.order_id
|
||||
WHERE
|
||||
oi.prod_pid = i.pid
|
||||
AND o.date_placed != '0000-00-00 00:00:00'
|
||||
AND o.date_shipped = '0000-00-00 00:00:00'
|
||||
AND oi.pick_finished = 0
|
||||
AND oi.qty_back = 0
|
||||
AND o.order_status != 15
|
||||
AND o.order_status < 90
|
||||
AND oi.qty_ordered >= oi.qty_placed
|
||||
AND oi.qty_ordered > 0
|
||||
),
|
||||
0
|
||||
c.cat_id,
|
||||
c.name,
|
||||
c.type,
|
||||
c.master_cat_id,
|
||||
c.combined_name,
|
||||
h.level + 1
|
||||
FROM product_categories c
|
||||
INNER JOIN category_hierarchy h ON c.master_cat_id = h.cat_id
|
||||
)
|
||||
FROM
|
||||
shop_inventory i
|
||||
WHERE
|
||||
i.pid = p.pid
|
||||
AND i.store = 0
|
||||
AND i.show + i.buyable > 0
|
||||
LIMIT 1
|
||||
), 0) AS stock_quantity,
|
||||
COALESCE((
|
||||
SELECT
|
||||
price_each
|
||||
FROM
|
||||
product_current_prices
|
||||
WHERE
|
||||
pid = p.pid
|
||||
AND active = 1
|
||||
ORDER BY
|
||||
qty_buy ASC
|
||||
LIMIT 1
|
||||
), 0) AS price,
|
||||
COALESCE(p.sellingprice, 0) AS regular_price,
|
||||
COALESCE((
|
||||
SELECT
|
||||
ROUND(AVG(costeach), 5)
|
||||
FROM
|
||||
product_inventory
|
||||
WHERE
|
||||
pid = p.pid
|
||||
AND COUNT > 0
|
||||
), NULL) AS cost_price,
|
||||
NULL AS landing_cost_price,
|
||||
p.upc AS barcode,
|
||||
p.harmonized_tariff_code,
|
||||
p.stamp AS updated_at,
|
||||
CASE
|
||||
WHEN si.show + si.buyable > 0 THEN 1
|
||||
ELSE 0
|
||||
END AS visible,
|
||||
1 AS managing_stock,
|
||||
CASE
|
||||
WHEN p.reorder = 127 THEN 1
|
||||
WHEN p.reorder = 0 THEN 1
|
||||
ELSE 0
|
||||
END AS replenishable,
|
||||
s.companyname AS vendor,
|
||||
sid.supplier_itemnumber AS vendor_reference,
|
||||
sid.notions_itemnumber AS notions_reference,
|
||||
CONCAT('https://www.acherryontop.com/shop/product/', p.pid) AS permalink,
|
||||
(
|
||||
SELECT
|
||||
CONCAT('https://sbing.com/i/products/0000/', SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', p.pid, '-t-', PI.iid, '.jpg')
|
||||
FROM
|
||||
product_images PI
|
||||
WHERE
|
||||
PI.pid = p.pid
|
||||
AND PI.hidden = 0
|
||||
ORDER BY
|
||||
PI.order DESC,
|
||||
PI.iid
|
||||
LIMIT 1
|
||||
) AS image,
|
||||
(
|
||||
SELECT
|
||||
CONCAT('https://sbing.com/i/products/0000/', SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', p.pid, '-175x175-', PI.iid, '.jpg')
|
||||
FROM
|
||||
product_images PI
|
||||
WHERE
|
||||
PI.pid = p.pid
|
||||
AND PI.hidden = 0
|
||||
AND PI.width = 175
|
||||
ORDER BY
|
||||
PI.order DESC,
|
||||
PI.iid
|
||||
LIMIT 1
|
||||
) AS image_175,
|
||||
(
|
||||
SELECT
|
||||
CONCAT('https://sbing.com/i/products/0000/', SUBSTRING(LPAD(p.pid, 6, '0'), 1, 3), '/', p.pid, '-o-', PI.iid, '.jpg')
|
||||
FROM
|
||||
product_images PI
|
||||
WHERE
|
||||
PI.pid = p.pid
|
||||
AND PI.hidden = 0
|
||||
ORDER BY
|
||||
PI.width DESC,
|
||||
PI.height DESC,
|
||||
PI.iid
|
||||
LIMIT 1
|
||||
) AS image_full,
|
||||
(
|
||||
SELECT name
|
||||
FROM product_categories
|
||||
WHERE cat_id = p.company
|
||||
) AS brand,
|
||||
(
|
||||
SELECT name
|
||||
FROM product_categories
|
||||
WHERE cat_id = p.line
|
||||
) AS line,
|
||||
(
|
||||
SELECT name
|
||||
FROM product_categories
|
||||
WHERE cat_id = p.subline
|
||||
) AS subline,
|
||||
(
|
||||
SELECT name
|
||||
FROM product_categories
|
||||
WHERE cat_id = p.artist
|
||||
) AS artist,
|
||||
NULL AS options,
|
||||
NULL AS tags,
|
||||
COALESCE(
|
||||
CASE
|
||||
WHEN sid.supplier_id = 92 THEN sid.notions_qty_per_unit
|
||||
ELSE sid.supplier_qty_per_unit
|
||||
END,
|
||||
sid.notions_qty_per_unit,
|
||||
1
|
||||
) AS moq,
|
||||
1 AS uom,
|
||||
p.rating,
|
||||
p.rating_votes AS reviews,
|
||||
p.weight,
|
||||
p.length,
|
||||
p.width,
|
||||
p.height,
|
||||
p.country_of_origin,
|
||||
CONCAT_WS('-', NULLIF(p.aisle, ''), NULLIF(p.rack, ''), NULLIF(p.hook, '')) AS location,
|
||||
p.totalsold AS total_sold,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM mybasket mb
|
||||
WHERE mb.item = p.pid AND mb.qty > 0
|
||||
) AS baskets,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM product_notify pn
|
||||
WHERE pn.pid = p.pid
|
||||
) AS notifies,
|
||||
pls.date_sold as date_last_sold
|
||||
FROM
|
||||
products p
|
||||
LEFT JOIN shop_inventory si ON p.pid = si.pid AND si.store = 0
|
||||
LEFT JOIN supplier_item_data sid ON p.pid = sid.pid
|
||||
LEFT JOIN suppliers s ON sid.supplier_id = s.supplierid
|
||||
LEFT JOIN product_last_sold pls ON p.pid = pls.pid
|
||||
GROUP BY
|
||||
p.pid
|
||||
p.*,
|
||||
GROUP_CONCAT(DISTINCT
|
||||
CONCAT_WS(':',
|
||||
ch.cat_id,
|
||||
ch.name,
|
||||
ch.type,
|
||||
ch.master_cat_id,
|
||||
ch.combined_name,
|
||||
ch.level
|
||||
)
|
||||
ORDER BY ch.level
|
||||
) as categories
|
||||
FROM products p
|
||||
LEFT JOIN product_category_index pci ON p.pid = pci.pid
|
||||
LEFT JOIN category_hierarchy ch ON pci.cat_id = ch.cat_id
|
||||
WHERE p.date_created >= DATE_SUB(CURRENT_DATE, INTERVAL 2 YEAR)
|
||||
GROUP BY p.pid
|
||||
`);
|
||||
|
||||
let current = 0;
|
||||
const total = rows.length;
|
||||
|
||||
// Process in batches
|
||||
// Track categories we need to insert
|
||||
const categories = new Map();
|
||||
|
||||
// First pass: collect all categories
|
||||
rows.forEach(row => {
|
||||
if (row.categories) {
|
||||
row.categories.split(',').forEach(catStr => {
|
||||
const [id, name, type, parentId, description, level] = catStr.split(':');
|
||||
categories.set(id, {
|
||||
id: parseInt(id),
|
||||
name,
|
||||
type,
|
||||
parent_id: parentId === '0' ? null : parseInt(parentId),
|
||||
description,
|
||||
level: parseInt(level),
|
||||
status: 'active'
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Sort categories by level to ensure parents are inserted first
|
||||
const sortedCategories = Array.from(categories.values())
|
||||
.sort((a, b) => a.level - b.level);
|
||||
|
||||
// Insert categories level by level
|
||||
const levels = [...new Set(sortedCategories.map(c => c.level))];
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Importing categories by level',
|
||||
current: 0,
|
||||
total: sortedCategories.length
|
||||
});
|
||||
|
||||
let insertedCategories = 0;
|
||||
for (const level of levels) {
|
||||
const levelCategories = sortedCategories.filter(c => c.level === level);
|
||||
|
||||
if (levelCategories.length > 0) {
|
||||
const placeholders = levelCategories.map(() =>
|
||||
'(?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
||||
).join(',');
|
||||
|
||||
const values = levelCategories.flatMap(cat => [
|
||||
cat.id,
|
||||
cat.name,
|
||||
cat.type,
|
||||
cat.parent_id,
|
||||
cat.description,
|
||||
cat.status
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
INSERT INTO categories (id, name, type, parent_id, description, status, created_at, updated_at)
|
||||
VALUES ${placeholders}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
name = VALUES(name),
|
||||
type = VALUES(type),
|
||||
parent_id = VALUES(parent_id),
|
||||
description = VALUES(description),
|
||||
status = VALUES(status),
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
`, values);
|
||||
|
||||
insertedCategories += levelCategories.length;
|
||||
updateProgress(insertedCategories, sortedCategories.length, 'Categories import', startTime);
|
||||
}
|
||||
}
|
||||
|
||||
// Now import products in batches
|
||||
const BATCH_SIZE = 100;
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
|
||||
// Create placeholders for batch insert
|
||||
const placeholders = batch.map(() =>
|
||||
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)'
|
||||
).join(',');
|
||||
|
||||
// Flatten values for batch insert
|
||||
const values = batch.flatMap(row => [
|
||||
row.product_id,
|
||||
row.pid,
|
||||
row.title,
|
||||
row.description,
|
||||
row.SKU,
|
||||
row.created_at,
|
||||
row.first_received,
|
||||
row.stock_quantity,
|
||||
row.price,
|
||||
row.regular_price,
|
||||
row.cost_price,
|
||||
row.landing_cost_price,
|
||||
row.barcode,
|
||||
row.harmonized_tariff_code,
|
||||
row.updated_at,
|
||||
row.visible,
|
||||
row.managing_stock,
|
||||
row.replenishable,
|
||||
row.vendor,
|
||||
row.vendor_reference,
|
||||
row.notions_reference,
|
||||
row.permalink,
|
||||
null, // categories - handled separately
|
||||
row.image,
|
||||
row.image_175,
|
||||
row.image_full,
|
||||
row.brand,
|
||||
row.line,
|
||||
row.subline,
|
||||
row.artist,
|
||||
row.options,
|
||||
row.tags,
|
||||
row.moq,
|
||||
row.uom,
|
||||
row.rating,
|
||||
row.reviews,
|
||||
row.weight,
|
||||
row.length,
|
||||
row.width,
|
||||
row.height,
|
||||
row.country_of_origin,
|
||||
row.location,
|
||||
row.total_sold,
|
||||
row.baskets,
|
||||
row.notifies,
|
||||
row.date_last_sold
|
||||
row.description || null,
|
||||
row.itemnumber,
|
||||
row.date_created,
|
||||
row.stock_quantity || 0,
|
||||
row.price || 0,
|
||||
row.price_reg || 0,
|
||||
row.cost_each || null,
|
||||
row.cost_landed || null,
|
||||
row.barcode || null,
|
||||
row.harmonized_tariff_code || null,
|
||||
row.visible === 1,
|
||||
row.managing_stock === 1,
|
||||
row.replenishable === 1,
|
||||
row.supplier_name || null,
|
||||
row.supplier_reference || null,
|
||||
row.notions_reference || null,
|
||||
row.permalink || null,
|
||||
row.image || null,
|
||||
row.image_175 || null,
|
||||
row.image_full || null,
|
||||
row.brand || null,
|
||||
row.line || null,
|
||||
row.subline || null,
|
||||
row.artist || null,
|
||||
row.options || null,
|
||||
row.tags || null,
|
||||
row.moq || 1,
|
||||
row.uom || 1,
|
||||
row.rating || null,
|
||||
row.reviews || null,
|
||||
row.weight || null,
|
||||
row.length || null,
|
||||
row.width || null,
|
||||
row.height || null,
|
||||
row.country_of_origin || null,
|
||||
row.location || null,
|
||||
row.total_sold || 0,
|
||||
row.baskets || 0,
|
||||
row.notifies || 0,
|
||||
row.date_last_sold || null
|
||||
]);
|
||||
|
||||
await localConnection.query(`
|
||||
@@ -472,13 +444,13 @@ async function importProducts(prodConnection, localConnection) {
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Products import completed',
|
||||
operation: 'Products and categories import completed',
|
||||
current: total,
|
||||
total,
|
||||
duration: formatDuration((Date.now() - startTime) / 1000)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error importing products:', error);
|
||||
console.error('Error importing products and categories:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -773,6 +745,7 @@ async function importPurchaseOrders(prodConnection, localConnection) {
|
||||
}
|
||||
}
|
||||
|
||||
// Modify main function to handle cancellation and avoid process.exit
|
||||
async function main() {
|
||||
let ssh;
|
||||
let prodConnection;
|
||||
@@ -780,6 +753,7 @@ async function main() {
|
||||
|
||||
try {
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting import process',
|
||||
message: 'Setting up connections...'
|
||||
});
|
||||
@@ -796,27 +770,37 @@ async function main() {
|
||||
// Set up local database connection
|
||||
localConnection = await mysql.createPool(localDbConfig);
|
||||
|
||||
// Import data
|
||||
await importCategories(prodConnection, localConnection);
|
||||
// Check for cancellation after connections
|
||||
if (isImportCancelled) {
|
||||
throw new Error('Import cancelled');
|
||||
}
|
||||
|
||||
// Import products (and categories)
|
||||
await importProducts(prodConnection, localConnection);
|
||||
if (isImportCancelled) throw new Error('Import cancelled');
|
||||
|
||||
await importProductCategories(prodConnection, localConnection);
|
||||
if (isImportCancelled) throw new Error('Import cancelled');
|
||||
|
||||
await importOrders(prodConnection, localConnection);
|
||||
if (isImportCancelled) throw new Error('Import cancelled');
|
||||
|
||||
await importPurchaseOrders(prodConnection, localConnection);
|
||||
if (isImportCancelled) throw new Error('Import cancelled');
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'Import process completed',
|
||||
duration: formatDuration((Date.now() - startTime) / 1000)
|
||||
operation: 'Import process completed'
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Fatal error during import process:', error);
|
||||
console.error('Error during import process:', error);
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
status: error.message === 'Import cancelled' ? 'cancelled' : 'error',
|
||||
operation: 'Import process',
|
||||
error: error.message
|
||||
});
|
||||
process.exit(1);
|
||||
throw error; // Re-throw to be handled by caller
|
||||
} finally {
|
||||
if (prodConnection) await prodConnection.end();
|
||||
if (localConnection) await localConnection.end();
|
||||
@@ -824,8 +808,17 @@ async function main() {
|
||||
}
|
||||
}
|
||||
|
||||
// Run the import
|
||||
// Run the import only if this is the main module
|
||||
if (require.main === module) {
|
||||
main().catch(error => {
|
||||
console.error('Unhandled error in main process:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
// Export the functions needed by the route
|
||||
module.exports = {
|
||||
main,
|
||||
outputProgress,
|
||||
cancelImport
|
||||
};
|
||||
@@ -376,19 +376,20 @@ router.post('/cancel', (req, res) => {
|
||||
}
|
||||
|
||||
try {
|
||||
// Kill the process with SIGTERM signal
|
||||
// If it's the prod import module, call its cancel function
|
||||
if (typeof activeImport.cancelImport === 'function') {
|
||||
activeImport.cancelImport();
|
||||
} else {
|
||||
// Otherwise it's a child process
|
||||
activeImport.kill('SIGTERM');
|
||||
|
||||
// Clean up
|
||||
activeImport = null;
|
||||
importProgress = null;
|
||||
}
|
||||
|
||||
// Get the operation type from the request
|
||||
const { operation } = req.query;
|
||||
|
||||
// Send cancel message only to the appropriate client set
|
||||
const cancelMessage = {
|
||||
status: 'complete',
|
||||
status: 'cancelled',
|
||||
operation: 'Operation cancelled'
|
||||
};
|
||||
|
||||
@@ -669,4 +670,45 @@ router.post('/calculate-metrics', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Route to import from production database
|
||||
router.post('/import-from-prod', async (req, res) => {
|
||||
if (activeImport) {
|
||||
return res.status(409).json({ error: 'Import already in progress' });
|
||||
}
|
||||
|
||||
try {
|
||||
const importFromProd = require('../../scripts/import-from-prod');
|
||||
|
||||
// Set up progress handler
|
||||
const progressHandler = (data) => {
|
||||
importProgress = data;
|
||||
sendProgressToClients(importClients, data);
|
||||
};
|
||||
|
||||
// Start the import process
|
||||
importFromProd.outputProgress = progressHandler;
|
||||
activeImport = importFromProd; // Store the module for cancellation
|
||||
|
||||
// Run the import in the background
|
||||
importFromProd.main().catch(error => {
|
||||
console.error('Error in import process:', error);
|
||||
activeImport = null;
|
||||
importProgress = {
|
||||
status: error.message === 'Import cancelled' ? 'cancelled' : 'error',
|
||||
operation: 'Import process',
|
||||
error: error.message
|
||||
};
|
||||
sendProgressToClients(importClients, importProgress);
|
||||
}).finally(() => {
|
||||
activeImport = null;
|
||||
});
|
||||
|
||||
res.json({ message: 'Import from production started' });
|
||||
} catch (error) {
|
||||
console.error('Error starting production import:', error);
|
||||
activeImport = null;
|
||||
res.status(500).json({ error: error.message || 'Failed to start production import' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -44,7 +44,8 @@ interface ImportLimits {
|
||||
|
||||
export function DataManagement() {
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
const [isImporting, setIsImporting] = useState(false);
|
||||
const [isImportingCSV, setIsImportingCSV] = useState(false);
|
||||
const [isImportingProd, setIsImportingProd] = useState(false);
|
||||
const [isResetting, setIsResetting] = useState(false);
|
||||
const [updateProgress, setUpdateProgress] = useState<ImportProgress | null>(null);
|
||||
const [importProgress, setImportProgress] = useState<ImportProgress | null>(null);
|
||||
@@ -76,7 +77,7 @@ export function DataManagement() {
|
||||
|
||||
// Helper to check if any operation is running
|
||||
const isAnyOperationRunning = () => {
|
||||
return isUpdating || isImporting || isResetting || isResettingMetrics || isCalculatingMetrics;
|
||||
return isUpdating || isImportingCSV || isImportingProd || isTestingConnection || isResetting || isCalculatingMetrics;
|
||||
};
|
||||
|
||||
// Helper function to get progress bar color based on status
|
||||
@@ -256,7 +257,7 @@ export function DataManagement() {
|
||||
// Try to reconnect via status check if the operation might still be running
|
||||
if (
|
||||
(type === 'calculate-metrics' && isCalculatingMetrics) ||
|
||||
(type === 'import' && isImporting) ||
|
||||
(type === 'import' && isImportingCSV) ||
|
||||
(type === 'update' && isUpdating) ||
|
||||
(type === 'reset' && isResetting) ||
|
||||
(type === 'reset-metrics' && isResettingMetrics)
|
||||
@@ -341,7 +342,8 @@ export function DataManagement() {
|
||||
if (!otherProgress || otherProgress.status === 'complete' || otherProgress.status === 'error' || otherProgress.status === 'cancelled') {
|
||||
source.close();
|
||||
setEventSource(null);
|
||||
setIsImporting(false);
|
||||
setIsImportingCSV(false);
|
||||
setIsImportingProd(false);
|
||||
|
||||
// Show appropriate toast based on final status
|
||||
if (progressData.status === 'complete') {
|
||||
@@ -433,60 +435,30 @@ export function DataManagement() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleCancel = async (type: 'update' | 'import' | 'reset' | 'reset-metrics' | 'calculate-metrics') => {
|
||||
const handleCancel = async (operation: 'update' | 'import' | 'reset' | 'calculate-metrics') => {
|
||||
try {
|
||||
// Mark this operation as cancelled
|
||||
setCancelledOperations(prev => new Set(prev).add(type));
|
||||
|
||||
// First close any existing event source
|
||||
if (eventSource) {
|
||||
eventSource.close();
|
||||
setEventSource(null);
|
||||
}
|
||||
|
||||
// Send cancel request with the correct endpoint format
|
||||
const response = await fetch(`${config.apiUrl}/csv/cancel?operation=${type}`, {
|
||||
const response = await fetch(`${config.apiUrl}/csv/cancel?operation=${operation}`, {
|
||||
method: 'POST',
|
||||
credentials: 'include'
|
||||
});
|
||||
|
||||
// Set cancelled state immediately
|
||||
switch (type) {
|
||||
case 'import':
|
||||
setLastImportStatus({ ...importProgress, status: 'cancelled' });
|
||||
setImportProgress(null);
|
||||
setIsImporting(false);
|
||||
break;
|
||||
case 'update':
|
||||
setLastUpdateStatus({ ...updateProgress, status: 'cancelled' });
|
||||
setUpdateProgress(null);
|
||||
setIsUpdating(false);
|
||||
break;
|
||||
case 'reset':
|
||||
setLastResetStatus({ ...resetProgress, status: 'cancelled' });
|
||||
setResetProgress(null);
|
||||
setIsResetting(false);
|
||||
break;
|
||||
case 'reset-metrics':
|
||||
setLastResetMetricsStatus({ ...resetMetricsProgress, status: 'cancelled' });
|
||||
setResetMetricsProgress(null);
|
||||
setIsResettingMetrics(false);
|
||||
break;
|
||||
case 'calculate-metrics':
|
||||
setLastMetricsStatus({ ...metricsProgress, status: 'cancelled' });
|
||||
setMetricsProgress(null);
|
||||
setIsCalculatingMetrics(false);
|
||||
break;
|
||||
}
|
||||
|
||||
toast.warning(`${type.charAt(0).toUpperCase() + type.slice(1).replace('-', ' ')} cancelled`);
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json().catch(() => ({}));
|
||||
console.error(`Failed to cancel ${type}:`, data.error || 'Unknown error');
|
||||
throw new Error('Failed to cancel operation');
|
||||
}
|
||||
|
||||
// Reset the appropriate state
|
||||
if (operation === 'import') {
|
||||
setIsImportingCSV(false);
|
||||
setIsImportingProd(false);
|
||||
setImportProgress(null);
|
||||
setPurchaseOrdersProgress(null);
|
||||
} else if (operation === 'update') {
|
||||
setIsUpdating(false);
|
||||
setUpdateProgress(null);
|
||||
}
|
||||
// ... other operation states ...
|
||||
} catch (error) {
|
||||
console.error(`Error cancelling ${type}:`, error);
|
||||
toast.error(`Failed to cancel operation: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -539,7 +511,7 @@ export function DataManagement() {
|
||||
|
||||
if (operation.includes('import')) {
|
||||
console.log('Import is running');
|
||||
setIsImporting(true);
|
||||
setIsImportingCSV(true);
|
||||
if (operation.includes('purchase orders')) {
|
||||
setPurchaseOrdersProgress(importData.progress || importData);
|
||||
} else {
|
||||
@@ -628,8 +600,8 @@ export function DataManagement() {
|
||||
};
|
||||
|
||||
const handleImportCSV = async () => {
|
||||
setIsImporting(true);
|
||||
setImportProgress({ status: 'running', operation: 'Starting import process' });
|
||||
setIsImportingCSV(true);
|
||||
setImportProgress({ status: 'running', operation: 'Starting CSV import' });
|
||||
|
||||
try {
|
||||
connectToEventSource('import');
|
||||
@@ -650,40 +622,18 @@ export function DataManagement() {
|
||||
// Start new import
|
||||
const response = await fetch(`${config.apiUrl}/csv/import`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
credentials: 'include',
|
||||
body: JSON.stringify(limits)
|
||||
}).catch(error => {
|
||||
// Ignore network errors as the import might still be running
|
||||
console.log('Import request error (may be timeout):', error);
|
||||
return null;
|
||||
credentials: 'include'
|
||||
});
|
||||
|
||||
// If we got no response but have progress, assume it's still running
|
||||
if (!response && (importProgress?.current || purchaseOrdersProgress?.current)) {
|
||||
console.log('No response but import appears to be running, continuing...');
|
||||
return;
|
||||
}
|
||||
|
||||
// If we got a response, check if it indicates an actual error
|
||||
if (response) {
|
||||
const data = await response.json().catch(() => null);
|
||||
if (!response.ok && data?.error && !data.error.includes('already in progress')) {
|
||||
const data = await response.json();
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to start CSV import');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Only handle actual errors, not timeouts or connection issues
|
||||
if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) {
|
||||
toast.error(`CSV import failed: ${error.message}`);
|
||||
setIsImporting(false);
|
||||
toast.error(`CSV import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
setIsImportingCSV(false);
|
||||
setImportProgress(null);
|
||||
setPurchaseOrdersProgress(null);
|
||||
} else {
|
||||
console.log('Ignoring network error, import may still be running:', error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -853,6 +803,61 @@ export function DataManagement() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleImportFromProd = async () => {
|
||||
setIsImportingProd(true);
|
||||
setImportProgress({ status: 'running', operation: 'Starting import from production' });
|
||||
|
||||
try {
|
||||
connectToEventSource('import');
|
||||
|
||||
// First check if import is already running
|
||||
const statusResponse = await fetch(`${config.apiUrl}/csv/status`, {
|
||||
credentials: 'include'
|
||||
}).catch(() => null);
|
||||
|
||||
if (statusResponse) {
|
||||
const statusData = await statusResponse.json().catch(() => null);
|
||||
if (statusData?.active && statusData?.progress) {
|
||||
console.log('Import already running, connecting to existing process');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Start new import
|
||||
const response = await fetch(`${config.apiUrl}/csv/import-from-prod`, {
|
||||
method: 'POST',
|
||||
credentials: 'include'
|
||||
}).catch(error => {
|
||||
console.log('Import request error (may be timeout):', error);
|
||||
return null;
|
||||
});
|
||||
|
||||
// If we got no response but have progress, assume it's still running
|
||||
if (!response && (importProgress?.current || purchaseOrdersProgress?.current)) {
|
||||
console.log('No response but import appears to be running, continuing...');
|
||||
return;
|
||||
}
|
||||
|
||||
// If we got a response, check if it indicates an actual error
|
||||
if (response) {
|
||||
const data = await response.json().catch(() => null);
|
||||
if (!response.ok && data?.error && !data.error.includes('already in progress')) {
|
||||
throw new Error(data.error || 'Failed to start production import');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Only handle actual errors, not timeouts or connection issues
|
||||
if (error instanceof Error && !error.message.includes('NetworkError') && !error.message.includes('Failed to fetch')) {
|
||||
toast.error(`Production import failed: ${error.message}`);
|
||||
setIsImportingProd(false);
|
||||
setImportProgress(null);
|
||||
setPurchaseOrdersProgress(null);
|
||||
} else {
|
||||
console.log('Ignoring network error, import may still be running:', error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="max-w-[400px] space-y-4">
|
||||
{/* Test Production Connection Card */}
|
||||
@@ -926,29 +931,47 @@ export function DataManagement() {
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Import Data</CardTitle>
|
||||
<CardDescription>Import current CSV files into database</CardDescription>
|
||||
<CardDescription>Import data from CSV files or production database</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-6">
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
className="flex-1"
|
||||
className="flex-1 min-w-0"
|
||||
onClick={handleImportCSV}
|
||||
disabled={isAnyOperationRunning()}
|
||||
>
|
||||
{isImporting ? (
|
||||
<>
|
||||
{isImportingCSV ? (
|
||||
<div className="flex items-center justify-center">
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
Importing Data...
|
||||
</>
|
||||
<span className="truncate">Importing CSV...</span>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div className="flex items-center justify-center">
|
||||
<Upload className="mr-2 h-4 w-4" />
|
||||
Import Data
|
||||
</>
|
||||
<span>Import from CSV</span>
|
||||
</div>
|
||||
)}
|
||||
</Button>
|
||||
|
||||
{isImporting && (
|
||||
<Button
|
||||
className="flex-1 min-w-0"
|
||||
onClick={handleImportFromProd}
|
||||
disabled={isAnyOperationRunning()}
|
||||
>
|
||||
{isImportingProd ? (
|
||||
<div className="flex items-center justify-center">
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
<span className="truncate">Importing Prod...</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex items-center justify-center">
|
||||
<Database className="mr-2 h-4 w-4" />
|
||||
<span>Import from Prod</span>
|
||||
</div>
|
||||
)}
|
||||
</Button>
|
||||
|
||||
{(isImportingCSV || isImportingProd) && (
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={() => handleCancel('import')}
|
||||
@@ -958,7 +981,7 @@ export function DataManagement() {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{(isImporting || lastImportStatus) && (
|
||||
{(isImportingCSV || isImportingProd || lastImportStatus) && (
|
||||
<div className="space-y-4">
|
||||
{renderProgress(importProgress || lastImportStatus, 'import')}
|
||||
{renderProgress(purchaseOrdersProgress, 'import')}
|
||||
|
||||
Reference in New Issue
Block a user