Enhance metrics calculation scripts with improved progress tracking and cancellation support
This commit is contained in:
@@ -1,167 +1,180 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const https = require('https');
|
||||
const fs = require('fs');
|
||||
const axios = require('axios');
|
||||
const { outputProgress, formatElapsedTime, estimateRemaining, calculateRate } = require('./metrics/utils/progress');
|
||||
|
||||
// Change working directory to script directory
|
||||
process.chdir(path.dirname(__filename));
|
||||
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '..', '.env') });
|
||||
|
||||
// Configuration
|
||||
const FILES = [
|
||||
{
|
||||
name: '39f2x83-products.csv',
|
||||
url: 'https://feeds.acherryontop.com/39f2x83-products.csv'
|
||||
},
|
||||
{
|
||||
name: '39f2x83-orders.csv',
|
||||
url: 'https://feeds.acherryontop.com/39f2x83-orders.csv'
|
||||
},
|
||||
{
|
||||
name: '39f2x83-purchase_orders.csv',
|
||||
url: 'https://feeds.acherryontop.com/39f2x83-purchase_orders.csv'
|
||||
}
|
||||
{
|
||||
name: '39f2x83-products.csv',
|
||||
url: process.env.PRODUCTS_CSV_URL
|
||||
},
|
||||
{
|
||||
name: '39f2x83-orders.csv',
|
||||
url: process.env.ORDERS_CSV_URL
|
||||
},
|
||||
{
|
||||
name: '39f2x83-purchase_orders.csv',
|
||||
url: process.env.PURCHASE_ORDERS_CSV_URL
|
||||
}
|
||||
];
|
||||
|
||||
const CSV_DIR = path.join(__dirname, '..', 'csv');
|
||||
let isCancelled = false;
|
||||
|
||||
// Ensure CSV directory exists
|
||||
if (!fs.existsSync(CSV_DIR)) {
|
||||
fs.mkdirSync(CSV_DIR, { recursive: true });
|
||||
function cancelUpdate() {
|
||||
isCancelled = true;
|
||||
outputProgress({
|
||||
status: 'cancelled',
|
||||
operation: 'CSV update cancelled',
|
||||
current: 0,
|
||||
total: FILES.length,
|
||||
elapsed: null,
|
||||
remaining: null,
|
||||
rate: 0
|
||||
});
|
||||
}
|
||||
|
||||
// Function to download a file
|
||||
function downloadFile(url, filePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const file = fs.createWriteStream(filePath);
|
||||
async function downloadFile(file, index, startTime) {
|
||||
if (isCancelled) return;
|
||||
|
||||
const csvDir = path.join(__dirname, '../csv');
|
||||
if (!fs.existsSync(csvDir)) {
|
||||
fs.mkdirSync(csvDir, { recursive: true });
|
||||
}
|
||||
|
||||
const writer = fs.createWriteStream(path.join(csvDir, file.name));
|
||||
|
||||
https.get(url, response => {
|
||||
if (response.statusCode !== 200) {
|
||||
reject(new Error(`Failed to download: ${response.statusCode} ${response.statusMessage}`));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const response = await axios({
|
||||
url: file.url,
|
||||
method: 'GET',
|
||||
responseType: 'stream'
|
||||
});
|
||||
|
||||
const totalSize = parseInt(response.headers['content-length'], 10);
|
||||
let downloadedSize = 0;
|
||||
let lastProgressUpdate = Date.now();
|
||||
const startTime = Date.now();
|
||||
const totalLength = response.headers['content-length'];
|
||||
let downloadedLength = 0;
|
||||
let lastProgressUpdate = Date.now();
|
||||
const PROGRESS_INTERVAL = 1000; // Update progress every second
|
||||
|
||||
response.on('data', chunk => {
|
||||
downloadedSize += chunk.length;
|
||||
const now = Date.now();
|
||||
// Update progress at most every 100ms to avoid console flooding
|
||||
if (now - lastProgressUpdate > 100) {
|
||||
const elapsed = (now - startTime) / 1000;
|
||||
const rate = downloadedSize / elapsed;
|
||||
const remaining = (totalSize - downloadedSize) / rate;
|
||||
|
||||
console.log(JSON.stringify({
|
||||
status: 'running',
|
||||
operation: `Downloading ${path.basename(filePath)}`,
|
||||
current: downloadedSize,
|
||||
total: totalSize,
|
||||
rate: (rate / 1024 / 1024).toFixed(2), // MB/s
|
||||
elapsed: formatDuration(elapsed),
|
||||
remaining: formatDuration(remaining),
|
||||
percentage: ((downloadedSize / totalSize) * 100).toFixed(1)
|
||||
}));
|
||||
lastProgressUpdate = now;
|
||||
}
|
||||
});
|
||||
response.data.on('data', (chunk) => {
|
||||
if (isCancelled) {
|
||||
writer.end();
|
||||
return;
|
||||
}
|
||||
|
||||
response.pipe(file);
|
||||
downloadedLength += chunk.length;
|
||||
|
||||
// Update progress based on time interval
|
||||
const now = Date.now();
|
||||
if (now - lastProgressUpdate >= PROGRESS_INTERVAL) {
|
||||
const progress = (downloadedLength / totalLength) * 100;
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: `Downloading ${file.name}`,
|
||||
current: index + (downloadedLength / totalLength),
|
||||
total: FILES.length,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, index + (downloadedLength / totalLength), FILES.length),
|
||||
rate: calculateRate(startTime, index + (downloadedLength / totalLength)),
|
||||
percentage: progress.toFixed(1),
|
||||
file_progress: {
|
||||
name: file.name,
|
||||
downloaded: downloadedLength,
|
||||
total: totalLength,
|
||||
percentage: progress.toFixed(1)
|
||||
}
|
||||
});
|
||||
lastProgressUpdate = now;
|
||||
}
|
||||
});
|
||||
|
||||
file.on('finish', () => {
|
||||
console.log(JSON.stringify({
|
||||
status: 'running',
|
||||
operation: `Completed ${path.basename(filePath)}`,
|
||||
current: totalSize,
|
||||
total: totalSize,
|
||||
percentage: '100'
|
||||
}));
|
||||
file.close();
|
||||
resolve();
|
||||
});
|
||||
}).on('error', error => {
|
||||
fs.unlink(filePath, () => {}); // Delete the file if download failed
|
||||
reject(error);
|
||||
});
|
||||
response.data.pipe(writer);
|
||||
|
||||
file.on('error', error => {
|
||||
fs.unlink(filePath, () => {}); // Delete the file if there was an error
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to format duration
|
||||
function formatDuration(seconds) {
|
||||
if (seconds < 60) return `${Math.round(seconds)}s`;
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
seconds = Math.round(seconds % 60);
|
||||
return `${minutes}m ${seconds}s`;
|
||||
return new Promise((resolve, reject) => {
|
||||
writer.on('finish', resolve);
|
||||
writer.on('error', reject);
|
||||
});
|
||||
} catch (error) {
|
||||
fs.unlinkSync(path.join(csvDir, file.name));
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Main function to update all files
|
||||
async function updateFiles() {
|
||||
console.log(JSON.stringify({
|
||||
status: 'running',
|
||||
operation: 'Starting CSV file updates',
|
||||
total: FILES.length,
|
||||
current: 0
|
||||
}));
|
||||
|
||||
for (let i = 0; i < FILES.length; i++) {
|
||||
const file = FILES[i];
|
||||
const filePath = path.join(CSV_DIR, file.name);
|
||||
const startTime = Date.now();
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'Starting CSV update',
|
||||
current: 0,
|
||||
total: FILES.length,
|
||||
elapsed: '0s',
|
||||
remaining: null,
|
||||
rate: 0,
|
||||
percentage: '0'
|
||||
});
|
||||
|
||||
try {
|
||||
// Delete existing file if it exists
|
||||
if (fs.existsSync(filePath)) {
|
||||
console.log(JSON.stringify({
|
||||
status: 'running',
|
||||
operation: `Removing existing file: ${file.name}`,
|
||||
current: i,
|
||||
total: FILES.length,
|
||||
percentage: ((i / FILES.length) * 100).toFixed(1)
|
||||
}));
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
for (let i = 0; i < FILES.length; i++) {
|
||||
if (isCancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Download new file
|
||||
console.log(JSON.stringify({
|
||||
status: 'running',
|
||||
operation: `Starting download: ${file.name}`,
|
||||
current: i,
|
||||
total: FILES.length,
|
||||
percentage: ((i / FILES.length) * 100).toFixed(1)
|
||||
}));
|
||||
await downloadFile(file.url, filePath);
|
||||
console.log(JSON.stringify({
|
||||
status: 'running',
|
||||
operation: `Successfully updated ${file.name}`,
|
||||
current: i + 1,
|
||||
total: FILES.length,
|
||||
percentage: (((i + 1) / FILES.length) * 100).toFixed(1)
|
||||
}));
|
||||
const file = FILES[i];
|
||||
await downloadFile(file, i, startTime);
|
||||
|
||||
outputProgress({
|
||||
status: 'running',
|
||||
operation: 'CSV update in progress',
|
||||
current: i + 1,
|
||||
total: FILES.length,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: estimateRemaining(startTime, i + 1, FILES.length),
|
||||
rate: calculateRate(startTime, i + 1),
|
||||
percentage: (((i + 1) / FILES.length) * 100).toFixed(1)
|
||||
});
|
||||
}
|
||||
|
||||
outputProgress({
|
||||
status: 'complete',
|
||||
operation: 'CSV update complete',
|
||||
current: FILES.length,
|
||||
total: FILES.length,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: '0s',
|
||||
rate: calculateRate(startTime, FILES.length),
|
||||
percentage: '100'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(JSON.stringify({
|
||||
status: 'error',
|
||||
operation: `Error updating ${file.name}`,
|
||||
error: error.message
|
||||
}));
|
||||
throw error;
|
||||
outputProgress({
|
||||
status: 'error',
|
||||
operation: 'CSV update failed',
|
||||
error: error.message,
|
||||
current: 0,
|
||||
total: FILES.length,
|
||||
elapsed: formatElapsedTime(startTime),
|
||||
remaining: null,
|
||||
rate: 0
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(JSON.stringify({
|
||||
status: 'complete',
|
||||
operation: 'CSV file update complete',
|
||||
current: FILES.length,
|
||||
total: FILES.length,
|
||||
percentage: '100'
|
||||
}));
|
||||
}
|
||||
|
||||
// Run the update
|
||||
updateFiles().catch(error => {
|
||||
console.error(JSON.stringify({
|
||||
error: `Update failed: ${error.message}`
|
||||
}));
|
||||
process.exit(1);
|
||||
});
|
||||
// Run the update only if this is the main module
|
||||
if (require.main === module) {
|
||||
updateFiles().catch((error) => {
|
||||
console.error('Error updating CSV files:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
// Export the functions needed by the route
|
||||
module.exports = {
|
||||
updateFiles,
|
||||
cancelUpdate
|
||||
};
|
||||
Reference in New Issue
Block a user