Add acot-server for getting data directly

This commit is contained in:
2025-05-24 19:18:00 -04:00
parent 06aa2372e4
commit 9db11531d6
13 changed files with 2754 additions and 86 deletions

View File

@@ -0,0 +1,239 @@
const { Client } = require('ssh2');
const mysql = require('mysql2/promise');
const fs = require('fs');
// Connection pooling and cache configuration
const connectionCache = {
ssh: null,
dbConnection: null,
lastUsed: 0,
isConnecting: false,
connectionPromise: null,
// Cache expiration time in milliseconds (5 minutes)
expirationTime: 5 * 60 * 1000,
// Cache for query results (key: query string, value: {data, timestamp})
queryCache: new Map(),
// Cache duration for different query types in milliseconds
cacheDuration: {
'field-options': 30 * 60 * 1000, // 30 minutes for field options
'product-lines': 10 * 60 * 1000, // 10 minutes for product lines
'sublines': 10 * 60 * 1000, // 10 minutes for sublines
'taxonomy': 30 * 60 * 1000, // 30 minutes for taxonomy data
'default': 60 * 1000 // 1 minute default
}
};
/**
* Get a database connection with connection pooling
* @returns {Promise<{ssh: object, connection: object}>} The SSH and database connection
*/
async function getDbConnection() {
const now = Date.now();
// Check if we need to refresh the connection due to inactivity
const needsRefresh = !connectionCache.ssh ||
!connectionCache.dbConnection ||
(now - connectionCache.lastUsed > connectionCache.expirationTime);
// If connection is still valid, update last used time and return existing connection
if (!needsRefresh) {
connectionCache.lastUsed = now;
return {
ssh: connectionCache.ssh,
connection: connectionCache.dbConnection
};
}
// If another request is already establishing a connection, wait for that promise
if (connectionCache.isConnecting && connectionCache.connectionPromise) {
try {
await connectionCache.connectionPromise;
return {
ssh: connectionCache.ssh,
connection: connectionCache.dbConnection
};
} catch (error) {
// If that connection attempt failed, we'll try again below
console.error('Error waiting for existing connection:', error);
}
}
// Close existing connections if they exist
if (connectionCache.dbConnection) {
try {
await connectionCache.dbConnection.end();
} catch (error) {
console.error('Error closing existing database connection:', error);
}
}
if (connectionCache.ssh) {
try {
connectionCache.ssh.end();
} catch (error) {
console.error('Error closing existing SSH connection:', error);
}
}
// Mark that we're establishing a new connection
connectionCache.isConnecting = true;
// Create a new promise for this connection attempt
connectionCache.connectionPromise = setupSshTunnel().then(tunnel => {
const { ssh, stream, dbConfig } = tunnel;
return mysql.createConnection({
...dbConfig,
stream
}).then(connection => {
// Store the new connections
connectionCache.ssh = ssh;
connectionCache.dbConnection = connection;
connectionCache.lastUsed = Date.now();
connectionCache.isConnecting = false;
return {
ssh,
connection
};
});
}).catch(error => {
connectionCache.isConnecting = false;
throw error;
});
// Wait for the connection to be established
return connectionCache.connectionPromise;
}
/**
* Get cached query results or execute query if not cached
* @param {string} cacheKey - Unique key to identify the query
* @param {string} queryType - Type of query (field-options, product-lines, etc.)
* @param {Function} queryFn - Function to execute if cache miss
* @returns {Promise<any>} The query result
*/
async function getCachedQuery(cacheKey, queryType, queryFn) {
// Get cache duration based on query type
const cacheDuration = connectionCache.cacheDuration[queryType] || connectionCache.cacheDuration.default;
// Check if we have a valid cached result
const cachedResult = connectionCache.queryCache.get(cacheKey);
const now = Date.now();
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
return cachedResult.data;
}
// No valid cache found, execute the query
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
const result = await queryFn();
// Cache the result
connectionCache.queryCache.set(cacheKey, {
data: result,
timestamp: now
});
return result;
}
/**
* Setup SSH tunnel to production database
* @private - Should only be used by getDbConnection
* @returns {Promise<{ssh: object, stream: object, dbConfig: object}>}
*/
async function setupSshTunnel() {
const sshConfig = {
host: process.env.PROD_SSH_HOST,
port: process.env.PROD_SSH_PORT || 22,
username: process.env.PROD_SSH_USER,
privateKey: process.env.PROD_SSH_KEY_PATH
? fs.readFileSync(process.env.PROD_SSH_KEY_PATH)
: undefined,
compress: true
};
const dbConfig = {
host: process.env.PROD_DB_HOST || 'localhost',
user: process.env.PROD_DB_USER,
password: process.env.PROD_DB_PASSWORD,
database: process.env.PROD_DB_NAME,
port: process.env.PROD_DB_PORT || 3306,
timezone: 'Z'
};
return new Promise((resolve, reject) => {
const ssh = new Client();
ssh.on('error', (err) => {
console.error('SSH connection error:', err);
reject(err);
});
ssh.on('ready', () => {
ssh.forwardOut(
'127.0.0.1',
0,
dbConfig.host,
dbConfig.port,
(err, stream) => {
if (err) reject(err);
resolve({ ssh, stream, dbConfig });
}
);
}).connect(sshConfig);
});
}
/**
* Clear cached query results
* @param {string} [cacheKey] - Specific cache key to clear (clears all if not provided)
*/
function clearQueryCache(cacheKey) {
if (cacheKey) {
connectionCache.queryCache.delete(cacheKey);
console.log(`Cleared cache for key: ${cacheKey}`);
} else {
connectionCache.queryCache.clear();
console.log('Cleared all query cache');
}
}
/**
* Force close all active connections
* Useful for server shutdown or manual connection reset
*/
async function closeAllConnections() {
if (connectionCache.dbConnection) {
try {
await connectionCache.dbConnection.end();
console.log('Closed database connection');
} catch (error) {
console.error('Error closing database connection:', error);
}
connectionCache.dbConnection = null;
}
if (connectionCache.ssh) {
try {
connectionCache.ssh.end();
console.log('Closed SSH connection');
} catch (error) {
console.error('Error closing SSH connection:', error);
}
connectionCache.ssh = null;
}
connectionCache.lastUsed = 0;
connectionCache.isConnecting = false;
connectionCache.connectionPromise = null;
}
module.exports = {
getDbConnection,
getCachedQuery,
clearQueryCache,
closeAllConnections
};

View File

@@ -0,0 +1,363 @@
const dotenv = require("dotenv");
const path = require("path");
const { outputProgress, formatElapsedTime } = require('./metrics-new/utils/progress');
const { setupConnections, closeConnections } = require('./import/utils');
const importCategories = require('./import/categories');
const { importProducts } = require('./import/products');
const importOrders = require('./import/orders');
const importPurchaseOrders = require('./import/purchase-orders');
const importHistoricalData = require('./import/historical-data');
dotenv.config({ path: path.join(__dirname, "../.env") });
// Constants to control which imports run
const IMPORT_CATEGORIES = true;
const IMPORT_PRODUCTS = true;
const IMPORT_ORDERS = true;
const IMPORT_PURCHASE_ORDERS = true;
const IMPORT_HISTORICAL_DATA = false;
// Add flag for incremental updates
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
// SSH configuration
const sshConfig = {
ssh: {
host: process.env.PROD_SSH_HOST,
port: process.env.PROD_SSH_PORT || 22,
username: process.env.PROD_SSH_USER,
privateKey: process.env.PROD_SSH_KEY_PATH
? require("fs").readFileSync(process.env.PROD_SSH_KEY_PATH)
: undefined,
compress: true, // Enable SSH compression
},
prodDbConfig: {
// MySQL config for production
host: process.env.PROD_DB_HOST || "localhost",
user: process.env.PROD_DB_USER,
password: process.env.PROD_DB_PASSWORD,
database: process.env.PROD_DB_NAME,
port: process.env.PROD_DB_PORT || 3306,
timezone: '-05:00', // Production DB always stores times in EST (UTC-5) regardless of DST
},
localDbConfig: {
// PostgreSQL config for local
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
port: process.env.DB_PORT || 5432,
ssl: process.env.DB_SSL === 'true',
connectionTimeoutMillis: 60000,
idleTimeoutMillis: 30000,
max: 10 // connection pool max size
}
};
let isImportCancelled = false;
// Add cancel function
function cancelImport() {
isImportCancelled = true;
outputProgress({
status: 'cancelled',
operation: 'Import process',
message: 'Import cancelled by user',
current: 0,
total: 0,
elapsed: null,
remaining: null,
rate: 0
});
}
async function main() {
const startTime = Date.now();
let connections;
let completedSteps = 0;
let importHistoryId;
const totalSteps = [
IMPORT_CATEGORIES,
IMPORT_PRODUCTS,
IMPORT_ORDERS,
IMPORT_PURCHASE_ORDERS,
IMPORT_HISTORICAL_DATA
].filter(Boolean).length;
try {
// Initial progress update
outputProgress({
status: "running",
operation: "Import process",
message: `Initializing SSH tunnel for ${INCREMENTAL_UPDATE ? 'incremental' : 'full'} import...`,
current: completedSteps,
total: totalSteps,
elapsed: formatElapsedTime(startTime)
});
connections = await setupConnections(sshConfig);
const { prodConnection, localConnection } = connections;
if (isImportCancelled) throw new Error("Import cancelled");
// Clean up any previously running imports that weren't completed
await localConnection.query(`
UPDATE import_history
SET
status = 'cancelled',
end_time = NOW(),
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
error_message = 'Previous import was not completed properly'
WHERE status = 'running'
`);
// Create import history record for the overall session
try {
const [historyResult] = await localConnection.query(`
INSERT INTO import_history (
table_name,
start_time,
is_incremental,
status,
additional_info
) VALUES (
'all_tables',
NOW(),
$1::boolean,
'running',
jsonb_build_object(
'categories_enabled', $2::boolean,
'products_enabled', $3::boolean,
'orders_enabled', $4::boolean,
'purchase_orders_enabled', $5::boolean,
'historical_data_enabled', $6::boolean
)
) RETURNING id
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_HISTORICAL_DATA]);
importHistoryId = historyResult.rows[0].id;
} catch (error) {
console.error("Error creating import history record:", error);
outputProgress({
status: "error",
operation: "Import process",
message: "Failed to create import history record",
error: error.message
});
throw error;
}
const results = {
categories: null,
products: null,
orders: null,
purchaseOrders: null,
historicalData: null
};
let totalRecordsAdded = 0;
let totalRecordsUpdated = 0;
// Run each import based on constants
if (IMPORT_CATEGORIES) {
results.categories = await importCategories(prodConnection, localConnection);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Categories import result:', results.categories);
totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0);
}
if (IMPORT_PRODUCTS) {
results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Products import result:', results.products);
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0);
}
if (IMPORT_ORDERS) {
results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Orders import result:', results.orders);
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0);
}
if (IMPORT_PURCHASE_ORDERS) {
try {
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Purchase orders import result:', results.purchaseOrders);
// Handle potential error status
if (results.purchaseOrders?.status === 'error') {
console.error('Purchase orders import had an error:', results.purchaseOrders.error);
} else {
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0);
}
} catch (error) {
console.error('Error during purchase orders import:', error);
// Continue with other imports, don't fail the whole process
results.purchaseOrders = {
status: 'error',
error: error.message,
recordsAdded: 0,
recordsUpdated: 0
};
}
}
if (IMPORT_HISTORICAL_DATA) {
try {
results.historicalData = await importHistoricalData(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Historical data import result:', results.historicalData);
// Handle potential error status
if (results.historicalData?.status === 'error') {
console.error('Historical data import had an error:', results.historicalData.error);
} else {
totalRecordsAdded += parseInt(results.historicalData?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.historicalData?.recordsUpdated || 0);
}
} catch (error) {
console.error('Error during historical data import:', error);
// Continue with other imports, don't fail the whole process
results.historicalData = {
status: 'error',
error: error.message,
recordsAdded: 0,
recordsUpdated: 0
};
}
}
const endTime = Date.now();
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
// Update import history with final stats
await localConnection.query(`
UPDATE import_history
SET
end_time = NOW(),
duration_seconds = $1,
records_added = $2,
records_updated = $3,
status = 'completed',
additional_info = jsonb_build_object(
'categories_enabled', $4::boolean,
'products_enabled', $5::boolean,
'orders_enabled', $6::boolean,
'purchase_orders_enabled', $7::boolean,
'historical_data_enabled', $8::boolean,
'categories_result', COALESCE($9::jsonb, 'null'::jsonb),
'products_result', COALESCE($10::jsonb, 'null'::jsonb),
'orders_result', COALESCE($11::jsonb, 'null'::jsonb),
'purchase_orders_result', COALESCE($12::jsonb, 'null'::jsonb),
'historical_data_result', COALESCE($13::jsonb, 'null'::jsonb)
)
WHERE id = $14
`, [
totalElapsedSeconds,
parseInt(totalRecordsAdded),
parseInt(totalRecordsUpdated),
IMPORT_CATEGORIES,
IMPORT_PRODUCTS,
IMPORT_ORDERS,
IMPORT_PURCHASE_ORDERS,
IMPORT_HISTORICAL_DATA,
JSON.stringify(results.categories),
JSON.stringify(results.products),
JSON.stringify(results.orders),
JSON.stringify(results.purchaseOrders),
JSON.stringify(results.historicalData),
importHistoryId
]);
outputProgress({
status: "complete",
operation: "Import process",
message: `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import completed successfully in ${formatElapsedTime(totalElapsedSeconds)}`,
current: completedSteps,
total: totalSteps,
elapsed: formatElapsedTime(startTime),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date(endTime).toISOString(),
elapsed_time: formatElapsedTime(startTime),
elapsed_seconds: totalElapsedSeconds,
total_duration: formatElapsedTime(totalElapsedSeconds)
},
results
});
return results;
} catch (error) {
const endTime = Date.now();
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
// Update import history with error
if (importHistoryId && connections?.localConnection) {
await connections.localConnection.query(`
UPDATE import_history
SET
end_time = NOW(),
duration_seconds = $1,
status = $2,
error_message = $3
WHERE id = $4
`, [totalElapsedSeconds, error.message === "Import cancelled" ? 'cancelled' : 'failed', error.message, importHistoryId]);
}
console.error("Error during import process:", error);
outputProgress({
status: error.message === "Import cancelled" ? "cancelled" : "error",
operation: "Import process",
message: error.message === "Import cancelled"
? `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import cancelled by user after ${formatElapsedTime(totalElapsedSeconds)}`
: `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import failed after ${formatElapsedTime(totalElapsedSeconds)}`,
error: error.message,
current: completedSteps,
total: totalSteps,
elapsed: formatElapsedTime(startTime),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date(endTime).toISOString(),
elapsed_time: formatElapsedTime(startTime),
elapsed_seconds: totalElapsedSeconds,
total_duration: formatElapsedTime(totalElapsedSeconds)
}
});
throw error;
} finally {
if (connections) {
await closeConnections(connections).catch(err => {
console.error("Error closing connections:", err);
});
}
}
}
// Run the import only if this is the main module
if (require.main === module) {
main().then((results) => {
console.log('Import completed successfully:', results);
// Force exit after a small delay to ensure all logs are written
setTimeout(() => process.exit(0), 500);
}).catch((error) => {
console.error("Unhandled error in main process:", error);
// Force exit with error code after a small delay
setTimeout(() => process.exit(1), 500);
});
}
// Export the functions needed by the route
module.exports = {
main,
cancelImport,
};