Add acot-server for getting data directly

This commit is contained in:
2025-05-24 19:18:00 -04:00
parent 06aa2372e4
commit 9db11531d6
13 changed files with 2754 additions and 86 deletions

View File

@@ -0,0 +1,238 @@
const { Client } = require('ssh2');
const mysql = require('mysql2/promise');
const fs = require('fs');
// Connection pooling and cache configuration
const connectionCache = {
ssh: null,
dbConnection: null,
lastUsed: 0,
isConnecting: false,
connectionPromise: null,
// Cache expiration time in milliseconds (5 minutes)
expirationTime: 5 * 60 * 1000,
// Cache for query results (key: query string, value: {data, timestamp})
queryCache: new Map(),
// Cache duration for different query types in milliseconds
cacheDuration: {
'stats': 60 * 1000, // 1 minute for stats
'products': 5 * 60 * 1000, // 5 minutes for products
'orders': 60 * 1000, // 1 minute for orders
'default': 60 * 1000 // 1 minute default
}
};
/**
* Get a database connection with connection pooling
* @returns {Promise<{ssh: object, connection: object}>} The SSH and database connection
*/
async function getDbConnection() {
const now = Date.now();
// Check if we need to refresh the connection due to inactivity
const needsRefresh = !connectionCache.ssh ||
!connectionCache.dbConnection ||
(now - connectionCache.lastUsed > connectionCache.expirationTime);
// If connection is still valid, update last used time and return existing connection
if (!needsRefresh) {
connectionCache.lastUsed = now;
return {
ssh: connectionCache.ssh,
connection: connectionCache.dbConnection
};
}
// If another request is already establishing a connection, wait for that promise
if (connectionCache.isConnecting && connectionCache.connectionPromise) {
try {
await connectionCache.connectionPromise;
return {
ssh: connectionCache.ssh,
connection: connectionCache.dbConnection
};
} catch (error) {
// If that connection attempt failed, we'll try again below
console.error('Error waiting for existing connection:', error);
}
}
// Close existing connections if they exist
if (connectionCache.dbConnection) {
try {
await connectionCache.dbConnection.end();
} catch (error) {
console.error('Error closing existing database connection:', error);
}
}
if (connectionCache.ssh) {
try {
connectionCache.ssh.end();
} catch (error) {
console.error('Error closing existing SSH connection:', error);
}
}
// Mark that we're establishing a new connection
connectionCache.isConnecting = true;
// Create a new promise for this connection attempt
connectionCache.connectionPromise = setupSshTunnel().then(tunnel => {
const { ssh, stream, dbConfig } = tunnel;
return mysql.createConnection({
...dbConfig,
stream
}).then(connection => {
// Store the new connections
connectionCache.ssh = ssh;
connectionCache.dbConnection = connection;
connectionCache.lastUsed = Date.now();
connectionCache.isConnecting = false;
return {
ssh,
connection
};
});
}).catch(error => {
connectionCache.isConnecting = false;
throw error;
});
// Wait for the connection to be established
return connectionCache.connectionPromise;
}
/**
* Get cached query results or execute query if not cached
* @param {string} cacheKey - Unique key to identify the query
* @param {string} queryType - Type of query (stats, products, orders, etc.)
* @param {Function} queryFn - Function to execute if cache miss
* @returns {Promise<any>} The query result
*/
async function getCachedQuery(cacheKey, queryType, queryFn) {
// Get cache duration based on query type
const cacheDuration = connectionCache.cacheDuration[queryType] || connectionCache.cacheDuration.default;
// Check if we have a valid cached result
const cachedResult = connectionCache.queryCache.get(cacheKey);
const now = Date.now();
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
return cachedResult.data;
}
// No valid cache found, execute the query
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
const result = await queryFn();
// Cache the result
connectionCache.queryCache.set(cacheKey, {
data: result,
timestamp: now
});
return result;
}
/**
* Setup SSH tunnel to production database
* @private - Should only be used by getDbConnection
* @returns {Promise<{ssh: object, stream: object, dbConfig: object}>}
*/
async function setupSshTunnel() {
const sshConfig = {
host: process.env.PROD_SSH_HOST,
port: process.env.PROD_SSH_PORT || 22,
username: process.env.PROD_SSH_USER,
privateKey: process.env.PROD_SSH_KEY_PATH
? fs.readFileSync(process.env.PROD_SSH_KEY_PATH)
: undefined,
compress: true
};
const dbConfig = {
host: process.env.PROD_DB_HOST || 'localhost',
user: process.env.PROD_DB_USER,
password: process.env.PROD_DB_PASSWORD,
database: process.env.PROD_DB_NAME,
port: process.env.PROD_DB_PORT || 3306,
timezone: 'Z'
};
return new Promise((resolve, reject) => {
const ssh = new Client();
ssh.on('error', (err) => {
console.error('SSH connection error:', err);
reject(err);
});
ssh.on('ready', () => {
ssh.forwardOut(
'127.0.0.1',
0,
dbConfig.host,
dbConfig.port,
(err, stream) => {
if (err) reject(err);
resolve({ ssh, stream, dbConfig });
}
);
}).connect(sshConfig);
});
}
/**
* Clear cached query results
* @param {string} [cacheKey] - Specific cache key to clear (clears all if not provided)
*/
function clearQueryCache(cacheKey) {
if (cacheKey) {
connectionCache.queryCache.delete(cacheKey);
console.log(`Cleared cache for key: ${cacheKey}`);
} else {
connectionCache.queryCache.clear();
console.log('Cleared all query cache');
}
}
/**
* Force close all active connections
* Useful for server shutdown or manual connection reset
*/
async function closeAllConnections() {
if (connectionCache.dbConnection) {
try {
await connectionCache.dbConnection.end();
console.log('Closed database connection');
} catch (error) {
console.error('Error closing database connection:', error);
}
connectionCache.dbConnection = null;
}
if (connectionCache.ssh) {
try {
connectionCache.ssh.end();
console.log('Closed SSH connection');
} catch (error) {
console.error('Error closing SSH connection:', error);
}
connectionCache.ssh = null;
}
connectionCache.lastUsed = 0;
connectionCache.isConnecting = false;
connectionCache.connectionPromise = null;
}
module.exports = {
getDbConnection,
getCachedQuery,
clearQueryCache,
closeAllConnections
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,22 @@
{
"name": "acot-server",
"version": "1.0.0",
"description": "A Cherry On Top production database server",
"main": "server.js",
"scripts": {
"start": "node server.js",
"dev": "nodemon server.js"
},
"dependencies": {
"express": "^4.18.2",
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"morgan": "^1.10.0",
"ssh2": "^1.14.0",
"mysql2": "^3.6.5",
"compression": "^1.7.4"
},
"devDependencies": {
"nodemon": "^3.0.1"
}
}

View File

@@ -0,0 +1,57 @@
const express = require('express');
const router = express.Router();
const { getDbConnection, getCachedQuery } = require('../db/connection');
// Test endpoint to count orders
router.get('/order-count', async (req, res) => {
try {
const { connection } = await getDbConnection();
// Simple query to count orders from _order table
const queryFn = async () => {
const [rows] = await connection.execute('SELECT COUNT(*) as count FROM _order');
return rows[0].count;
};
const cacheKey = 'order-count';
const count = await getCachedQuery(cacheKey, 'default', queryFn);
res.json({
success: true,
data: {
orderCount: count,
timestamp: new Date().toISOString()
}
});
} catch (error) {
console.error('Error fetching order count:', error);
res.status(500).json({
success: false,
error: error.message
});
}
});
// Test connection endpoint
router.get('/test-connection', async (req, res) => {
try {
const { connection } = await getDbConnection();
// Test the connection with a simple query
const [rows] = await connection.execute('SELECT 1 as test');
res.json({
success: true,
message: 'Database connection successful',
data: rows[0]
});
} catch (error) {
console.error('Error testing connection:', error);
res.status(500).json({
success: false,
error: error.message
});
}
});
module.exports = router;

View File

@@ -0,0 +1,97 @@
require('dotenv').config();
const express = require('express');
const cors = require('cors');
const morgan = require('morgan');
const compression = require('compression');
const fs = require('fs');
const path = require('path');
const { closeAllConnections } = require('./db/connection');
const app = express();
const PORT = process.env.ACOT_PORT || 3012;
// Create logs directory if it doesn't exist
const logDir = path.join(__dirname, 'logs/app');
if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true });
}
// Create a write stream for access logs
const accessLogStream = fs.createWriteStream(
path.join(logDir, 'access.log'),
{ flags: 'a' }
);
// Middleware
app.use(compression());
app.use(cors());
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Logging middleware
if (process.env.NODE_ENV === 'production') {
app.use(morgan('combined', { stream: accessLogStream }));
} else {
app.use(morgan('dev'));
}
// Health check endpoint
app.get('/health', (req, res) => {
res.json({
status: 'healthy',
service: 'acot-server',
timestamp: new Date().toISOString(),
uptime: process.uptime()
});
});
// Routes
app.use('/api/acot/test', require('./routes/test'));
// Error handling middleware
app.use((err, req, res, next) => {
console.error('Unhandled error:', err);
res.status(500).json({
success: false,
error: process.env.NODE_ENV === 'production'
? 'Internal server error'
: err.message
});
});
// 404 handler
app.use((req, res) => {
res.status(404).json({
success: false,
error: 'Route not found'
});
});
// Start server
const server = app.listen(PORT, () => {
console.log(`ACOT Server running on port ${PORT}`);
console.log(`Environment: ${process.env.NODE_ENV}`);
});
// Graceful shutdown
const gracefulShutdown = async () => {
console.log('SIGTERM signal received: closing HTTP server');
server.close(async () => {
console.log('HTTP server closed');
// Close database connections
try {
await closeAllConnections();
console.log('Database connections closed');
} catch (error) {
console.error('Error closing database connections:', error);
}
process.exit(0);
});
};
process.on('SIGTERM', gracefulShutdown);
process.on('SIGINT', gracefulShutdown);
module.exports = app;

View File

@@ -161,7 +161,6 @@ const DashboardLayout = () => {
<Navigation /> <Navigation />
<div className="p-4 space-y-4"> <div className="p-4 space-y-4">
<div className="grid grid-cols-1 xl:grid-cols-6 gap-4"> <div className="grid grid-cols-1 xl:grid-cols-6 gap-4">
<div className="xl:col-span-4 col-span-6"> <div className="xl:col-span-4 col-span-6">
<div className="space-y-4 h-full w-full"> <div className="space-y-4 h-full w-full">

View File

@@ -0,0 +1,133 @@
import React, { useState, useEffect } from "react";
import axios from "axios";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { Button } from "@/components/ui/button";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import { Loader2, AlertCircle, CheckCircle, RefreshCw } from "lucide-react";
const AcotTest = () => {
const [loading, setLoading] = useState(false);
const [error, setError] = useState(null);
const [data, setData] = useState(null);
const [connectionStatus, setConnectionStatus] = useState(null);
const testConnection = async () => {
setLoading(true);
setError(null);
try {
const response = await axios.get("/api/acot/test/test-connection");
setConnectionStatus(response.data);
} catch (err) {
setError(err.response?.data?.error || err.message);
setConnectionStatus(null);
} finally {
setLoading(false);
}
};
const fetchOrderCount = async () => {
setLoading(true);
setError(null);
try {
const response = await axios.get("/api/acot/test/order-count");
setData(response.data.data);
} catch (err) {
setError(err.response?.data?.error || err.message);
setData(null);
} finally {
setLoading(false);
}
};
useEffect(() => {
testConnection();
}, []);
return (
<Card className="w-full max-w-md">
<CardHeader>
<CardTitle className="flex items-center justify-between">
ACOT Server Test
<Button
size="icon"
variant="outline"
onClick={() => {
testConnection();
if (connectionStatus?.success) {
fetchOrderCount();
}
}}
disabled={loading}
>
{loading ? (
<Loader2 className="h-4 w-4 animate-spin" />
) : (
<RefreshCw className="h-4 w-4" />
)}
</Button>
</CardTitle>
</CardHeader>
<CardContent className="space-y-4">
{/* Connection Status */}
<div className="space-y-2">
<h3 className="text-sm font-medium">Connection Status</h3>
{connectionStatus?.success ? (
<Alert className="bg-green-50 border-green-200">
<CheckCircle className="h-4 w-4 text-green-600" />
<AlertTitle className="text-green-800">Connected</AlertTitle>
<AlertDescription className="text-green-700">
{connectionStatus.message}
</AlertDescription>
</Alert>
) : error ? (
<Alert variant="destructive">
<AlertCircle className="h-4 w-4" />
<AlertTitle>Connection Failed</AlertTitle>
<AlertDescription>{error}</AlertDescription>
</Alert>
) : (
<div className="text-sm text-muted-foreground">
Testing connection...
</div>
)}
</div>
{/* Order Count */}
{connectionStatus?.success && (
<div className="space-y-2">
<Button
onClick={fetchOrderCount}
disabled={loading}
className="w-full"
>
{loading ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Loading...
</>
) : (
"Fetch Order Count"
)}
</Button>
{data && (
<div className="p-4 bg-muted rounded-lg">
<div className="text-sm text-muted-foreground">
Total Orders in Database
</div>
<div className="text-2xl font-bold">
{data.orderCount?.toLocaleString()}
</div>
<div className="text-xs text-muted-foreground mt-1">
Last updated: {new Date(data.timestamp).toLocaleTimeString()}
</div>
</div>
)}
</div>
)}
</CardContent>
</Card>
);
};
export default AcotTest;

View File

@@ -0,0 +1,24 @@
import React from 'react';
import DashboardLayout from '@/components/DashboardLayout';
import AcotTest from '@/components/dashboard/AcotTest';
const TestAcotPage = () => {
return (
<DashboardLayout>
<div className="p-6">
<div className="mb-6">
<h1 className="text-2xl font-bold">ACOT Server Test</h1>
<p className="text-muted-foreground mt-1">
Test connection to production database through ACOT server
</p>
</div>
<div className="flex justify-center">
<AcotTest />
</div>
</div>
</DashboardLayout>
);
};
export default TestAcotPage;

View File

@@ -203,6 +203,42 @@ export default defineConfig(({ mode }) => {
}); });
}); });
}, },
},
"/api/acot": {
target: "https://dashboard.kent.pw",
changeOrigin: true,
secure: false,
rewrite: (path) => path.replace(/^\/api\/acot/, "/api/acot"),
configure: (proxy, _options) => {
proxy.on("error", (err, req, res) => {
console.error("ACOT proxy error:", err);
res.writeHead(500, {
"Content-Type": "application/json",
});
res.end(
JSON.stringify({
error: "Proxy Error",
message: err.message,
details: err.stack
})
);
});
proxy.on("proxyReq", (proxyReq, req, _res) => {
console.log("Outgoing ACOT request:", {
method: req.method,
url: req.url,
path: proxyReq.path,
headers: proxyReq.getHeaders(),
});
});
proxy.on("proxyRes", (proxyRes, req, _res) => {
console.log("ACOT proxy response:", {
statusCode: proxyRes.statusCode,
url: req.url,
headers: proxyRes.headers,
});
});
},
} }
}, },
}, },

View File

@@ -0,0 +1,239 @@
const { Client } = require('ssh2');
const mysql = require('mysql2/promise');
const fs = require('fs');
// Connection pooling and cache configuration
const connectionCache = {
ssh: null,
dbConnection: null,
lastUsed: 0,
isConnecting: false,
connectionPromise: null,
// Cache expiration time in milliseconds (5 minutes)
expirationTime: 5 * 60 * 1000,
// Cache for query results (key: query string, value: {data, timestamp})
queryCache: new Map(),
// Cache duration for different query types in milliseconds
cacheDuration: {
'field-options': 30 * 60 * 1000, // 30 minutes for field options
'product-lines': 10 * 60 * 1000, // 10 minutes for product lines
'sublines': 10 * 60 * 1000, // 10 minutes for sublines
'taxonomy': 30 * 60 * 1000, // 30 minutes for taxonomy data
'default': 60 * 1000 // 1 minute default
}
};
/**
* Get a database connection with connection pooling
* @returns {Promise<{ssh: object, connection: object}>} The SSH and database connection
*/
async function getDbConnection() {
const now = Date.now();
// Check if we need to refresh the connection due to inactivity
const needsRefresh = !connectionCache.ssh ||
!connectionCache.dbConnection ||
(now - connectionCache.lastUsed > connectionCache.expirationTime);
// If connection is still valid, update last used time and return existing connection
if (!needsRefresh) {
connectionCache.lastUsed = now;
return {
ssh: connectionCache.ssh,
connection: connectionCache.dbConnection
};
}
// If another request is already establishing a connection, wait for that promise
if (connectionCache.isConnecting && connectionCache.connectionPromise) {
try {
await connectionCache.connectionPromise;
return {
ssh: connectionCache.ssh,
connection: connectionCache.dbConnection
};
} catch (error) {
// If that connection attempt failed, we'll try again below
console.error('Error waiting for existing connection:', error);
}
}
// Close existing connections if they exist
if (connectionCache.dbConnection) {
try {
await connectionCache.dbConnection.end();
} catch (error) {
console.error('Error closing existing database connection:', error);
}
}
if (connectionCache.ssh) {
try {
connectionCache.ssh.end();
} catch (error) {
console.error('Error closing existing SSH connection:', error);
}
}
// Mark that we're establishing a new connection
connectionCache.isConnecting = true;
// Create a new promise for this connection attempt
connectionCache.connectionPromise = setupSshTunnel().then(tunnel => {
const { ssh, stream, dbConfig } = tunnel;
return mysql.createConnection({
...dbConfig,
stream
}).then(connection => {
// Store the new connections
connectionCache.ssh = ssh;
connectionCache.dbConnection = connection;
connectionCache.lastUsed = Date.now();
connectionCache.isConnecting = false;
return {
ssh,
connection
};
});
}).catch(error => {
connectionCache.isConnecting = false;
throw error;
});
// Wait for the connection to be established
return connectionCache.connectionPromise;
}
/**
* Get cached query results or execute query if not cached
* @param {string} cacheKey - Unique key to identify the query
* @param {string} queryType - Type of query (field-options, product-lines, etc.)
* @param {Function} queryFn - Function to execute if cache miss
* @returns {Promise<any>} The query result
*/
async function getCachedQuery(cacheKey, queryType, queryFn) {
// Get cache duration based on query type
const cacheDuration = connectionCache.cacheDuration[queryType] || connectionCache.cacheDuration.default;
// Check if we have a valid cached result
const cachedResult = connectionCache.queryCache.get(cacheKey);
const now = Date.now();
if (cachedResult && (now - cachedResult.timestamp < cacheDuration)) {
console.log(`Cache hit for ${queryType} query: ${cacheKey}`);
return cachedResult.data;
}
// No valid cache found, execute the query
console.log(`Cache miss for ${queryType} query: ${cacheKey}`);
const result = await queryFn();
// Cache the result
connectionCache.queryCache.set(cacheKey, {
data: result,
timestamp: now
});
return result;
}
/**
* Setup SSH tunnel to production database
* @private - Should only be used by getDbConnection
* @returns {Promise<{ssh: object, stream: object, dbConfig: object}>}
*/
async function setupSshTunnel() {
const sshConfig = {
host: process.env.PROD_SSH_HOST,
port: process.env.PROD_SSH_PORT || 22,
username: process.env.PROD_SSH_USER,
privateKey: process.env.PROD_SSH_KEY_PATH
? fs.readFileSync(process.env.PROD_SSH_KEY_PATH)
: undefined,
compress: true
};
const dbConfig = {
host: process.env.PROD_DB_HOST || 'localhost',
user: process.env.PROD_DB_USER,
password: process.env.PROD_DB_PASSWORD,
database: process.env.PROD_DB_NAME,
port: process.env.PROD_DB_PORT || 3306,
timezone: 'Z'
};
return new Promise((resolve, reject) => {
const ssh = new Client();
ssh.on('error', (err) => {
console.error('SSH connection error:', err);
reject(err);
});
ssh.on('ready', () => {
ssh.forwardOut(
'127.0.0.1',
0,
dbConfig.host,
dbConfig.port,
(err, stream) => {
if (err) reject(err);
resolve({ ssh, stream, dbConfig });
}
);
}).connect(sshConfig);
});
}
/**
* Clear cached query results
* @param {string} [cacheKey] - Specific cache key to clear (clears all if not provided)
*/
function clearQueryCache(cacheKey) {
if (cacheKey) {
connectionCache.queryCache.delete(cacheKey);
console.log(`Cleared cache for key: ${cacheKey}`);
} else {
connectionCache.queryCache.clear();
console.log('Cleared all query cache');
}
}
/**
* Force close all active connections
* Useful for server shutdown or manual connection reset
*/
async function closeAllConnections() {
if (connectionCache.dbConnection) {
try {
await connectionCache.dbConnection.end();
console.log('Closed database connection');
} catch (error) {
console.error('Error closing database connection:', error);
}
connectionCache.dbConnection = null;
}
if (connectionCache.ssh) {
try {
connectionCache.ssh.end();
console.log('Closed SSH connection');
} catch (error) {
console.error('Error closing SSH connection:', error);
}
connectionCache.ssh = null;
}
connectionCache.lastUsed = 0;
connectionCache.isConnecting = false;
connectionCache.connectionPromise = null;
}
module.exports = {
getDbConnection,
getCachedQuery,
clearQueryCache,
closeAllConnections
};

View File

@@ -0,0 +1,363 @@
const dotenv = require("dotenv");
const path = require("path");
const { outputProgress, formatElapsedTime } = require('./metrics-new/utils/progress');
const { setupConnections, closeConnections } = require('./import/utils');
const importCategories = require('./import/categories');
const { importProducts } = require('./import/products');
const importOrders = require('./import/orders');
const importPurchaseOrders = require('./import/purchase-orders');
const importHistoricalData = require('./import/historical-data');
dotenv.config({ path: path.join(__dirname, "../.env") });
// Constants to control which imports run
const IMPORT_CATEGORIES = true;
const IMPORT_PRODUCTS = true;
const IMPORT_ORDERS = true;
const IMPORT_PURCHASE_ORDERS = true;
const IMPORT_HISTORICAL_DATA = false;
// Add flag for incremental updates
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
// SSH configuration
const sshConfig = {
ssh: {
host: process.env.PROD_SSH_HOST,
port: process.env.PROD_SSH_PORT || 22,
username: process.env.PROD_SSH_USER,
privateKey: process.env.PROD_SSH_KEY_PATH
? require("fs").readFileSync(process.env.PROD_SSH_KEY_PATH)
: undefined,
compress: true, // Enable SSH compression
},
prodDbConfig: {
// MySQL config for production
host: process.env.PROD_DB_HOST || "localhost",
user: process.env.PROD_DB_USER,
password: process.env.PROD_DB_PASSWORD,
database: process.env.PROD_DB_NAME,
port: process.env.PROD_DB_PORT || 3306,
timezone: '-05:00', // Production DB always stores times in EST (UTC-5) regardless of DST
},
localDbConfig: {
// PostgreSQL config for local
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
port: process.env.DB_PORT || 5432,
ssl: process.env.DB_SSL === 'true',
connectionTimeoutMillis: 60000,
idleTimeoutMillis: 30000,
max: 10 // connection pool max size
}
};
let isImportCancelled = false;
// Add cancel function
function cancelImport() {
isImportCancelled = true;
outputProgress({
status: 'cancelled',
operation: 'Import process',
message: 'Import cancelled by user',
current: 0,
total: 0,
elapsed: null,
remaining: null,
rate: 0
});
}
async function main() {
const startTime = Date.now();
let connections;
let completedSteps = 0;
let importHistoryId;
const totalSteps = [
IMPORT_CATEGORIES,
IMPORT_PRODUCTS,
IMPORT_ORDERS,
IMPORT_PURCHASE_ORDERS,
IMPORT_HISTORICAL_DATA
].filter(Boolean).length;
try {
// Initial progress update
outputProgress({
status: "running",
operation: "Import process",
message: `Initializing SSH tunnel for ${INCREMENTAL_UPDATE ? 'incremental' : 'full'} import...`,
current: completedSteps,
total: totalSteps,
elapsed: formatElapsedTime(startTime)
});
connections = await setupConnections(sshConfig);
const { prodConnection, localConnection } = connections;
if (isImportCancelled) throw new Error("Import cancelled");
// Clean up any previously running imports that weren't completed
await localConnection.query(`
UPDATE import_history
SET
status = 'cancelled',
end_time = NOW(),
duration_seconds = EXTRACT(EPOCH FROM (NOW() - start_time))::INTEGER,
error_message = 'Previous import was not completed properly'
WHERE status = 'running'
`);
// Create import history record for the overall session
try {
const [historyResult] = await localConnection.query(`
INSERT INTO import_history (
table_name,
start_time,
is_incremental,
status,
additional_info
) VALUES (
'all_tables',
NOW(),
$1::boolean,
'running',
jsonb_build_object(
'categories_enabled', $2::boolean,
'products_enabled', $3::boolean,
'orders_enabled', $4::boolean,
'purchase_orders_enabled', $5::boolean,
'historical_data_enabled', $6::boolean
)
) RETURNING id
`, [INCREMENTAL_UPDATE, IMPORT_CATEGORIES, IMPORT_PRODUCTS, IMPORT_ORDERS, IMPORT_PURCHASE_ORDERS, IMPORT_HISTORICAL_DATA]);
importHistoryId = historyResult.rows[0].id;
} catch (error) {
console.error("Error creating import history record:", error);
outputProgress({
status: "error",
operation: "Import process",
message: "Failed to create import history record",
error: error.message
});
throw error;
}
const results = {
categories: null,
products: null,
orders: null,
purchaseOrders: null,
historicalData: null
};
let totalRecordsAdded = 0;
let totalRecordsUpdated = 0;
// Run each import based on constants
if (IMPORT_CATEGORIES) {
results.categories = await importCategories(prodConnection, localConnection);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Categories import result:', results.categories);
totalRecordsAdded += parseInt(results.categories?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.categories?.recordsUpdated || 0);
}
if (IMPORT_PRODUCTS) {
results.products = await importProducts(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Products import result:', results.products);
totalRecordsAdded += parseInt(results.products?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.products?.recordsUpdated || 0);
}
if (IMPORT_ORDERS) {
results.orders = await importOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Orders import result:', results.orders);
totalRecordsAdded += parseInt(results.orders?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.orders?.recordsUpdated || 0);
}
if (IMPORT_PURCHASE_ORDERS) {
try {
results.purchaseOrders = await importPurchaseOrders(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Purchase orders import result:', results.purchaseOrders);
// Handle potential error status
if (results.purchaseOrders?.status === 'error') {
console.error('Purchase orders import had an error:', results.purchaseOrders.error);
} else {
totalRecordsAdded += parseInt(results.purchaseOrders?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.purchaseOrders?.recordsUpdated || 0);
}
} catch (error) {
console.error('Error during purchase orders import:', error);
// Continue with other imports, don't fail the whole process
results.purchaseOrders = {
status: 'error',
error: error.message,
recordsAdded: 0,
recordsUpdated: 0
};
}
}
if (IMPORT_HISTORICAL_DATA) {
try {
results.historicalData = await importHistoricalData(prodConnection, localConnection, INCREMENTAL_UPDATE);
if (isImportCancelled) throw new Error("Import cancelled");
completedSteps++;
console.log('Historical data import result:', results.historicalData);
// Handle potential error status
if (results.historicalData?.status === 'error') {
console.error('Historical data import had an error:', results.historicalData.error);
} else {
totalRecordsAdded += parseInt(results.historicalData?.recordsAdded || 0);
totalRecordsUpdated += parseInt(results.historicalData?.recordsUpdated || 0);
}
} catch (error) {
console.error('Error during historical data import:', error);
// Continue with other imports, don't fail the whole process
results.historicalData = {
status: 'error',
error: error.message,
recordsAdded: 0,
recordsUpdated: 0
};
}
}
const endTime = Date.now();
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
// Update import history with final stats
await localConnection.query(`
UPDATE import_history
SET
end_time = NOW(),
duration_seconds = $1,
records_added = $2,
records_updated = $3,
status = 'completed',
additional_info = jsonb_build_object(
'categories_enabled', $4::boolean,
'products_enabled', $5::boolean,
'orders_enabled', $6::boolean,
'purchase_orders_enabled', $7::boolean,
'historical_data_enabled', $8::boolean,
'categories_result', COALESCE($9::jsonb, 'null'::jsonb),
'products_result', COALESCE($10::jsonb, 'null'::jsonb),
'orders_result', COALESCE($11::jsonb, 'null'::jsonb),
'purchase_orders_result', COALESCE($12::jsonb, 'null'::jsonb),
'historical_data_result', COALESCE($13::jsonb, 'null'::jsonb)
)
WHERE id = $14
`, [
totalElapsedSeconds,
parseInt(totalRecordsAdded),
parseInt(totalRecordsUpdated),
IMPORT_CATEGORIES,
IMPORT_PRODUCTS,
IMPORT_ORDERS,
IMPORT_PURCHASE_ORDERS,
IMPORT_HISTORICAL_DATA,
JSON.stringify(results.categories),
JSON.stringify(results.products),
JSON.stringify(results.orders),
JSON.stringify(results.purchaseOrders),
JSON.stringify(results.historicalData),
importHistoryId
]);
outputProgress({
status: "complete",
operation: "Import process",
message: `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import completed successfully in ${formatElapsedTime(totalElapsedSeconds)}`,
current: completedSteps,
total: totalSteps,
elapsed: formatElapsedTime(startTime),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date(endTime).toISOString(),
elapsed_time: formatElapsedTime(startTime),
elapsed_seconds: totalElapsedSeconds,
total_duration: formatElapsedTime(totalElapsedSeconds)
},
results
});
return results;
} catch (error) {
const endTime = Date.now();
const totalElapsedSeconds = Math.round((endTime - startTime) / 1000);
// Update import history with error
if (importHistoryId && connections?.localConnection) {
await connections.localConnection.query(`
UPDATE import_history
SET
end_time = NOW(),
duration_seconds = $1,
status = $2,
error_message = $3
WHERE id = $4
`, [totalElapsedSeconds, error.message === "Import cancelled" ? 'cancelled' : 'failed', error.message, importHistoryId]);
}
console.error("Error during import process:", error);
outputProgress({
status: error.message === "Import cancelled" ? "cancelled" : "error",
operation: "Import process",
message: error.message === "Import cancelled"
? `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import cancelled by user after ${formatElapsedTime(totalElapsedSeconds)}`
: `${INCREMENTAL_UPDATE ? 'Incremental' : 'Full'} import failed after ${formatElapsedTime(totalElapsedSeconds)}`,
error: error.message,
current: completedSteps,
total: totalSteps,
elapsed: formatElapsedTime(startTime),
timing: {
start_time: new Date(startTime).toISOString(),
end_time: new Date(endTime).toISOString(),
elapsed_time: formatElapsedTime(startTime),
elapsed_seconds: totalElapsedSeconds,
total_duration: formatElapsedTime(totalElapsedSeconds)
}
});
throw error;
} finally {
if (connections) {
await closeConnections(connections).catch(err => {
console.error("Error closing connections:", err);
});
}
}
}
// Run the import only if this is the main module
if (require.main === module) {
main().then((results) => {
console.log('Import completed successfully:', results);
// Force exit after a small delay to ensure all logs are written
setTimeout(() => process.exit(0), 500);
}).catch((error) => {
console.error("Unhandled error in main process:", error);
// Force exit with error code after a small delay
setTimeout(() => process.exit(1), 500);
});
}
// Export the functions needed by the route
module.exports = {
main,
cancelImport,
};

View File

@@ -1,7 +1,7 @@
#!/bin/zsh #!/bin/zsh
#Clear previous mount in case its still there #Clear previous mount in case its still there
umount ~/Dev/dashboard/dashboard-server umount /Users/matt/Dev/dashboard/dashboard-server
#Mount #Mount
sshfs matt@dashboard.kent.pw:/var/www/html/dashboard -p 22122 ~/Dev/dashboard/dashboard-server sshfs matt@dashboard.kent.pw:/var/www/html/dashboard -p 22122 /Users/matt/Dev/dashboard/dashboard-server

View File

@@ -1,83 +0,0 @@
# Gorgias API endpoints
location /api/gorgias/ {
proxy_pass http://localhost:3006/api/gorgias/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# CORS headers
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS, PUT, DELETE';
add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization';
# Handle OPTIONS method
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS, PUT, DELETE';
add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization';
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain charset=UTF-8';
add_header 'Content-Length' 0;
return 204;
}
}
# Google Analytics API endpoints
location /api/analytics/ {
proxy_pass http://localhost:3007/api/analytics/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# CORS headers
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization';
# Handle OPTIONS method
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization';
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain charset=UTF-8';
add_header 'Content-Length' 0;
return 204;
}
}
# Typeform API endpoints
location /api/typeform/ {
proxy_pass http://localhost:3008/api/typeform/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# CORS headers
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization';
# Handle OPTIONS method
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization';
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain charset=UTF-8';
add_header 'Content-Length' 0;
return 204;
}
}