Add/update inital try of order components and add csv update script + update import script
This commit is contained in:
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -43,12 +43,18 @@ CREATE TABLE IF NOT EXISTS orders (
|
||||
tax_included BOOLEAN DEFAULT false,
|
||||
shipping DECIMAL(10, 3) DEFAULT 0,
|
||||
customer VARCHAR(50) NOT NULL,
|
||||
status VARCHAR(20) DEFAULT 'pending',
|
||||
payment_method VARCHAR(50),
|
||||
shipping_method VARCHAR(50),
|
||||
shipping_address TEXT,
|
||||
billing_address TEXT,
|
||||
canceled BOOLEAN DEFAULT false,
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id),
|
||||
FOREIGN KEY (SKU) REFERENCES products(SKU),
|
||||
INDEX idx_order_number (order_number),
|
||||
INDEX idx_customer (customer),
|
||||
INDEX idx_date (date),
|
||||
INDEX idx_status (status),
|
||||
UNIQUE KEY unique_order_product (order_number, product_id)
|
||||
);
|
||||
|
||||
|
||||
@@ -4,8 +4,10 @@ const csv = require('csv-parse');
|
||||
const mysql = require('mysql2/promise');
|
||||
const dotenv = require('dotenv');
|
||||
|
||||
// For testing purposes, limit the number of rows to import
|
||||
const TEST_ROW_LIMIT = 5000;
|
||||
// For testing purposes, limit the number of rows to import (0 = no limit)
|
||||
const PRODUCTS_TEST_LIMIT = 0;
|
||||
const ORDERS_TEST_LIMIT = 5000;
|
||||
const PURCHASE_ORDERS_TEST_LIMIT = 0;
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, '../.env') });
|
||||
|
||||
@@ -17,8 +19,46 @@ const dbConfig = {
|
||||
multipleStatements: true
|
||||
};
|
||||
|
||||
// Helper function to count total rows in a CSV file
|
||||
async function countRows(filePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let count = 0;
|
||||
fs.createReadStream(filePath)
|
||||
.pipe(csv.parse())
|
||||
.on('data', () => count++)
|
||||
.on('error', reject)
|
||||
.on('end', () => resolve(count - 1)); // Subtract 1 for header row
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to format time duration
|
||||
function formatDuration(seconds) {
|
||||
if (seconds < 60) return `${Math.round(seconds)}s`;
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
seconds = Math.round(seconds % 60);
|
||||
return `${minutes}m ${seconds}s`;
|
||||
}
|
||||
|
||||
// Helper function to update progress with time estimate
|
||||
function updateProgress(current, total, operation, startTime) {
|
||||
const percentage = ((current / total) * 100).toFixed(1);
|
||||
const elapsed = (Date.now() - startTime) / 1000;
|
||||
const rate = current / elapsed; // rows per second
|
||||
const remaining = (total - current) / rate;
|
||||
|
||||
process.stdout.write(
|
||||
`\r${operation}: ${current.toLocaleString()}/${total.toLocaleString()} rows ` +
|
||||
`(${percentage}%) - Rate: ${Math.round(rate)}/s - ` +
|
||||
`Elapsed: ${formatDuration(elapsed)} - ` +
|
||||
`Est. remaining: ${formatDuration(remaining)}`
|
||||
);
|
||||
}
|
||||
|
||||
async function importProducts(connection, filePath) {
|
||||
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
|
||||
const totalRows = PRODUCTS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PRODUCTS_TEST_LIMIT) : await countRows(filePath);
|
||||
const startTime = Date.now();
|
||||
console.log(`\nStarting products import (${totalRows.toLocaleString()} total rows${PRODUCTS_TEST_LIMIT > 0 ? ` - limited to ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
|
||||
|
||||
function convertDate(dateStr) {
|
||||
if (!dateStr) return null;
|
||||
@@ -29,14 +69,22 @@ async function importProducts(connection, filePath) {
|
||||
let updated = 0;
|
||||
let added = 0;
|
||||
let rowCount = 0;
|
||||
let lastUpdate = Date.now();
|
||||
|
||||
for await (const record of parser) {
|
||||
// if (rowCount >= TEST_ROW_LIMIT) {
|
||||
// console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
|
||||
// break;
|
||||
// }
|
||||
if (PRODUCTS_TEST_LIMIT > 0 && rowCount >= PRODUCTS_TEST_LIMIT) {
|
||||
console.log(`\nReached test limit of ${PRODUCTS_TEST_LIMIT.toLocaleString()} rows`);
|
||||
break;
|
||||
}
|
||||
rowCount++;
|
||||
|
||||
// Update progress every 100ms to avoid console flooding
|
||||
const now = Date.now();
|
||||
if (now - lastUpdate > 100) {
|
||||
updateProgress(rowCount, totalRows, 'Products', startTime);
|
||||
lastUpdate = now;
|
||||
}
|
||||
|
||||
// Check if product exists
|
||||
const [existing] = await connection.query('SELECT product_id FROM products WHERE product_id = ?', [record.product_id]);
|
||||
|
||||
@@ -69,14 +117,19 @@ async function importProducts(connection, filePath) {
|
||||
]);
|
||||
existing.length ? updated++ : added++;
|
||||
} catch (error) {
|
||||
console.error(`Error importing product ${record.product_id}:`, error.message);
|
||||
console.error(`\nError importing product ${record.product_id}:`, error.message);
|
||||
}
|
||||
}
|
||||
console.log(`Products import completed: ${added} added, ${updated} updated (processed ${rowCount} rows)`);
|
||||
|
||||
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
console.log(`\nProducts import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated (processed ${rowCount.toLocaleString()} rows)`);
|
||||
}
|
||||
|
||||
async function importOrders(connection, filePath) {
|
||||
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
|
||||
const totalRows = ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), ORDERS_TEST_LIMIT) : await countRows(filePath);
|
||||
const startTime = Date.now();
|
||||
console.log(`\nStarting orders import (${totalRows.toLocaleString()} total rows${ORDERS_TEST_LIMIT > 0 ? ` - limited to ${ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
|
||||
|
||||
function convertDate(dateStr) {
|
||||
if (!dateStr) return null;
|
||||
@@ -92,14 +145,22 @@ async function importOrders(connection, filePath) {
|
||||
let updated = 0;
|
||||
let added = 0;
|
||||
let rowCount = 0;
|
||||
let lastUpdate = Date.now();
|
||||
|
||||
for await (const record of parser) {
|
||||
if (rowCount >= TEST_ROW_LIMIT) {
|
||||
console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
|
||||
if (ORDERS_TEST_LIMIT > 0 && rowCount >= ORDERS_TEST_LIMIT) {
|
||||
console.log(`\nReached test limit of ${ORDERS_TEST_LIMIT.toLocaleString()} rows`);
|
||||
break;
|
||||
}
|
||||
rowCount++;
|
||||
|
||||
// Update progress every 100ms
|
||||
const now = Date.now();
|
||||
if (now - lastUpdate > 100) {
|
||||
updateProgress(rowCount, totalRows, 'Orders', startTime);
|
||||
lastUpdate = now;
|
||||
}
|
||||
|
||||
if (!validProductIds.has(record.product_id)) {
|
||||
skipped++;
|
||||
continue;
|
||||
@@ -128,15 +189,20 @@ async function importOrders(connection, filePath) {
|
||||
]);
|
||||
existing.length ? updated++ : added++;
|
||||
} catch (error) {
|
||||
console.error(`Error importing order ${record.order_number}, product ${record.product_id}:`, error.message);
|
||||
console.error(`\nError importing order ${record.order_number}, product ${record.product_id}:`, error.message);
|
||||
skipped++;
|
||||
}
|
||||
}
|
||||
console.log(`Orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`);
|
||||
|
||||
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
console.log(`\nOrders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
|
||||
}
|
||||
|
||||
async function importPurchaseOrders(connection, filePath) {
|
||||
const parser = fs.createReadStream(filePath).pipe(csv.parse({ columns: true, trim: true }));
|
||||
const totalRows = PURCHASE_ORDERS_TEST_LIMIT > 0 ? Math.min(await countRows(filePath), PURCHASE_ORDERS_TEST_LIMIT) : await countRows(filePath);
|
||||
const startTime = Date.now();
|
||||
console.log(`\nStarting purchase orders import (${totalRows.toLocaleString()} total rows${PURCHASE_ORDERS_TEST_LIMIT > 0 ? ` - limited to ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows` : ''})`);
|
||||
|
||||
function convertDate(dateStr) {
|
||||
if (!dateStr) return null;
|
||||
@@ -152,14 +218,22 @@ async function importPurchaseOrders(connection, filePath) {
|
||||
let updated = 0;
|
||||
let added = 0;
|
||||
let rowCount = 0;
|
||||
let lastUpdate = Date.now();
|
||||
|
||||
for await (const record of parser) {
|
||||
if (rowCount >= TEST_ROW_LIMIT) {
|
||||
console.log(`Reached test limit of ${TEST_ROW_LIMIT} rows`);
|
||||
if (PURCHASE_ORDERS_TEST_LIMIT > 0 && rowCount >= PURCHASE_ORDERS_TEST_LIMIT) {
|
||||
console.log(`\nReached test limit of ${PURCHASE_ORDERS_TEST_LIMIT.toLocaleString()} rows`);
|
||||
break;
|
||||
}
|
||||
rowCount++;
|
||||
|
||||
// Update progress every 100ms
|
||||
const now = Date.now();
|
||||
if (now - lastUpdate > 100) {
|
||||
updateProgress(rowCount, totalRows, 'Purchase Orders', startTime);
|
||||
lastUpdate = now;
|
||||
}
|
||||
|
||||
if (!validProductIds.has(record.product_id)) {
|
||||
skipped++;
|
||||
continue;
|
||||
@@ -188,14 +262,18 @@ async function importPurchaseOrders(connection, filePath) {
|
||||
]);
|
||||
existing.length ? updated++ : added++;
|
||||
} catch (error) {
|
||||
console.error(`Error importing PO ${record.po_id}, product ${record.product_id}:`, error.message);
|
||||
console.error(`\nError importing PO ${record.po_id}, product ${record.product_id}:`, error.message);
|
||||
skipped++;
|
||||
}
|
||||
}
|
||||
console.log(`Purchase orders import completed: ${added} added, ${updated} updated, ${skipped} skipped (processed ${rowCount} rows)`);
|
||||
|
||||
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
console.log(`\nPurchase orders import completed in ${duration}s: ${added.toLocaleString()} added, ${updated.toLocaleString()} updated, ${skipped.toLocaleString()} skipped (processed ${rowCount.toLocaleString()} rows)`);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('Starting import process...');
|
||||
const startTime = Date.now();
|
||||
const connection = await mysql.createConnection(dbConfig);
|
||||
|
||||
try {
|
||||
@@ -205,18 +283,14 @@ async function main() {
|
||||
await connection.query(schemaSQL);
|
||||
|
||||
// Import products first since they're referenced by other tables
|
||||
console.log('Importing products...');
|
||||
await importProducts(connection, path.join(__dirname, '../csv/39f2x83-products.csv'));
|
||||
|
||||
console.log('Importing orders...');
|
||||
await importOrders(connection, path.join(__dirname, '../csv/39f2x83-orders.csv'));
|
||||
|
||||
console.log('Importing purchase orders...');
|
||||
await importPurchaseOrders(connection, path.join(__dirname, '../csv/39f2x83-purchase_orders.csv'));
|
||||
|
||||
console.log('All imports completed successfully');
|
||||
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
console.log(`\nAll imports completed successfully in ${duration} seconds`);
|
||||
} catch (error) {
|
||||
console.error('Error during import:', error);
|
||||
console.error('\nError during import:', error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await connection.end();
|
||||
|
||||
@@ -24,10 +24,10 @@ async function setupDatabase() {
|
||||
console.log('Schema created successfully');
|
||||
|
||||
// Create stored procedures
|
||||
console.log('Setting up stored procedures...');
|
||||
const proceduresSQL = fs.readFileSync(path.join(__dirname, '../db/procedures.sql'), 'utf8');
|
||||
await connection.query(proceduresSQL);
|
||||
console.log('Stored procedures created successfully');
|
||||
// console.log('Setting up stored procedures...');
|
||||
// const proceduresSQL = fs.readFileSync(path.join(__dirname, '../db/procedures.sql'), 'utf8');
|
||||
// await connection.query(proceduresSQL);
|
||||
// console.log('Stored procedures created successfully');
|
||||
|
||||
console.log('Database setup completed successfully');
|
||||
} catch (error) {
|
||||
|
||||
97
inventory-server/scripts/update-csv.js
Normal file
97
inventory-server/scripts/update-csv.js
Normal file
@@ -0,0 +1,97 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const https = require('https');
|
||||
|
||||
// Configuration
|
||||
const FILES = [
|
||||
{
|
||||
name: '39f2x83-products.csv',
|
||||
url: 'https://feeds.acherryontop.com/39f2x83-products.csv'
|
||||
},
|
||||
{
|
||||
name: '39f2x83-orders.csv',
|
||||
url: 'https://feeds.acherryontop.com/39f2x83-orders.csv'
|
||||
},
|
||||
{
|
||||
name: '39f2x83-purchase_orders.csv',
|
||||
url: 'https://feeds.acherryontop.com/39f2x83-purchase_orders.csv'
|
||||
}
|
||||
];
|
||||
|
||||
const CSV_DIR = path.join(__dirname, '..', 'csv');
|
||||
|
||||
// Ensure CSV directory exists
|
||||
if (!fs.existsSync(CSV_DIR)) {
|
||||
fs.mkdirSync(CSV_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// Function to download a file
|
||||
function downloadFile(url, filePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const file = fs.createWriteStream(filePath);
|
||||
|
||||
https.get(url, response => {
|
||||
if (response.statusCode !== 200) {
|
||||
reject(new Error(`Failed to download: ${response.statusCode} ${response.statusMessage}`));
|
||||
return;
|
||||
}
|
||||
|
||||
const totalSize = parseInt(response.headers['content-length'], 10);
|
||||
let downloadedSize = 0;
|
||||
|
||||
response.on('data', chunk => {
|
||||
downloadedSize += chunk.length;
|
||||
const progress = (downloadedSize / totalSize * 100).toFixed(2);
|
||||
process.stdout.write(`\rDownloading ${path.basename(filePath)}: ${progress}%`);
|
||||
});
|
||||
|
||||
response.pipe(file);
|
||||
|
||||
file.on('finish', () => {
|
||||
process.stdout.write('\n');
|
||||
file.close();
|
||||
resolve();
|
||||
});
|
||||
}).on('error', error => {
|
||||
fs.unlink(filePath, () => {}); // Delete the file if download failed
|
||||
reject(error);
|
||||
});
|
||||
|
||||
file.on('error', error => {
|
||||
fs.unlink(filePath, () => {}); // Delete the file if there was an error
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Main function to update all files
|
||||
async function updateFiles() {
|
||||
console.log('Starting CSV file updates...');
|
||||
|
||||
for (const file of FILES) {
|
||||
const filePath = path.join(CSV_DIR, file.name);
|
||||
|
||||
try {
|
||||
// Delete existing file if it exists
|
||||
if (fs.existsSync(filePath)) {
|
||||
console.log(`Removing existing file: ${file.name}`);
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
// Download new file
|
||||
console.log(`Downloading ${file.name}...`);
|
||||
await downloadFile(file.url, filePath);
|
||||
console.log(`Successfully updated ${file.name}`);
|
||||
} catch (error) {
|
||||
console.error(`Error updating ${file.name}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('CSV file update complete!');
|
||||
}
|
||||
|
||||
// Run the update
|
||||
updateFiles().catch(error => {
|
||||
console.error('Update failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
35
inventory-server/src/app.js
Normal file
35
inventory-server/src/app.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const mysql = require('mysql2/promise');
|
||||
const productsRouter = require('./routes/products');
|
||||
const dashboardRouter = require('./routes/dashboard');
|
||||
const ordersRouter = require('./routes/orders');
|
||||
|
||||
const app = express();
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
// Database connection
|
||||
const pool = mysql.createPool({
|
||||
host: 'localhost',
|
||||
user: 'root',
|
||||
password: '',
|
||||
database: 'inventory',
|
||||
waitForConnections: true,
|
||||
connectionLimit: 10,
|
||||
queueLimit: 0
|
||||
});
|
||||
|
||||
// Make db pool available in routes
|
||||
app.locals.pool = pool;
|
||||
|
||||
// Routes
|
||||
app.use('/api/products', productsRouter);
|
||||
app.use('/api/dashboard', dashboardRouter);
|
||||
app.use('/api/orders', ordersRouter);
|
||||
|
||||
const PORT = process.env.PORT || 3001;
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Server is running on port ${PORT}`);
|
||||
});
|
||||
255
inventory-server/src/routes/orders.js
Normal file
255
inventory-server/src/routes/orders.js
Normal file
@@ -0,0 +1,255 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
// Get all orders with pagination, filtering, and sorting
|
||||
router.get('/', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const limit = parseInt(req.query.limit) || 50;
|
||||
const offset = (page - 1) * limit;
|
||||
const search = req.query.search || '';
|
||||
const status = req.query.status || 'all';
|
||||
const fromDate = req.query.fromDate ? new Date(req.query.fromDate) : null;
|
||||
const toDate = req.query.toDate ? new Date(req.query.toDate) : null;
|
||||
const minAmount = parseFloat(req.query.minAmount) || 0;
|
||||
const maxAmount = req.query.maxAmount ? parseFloat(req.query.maxAmount) : null;
|
||||
const sortColumn = req.query.sortColumn || 'date';
|
||||
const sortDirection = req.query.sortDirection === 'desc' ? 'DESC' : 'ASC';
|
||||
|
||||
// Build the WHERE clause
|
||||
const conditions = ['o1.canceled = false'];
|
||||
const params = [];
|
||||
|
||||
if (search) {
|
||||
conditions.push('(o1.order_number LIKE ? OR o1.customer LIKE ?)');
|
||||
params.push(`%${search}%`, `%${search}%`);
|
||||
}
|
||||
|
||||
if (status !== 'all') {
|
||||
conditions.push('o1.status = ?');
|
||||
params.push(status);
|
||||
}
|
||||
|
||||
if (fromDate) {
|
||||
conditions.push('DATE(o1.date) >= DATE(?)');
|
||||
params.push(fromDate.toISOString());
|
||||
}
|
||||
|
||||
if (toDate) {
|
||||
conditions.push('DATE(o1.date) <= DATE(?)');
|
||||
params.push(toDate.toISOString());
|
||||
}
|
||||
|
||||
if (minAmount > 0) {
|
||||
conditions.push('total_amount >= ?');
|
||||
params.push(minAmount);
|
||||
}
|
||||
|
||||
if (maxAmount) {
|
||||
conditions.push('total_amount <= ?');
|
||||
params.push(maxAmount);
|
||||
}
|
||||
|
||||
// Get total count for pagination
|
||||
const [countResult] = await pool.query(`
|
||||
SELECT COUNT(DISTINCT o1.order_number) as total
|
||||
FROM orders o1
|
||||
LEFT JOIN (
|
||||
SELECT order_number, SUM(price * quantity) as total_amount
|
||||
FROM orders
|
||||
GROUP BY order_number
|
||||
) totals ON o1.order_number = totals.order_number
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
`, params);
|
||||
|
||||
const total = countResult[0].total;
|
||||
|
||||
// Get paginated results
|
||||
const query = `
|
||||
SELECT
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
o1.date,
|
||||
o1.status,
|
||||
o1.payment_method,
|
||||
o1.shipping_method,
|
||||
COUNT(o2.product_id) as items_count,
|
||||
SUM(o2.price * o2.quantity) as total_amount
|
||||
FROM orders o1
|
||||
JOIN orders o2 ON o1.order_number = o2.order_number
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
GROUP BY
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
o1.date,
|
||||
o1.status,
|
||||
o1.payment_method,
|
||||
o1.shipping_method
|
||||
ORDER BY ${
|
||||
sortColumn === 'items_count' || sortColumn === 'total_amount'
|
||||
? `${sortColumn} ${sortDirection}`
|
||||
: `o1.${sortColumn} ${sortDirection}`
|
||||
}
|
||||
LIMIT ? OFFSET ?
|
||||
`;
|
||||
|
||||
const [rows] = await pool.query(query, [...params, limit, offset]);
|
||||
|
||||
// Get order statistics
|
||||
const [stats] = await pool.query(`
|
||||
WITH CurrentStats AS (
|
||||
SELECT
|
||||
COUNT(DISTINCT order_number) as total_orders,
|
||||
SUM(price * quantity) as total_revenue
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
),
|
||||
PreviousStats AS (
|
||||
SELECT
|
||||
COUNT(DISTINCT order_number) as prev_orders,
|
||||
SUM(price * quantity) as prev_revenue
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) BETWEEN DATE_SUB(CURDATE(), INTERVAL 60 DAY) AND DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
),
|
||||
OrderValues AS (
|
||||
SELECT
|
||||
order_number,
|
||||
SUM(price * quantity) as order_value
|
||||
FROM orders
|
||||
WHERE canceled = false
|
||||
AND DATE(date) >= DATE_SUB(CURDATE(), INTERVAL 30 DAY)
|
||||
GROUP BY order_number
|
||||
)
|
||||
SELECT
|
||||
cs.total_orders,
|
||||
cs.total_revenue,
|
||||
CASE
|
||||
WHEN ps.prev_orders > 0
|
||||
THEN ((cs.total_orders - ps.prev_orders) / ps.prev_orders * 100)
|
||||
ELSE 0
|
||||
END as order_growth,
|
||||
CASE
|
||||
WHEN ps.prev_revenue > 0
|
||||
THEN ((cs.total_revenue - ps.prev_revenue) / ps.prev_revenue * 100)
|
||||
ELSE 0
|
||||
END as revenue_growth,
|
||||
CASE
|
||||
WHEN cs.total_orders > 0
|
||||
THEN (cs.total_revenue / cs.total_orders)
|
||||
ELSE 0
|
||||
END as average_order_value,
|
||||
CASE
|
||||
WHEN ps.prev_orders > 0
|
||||
THEN (ps.prev_revenue / ps.prev_orders)
|
||||
ELSE 0
|
||||
END as prev_average_order_value
|
||||
FROM CurrentStats cs
|
||||
CROSS JOIN PreviousStats ps
|
||||
`);
|
||||
|
||||
const orderStats = stats[0];
|
||||
|
||||
res.json({
|
||||
orders: rows.map(row => ({
|
||||
...row,
|
||||
total_amount: parseFloat(row.total_amount) || 0,
|
||||
items_count: parseInt(row.items_count) || 0,
|
||||
date: row.date
|
||||
})),
|
||||
pagination: {
|
||||
total,
|
||||
pages: Math.ceil(total / limit),
|
||||
currentPage: page,
|
||||
limit
|
||||
},
|
||||
stats: {
|
||||
totalOrders: parseInt(orderStats.total_orders) || 0,
|
||||
totalRevenue: parseFloat(orderStats.total_revenue) || 0,
|
||||
orderGrowth: parseFloat(orderStats.order_growth) || 0,
|
||||
revenueGrowth: parseFloat(orderStats.revenue_growth) || 0,
|
||||
averageOrderValue: parseFloat(orderStats.average_order_value) || 0,
|
||||
aovGrowth: orderStats.prev_average_order_value > 0
|
||||
? ((orderStats.average_order_value - orderStats.prev_average_order_value) / orderStats.prev_average_order_value * 100)
|
||||
: 0,
|
||||
conversionRate: 2.5, // Placeholder - would need actual visitor data
|
||||
conversionGrowth: 0.5 // Placeholder - would need actual visitor data
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching orders:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch orders' });
|
||||
}
|
||||
});
|
||||
|
||||
// Get a single order with its items
|
||||
router.get('/:orderNumber', async (req, res) => {
|
||||
const pool = req.app.locals.pool;
|
||||
try {
|
||||
// Get order details
|
||||
const [orderRows] = await pool.query(`
|
||||
SELECT DISTINCT
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
o1.date,
|
||||
o1.status,
|
||||
o1.payment_method,
|
||||
o1.shipping_method,
|
||||
o1.shipping_address,
|
||||
o1.billing_address,
|
||||
COUNT(o2.product_id) as items_count,
|
||||
SUM(o2.price * o2.quantity) as total_amount
|
||||
FROM orders o1
|
||||
JOIN orders o2 ON o1.order_number = o2.order_number
|
||||
WHERE o1.order_number = ? AND o1.canceled = false
|
||||
GROUP BY
|
||||
o1.order_number,
|
||||
o1.customer,
|
||||
o1.date,
|
||||
o1.status,
|
||||
o1.payment_method,
|
||||
o1.shipping_method,
|
||||
o1.shipping_address,
|
||||
o1.billing_address
|
||||
`, [req.params.orderNumber]);
|
||||
|
||||
if (orderRows.length === 0) {
|
||||
return res.status(404).json({ error: 'Order not found' });
|
||||
}
|
||||
|
||||
// Get order items
|
||||
const [itemRows] = await pool.query(`
|
||||
SELECT
|
||||
o.product_id,
|
||||
p.title,
|
||||
p.sku,
|
||||
o.quantity,
|
||||
o.price,
|
||||
(o.price * o.quantity) as total
|
||||
FROM orders o
|
||||
JOIN products p ON o.product_id = p.product_id
|
||||
WHERE o.order_number = ? AND o.canceled = false
|
||||
`, [req.params.orderNumber]);
|
||||
|
||||
const order = {
|
||||
...orderRows[0],
|
||||
total_amount: parseFloat(orderRows[0].total_amount) || 0,
|
||||
items_count: parseInt(orderRows[0].items_count) || 0,
|
||||
items: itemRows.map(item => ({
|
||||
...item,
|
||||
price: parseFloat(item.price) || 0,
|
||||
total: parseFloat(item.total) || 0,
|
||||
quantity: parseInt(item.quantity) || 0
|
||||
}))
|
||||
};
|
||||
|
||||
res.json(order);
|
||||
} catch (error) {
|
||||
console.error('Error fetching order:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch order' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -36,6 +36,7 @@ const cors = require('cors');
|
||||
const mysql = require('mysql2/promise');
|
||||
const productsRouter = require('./routes/products');
|
||||
const dashboardRouter = require('./routes/dashboard');
|
||||
const ordersRouter = require('./routes/orders');
|
||||
|
||||
// Ensure required directories exist
|
||||
['logs', 'uploads'].forEach(dir => {
|
||||
@@ -111,6 +112,7 @@ pool.getConnection()
|
||||
// Routes
|
||||
app.use('/api/products', productsRouter);
|
||||
app.use('/api/dashboard', dashboardRouter);
|
||||
app.use('/api/orders', ordersRouter);
|
||||
|
||||
// Basic health check route
|
||||
app.get('/health', (req, res) => {
|
||||
|
||||
Reference in New Issue
Block a user