Add new metrics route
This commit is contained in:
@@ -11,11 +11,11 @@ const importHistoricalData = require('./import/historical-data');
|
|||||||
dotenv.config({ path: path.join(__dirname, "../.env") });
|
dotenv.config({ path: path.join(__dirname, "../.env") });
|
||||||
|
|
||||||
// Constants to control which imports run
|
// Constants to control which imports run
|
||||||
const IMPORT_CATEGORIES = false;
|
const IMPORT_CATEGORIES = true;
|
||||||
const IMPORT_PRODUCTS = false;
|
const IMPORT_PRODUCTS = true;
|
||||||
const IMPORT_ORDERS = false;
|
const IMPORT_ORDERS = true;
|
||||||
const IMPORT_PURCHASE_ORDERS = false;
|
const IMPORT_PURCHASE_ORDERS = true;
|
||||||
const IMPORT_HISTORICAL_DATA = true;
|
const IMPORT_HISTORICAL_DATA = false;
|
||||||
|
|
||||||
// Add flag for incremental updates
|
// Add flag for incremental updates
|
||||||
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
|
const INCREMENTAL_UPDATE = process.env.INCREMENTAL_UPDATE !== 'false'; // Default to true unless explicitly set to false
|
||||||
|
|||||||
@@ -1,62 +1,345 @@
|
|||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
const { Pool } = require('pg'); // Assuming pg driver
|
||||||
|
|
||||||
// Get key metrics trends (revenue, inventory value, GMROI)
|
// --- Configuration & Helpers ---
|
||||||
router.get('/trends', async (req, res) => {
|
|
||||||
|
const DEFAULT_PAGE_LIMIT = 50;
|
||||||
|
const MAX_PAGE_LIMIT = 200; // Prevent excessive data requests
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maps user-friendly query parameter keys (camelCase) to database column names.
|
||||||
|
* Also validates if the column is safe for sorting or filtering.
|
||||||
|
* Add ALL columns from product_metrics that should be filterable/sortable.
|
||||||
|
*/
|
||||||
|
const COLUMN_MAP = {
|
||||||
|
// Product Info
|
||||||
|
pid: { dbCol: 'pm.pid', type: 'number' },
|
||||||
|
sku: { dbCol: 'pm.sku', type: 'string' },
|
||||||
|
title: { dbCol: 'pm.title', type: 'string' },
|
||||||
|
brand: { dbCol: 'pm.brand', type: 'string' },
|
||||||
|
vendor: { dbCol: 'pm.vendor', type: 'string' },
|
||||||
|
imageUrl: { dbCol: 'pm.image_url', type: 'string' },
|
||||||
|
isVisible: { dbCol: 'pm.is_visible', type: 'boolean' },
|
||||||
|
isReplenishable: { dbCol: 'pm.is_replenishable', type: 'boolean' },
|
||||||
|
// Current Status
|
||||||
|
currentPrice: { dbCol: 'pm.current_price', type: 'number' },
|
||||||
|
currentRegularPrice: { dbCol: 'pm.current_regular_price', type: 'number' },
|
||||||
|
currentCostPrice: { dbCol: 'pm.current_cost_price', type: 'number' },
|
||||||
|
currentLandingCostPrice: { dbCol: 'pm.current_landing_cost_price', type: 'number' },
|
||||||
|
currentStock: { dbCol: 'pm.current_stock', type: 'number' },
|
||||||
|
currentStockCost: { dbCol: 'pm.current_stock_cost', type: 'number' },
|
||||||
|
currentStockRetail: { dbCol: 'pm.current_stock_retail', type: 'number' },
|
||||||
|
currentStockGross: { dbCol: 'pm.current_stock_gross', type: 'number' },
|
||||||
|
onOrderQty: { dbCol: 'pm.on_order_qty', type: 'number' },
|
||||||
|
onOrderCost: { dbCol: 'pm.on_order_cost', type: 'number' },
|
||||||
|
onOrderRetail: { dbCol: 'pm.on_order_retail', type: 'number' },
|
||||||
|
earliestExpectedDate: { dbCol: 'pm.earliest_expected_date', type: 'date' },
|
||||||
|
// Historical Dates
|
||||||
|
dateCreated: { dbCol: 'pm.date_created', type: 'date' },
|
||||||
|
dateFirstReceived: { dbCol: 'pm.date_first_received', type: 'date' },
|
||||||
|
dateLastReceived: { dbCol: 'pm.date_last_received', type: 'date' },
|
||||||
|
dateFirstSold: { dbCol: 'pm.date_first_sold', type: 'date' },
|
||||||
|
dateLastSold: { dbCol: 'pm.date_last_sold', type: 'date' },
|
||||||
|
ageDays: { dbCol: 'pm.age_days', type: 'number' },
|
||||||
|
// Rolling Period Metrics
|
||||||
|
sales7d: { dbCol: 'pm.sales_7d', type: 'number' }, revenue7d: { dbCol: 'pm.revenue_7d', type: 'number' },
|
||||||
|
sales14d: { dbCol: 'pm.sales_14d', type: 'number' }, revenue14d: { dbCol: 'pm.revenue_14d', type: 'number' },
|
||||||
|
sales30d: { dbCol: 'pm.sales_30d', type: 'number' }, revenue30d: { dbCol: 'pm.revenue_30d', type: 'number' },
|
||||||
|
cogs30d: { dbCol: 'pm.cogs_30d', type: 'number' }, profit30d: { dbCol: 'pm.profit_30d', type: 'number' },
|
||||||
|
returnsUnits30d: { dbCol: 'pm.returns_units_30d', type: 'number' }, returnsRevenue30d: { dbCol: 'pm.returns_revenue_30d', type: 'number' },
|
||||||
|
discounts30d: { dbCol: 'pm.discounts_30d', type: 'number' }, grossRevenue30d: { dbCol: 'pm.gross_revenue_30d', type: 'number' },
|
||||||
|
grossRegularRevenue30d: { dbCol: 'pm.gross_regular_revenue_30d', type: 'number' },
|
||||||
|
stockoutDays30d: { dbCol: 'pm.stockout_days_30d', type: 'number' },
|
||||||
|
sales365d: { dbCol: 'pm.sales_365d', type: 'number' }, revenue365d: { dbCol: 'pm.revenue_365d', type: 'number' },
|
||||||
|
avgStockUnits30d: { dbCol: 'pm.avg_stock_units_30d', type: 'number' }, avgStockCost30d: { dbCol: 'pm.avg_stock_cost_30d', type: 'number' },
|
||||||
|
avgStockRetail30d: { dbCol: 'pm.avg_stock_retail_30d', type: 'number' }, avgStockGross30d: { dbCol: 'pm.avg_stock_gross_30d', type: 'number' },
|
||||||
|
receivedQty30d: { dbCol: 'pm.received_qty_30d', type: 'number' }, receivedCost30d: { dbCol: 'pm.received_cost_30d', type: 'number' },
|
||||||
|
// Lifetime Metrics
|
||||||
|
lifetimeSales: { dbCol: 'pm.lifetime_sales', type: 'number' }, lifetimeRevenue: { dbCol: 'pm.lifetime_revenue', type: 'number' },
|
||||||
|
// First Period Metrics
|
||||||
|
first7DaysSales: { dbCol: 'pm.first_7_days_sales', type: 'number' }, first7DaysRevenue: { dbCol: 'pm.first_7_days_revenue', type: 'number' },
|
||||||
|
first30DaysSales: { dbCol: 'pm.first_30_days_sales', type: 'number' }, first30DaysRevenue: { dbCol: 'pm.first_30_days_revenue', type: 'number' },
|
||||||
|
first60DaysSales: { dbCol: 'pm.first_60_days_sales', type: 'number' }, first60DaysRevenue: { dbCol: 'pm.first_60_days_revenue', type: 'number' },
|
||||||
|
first90DaysSales: { dbCol: 'pm.first_90_days_sales', type: 'number' }, first90DaysRevenue: { dbCol: 'pm.first_90_days_revenue', type: 'number' },
|
||||||
|
// Calculated KPIs
|
||||||
|
asp30d: { dbCol: 'pm.asp_30d', type: 'number' }, acp30d: { dbCol: 'pm.acp_30d', type: 'number' }, avgRos30d: { dbCol: 'pm.avg_ros_30d', type: 'number' },
|
||||||
|
avgSalesPerDay30d: { dbCol: 'pm.avg_sales_per_day_30d', type: 'number' }, avgSalesPerMonth30d: { dbCol: 'pm.avg_sales_per_month_30d', type: 'number' },
|
||||||
|
margin30d: { dbCol: 'pm.margin_30d', type: 'number' }, markup30d: { dbCol: 'pm.markup_30d', type: 'number' }, gmroi30d: { dbCol: 'pm.gmroi_30d', type: 'number' },
|
||||||
|
stockturn30d: { dbCol: 'pm.stockturn_30d', type: 'number' }, returnRate30d: { dbCol: 'pm.return_rate_30d', type: 'number' },
|
||||||
|
discountRate30d: { dbCol: 'pm.discount_rate_30d', type: 'number' }, stockoutRate30d: { dbCol: 'pm.stockout_rate_30d', type: 'number' },
|
||||||
|
markdown30d: { dbCol: 'pm.markdown_30d', type: 'number' }, markdownRate30d: { dbCol: 'pm.markdown_rate_30d', type: 'number' },
|
||||||
|
sellThrough30d: { dbCol: 'pm.sell_through_30d', type: 'number' }, avgLeadTimeDays: { dbCol: 'pm.avg_lead_time_days', type: 'number' },
|
||||||
|
// Forecasting & Replenishment
|
||||||
|
abcClass: { dbCol: 'pm.abc_class', type: 'string' }, salesVelocityDaily: { dbCol: 'pm.sales_velocity_daily', type: 'number' },
|
||||||
|
configLeadTime: { dbCol: 'pm.config_lead_time', type: 'number' }, configDaysOfStock: { dbCol: 'pm.config_days_of_stock', type: 'number' },
|
||||||
|
configSafetyStock: { dbCol: 'pm.config_safety_stock', type: 'number' }, planningPeriodDays: { dbCol: 'pm.planning_period_days', type: 'number' },
|
||||||
|
leadTimeForecastUnits: { dbCol: 'pm.lead_time_forecast_units', type: 'number' }, daysOfStockForecastUnits: { dbCol: 'pm.days_of_stock_forecast_units', type: 'number' },
|
||||||
|
planningPeriodForecastUnits: { dbCol: 'pm.planning_period_forecast_units', type: 'number' }, leadTimeClosingStock: { dbCol: 'pm.lead_time_closing_stock', type: 'number' },
|
||||||
|
daysOfStockClosingStock: { dbCol: 'pm.days_of_stock_closing_stock', type: 'number' }, replenishmentNeededRaw: { dbCol: 'pm.replenishment_needed_raw', type: 'number' },
|
||||||
|
replenishmentUnits: { dbCol: 'pm.replenishment_units', type: 'number' }, replenishmentCost: { dbCol: 'pm.replenishment_cost', type: 'number' },
|
||||||
|
replenishmentRetail: { dbCol: 'pm.replenishment_retail', type: 'number' }, replenishmentProfit: { dbCol: 'pm.replenishment_profit', type: 'number' },
|
||||||
|
toOrderUnits: { dbCol: 'pm.to_order_units', type: 'number' }, forecastLostSalesUnits: { dbCol: 'pm.forecast_lost_sales_units', type: 'number' },
|
||||||
|
forecastLostRevenue: { dbCol: 'pm.forecast_lost_revenue', type: 'number' }, stockCoverInDays: { dbCol: 'pm.stock_cover_in_days', type: 'number' },
|
||||||
|
poCoverInDays: { dbCol: 'pm.po_cover_in_days', type: 'number' }, sellsOutInDays: { dbCol: 'pm.sells_out_in_days', type: 'number' },
|
||||||
|
replenishDate: { dbCol: 'pm.replenish_date', type: 'date' }, overstockedUnits: { dbCol: 'pm.overstocked_units', type: 'number' },
|
||||||
|
overstockedCost: { dbCol: 'pm.overstocked_cost', type: 'number' }, overstockedRetail: { dbCol: 'pm.overstocked_retail', type: 'number' },
|
||||||
|
isOldStock: { dbCol: 'pm.is_old_stock', type: 'boolean' },
|
||||||
|
// Yesterday
|
||||||
|
yesterdaySales: { dbCol: 'pm.yesterday_sales', type: 'number' },
|
||||||
|
};
|
||||||
|
|
||||||
|
function getSafeColumnInfo(queryParamKey) {
|
||||||
|
return COLUMN_MAP[queryParamKey] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Route Handlers ---
|
||||||
|
|
||||||
|
// GET /metrics/filter-options - Provide distinct values for filter dropdowns
|
||||||
|
router.get('/filter-options', async (req, res) => {
|
||||||
const pool = req.app.locals.pool;
|
const pool = req.app.locals.pool;
|
||||||
|
console.log('GET /metrics/filter-options');
|
||||||
try {
|
try {
|
||||||
const { rows } = await pool.query(`
|
const [vendorRes, brandRes, abcClassRes] = await Promise.all([
|
||||||
WITH MonthlyMetrics AS (
|
pool.query(`SELECT DISTINCT vendor FROM public.product_metrics WHERE vendor IS NOT NULL AND vendor <> '' ORDER BY vendor`),
|
||||||
SELECT
|
pool.query(`SELECT DISTINCT COALESCE(brand, 'Unbranded') as brand FROM public.product_metrics WHERE brand IS NOT NULL AND brand <> '' ORDER BY brand`),
|
||||||
make_date(pta.year, pta.month, 1) as date,
|
pool.query(`SELECT DISTINCT abc_class FROM public.product_metrics WHERE abc_class IS NOT NULL ORDER BY abc_class`)
|
||||||
ROUND(COALESCE(SUM(pta.total_revenue), 0)::numeric, 3) as revenue,
|
// Add queries for other distinct options if needed (e.g., categories if stored on pm)
|
||||||
ROUND(COALESCE(SUM(pta.total_cost), 0)::numeric, 3) as cost,
|
]);
|
||||||
ROUND(COALESCE(SUM(pm.inventory_value), 0)::numeric, 3) as inventory_value,
|
|
||||||
CASE
|
|
||||||
WHEN SUM(pm.inventory_value) > 0
|
|
||||||
THEN ROUND((SUM(pta.total_revenue - pta.total_cost) / SUM(pm.inventory_value) * 100)::numeric, 3)
|
|
||||||
ELSE 0
|
|
||||||
END as gmroi
|
|
||||||
FROM product_time_aggregates pta
|
|
||||||
JOIN product_metrics pm ON pta.pid = pm.pid
|
|
||||||
WHERE (pta.year * 100 + pta.month) >=
|
|
||||||
EXTRACT(YEAR FROM CURRENT_DATE - INTERVAL '12 months')::integer * 100 +
|
|
||||||
EXTRACT(MONTH FROM CURRENT_DATE - INTERVAL '12 months')::integer
|
|
||||||
GROUP BY pta.year, pta.month
|
|
||||||
ORDER BY date ASC
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
to_char(date, 'Mon YY') as date,
|
|
||||||
revenue,
|
|
||||||
inventory_value,
|
|
||||||
gmroi
|
|
||||||
FROM MonthlyMetrics
|
|
||||||
`);
|
|
||||||
|
|
||||||
console.log('Raw metrics trends data:', rows);
|
res.json({
|
||||||
|
vendors: vendorRes.rows.map(r => r.vendor),
|
||||||
// Transform the data into the format expected by the frontend
|
brands: brandRes.rows.map(r => r.brand),
|
||||||
const transformedData = {
|
abcClasses: abcClassRes.rows.map(r => r.abc_class),
|
||||||
revenue: rows.map(row => ({
|
});
|
||||||
date: row.date,
|
|
||||||
value: parseFloat(row.revenue)
|
|
||||||
})),
|
|
||||||
inventory_value: rows.map(row => ({
|
|
||||||
date: row.date,
|
|
||||||
value: parseFloat(row.inventory_value)
|
|
||||||
})),
|
|
||||||
gmroi: rows.map(row => ({
|
|
||||||
date: row.date,
|
|
||||||
value: parseFloat(row.gmroi)
|
|
||||||
}))
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log('Transformed metrics data:', transformedData);
|
|
||||||
res.json(transformedData);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error fetching metrics trends:', error);
|
console.error('Error fetching filter options:', error);
|
||||||
res.status(500).json({ error: 'Failed to fetch metrics trends' });
|
res.status(500).json({ error: 'Failed to fetch filter options' });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
// GET /metrics/ - List all product metrics with filtering, sorting, pagination
|
||||||
|
router.get('/', async (req, res) => {
|
||||||
|
const pool = req.app.locals.pool; // Get pool from app instance
|
||||||
|
console.log('GET /metrics received query:', req.query);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// --- Pagination ---
|
||||||
|
let page = parseInt(req.query.page, 10);
|
||||||
|
let limit = parseInt(req.query.limit, 10);
|
||||||
|
if (isNaN(page) || page < 1) page = 1;
|
||||||
|
if (isNaN(limit) || limit < 1) limit = DEFAULT_PAGE_LIMIT;
|
||||||
|
limit = Math.min(limit, MAX_PAGE_LIMIT); // Cap the limit
|
||||||
|
const offset = (page - 1) * limit;
|
||||||
|
|
||||||
|
// --- Sorting ---
|
||||||
|
const sortQueryKey = req.query.sort || 'title'; // Default sort field key
|
||||||
|
const sortColumnInfo = getSafeColumnInfo(sortQueryKey);
|
||||||
|
const sortColumn = sortColumnInfo ? sortColumnInfo.dbCol : 'pm.title'; // Default DB column
|
||||||
|
const sortDirection = req.query.order?.toLowerCase() === 'desc' ? 'DESC' : 'ASC';
|
||||||
|
const nullsOrder = (sortDirection === 'ASC' ? 'NULLS FIRST' : 'NULLS LAST'); // Consistent null handling
|
||||||
|
|
||||||
|
// --- Filtering ---
|
||||||
|
const conditions = [];
|
||||||
|
const params = [];
|
||||||
|
let paramCounter = 1;
|
||||||
|
|
||||||
|
// Add default visibility/replenishable filters unless overridden
|
||||||
|
if (req.query.showInvisible !== 'true') conditions.push(`pm.is_visible = true`);
|
||||||
|
if (req.query.showNonReplenishable !== 'true') conditions.push(`pm.is_replenishable = true`);
|
||||||
|
|
||||||
|
// Process other filters from query parameters
|
||||||
|
for (const key in req.query) {
|
||||||
|
if (['page', 'limit', 'sort', 'order', 'showInvisible', 'showNonReplenishable'].includes(key)) continue; // Skip control params
|
||||||
|
|
||||||
|
let filterKey = key;
|
||||||
|
let operator = '='; // Default operator
|
||||||
|
let value = req.query[key];
|
||||||
|
|
||||||
|
// Check for operator suffixes (e.g., sales30d_gt, title_like)
|
||||||
|
const operatorMatch = key.match(/^(.*)_(eq|ne|gt|gte|lt|lte|like|ilike|between|in)$/);
|
||||||
|
if (operatorMatch) {
|
||||||
|
filterKey = operatorMatch[1]; // e.g., "sales30d"
|
||||||
|
operator = operatorMatch[2]; // e.g., "gt"
|
||||||
|
}
|
||||||
|
|
||||||
|
const columnInfo = getSafeColumnInfo(filterKey);
|
||||||
|
if (!columnInfo) {
|
||||||
|
console.warn(`Invalid filter key ignored: ${key}`);
|
||||||
|
continue; // Skip if the key doesn't map to a known column
|
||||||
|
}
|
||||||
|
|
||||||
|
const dbColumn = columnInfo.dbCol;
|
||||||
|
const valueType = columnInfo.type;
|
||||||
|
|
||||||
|
// --- Build WHERE clause fragment ---
|
||||||
|
try {
|
||||||
|
let conditionFragment = '';
|
||||||
|
let needsParam = true; // Most operators need a parameter
|
||||||
|
|
||||||
|
switch (operator.toLowerCase()) {
|
||||||
|
case 'eq': operator = '='; break;
|
||||||
|
case 'ne': operator = '<>'; break;
|
||||||
|
case 'gt': operator = '>'; break;
|
||||||
|
case 'gte': operator = '>='; break;
|
||||||
|
case 'lt': operator = '<'; break;
|
||||||
|
case 'lte': operator = '<='; break;
|
||||||
|
case 'like': operator = 'LIKE'; value = `%${value}%`; break; // Add wildcards for LIKE
|
||||||
|
case 'ilike': operator = 'ILIKE'; value = `%${value}%`; break; // Add wildcards for ILIKE
|
||||||
|
case 'between':
|
||||||
|
const [val1, val2] = String(value).split(',');
|
||||||
|
if (val1 !== undefined && val2 !== undefined) {
|
||||||
|
conditionFragment = `${dbColumn} BETWEEN $${paramCounter++} AND $${paramCounter++}`;
|
||||||
|
params.push(parseValue(val1, valueType), parseValue(val2, valueType));
|
||||||
|
needsParam = false; // Params added manually
|
||||||
|
} else {
|
||||||
|
console.warn(`Invalid 'between' value for ${key}: ${value}`);
|
||||||
|
continue; // Skip this filter
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'in':
|
||||||
|
const inValues = String(value).split(',');
|
||||||
|
if (inValues.length > 0) {
|
||||||
|
const placeholders = inValues.map(() => `$${paramCounter++}`).join(', ');
|
||||||
|
conditionFragment = `${dbColumn} IN (${placeholders})`;
|
||||||
|
params.push(...inValues.map(v => parseValue(v, valueType))); // Add all parsed values
|
||||||
|
needsParam = false; // Params added manually
|
||||||
|
} else {
|
||||||
|
console.warn(`Invalid 'in' value for ${key}: ${value}`);
|
||||||
|
continue; // Skip this filter
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
// Add other operators as needed (IS NULL, IS NOT NULL, etc.)
|
||||||
|
case '=': // Keep default '='
|
||||||
|
default: operator = '='; break; // Ensure default is handled
|
||||||
|
}
|
||||||
|
|
||||||
|
if (needsParam) {
|
||||||
|
conditionFragment = `${dbColumn} ${operator} $${paramCounter++}`;
|
||||||
|
params.push(parseValue(value, valueType));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (conditionFragment) {
|
||||||
|
conditions.push(`(${conditionFragment})`); // Wrap condition in parentheses
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (parseError) {
|
||||||
|
console.warn(`Skipping filter for key "${key}" due to parsing error: ${parseError.message}`);
|
||||||
|
// Decrement counter if param wasn't actually used due to error
|
||||||
|
if (needsParam) paramCounter--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Construct and Execute Queries ---
|
||||||
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||||
|
|
||||||
|
// Count Query
|
||||||
|
const countSql = `SELECT COUNT(*) AS total FROM public.product_metrics pm ${whereClause}`;
|
||||||
|
console.log('Executing Count Query:', countSql, params);
|
||||||
|
const countPromise = pool.query(countSql, params);
|
||||||
|
|
||||||
|
// Data Query (Select all columns from metrics table for now)
|
||||||
|
const dataSql = `
|
||||||
|
SELECT pm.*
|
||||||
|
FROM public.product_metrics pm
|
||||||
|
${whereClause}
|
||||||
|
ORDER BY ${sortColumn} ${sortDirection} ${nullsOrder}
|
||||||
|
LIMIT $${paramCounter} OFFSET $${paramCounter + 1}
|
||||||
|
`;
|
||||||
|
const dataParams = [...params, limit, offset];
|
||||||
|
console.log('Executing Data Query:', dataSql, dataParams);
|
||||||
|
const dataPromise = pool.query(dataSql, dataParams);
|
||||||
|
|
||||||
|
// Execute queries in parallel
|
||||||
|
const [countResult, dataResult] = await Promise.all([countPromise, dataPromise]);
|
||||||
|
|
||||||
|
const total = parseInt(countResult.rows[0].total, 10);
|
||||||
|
const metrics = dataResult.rows;
|
||||||
|
console.log(`Total: ${total}, Fetched: ${metrics.length} for page ${page}`);
|
||||||
|
|
||||||
|
// --- Respond ---
|
||||||
|
res.json({
|
||||||
|
metrics,
|
||||||
|
pagination: {
|
||||||
|
total,
|
||||||
|
pages: Math.ceil(total / limit),
|
||||||
|
currentPage: page,
|
||||||
|
limit,
|
||||||
|
},
|
||||||
|
// Optionally include applied filters/sort for frontend confirmation
|
||||||
|
appliedQuery: {
|
||||||
|
filters: req.query, // Send back raw query filters
|
||||||
|
sort: sortQueryKey,
|
||||||
|
order: sortDirection.toLowerCase()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching metrics list:', error);
|
||||||
|
res.status(500).json({ error: 'Failed to fetch product metrics list.' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /metrics/:pid - Get metrics for a single product
|
||||||
|
router.get('/:pid', async (req, res) => {
|
||||||
|
const pool = req.app.locals.pool;
|
||||||
|
const pid = parseInt(req.params.pid, 10);
|
||||||
|
|
||||||
|
if (isNaN(pid)) {
|
||||||
|
return res.status(400).json({ error: 'Invalid Product ID.' });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`GET /metrics/${pid}`);
|
||||||
|
try {
|
||||||
|
const { rows } = await pool.query(
|
||||||
|
`SELECT * FROM public.product_metrics WHERE pid = $1`,
|
||||||
|
[pid]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (rows.length === 0) {
|
||||||
|
console.log(`Metrics not found for PID: ${pid}`);
|
||||||
|
return res.status(404).json({ error: 'Metrics not found for this product.' });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Metrics found for PID: ${pid}`);
|
||||||
|
// Data is pre-calculated, return the first (only) row
|
||||||
|
res.json(rows[0]);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error fetching metrics for PID ${pid}:`, error);
|
||||||
|
res.status(500).json({ error: 'Failed to fetch product metrics.' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a value based on its expected type.
|
||||||
|
* Throws error for invalid formats.
|
||||||
|
*/
|
||||||
|
function parseValue(value, type) {
|
||||||
|
if (value === null || value === undefined || value === '') return null; // Allow empty strings? Or handle differently?
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case 'number':
|
||||||
|
const num = parseFloat(value);
|
||||||
|
if (isNaN(num)) throw new Error(`Invalid number format: "${value}"`);
|
||||||
|
return num;
|
||||||
|
case 'boolean':
|
||||||
|
if (String(value).toLowerCase() === 'true') return true;
|
||||||
|
if (String(value).toLowerCase() === 'false') return false;
|
||||||
|
throw new Error(`Invalid boolean format: "${value}"`);
|
||||||
|
case 'date':
|
||||||
|
// Basic validation, rely on DB to handle actual date conversion
|
||||||
|
if (!String(value).match(/^\d{4}-\d{2}-\d{2}$/)) {
|
||||||
|
// Allow full timestamps too? Adjust regex if needed
|
||||||
|
// console.warn(`Potentially invalid date format: "${value}"`); // Warn instead of throwing?
|
||||||
|
}
|
||||||
|
return String(value); // Send as string, let DB handle it
|
||||||
|
case 'string':
|
||||||
|
default:
|
||||||
|
return String(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
Reference in New Issue
Block a user