Initial setup, everything broken

This commit is contained in:
2025-01-08 20:58:34 -05:00
parent 875a04d83e
commit ceaf5ae279
34 changed files with 9282 additions and 0 deletions

View File

@@ -0,0 +1,35 @@
module.exports = {
apps: [{
name: 'inventory-server',
script: 'src/server.js',
instances: 1,
exec_mode: 'fork',
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
PORT: 3010
},
log_rotate: true,
max_size: '10M',
retain: '10',
log_date_format: 'YYYY-MM-DD HH:mm:ss',
error_file: 'logs/pm2/err.log',
out_file: 'logs/pm2/out.log',
log_file: 'logs/pm2/combined.log',
time: true,
ignore_watch: [
'node_modules',
'logs',
'.git',
'*.log'
],
min_uptime: 5000,
max_restarts: 5,
restart_delay: 4000,
listen_timeout: 50000,
kill_timeout: 5000,
node_args: '--max-old-space-size=1536'
}]
};

3100
inventory-server/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,33 @@
{
"name": "inventory-server",
"version": "1.0.0",
"description": "Backend server for inventory management system",
"main": "src/server.js",
"scripts": {
"start": "node src/server.js",
"dev": "nodemon src/server.js",
"prod": "pm2 start ecosystem.config.js",
"prod:stop": "pm2 stop inventory-server",
"prod:restart": "pm2 restart inventory-server",
"prod:logs": "pm2 logs inventory-server",
"prod:status": "pm2 status inventory-server",
"setup": "mkdir -p logs uploads",
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"cors": "^2.8.5",
"csv-parse": "^5.5.3",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"multer": "^1.4.5-lts.1",
"mysql2": "^3.7.0",
"pm2": "^5.3.0",
"uuid": "^9.0.1"
},
"devDependencies": {
"nodemon": "^3.0.2"
}
}

View File

@@ -0,0 +1,36 @@
CREATE DATABASE IF NOT EXISTS inventory_db;
USE inventory_db;
CREATE TABLE IF NOT EXISTS products (
id VARCHAR(36) PRIMARY KEY,
sku VARCHAR(50) UNIQUE NOT NULL,
name VARCHAR(255) NOT NULL,
description TEXT,
category VARCHAR(100),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS inventory_levels (
id VARCHAR(36) PRIMARY KEY,
product_id VARCHAR(36) NOT NULL,
quantity INT NOT NULL DEFAULT 0,
reorder_point INT,
reorder_quantity INT,
last_reorder_date TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (product_id) REFERENCES products(id)
);
CREATE TABLE IF NOT EXISTS inventory_transactions (
id VARCHAR(36) PRIMARY KEY,
product_id VARCHAR(36) NOT NULL,
transaction_type ENUM('purchase', 'sale', 'adjustment') NOT NULL,
quantity INT NOT NULL,
transaction_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
reference_number VARCHAR(100),
notes TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (product_id) REFERENCES products(id)
);

View File

@@ -0,0 +1,137 @@
const express = require('express');
const router = express.Router();
const { v4: uuidv4 } = require('uuid');
const { importProductsFromCSV } = require('../utils/csvImporter');
const multer = require('multer');
// Configure multer for file uploads
const upload = multer({ dest: 'uploads/' });
// Get all products with their current inventory levels
router.get('/', async (req, res) => {
const pool = req.app.locals.pool;
try {
const [rows] = await pool.query(`
SELECT p.*, il.quantity, il.reorder_point, il.reorder_quantity
FROM products p
LEFT JOIN inventory_levels il ON p.id = il.product_id
ORDER BY p.created_at DESC
`);
res.json(rows);
} catch (error) {
console.error('Error fetching products:', error);
res.status(500).json({ error: 'Failed to fetch products' });
}
});
// Get a single product with its inventory details
router.get('/:id', async (req, res) => {
const pool = req.app.locals.pool;
try {
const [rows] = await pool.query(`
SELECT p.*, il.quantity, il.reorder_point, il.reorder_quantity
FROM products p
LEFT JOIN inventory_levels il ON p.id = il.product_id
WHERE p.id = ?
`, [req.params.id]);
if (rows.length === 0) {
return res.status(404).json({ error: 'Product not found' });
}
res.json(rows[0]);
} catch (error) {
console.error('Error fetching product:', error);
res.status(500).json({ error: 'Failed to fetch product' });
}
});
// Create a new product
router.post('/', async (req, res) => {
const pool = req.app.locals.pool;
const { sku, name, description, category } = req.body;
const id = uuidv4();
try {
const connection = await pool.getConnection();
try {
await connection.beginTransaction();
await connection.query(
'INSERT INTO products (id, sku, name, description, category) VALUES (?, ?, ?, ?, ?)',
[id, sku, name, description, category]
);
await connection.query(
'INSERT INTO inventory_levels (id, product_id, quantity) VALUES (?, ?, 0)',
[uuidv4(), id]
);
await connection.commit();
res.status(201).json({ id, sku, name, description, category });
} catch (error) {
await connection.rollback();
throw error;
} finally {
connection.release();
}
} catch (error) {
console.error('Error creating product:', error);
res.status(500).json({ error: 'Failed to create product' });
}
});
// Import products from CSV
router.post('/import', upload.single('file'), async (req, res) => {
if (!req.file) {
return res.status(400).json({ error: 'No file uploaded' });
}
try {
const result = await importProductsFromCSV(req.file.path, req.app.locals.pool);
// Clean up the uploaded file
require('fs').unlinkSync(req.file.path);
res.json(result);
} catch (error) {
console.error('Error importing products:', error);
res.status(500).json({ error: 'Failed to import products' });
}
});
// Update product inventory
router.post('/:id/inventory', async (req, res) => {
const pool = req.app.locals.pool;
const { quantity, type, notes } = req.body;
try {
const connection = await pool.getConnection();
try {
await connection.beginTransaction();
// Create inventory transaction
await connection.query(
'INSERT INTO inventory_transactions (id, product_id, transaction_type, quantity, notes) VALUES (?, ?, ?, ?, ?)',
[uuidv4(), req.params.id, type, quantity, notes]
);
// Update inventory level
const quantityChange = type === 'sale' ? -quantity : quantity;
await connection.query(
'UPDATE inventory_levels SET quantity = quantity + ? WHERE product_id = ?',
[quantityChange, req.params.id]
);
await connection.commit();
res.json({ success: true });
} catch (error) {
await connection.rollback();
throw error;
} finally {
connection.release();
}
} catch (error) {
console.error('Error updating inventory:', error);
res.status(500).json({ error: 'Failed to update inventory' });
}
});
module.exports = router;

View File

@@ -0,0 +1,99 @@
require('dotenv').config();
const express = require('express');
const cors = require('cors');
const mysql = require('mysql2/promise');
const productsRouter = require('./routes/products');
const path = require('path');
const fs = require('fs');
// Ensure required directories exist
['logs', 'uploads'].forEach(dir => {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
});
const app = express();
// Middleware
app.use(cors({
origin: process.env.CORS_ORIGIN,
credentials: true
}));
app.use(express.json());
// Request logging middleware
app.use((req, res, next) => {
const start = Date.now();
res.on('finish', () => {
const duration = Date.now() - start;
console.log(
`[${new Date().toISOString()}] ${req.method} ${req.url} ${res.statusCode} ${duration}ms`
);
});
next();
});
// Database connection pool
const pool = mysql.createPool({
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
waitForConnections: true,
connectionLimit: process.env.NODE_ENV === 'production' ? 20 : 10,
queueLimit: 0,
enableKeepAlive: true,
keepAliveInitialDelay: 0
});
// Make pool available to routes
app.locals.pool = pool;
// Test database connection
pool.getConnection()
.then(connection => {
console.log('[Database] Connected successfully');
connection.release();
})
.catch(err => {
console.error('[Database] Error connecting:', err);
process.exit(1); // Exit if we can't connect to the database
});
// Routes
app.use('/api/products', productsRouter);
// Basic health check route
app.get('/health', (req, res) => {
res.json({
status: 'ok',
timestamp: new Date().toISOString(),
environment: process.env.NODE_ENV
});
});
// Error handling middleware
app.use((err, req, res, next) => {
console.error(`[${new Date().toISOString()}] Error:`, err);
res.status(500).json({
error: process.env.NODE_ENV === 'production'
? 'An internal server error occurred'
: err.message
});
});
// Handle uncaught exceptions
process.on('uncaughtException', (err) => {
console.error(`[${new Date().toISOString()}] Uncaught Exception:`, err);
process.exit(1);
});
process.on('unhandledRejection', (reason, promise) => {
console.error(`[${new Date().toISOString()}] Unhandled Rejection at:`, promise, 'reason:', reason);
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`[Server] Running in ${process.env.NODE_ENV || 'development'} mode on port ${PORT}`);
});

View File

@@ -0,0 +1,63 @@
const fs = require('fs');
const { parse } = require('csv-parse');
const { v4: uuidv4 } = require('uuid');
async function importProductsFromCSV(filePath, pool) {
return new Promise((resolve, reject) => {
const products = [];
fs.createReadStream(filePath)
.pipe(parse({
columns: true,
skip_empty_lines: true
}))
.on('data', async (row) => {
products.push({
id: uuidv4(),
sku: row.sku,
name: row.name,
description: row.description || null,
category: row.category || null
});
})
.on('end', async () => {
try {
const connection = await pool.getConnection();
try {
await connection.beginTransaction();
for (const product of products) {
await connection.query(
'INSERT INTO products (id, sku, name, description, category) VALUES (?, ?, ?, ?, ?)',
[product.id, product.sku, product.name, product.description, product.category]
);
// Initialize inventory level for the product
await connection.query(
'INSERT INTO inventory_levels (id, product_id, quantity) VALUES (?, ?, 0)',
[uuidv4(), product.id]
);
}
await connection.commit();
resolve({ imported: products.length });
} catch (error) {
await connection.rollback();
reject(error);
} finally {
connection.release();
}
} catch (error) {
reject(error);
}
})
.on('error', (error) => {
reject(error);
});
});
}
module.exports = {
importProductsFromCSV
};