From 451d5f0b3b5db56383996a3f3770f02ffc46ecd5 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 3 Oct 2025 13:14:22 -0400 Subject: [PATCH] Add ai supplemental fields to product import, fix image upload url, misc changes for netcup server --- inventory-server/chat/create-new-database.sql | 45 ++++ inventory-server/chat/export-chat-data.sh | 147 +++++++++++ inventory-server/chat/import-chat-data.sh | 167 +++++++++++++ .../chat/migrate-to-new-server.md | 86 +++++++ .../chat/update-config-template.env | 26 ++ inventory-server/chat/verify-migration.js | 231 ++++++++++++++++++ inventory-server/src/routes/import.js | 4 +- .../src/routes/reusable-images.js | 2 +- .../components/ProductCard/SortableImage.tsx | 2 +- .../hooks/useProductImagesInit.ts | 2 +- .../MatchColumnsStep/MatchColumnsStep.tsx | 141 ++++++++++- .../hooks/useValidationState.tsx | 1 + .../utils/aiValidationUtils.ts | 8 +- inventory/vite.config.ts | 26 +- 14 files changed, 860 insertions(+), 28 deletions(-) create mode 100644 inventory-server/chat/create-new-database.sql create mode 100755 inventory-server/chat/export-chat-data.sh create mode 100755 inventory-server/chat/import-chat-data.sh create mode 100644 inventory-server/chat/migrate-to-new-server.md create mode 100644 inventory-server/chat/update-config-template.env create mode 100755 inventory-server/chat/verify-migration.js diff --git a/inventory-server/chat/create-new-database.sql b/inventory-server/chat/create-new-database.sql new file mode 100644 index 0000000..00375e7 --- /dev/null +++ b/inventory-server/chat/create-new-database.sql @@ -0,0 +1,45 @@ +-- PostgreSQL Database Creation Script for New Server +-- Run as: sudo -u postgres psql -f create-new-database.sql + +-- Terminate all connections to the database (if it exists) +SELECT pg_terminate_backend(pid) +FROM pg_stat_activity +WHERE datname = 'rocketchat_converted' AND pid <> pg_backend_pid(); + +-- Drop the database if it exists +DROP DATABASE IF EXISTS rocketchat_converted; + +-- Create fresh database +CREATE DATABASE rocketchat_converted; + +-- Create user (if not exists) - UPDATE PASSWORD BEFORE RUNNING! +DO $$ +BEGIN + IF NOT EXISTS (SELECT FROM pg_user WHERE usename = 'rocketchat_user') THEN + CREATE USER rocketchat_user WITH PASSWORD 'HKjLgt23gWuPXzEAn3rW'; + END IF; +END $$; + +-- Grant database privileges +GRANT CONNECT ON DATABASE rocketchat_converted TO rocketchat_user; +GRANT CREATE ON DATABASE rocketchat_converted TO rocketchat_user; + +-- Connect to the new database +\c rocketchat_converted; + +-- Grant schema privileges +GRANT CREATE ON SCHEMA public TO rocketchat_user; +GRANT USAGE ON SCHEMA public TO rocketchat_user; + +-- Grant privileges on all future tables and sequences +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO rocketchat_user; +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE, SELECT ON SEQUENCES TO rocketchat_user; + +-- Display success message +\echo 'Database created successfully!' +\echo 'IMPORTANT: Update the password for rocketchat_user before proceeding' +\echo 'Next steps:' +\echo '1. Update the password in this file' +\echo '2. Run export-chat-data.sh on your current server' +\echo '3. Transfer the exported files to this server' +\echo '4. Run import-chat-data.sh on this server' diff --git a/inventory-server/chat/export-chat-data.sh b/inventory-server/chat/export-chat-data.sh new file mode 100755 index 0000000..d6ab0ef --- /dev/null +++ b/inventory-server/chat/export-chat-data.sh @@ -0,0 +1,147 @@ +#!/bin/bash + +# Chat Database Export Script +# This script exports the chat database schema and data for migration + +set -e # Exit on any error + +echo "๐Ÿš€ Starting chat database export..." + +# Configuration - Update these values for your setup +DB_HOST="${CHAT_DB_HOST:-localhost}" +DB_PORT="${CHAT_DB_PORT:-5432}" +DB_NAME="${CHAT_DB_NAME:-rocketchat_converted}" +DB_USER="${CHAT_DB_USER:-rocketchat_user}" + +# Check if database connection info is available +if [ -z "$CHAT_DB_PASSWORD" ]; then + echo "โš ๏ธ CHAT_DB_PASSWORD environment variable not set" + echo "Please set it with: export CHAT_DB_PASSWORD='your_password'" + exit 1 +fi + +echo "๐Ÿ“Š Database: $DB_NAME on $DB_HOST:$DB_PORT" + +# Create export directory +EXPORT_DIR="chat-migration-$(date +%Y%m%d-%H%M%S)" +mkdir -p "$EXPORT_DIR" + +echo "๐Ÿ“ Export directory: $EXPORT_DIR" + +# Export database schema +echo "๐Ÿ“‹ Exporting database schema..." +PGPASSWORD="$CHAT_DB_PASSWORD" pg_dump \ + -h "$DB_HOST" \ + -p "$DB_PORT" \ + -U "$DB_USER" \ + -d "$DB_NAME" \ + --schema-only \ + --no-owner \ + --no-privileges \ + -f "$EXPORT_DIR/chat-schema.sql" + +if [ $? -eq 0 ]; then + echo "โœ… Schema exported successfully" +else + echo "โŒ Schema export failed" + exit 1 +fi + +# Export database data +echo "๐Ÿ’พ Exporting database data..." +PGPASSWORD="$CHAT_DB_PASSWORD" pg_dump \ + -h "$DB_HOST" \ + -p "$DB_PORT" \ + -U "$DB_USER" \ + -d "$DB_NAME" \ + --data-only \ + --no-owner \ + --no-privileges \ + --disable-triggers \ + --column-inserts \ + -f "$EXPORT_DIR/chat-data.sql" + +if [ $? -eq 0 ]; then + echo "โœ… Data exported successfully" +else + echo "โŒ Data export failed" + exit 1 +fi + +# Export file uploads and avatars +echo "๐Ÿ“Ž Exporting chat files (uploads and avatars)..." +if [ -d "db-convert/db/files" ]; then + cd db-convert/db + tar -czf "../../$EXPORT_DIR/chat-files.tar.gz" files/ + cd ../.. + echo "โœ… Files exported successfully" +else + echo "โš ๏ธ No files directory found at db-convert/db/files" + echo " This is normal if you have no file uploads" + touch "$EXPORT_DIR/chat-files.tar.gz" +fi + +# Get table statistics for verification +echo "๐Ÿ“ˆ Generating export statistics..." +PGPASSWORD="$CHAT_DB_PASSWORD" psql \ + -h "$DB_HOST" \ + -p "$DB_PORT" \ + -U "$DB_USER" \ + -d "$DB_NAME" \ + -c " + SELECT + schemaname, + tablename, + n_tup_ins as inserted_rows, + n_tup_upd as updated_rows, + n_tup_del as deleted_rows, + n_live_tup as live_rows, + n_dead_tup as dead_rows + FROM pg_stat_user_tables + ORDER BY n_live_tup DESC; + " > "$EXPORT_DIR/table-stats.txt" + +# Create export summary +cat > "$EXPORT_DIR/export-summary.txt" << EOF +Chat Database Export Summary +=========================== + +Export Date: $(date) +Database: $DB_NAME +Host: $DB_HOST:$DB_PORT +User: $DB_USER + +Files Generated: +- chat-schema.sql: Database schema (tables, indexes, constraints) +- chat-data.sql: All table data +- chat-files.tar.gz: Uploaded files and avatars +- table-stats.txt: Database statistics +- export-summary.txt: This summary + +Next Steps: +1. Transfer these files to your new server +2. Run create-new-database.sql on the new server first +3. Run import-chat-data.sh on the new server +4. Update your application configuration +5. Run verify-migration.js to validate the migration + +Important Notes: +- Keep these files secure as they contain your chat data +- Ensure the new server has enough disk space +- Plan for application downtime during the migration +EOF + +echo "" +echo "๐ŸŽ‰ Export completed successfully!" +echo "๐Ÿ“ Files are in: $EXPORT_DIR/" +echo "" +echo "๐Ÿ“‹ Export Summary:" +ls -lh "$EXPORT_DIR/" +echo "" +echo "๐Ÿšš Next steps:" +echo "1. Transfer the $EXPORT_DIR/ directory to your new server" +echo "2. Run create-new-database.sql on the new server (update password first!)" +echo "3. Run import-chat-data.sh on the new server" +echo "" +echo "๐Ÿ’ก To transfer files to new server:" +echo " scp -r $EXPORT_DIR/ user@new-server:/tmp/" diff --git a/inventory-server/chat/import-chat-data.sh b/inventory-server/chat/import-chat-data.sh new file mode 100755 index 0000000..eb8a123 --- /dev/null +++ b/inventory-server/chat/import-chat-data.sh @@ -0,0 +1,167 @@ +#!/bin/bash + +# Chat Database Import Script +# This script imports the chat database schema and data on the new server + +set -e # Exit on any error + +echo "๐Ÿš€ Starting chat database import..." + +# Configuration - Update these values for your new server +DB_HOST="${CHAT_DB_HOST:-localhost}" +DB_PORT="${CHAT_DB_PORT:-5432}" +DB_NAME="${CHAT_DB_NAME:-rocketchat_converted}" +DB_USER="${CHAT_DB_USER:-rocketchat_user}" + +# Check if database connection info is available +if [ -z "$CHAT_DB_PASSWORD" ]; then + echo "โš ๏ธ CHAT_DB_PASSWORD environment variable not set" + echo "Please set it with: export CHAT_DB_PASSWORD='your_password'" + exit 1 +fi + +# Find the migration directory +MIGRATION_DIR="" +if [ -d "/tmp" ]; then + MIGRATION_DIR=$(find /tmp -maxdepth 1 -name "chat-migration-*" -type d | head -1) +fi + +if [ -z "$MIGRATION_DIR" ]; then + echo "โŒ No migration directory found in /tmp/" + echo "Please specify the migration directory:" + read -p "Enter full path to migration directory: " MIGRATION_DIR +fi + +if [ ! -d "$MIGRATION_DIR" ]; then + echo "โŒ Migration directory not found: $MIGRATION_DIR" + exit 1 +fi + +echo "๐Ÿ“ Using migration directory: $MIGRATION_DIR" +echo "๐Ÿ“Š Target database: $DB_NAME on $DB_HOST:$DB_PORT" + +# Verify required files exist +REQUIRED_FILES=("chat-schema.sql" "chat-data.sql" "chat-files.tar.gz") +for file in "${REQUIRED_FILES[@]}"; do + if [ ! -f "$MIGRATION_DIR/$file" ]; then + echo "โŒ Required file not found: $MIGRATION_DIR/$file" + exit 1 + fi +done + +echo "โœ… All required files found" + +# Test database connection +echo "๐Ÿ”— Testing database connection..." +PGPASSWORD="$CHAT_DB_PASSWORD" psql \ + -h "$DB_HOST" \ + -p "$DB_PORT" \ + -U "$DB_USER" \ + -d "$DB_NAME" \ + -c "SELECT version();" > /dev/null + +if [ $? -eq 0 ]; then + echo "โœ… Database connection successful" +else + echo "โŒ Database connection failed" + echo "Please ensure:" + echo " 1. PostgreSQL is running" + echo " 2. Database '$DB_NAME' exists" + echo " 3. User '$DB_USER' has access" + echo " 4. Password is correct" + exit 1 +fi + +# Import database schema +echo "๐Ÿ“‹ Importing database schema..." +PGPASSWORD="$CHAT_DB_PASSWORD" psql \ + -h "$DB_HOST" \ + -p "$DB_PORT" \ + -U "$DB_USER" \ + -d "$DB_NAME" \ + -f "$MIGRATION_DIR/chat-schema.sql" + +if [ $? -eq 0 ]; then + echo "โœ… Schema imported successfully" +else + echo "โŒ Schema import failed" + exit 1 +fi + +# Import database data +echo "๐Ÿ’พ Importing database data..." +echo " This may take a while depending on data size..." + +PGPASSWORD="$CHAT_DB_PASSWORD" psql \ + -h "$DB_HOST" \ + -p "$DB_PORT" \ + -U "$DB_USER" \ + -d "$DB_NAME" \ + -f "$MIGRATION_DIR/chat-data.sql" + +if [ $? -eq 0 ]; then + echo "โœ… Data imported successfully" +else + echo "โŒ Data import failed" + echo "Check the error messages above for details" + exit 1 +fi + +# Create files directory and import files +echo "๐Ÿ“Ž Setting up files directory..." +mkdir -p "db-convert/db" + +if [ -s "$MIGRATION_DIR/chat-files.tar.gz" ]; then + echo "๐Ÿ“‚ Extracting chat files..." + cd db-convert/db + tar -xzf "$MIGRATION_DIR/chat-files.tar.gz" + cd ../.. + + # Set proper permissions + if [ -d "db-convert/db/files" ]; then + chmod -R 755 db-convert/db/files + echo "โœ… Files imported and permissions set" + else + echo "โš ๏ธ Files directory not created properly" + fi +else + echo "โ„น๏ธ No files to import (empty archive)" + mkdir -p "db-convert/db/files/uploads" + mkdir -p "db-convert/db/files/avatars" +fi + +# Get final table statistics +echo "๐Ÿ“ˆ Generating import statistics..." +PGPASSWORD="$CHAT_DB_PASSWORD" psql \ + -h "$DB_HOST" \ + -p "$DB_PORT" \ + -U "$DB_USER" \ + -d "$DB_NAME" \ + -c " + SELECT + tablename, + n_live_tup as row_count + FROM pg_stat_user_tables + WHERE schemaname = 'public' + ORDER BY n_live_tup DESC; + " + +# Create import summary +echo "" +echo "๐ŸŽ‰ Import completed successfully!" +echo "" +echo "๐Ÿ“‹ Import Summary:" +echo " Database: $DB_NAME" +echo " Host: $DB_HOST:$DB_PORT" +echo " Files location: $(pwd)/db-convert/db/files/" +echo "" +echo "๐Ÿ” Next steps:" +echo "1. Update your application configuration to use this database" +echo "2. Run verify-migration.js to validate the migration" +echo "3. Test your application thoroughly" +echo "4. Update DNS/load balancer to point to new server" +echo "" +echo "โš ๏ธ Important:" +echo "- Keep the original data as backup until migration is fully validated" +echo "- Monitor the application closely after switching" +echo "- Have a rollback plan ready" diff --git a/inventory-server/chat/migrate-to-new-server.md b/inventory-server/chat/migrate-to-new-server.md new file mode 100644 index 0000000..f3e75aa --- /dev/null +++ b/inventory-server/chat/migrate-to-new-server.md @@ -0,0 +1,86 @@ +# Chat Database Migration Guide + +This guide will help you migrate your chat database from the current server to a new PostgreSQL server. + +## Overview +Your chat system uses: +- Database: `rocketchat_converted` (PostgreSQL) +- Main tables: users, message, room, uploads, avatars, subscription +- File storage: db-convert/db/files/ directory with uploads and avatars +- Environment configuration for database connection + +## Migration Steps + +### 1. Pre-Migration Setup + +On your **new server**, ensure PostgreSQL is installed and running: +```bash +# Install PostgreSQL (if not already done) +sudo apt update +sudo apt install postgresql postgresql-contrib + +# Start PostgreSQL service +sudo systemctl start postgresql +sudo systemctl enable postgresql +``` + +### 2. Create Database Schema on New Server + +Run the provided migration script: +```bash +# On new server +sudo -u postgres psql -f create-new-database.sql +``` + +### 3. Export Data from Current Server + +Run the export script: +```bash +# On current server +./export-chat-data.sh +``` + +This will create: +- `chat-schema.sql` - Database schema +- `chat-data.sql` - All table data +- `chat-files.tar.gz` - All uploaded files and avatars + +### 4. Transfer Data to New Server + +```bash +# Copy files to new server +scp chat-schema.sql chat-data.sql chat-files.tar.gz user@new-server:/tmp/ +``` + +### 5. Import Data on New Server + +```bash +# On new server +./import-chat-data.sh +``` + +### 6. Update Configuration + +Update your environment variables to point to the new database server. + +### 7. Verify Migration + +Run the verification script to ensure everything transferred correctly: +```bash +node verify-migration.js +``` + +## Files Provided + +1. `create-new-database.sql` - Creates database and user on new server +2. `export-chat-data.sh` - Exports data from current server +3. `import-chat-data.sh` - Imports data to new server +4. `verify-migration.js` - Verifies data integrity +5. `update-config-template.env` - Template for new configuration + +## Important Notes + +- **Backup first**: Always backup your current database before migration +- **Downtime**: Plan for application downtime during migration +- **File permissions**: Ensure file permissions are preserved during transfer +- **Network access**: Ensure new server can accept connections from your application diff --git a/inventory-server/chat/update-config-template.env b/inventory-server/chat/update-config-template.env new file mode 100644 index 0000000..0a34d84 --- /dev/null +++ b/inventory-server/chat/update-config-template.env @@ -0,0 +1,26 @@ +# Chat Server Database Configuration Template +# Copy this to your .env file and update the values for your new server + +# Database Configuration for New Server +CHAT_DB_HOST=your-new-server-ip-or-hostname +CHAT_DB_PORT=5432 +CHAT_DB_NAME=rocketchat_converted +CHAT_DB_USER=rocketchat_user +CHAT_DB_PASSWORD=your-secure-password + +# Chat Server Port +CHAT_PORT=3014 + +# Example configuration: +# CHAT_DB_HOST=192.168.1.100 +# CHAT_DB_PORT=5432 +# CHAT_DB_NAME=rocketchat_converted +# CHAT_DB_USER=rocketchat_user +# CHAT_DB_PASSWORD=MySecureP@ssw0rd123 + +# Notes: +# - Replace 'your-new-server-ip-or-hostname' with actual server address +# - Use a strong password for CHAT_DB_PASSWORD +# - Ensure the new server allows connections from your application server +# - Update any firewall rules to allow PostgreSQL connections (port 5432) +# - Test connectivity before updating production configuration diff --git a/inventory-server/chat/verify-migration.js b/inventory-server/chat/verify-migration.js new file mode 100755 index 0000000..80fb178 --- /dev/null +++ b/inventory-server/chat/verify-migration.js @@ -0,0 +1,231 @@ +#!/usr/bin/env node + +/** + * Chat Database Migration Verification Script + * + * This script verifies that the chat database migration was successful + * by comparing record counts and testing basic functionality. + */ + +require('dotenv').config({ path: '../.env' }); +const { Pool } = require('pg'); + +// Database configuration +const pool = new Pool({ + host: process.env.CHAT_DB_HOST || 'localhost', + user: process.env.CHAT_DB_USER || 'rocketchat_user', + password: process.env.CHAT_DB_PASSWORD, + database: process.env.CHAT_DB_NAME || 'rocketchat_converted', + port: process.env.CHAT_DB_PORT || 5432, +}); + +const originalStats = process.argv[2] ? JSON.parse(process.argv[2]) : null; + +async function verifyMigration() { + console.log('๐Ÿ” Starting migration verification...\n'); + + try { + // Test basic connection + console.log('๐Ÿ”— Testing database connection...'); + const versionResult = await pool.query('SELECT version()'); + console.log('โœ… Database connection successful'); + console.log(` PostgreSQL version: ${versionResult.rows[0].version.split(' ')[1]}\n`); + + // Get table statistics + console.log('๐Ÿ“Š Checking table statistics...'); + const statsResult = await pool.query(` + SELECT + tablename, + n_live_tup as row_count, + n_dead_tup as dead_rows, + schemaname + FROM pg_stat_user_tables + WHERE schemaname = 'public' + ORDER BY n_live_tup DESC + `); + + if (statsResult.rows.length === 0) { + console.log('โŒ No tables found! Migration may have failed.'); + return false; + } + + console.log('๐Ÿ“‹ Table Statistics:'); + console.log(' Table Name | Row Count | Dead Rows'); + console.log(' -------------------|-----------|----------'); + + let totalRows = 0; + const tableStats = {}; + + for (const row of statsResult.rows) { + const rowCount = parseInt(row.row_count) || 0; + const deadRows = parseInt(row.dead_rows) || 0; + totalRows += rowCount; + tableStats[row.tablename] = rowCount; + + console.log(` ${row.tablename.padEnd(18)} | ${rowCount.toString().padStart(9)} | ${deadRows.toString().padStart(8)}`); + } + + console.log(`\n Total rows across all tables: ${totalRows}\n`); + + // Verify critical tables exist and have data + const criticalTables = ['users', 'message', 'room']; + console.log('๐Ÿ”‘ Checking critical tables...'); + + for (const table of criticalTables) { + if (tableStats[table] > 0) { + console.log(`โœ… ${table}: ${tableStats[table]} rows`); + } else if (tableStats[table] === 0) { + console.log(`โš ๏ธ ${table}: table exists but is empty`); + } else { + console.log(`โŒ ${table}: table not found`); + return false; + } + } + + // Test specific functionality + console.log('\n๐Ÿงช Testing specific functionality...'); + + // Test users table + const userTest = await pool.query(` + SELECT COUNT(*) as total_users, + COUNT(*) FILTER (WHERE active = true) as active_users, + COUNT(*) FILTER (WHERE type = 'user') as regular_users + FROM users + `); + + if (userTest.rows[0]) { + const { total_users, active_users, regular_users } = userTest.rows[0]; + console.log(`โœ… Users: ${total_users} total, ${active_users} active, ${regular_users} regular users`); + } + + // Test messages table + const messageTest = await pool.query(` + SELECT COUNT(*) as total_messages, + COUNT(DISTINCT rid) as unique_rooms, + MIN(ts) as oldest_message, + MAX(ts) as newest_message + FROM message + `); + + if (messageTest.rows[0]) { + const { total_messages, unique_rooms, oldest_message, newest_message } = messageTest.rows[0]; + console.log(`โœ… Messages: ${total_messages} total across ${unique_rooms} rooms`); + if (oldest_message && newest_message) { + console.log(` Date range: ${oldest_message.toISOString().split('T')[0]} to ${newest_message.toISOString().split('T')[0]}`); + } + } + + // Test rooms table + const roomTest = await pool.query(` + SELECT COUNT(*) as total_rooms, + COUNT(*) FILTER (WHERE t = 'c') as channels, + COUNT(*) FILTER (WHERE t = 'p') as private_groups, + COUNT(*) FILTER (WHERE t = 'd') as direct_messages + FROM room + `); + + if (roomTest.rows[0]) { + const { total_rooms, channels, private_groups, direct_messages } = roomTest.rows[0]; + console.log(`โœ… Rooms: ${total_rooms} total (${channels} channels, ${private_groups} private, ${direct_messages} DMs)`); + } + + // Test file uploads if table exists + if (tableStats.uploads > 0) { + const uploadTest = await pool.query(` + SELECT COUNT(*) as total_uploads, + COUNT(DISTINCT typegroup) as file_types, + pg_size_pretty(SUM(size)) as total_size + FROM uploads + WHERE size IS NOT NULL + `); + + if (uploadTest.rows[0]) { + const { total_uploads, file_types, total_size } = uploadTest.rows[0]; + console.log(`โœ… Uploads: ${total_uploads} files, ${file_types} types, ${total_size || 'unknown size'}`); + } + } + + // Test server health endpoint simulation + console.log('\n๐Ÿฅ Testing application endpoints simulation...'); + + try { + const healthTest = await pool.query(` + SELECT + (SELECT COUNT(*) FROM users WHERE active = true) as active_users, + (SELECT COUNT(*) FROM message) as total_messages, + (SELECT COUNT(*) FROM room) as total_rooms + `); + + if (healthTest.rows[0]) { + const stats = healthTest.rows[0]; + console.log('โœ… Health check simulation passed'); + console.log(` Active users: ${stats.active_users}`); + console.log(` Total messages: ${stats.total_messages}`); + console.log(` Total rooms: ${stats.total_rooms}`); + } + } catch (error) { + console.log(`โš ๏ธ Health check simulation failed: ${error.message}`); + } + + // Check indexes + console.log('\n๐Ÿ“‡ Checking database indexes...'); + const indexResult = await pool.query(` + SELECT + schemaname, + tablename, + indexname, + indexdef + FROM pg_indexes + WHERE schemaname = 'public' + ORDER BY tablename, indexname + `); + + const indexesByTable = {}; + for (const idx of indexResult.rows) { + if (!indexesByTable[idx.tablename]) { + indexesByTable[idx.tablename] = []; + } + indexesByTable[idx.tablename].push(idx.indexname); + } + + for (const [table, indexes] of Object.entries(indexesByTable)) { + console.log(` ${table}: ${indexes.length} indexes`); + } + + console.log('\n๐ŸŽ‰ Migration verification completed successfully!'); + console.log('\nโœ… Summary:'); + console.log(` - Database connection: Working`); + console.log(` - Tables created: ${statsResult.rows.length}`); + console.log(` - Total data rows: ${totalRows}`); + console.log(` - Critical tables: All present`); + console.log(` - Indexes: ${indexResult.rows.length} total`); + + console.log('\n๐Ÿš€ Next steps:'); + console.log(' 1. Update your application configuration'); + console.log(' 2. Start your chat server'); + console.log(' 3. Test chat functionality in the browser'); + console.log(' 4. Monitor logs for any issues'); + + return true; + + } catch (error) { + console.error('โŒ Migration verification failed:', error.message); + console.error('\n๐Ÿ”ง Troubleshooting steps:'); + console.error(' 1. Check database connection settings'); + console.error(' 2. Verify database and user exist'); + console.error(' 3. Check PostgreSQL logs'); + console.error(' 4. Ensure import completed without errors'); + return false; + } finally { + await pool.end(); + } +} + +// Run verification +if (require.main === module) { + verifyMigration().then(success => { + process.exit(success ? 0 : 1); + }); +} + +module.exports = { verifyMigration }; diff --git a/inventory-server/src/routes/import.js b/inventory-server/src/routes/import.js index 74ff34b..34b732c 100644 --- a/inventory-server/src/routes/import.js +++ b/inventory-server/src/routes/import.js @@ -376,8 +376,8 @@ router.post('/upload-image', upload.single('image'), (req, res) => { }); // Create URL for the uploaded file - using an absolute URL with domain - // This will generate a URL like: https://inventory.acot.site/uploads/products/filename.jpg - const baseUrl = 'https://inventory.acot.site'; + // This will generate a URL like: https://acot.site/uploads/products/filename.jpg + const baseUrl = 'https://acot.site'; const imageUrl = `${baseUrl}/uploads/products/${req.file.filename}`; // Schedule this image for deletion in 24 hours diff --git a/inventory-server/src/routes/reusable-images.js b/inventory-server/src/routes/reusable-images.js index e812ebb..7dd3f96 100644 --- a/inventory-server/src/routes/reusable-images.js +++ b/inventory-server/src/routes/reusable-images.js @@ -194,7 +194,7 @@ router.post('/upload', upload.single('image'), async (req, res) => { } // Create URL for the uploaded file - const baseUrl = 'https://inventory.acot.site'; + const baseUrl = 'https://acot.site'; const imageUrl = `${baseUrl}/uploads/reusable/${req.file.filename}`; const pool = req.app.locals.pool; diff --git a/inventory/src/components/product-import/steps/ImageUploadStep/components/ProductCard/SortableImage.tsx b/inventory/src/components/product-import/steps/ImageUploadStep/components/ProductCard/SortableImage.tsx index b56ea7d..b1ea1e8 100644 --- a/inventory/src/components/product-import/steps/ImageUploadStep/components/ProductCard/SortableImage.tsx +++ b/inventory/src/components/product-import/steps/ImageUploadStep/components/ProductCard/SortableImage.tsx @@ -41,7 +41,7 @@ const getFullImageUrl = (url: string): string => { } // Otherwise, it's a relative URL, prepend the domain - const baseUrl = 'https://inventory.acot.site'; + const baseUrl = 'https://acot.site'; // Make sure url starts with / for path const path = url.startsWith('/') ? url : `/${url}`; return `${baseUrl}${path}`; diff --git a/inventory/src/components/product-import/steps/ImageUploadStep/hooks/useProductImagesInit.ts b/inventory/src/components/product-import/steps/ImageUploadStep/hooks/useProductImagesInit.ts index 5a3280b..79d80a0 100644 --- a/inventory/src/components/product-import/steps/ImageUploadStep/hooks/useProductImagesInit.ts +++ b/inventory/src/components/product-import/steps/ImageUploadStep/hooks/useProductImagesInit.ts @@ -74,7 +74,7 @@ export const useProductImagesInit = (data: Product[]) => { } // Otherwise, it's a relative URL, prepend the domain - const baseUrl = 'https://inventory.acot.site'; + const baseUrl = 'https://acot.site'; // Make sure url starts with / for path const path = url.startsWith('/') ? url : `/${url}`; return `${baseUrl}${path}`; diff --git a/inventory/src/components/product-import/steps/MatchColumnsStep/MatchColumnsStep.tsx b/inventory/src/components/product-import/steps/MatchColumnsStep/MatchColumnsStep.tsx index d332c29..f740a42 100644 --- a/inventory/src/components/product-import/steps/MatchColumnsStep/MatchColumnsStep.tsx +++ b/inventory/src/components/product-import/steps/MatchColumnsStep/MatchColumnsStep.tsx @@ -19,7 +19,7 @@ import { import { useQuery } from "@tanstack/react-query" import config from "@/config" import { Button } from "@/components/ui/button" -import { CheckCircle2, AlertCircle, EyeIcon, EyeOffIcon, ArrowRightIcon, XIcon, FileSpreadsheetIcon, LinkIcon, CheckIcon, ChevronsUpDown } from "lucide-react" +import { CheckCircle2, AlertCircle, EyeIcon, EyeOffIcon, ArrowRightIcon, XIcon, FileSpreadsheetIcon, LinkIcon, CheckIcon, ChevronsUpDown, Sparkles } from "lucide-react" import { Separator } from "@/components/ui/separator" import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table" import { Badge } from "@/components/ui/badge" @@ -59,6 +59,7 @@ export enum ColumnType { matchedSelectOptions, matchedMultiInput, matchedMultiSelect, + aiSupplemental, } export type MatchedOptions = { @@ -97,6 +98,7 @@ export type MatchedMultiSelectColumn = { value: T matchedOptions: MatchedOptions[] } +type AiSupplementalColumn = { type: ColumnType.aiSupplemental; index: number; header: string } export type Column = | EmptyColumn @@ -107,6 +109,7 @@ export type Column = | MatchedSelectOptionsColumn | MatchedMultiInputColumn | MatchedMultiSelectColumn + | AiSupplementalColumn export type Columns = Column[] @@ -114,15 +117,19 @@ export type Columns = Column[] const ColumnActions = memo(({ column, onIgnore, + onToggleAiSupplemental, toggleValueMapping, isExpanded, - canExpandValues + canExpandValues, + isAiSupplemental, }: { column: any, onIgnore: (index: number) => void, + onToggleAiSupplemental: (index: number) => void, toggleValueMapping: (index: number) => void, isExpanded: boolean, - canExpandValues: boolean + canExpandValues: boolean, + isAiSupplemental: boolean, }) => { // Create stable callback references to prevent unnecessary re-renders const handleIgnore = useCallback(() => { @@ -133,6 +140,10 @@ const ColumnActions = memo(({ toggleValueMapping(column.index); }, [toggleValueMapping, column.index]); + const handleToggleAiSupplemental = useCallback(() => { + onToggleAiSupplemental(column.index); + }, [onToggleAiSupplemental, column.index]); + return (
{canExpandValues && ( @@ -154,6 +165,15 @@ const ColumnActions = memo(({ } )} +