Add ai supplemental fields to product import, fix image upload url, misc changes for netcup server
This commit is contained in:
45
inventory-server/chat/create-new-database.sql
Normal file
45
inventory-server/chat/create-new-database.sql
Normal file
@@ -0,0 +1,45 @@
|
||||
-- PostgreSQL Database Creation Script for New Server
|
||||
-- Run as: sudo -u postgres psql -f create-new-database.sql
|
||||
|
||||
-- Terminate all connections to the database (if it exists)
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = 'rocketchat_converted' AND pid <> pg_backend_pid();
|
||||
|
||||
-- Drop the database if it exists
|
||||
DROP DATABASE IF EXISTS rocketchat_converted;
|
||||
|
||||
-- Create fresh database
|
||||
CREATE DATABASE rocketchat_converted;
|
||||
|
||||
-- Create user (if not exists) - UPDATE PASSWORD BEFORE RUNNING!
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_user WHERE usename = 'rocketchat_user') THEN
|
||||
CREATE USER rocketchat_user WITH PASSWORD 'HKjLgt23gWuPXzEAn3rW';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Grant database privileges
|
||||
GRANT CONNECT ON DATABASE rocketchat_converted TO rocketchat_user;
|
||||
GRANT CREATE ON DATABASE rocketchat_converted TO rocketchat_user;
|
||||
|
||||
-- Connect to the new database
|
||||
\c rocketchat_converted;
|
||||
|
||||
-- Grant schema privileges
|
||||
GRANT CREATE ON SCHEMA public TO rocketchat_user;
|
||||
GRANT USAGE ON SCHEMA public TO rocketchat_user;
|
||||
|
||||
-- Grant privileges on all future tables and sequences
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO rocketchat_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE, SELECT ON SEQUENCES TO rocketchat_user;
|
||||
|
||||
-- Display success message
|
||||
\echo 'Database created successfully!'
|
||||
\echo 'IMPORTANT: Update the password for rocketchat_user before proceeding'
|
||||
\echo 'Next steps:'
|
||||
\echo '1. Update the password in this file'
|
||||
\echo '2. Run export-chat-data.sh on your current server'
|
||||
\echo '3. Transfer the exported files to this server'
|
||||
\echo '4. Run import-chat-data.sh on this server'
|
||||
147
inventory-server/chat/export-chat-data.sh
Executable file
147
inventory-server/chat/export-chat-data.sh
Executable file
@@ -0,0 +1,147 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Chat Database Export Script
|
||||
# This script exports the chat database schema and data for migration
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
echo "🚀 Starting chat database export..."
|
||||
|
||||
# Configuration - Update these values for your setup
|
||||
DB_HOST="${CHAT_DB_HOST:-localhost}"
|
||||
DB_PORT="${CHAT_DB_PORT:-5432}"
|
||||
DB_NAME="${CHAT_DB_NAME:-rocketchat_converted}"
|
||||
DB_USER="${CHAT_DB_USER:-rocketchat_user}"
|
||||
|
||||
# Check if database connection info is available
|
||||
if [ -z "$CHAT_DB_PASSWORD" ]; then
|
||||
echo "⚠️ CHAT_DB_PASSWORD environment variable not set"
|
||||
echo "Please set it with: export CHAT_DB_PASSWORD='your_password'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📊 Database: $DB_NAME on $DB_HOST:$DB_PORT"
|
||||
|
||||
# Create export directory
|
||||
EXPORT_DIR="chat-migration-$(date +%Y%m%d-%H%M%S)"
|
||||
mkdir -p "$EXPORT_DIR"
|
||||
|
||||
echo "📁 Export directory: $EXPORT_DIR"
|
||||
|
||||
# Export database schema
|
||||
echo "📋 Exporting database schema..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" pg_dump \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
--schema-only \
|
||||
--no-owner \
|
||||
--no-privileges \
|
||||
-f "$EXPORT_DIR/chat-schema.sql"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Schema exported successfully"
|
||||
else
|
||||
echo "❌ Schema export failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Export database data
|
||||
echo "💾 Exporting database data..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" pg_dump \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
--data-only \
|
||||
--no-owner \
|
||||
--no-privileges \
|
||||
--disable-triggers \
|
||||
--column-inserts \
|
||||
-f "$EXPORT_DIR/chat-data.sql"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Data exported successfully"
|
||||
else
|
||||
echo "❌ Data export failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Export file uploads and avatars
|
||||
echo "📎 Exporting chat files (uploads and avatars)..."
|
||||
if [ -d "db-convert/db/files" ]; then
|
||||
cd db-convert/db
|
||||
tar -czf "../../$EXPORT_DIR/chat-files.tar.gz" files/
|
||||
cd ../..
|
||||
echo "✅ Files exported successfully"
|
||||
else
|
||||
echo "⚠️ No files directory found at db-convert/db/files"
|
||||
echo " This is normal if you have no file uploads"
|
||||
touch "$EXPORT_DIR/chat-files.tar.gz"
|
||||
fi
|
||||
|
||||
# Get table statistics for verification
|
||||
echo "📈 Generating export statistics..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-c "
|
||||
SELECT
|
||||
schemaname,
|
||||
tablename,
|
||||
n_tup_ins as inserted_rows,
|
||||
n_tup_upd as updated_rows,
|
||||
n_tup_del as deleted_rows,
|
||||
n_live_tup as live_rows,
|
||||
n_dead_tup as dead_rows
|
||||
FROM pg_stat_user_tables
|
||||
ORDER BY n_live_tup DESC;
|
||||
" > "$EXPORT_DIR/table-stats.txt"
|
||||
|
||||
# Create export summary
|
||||
cat > "$EXPORT_DIR/export-summary.txt" << EOF
|
||||
Chat Database Export Summary
|
||||
===========================
|
||||
|
||||
Export Date: $(date)
|
||||
Database: $DB_NAME
|
||||
Host: $DB_HOST:$DB_PORT
|
||||
User: $DB_USER
|
||||
|
||||
Files Generated:
|
||||
- chat-schema.sql: Database schema (tables, indexes, constraints)
|
||||
- chat-data.sql: All table data
|
||||
- chat-files.tar.gz: Uploaded files and avatars
|
||||
- table-stats.txt: Database statistics
|
||||
- export-summary.txt: This summary
|
||||
|
||||
Next Steps:
|
||||
1. Transfer these files to your new server
|
||||
2. Run create-new-database.sql on the new server first
|
||||
3. Run import-chat-data.sh on the new server
|
||||
4. Update your application configuration
|
||||
5. Run verify-migration.js to validate the migration
|
||||
|
||||
Important Notes:
|
||||
- Keep these files secure as they contain your chat data
|
||||
- Ensure the new server has enough disk space
|
||||
- Plan for application downtime during the migration
|
||||
EOF
|
||||
|
||||
echo ""
|
||||
echo "🎉 Export completed successfully!"
|
||||
echo "📁 Files are in: $EXPORT_DIR/"
|
||||
echo ""
|
||||
echo "📋 Export Summary:"
|
||||
ls -lh "$EXPORT_DIR/"
|
||||
echo ""
|
||||
echo "🚚 Next steps:"
|
||||
echo "1. Transfer the $EXPORT_DIR/ directory to your new server"
|
||||
echo "2. Run create-new-database.sql on the new server (update password first!)"
|
||||
echo "3. Run import-chat-data.sh on the new server"
|
||||
echo ""
|
||||
echo "💡 To transfer files to new server:"
|
||||
echo " scp -r $EXPORT_DIR/ user@new-server:/tmp/"
|
||||
167
inventory-server/chat/import-chat-data.sh
Executable file
167
inventory-server/chat/import-chat-data.sh
Executable file
@@ -0,0 +1,167 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Chat Database Import Script
|
||||
# This script imports the chat database schema and data on the new server
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
echo "🚀 Starting chat database import..."
|
||||
|
||||
# Configuration - Update these values for your new server
|
||||
DB_HOST="${CHAT_DB_HOST:-localhost}"
|
||||
DB_PORT="${CHAT_DB_PORT:-5432}"
|
||||
DB_NAME="${CHAT_DB_NAME:-rocketchat_converted}"
|
||||
DB_USER="${CHAT_DB_USER:-rocketchat_user}"
|
||||
|
||||
# Check if database connection info is available
|
||||
if [ -z "$CHAT_DB_PASSWORD" ]; then
|
||||
echo "⚠️ CHAT_DB_PASSWORD environment variable not set"
|
||||
echo "Please set it with: export CHAT_DB_PASSWORD='your_password'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Find the migration directory
|
||||
MIGRATION_DIR=""
|
||||
if [ -d "/tmp" ]; then
|
||||
MIGRATION_DIR=$(find /tmp -maxdepth 1 -name "chat-migration-*" -type d | head -1)
|
||||
fi
|
||||
|
||||
if [ -z "$MIGRATION_DIR" ]; then
|
||||
echo "❌ No migration directory found in /tmp/"
|
||||
echo "Please specify the migration directory:"
|
||||
read -p "Enter full path to migration directory: " MIGRATION_DIR
|
||||
fi
|
||||
|
||||
if [ ! -d "$MIGRATION_DIR" ]; then
|
||||
echo "❌ Migration directory not found: $MIGRATION_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📁 Using migration directory: $MIGRATION_DIR"
|
||||
echo "📊 Target database: $DB_NAME on $DB_HOST:$DB_PORT"
|
||||
|
||||
# Verify required files exist
|
||||
REQUIRED_FILES=("chat-schema.sql" "chat-data.sql" "chat-files.tar.gz")
|
||||
for file in "${REQUIRED_FILES[@]}"; do
|
||||
if [ ! -f "$MIGRATION_DIR/$file" ]; then
|
||||
echo "❌ Required file not found: $MIGRATION_DIR/$file"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✅ All required files found"
|
||||
|
||||
# Test database connection
|
||||
echo "🔗 Testing database connection..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-c "SELECT version();" > /dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Database connection successful"
|
||||
else
|
||||
echo "❌ Database connection failed"
|
||||
echo "Please ensure:"
|
||||
echo " 1. PostgreSQL is running"
|
||||
echo " 2. Database '$DB_NAME' exists"
|
||||
echo " 3. User '$DB_USER' has access"
|
||||
echo " 4. Password is correct"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Import database schema
|
||||
echo "📋 Importing database schema..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-f "$MIGRATION_DIR/chat-schema.sql"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Schema imported successfully"
|
||||
else
|
||||
echo "❌ Schema import failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Import database data
|
||||
echo "💾 Importing database data..."
|
||||
echo " This may take a while depending on data size..."
|
||||
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-f "$MIGRATION_DIR/chat-data.sql"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Data imported successfully"
|
||||
else
|
||||
echo "❌ Data import failed"
|
||||
echo "Check the error messages above for details"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create files directory and import files
|
||||
echo "📎 Setting up files directory..."
|
||||
mkdir -p "db-convert/db"
|
||||
|
||||
if [ -s "$MIGRATION_DIR/chat-files.tar.gz" ]; then
|
||||
echo "📂 Extracting chat files..."
|
||||
cd db-convert/db
|
||||
tar -xzf "$MIGRATION_DIR/chat-files.tar.gz"
|
||||
cd ../..
|
||||
|
||||
# Set proper permissions
|
||||
if [ -d "db-convert/db/files" ]; then
|
||||
chmod -R 755 db-convert/db/files
|
||||
echo "✅ Files imported and permissions set"
|
||||
else
|
||||
echo "⚠️ Files directory not created properly"
|
||||
fi
|
||||
else
|
||||
echo "ℹ️ No files to import (empty archive)"
|
||||
mkdir -p "db-convert/db/files/uploads"
|
||||
mkdir -p "db-convert/db/files/avatars"
|
||||
fi
|
||||
|
||||
# Get final table statistics
|
||||
echo "📈 Generating import statistics..."
|
||||
PGPASSWORD="$CHAT_DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" \
|
||||
-p "$DB_PORT" \
|
||||
-U "$DB_USER" \
|
||||
-d "$DB_NAME" \
|
||||
-c "
|
||||
SELECT
|
||||
tablename,
|
||||
n_live_tup as row_count
|
||||
FROM pg_stat_user_tables
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY n_live_tup DESC;
|
||||
"
|
||||
|
||||
# Create import summary
|
||||
echo ""
|
||||
echo "🎉 Import completed successfully!"
|
||||
echo ""
|
||||
echo "📋 Import Summary:"
|
||||
echo " Database: $DB_NAME"
|
||||
echo " Host: $DB_HOST:$DB_PORT"
|
||||
echo " Files location: $(pwd)/db-convert/db/files/"
|
||||
echo ""
|
||||
echo "🔍 Next steps:"
|
||||
echo "1. Update your application configuration to use this database"
|
||||
echo "2. Run verify-migration.js to validate the migration"
|
||||
echo "3. Test your application thoroughly"
|
||||
echo "4. Update DNS/load balancer to point to new server"
|
||||
echo ""
|
||||
echo "⚠️ Important:"
|
||||
echo "- Keep the original data as backup until migration is fully validated"
|
||||
echo "- Monitor the application closely after switching"
|
||||
echo "- Have a rollback plan ready"
|
||||
86
inventory-server/chat/migrate-to-new-server.md
Normal file
86
inventory-server/chat/migrate-to-new-server.md
Normal file
@@ -0,0 +1,86 @@
|
||||
# Chat Database Migration Guide
|
||||
|
||||
This guide will help you migrate your chat database from the current server to a new PostgreSQL server.
|
||||
|
||||
## Overview
|
||||
Your chat system uses:
|
||||
- Database: `rocketchat_converted` (PostgreSQL)
|
||||
- Main tables: users, message, room, uploads, avatars, subscription
|
||||
- File storage: db-convert/db/files/ directory with uploads and avatars
|
||||
- Environment configuration for database connection
|
||||
|
||||
## Migration Steps
|
||||
|
||||
### 1. Pre-Migration Setup
|
||||
|
||||
On your **new server**, ensure PostgreSQL is installed and running:
|
||||
```bash
|
||||
# Install PostgreSQL (if not already done)
|
||||
sudo apt update
|
||||
sudo apt install postgresql postgresql-contrib
|
||||
|
||||
# Start PostgreSQL service
|
||||
sudo systemctl start postgresql
|
||||
sudo systemctl enable postgresql
|
||||
```
|
||||
|
||||
### 2. Create Database Schema on New Server
|
||||
|
||||
Run the provided migration script:
|
||||
```bash
|
||||
# On new server
|
||||
sudo -u postgres psql -f create-new-database.sql
|
||||
```
|
||||
|
||||
### 3. Export Data from Current Server
|
||||
|
||||
Run the export script:
|
||||
```bash
|
||||
# On current server
|
||||
./export-chat-data.sh
|
||||
```
|
||||
|
||||
This will create:
|
||||
- `chat-schema.sql` - Database schema
|
||||
- `chat-data.sql` - All table data
|
||||
- `chat-files.tar.gz` - All uploaded files and avatars
|
||||
|
||||
### 4. Transfer Data to New Server
|
||||
|
||||
```bash
|
||||
# Copy files to new server
|
||||
scp chat-schema.sql chat-data.sql chat-files.tar.gz user@new-server:/tmp/
|
||||
```
|
||||
|
||||
### 5. Import Data on New Server
|
||||
|
||||
```bash
|
||||
# On new server
|
||||
./import-chat-data.sh
|
||||
```
|
||||
|
||||
### 6. Update Configuration
|
||||
|
||||
Update your environment variables to point to the new database server.
|
||||
|
||||
### 7. Verify Migration
|
||||
|
||||
Run the verification script to ensure everything transferred correctly:
|
||||
```bash
|
||||
node verify-migration.js
|
||||
```
|
||||
|
||||
## Files Provided
|
||||
|
||||
1. `create-new-database.sql` - Creates database and user on new server
|
||||
2. `export-chat-data.sh` - Exports data from current server
|
||||
3. `import-chat-data.sh` - Imports data to new server
|
||||
4. `verify-migration.js` - Verifies data integrity
|
||||
5. `update-config-template.env` - Template for new configuration
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **Backup first**: Always backup your current database before migration
|
||||
- **Downtime**: Plan for application downtime during migration
|
||||
- **File permissions**: Ensure file permissions are preserved during transfer
|
||||
- **Network access**: Ensure new server can accept connections from your application
|
||||
26
inventory-server/chat/update-config-template.env
Normal file
26
inventory-server/chat/update-config-template.env
Normal file
@@ -0,0 +1,26 @@
|
||||
# Chat Server Database Configuration Template
|
||||
# Copy this to your .env file and update the values for your new server
|
||||
|
||||
# Database Configuration for New Server
|
||||
CHAT_DB_HOST=your-new-server-ip-or-hostname
|
||||
CHAT_DB_PORT=5432
|
||||
CHAT_DB_NAME=rocketchat_converted
|
||||
CHAT_DB_USER=rocketchat_user
|
||||
CHAT_DB_PASSWORD=your-secure-password
|
||||
|
||||
# Chat Server Port
|
||||
CHAT_PORT=3014
|
||||
|
||||
# Example configuration:
|
||||
# CHAT_DB_HOST=192.168.1.100
|
||||
# CHAT_DB_PORT=5432
|
||||
# CHAT_DB_NAME=rocketchat_converted
|
||||
# CHAT_DB_USER=rocketchat_user
|
||||
# CHAT_DB_PASSWORD=MySecureP@ssw0rd123
|
||||
|
||||
# Notes:
|
||||
# - Replace 'your-new-server-ip-or-hostname' with actual server address
|
||||
# - Use a strong password for CHAT_DB_PASSWORD
|
||||
# - Ensure the new server allows connections from your application server
|
||||
# - Update any firewall rules to allow PostgreSQL connections (port 5432)
|
||||
# - Test connectivity before updating production configuration
|
||||
231
inventory-server/chat/verify-migration.js
Executable file
231
inventory-server/chat/verify-migration.js
Executable file
@@ -0,0 +1,231 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Chat Database Migration Verification Script
|
||||
*
|
||||
* This script verifies that the chat database migration was successful
|
||||
* by comparing record counts and testing basic functionality.
|
||||
*/
|
||||
|
||||
require('dotenv').config({ path: '../.env' });
|
||||
const { Pool } = require('pg');
|
||||
|
||||
// Database configuration
|
||||
const pool = new Pool({
|
||||
host: process.env.CHAT_DB_HOST || 'localhost',
|
||||
user: process.env.CHAT_DB_USER || 'rocketchat_user',
|
||||
password: process.env.CHAT_DB_PASSWORD,
|
||||
database: process.env.CHAT_DB_NAME || 'rocketchat_converted',
|
||||
port: process.env.CHAT_DB_PORT || 5432,
|
||||
});
|
||||
|
||||
const originalStats = process.argv[2] ? JSON.parse(process.argv[2]) : null;
|
||||
|
||||
async function verifyMigration() {
|
||||
console.log('🔍 Starting migration verification...\n');
|
||||
|
||||
try {
|
||||
// Test basic connection
|
||||
console.log('🔗 Testing database connection...');
|
||||
const versionResult = await pool.query('SELECT version()');
|
||||
console.log('✅ Database connection successful');
|
||||
console.log(` PostgreSQL version: ${versionResult.rows[0].version.split(' ')[1]}\n`);
|
||||
|
||||
// Get table statistics
|
||||
console.log('📊 Checking table statistics...');
|
||||
const statsResult = await pool.query(`
|
||||
SELECT
|
||||
tablename,
|
||||
n_live_tup as row_count,
|
||||
n_dead_tup as dead_rows,
|
||||
schemaname
|
||||
FROM pg_stat_user_tables
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY n_live_tup DESC
|
||||
`);
|
||||
|
||||
if (statsResult.rows.length === 0) {
|
||||
console.log('❌ No tables found! Migration may have failed.');
|
||||
return false;
|
||||
}
|
||||
|
||||
console.log('📋 Table Statistics:');
|
||||
console.log(' Table Name | Row Count | Dead Rows');
|
||||
console.log(' -------------------|-----------|----------');
|
||||
|
||||
let totalRows = 0;
|
||||
const tableStats = {};
|
||||
|
||||
for (const row of statsResult.rows) {
|
||||
const rowCount = parseInt(row.row_count) || 0;
|
||||
const deadRows = parseInt(row.dead_rows) || 0;
|
||||
totalRows += rowCount;
|
||||
tableStats[row.tablename] = rowCount;
|
||||
|
||||
console.log(` ${row.tablename.padEnd(18)} | ${rowCount.toString().padStart(9)} | ${deadRows.toString().padStart(8)}`);
|
||||
}
|
||||
|
||||
console.log(`\n Total rows across all tables: ${totalRows}\n`);
|
||||
|
||||
// Verify critical tables exist and have data
|
||||
const criticalTables = ['users', 'message', 'room'];
|
||||
console.log('🔑 Checking critical tables...');
|
||||
|
||||
for (const table of criticalTables) {
|
||||
if (tableStats[table] > 0) {
|
||||
console.log(`✅ ${table}: ${tableStats[table]} rows`);
|
||||
} else if (tableStats[table] === 0) {
|
||||
console.log(`⚠️ ${table}: table exists but is empty`);
|
||||
} else {
|
||||
console.log(`❌ ${table}: table not found`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Test specific functionality
|
||||
console.log('\n🧪 Testing specific functionality...');
|
||||
|
||||
// Test users table
|
||||
const userTest = await pool.query(`
|
||||
SELECT COUNT(*) as total_users,
|
||||
COUNT(*) FILTER (WHERE active = true) as active_users,
|
||||
COUNT(*) FILTER (WHERE type = 'user') as regular_users
|
||||
FROM users
|
||||
`);
|
||||
|
||||
if (userTest.rows[0]) {
|
||||
const { total_users, active_users, regular_users } = userTest.rows[0];
|
||||
console.log(`✅ Users: ${total_users} total, ${active_users} active, ${regular_users} regular users`);
|
||||
}
|
||||
|
||||
// Test messages table
|
||||
const messageTest = await pool.query(`
|
||||
SELECT COUNT(*) as total_messages,
|
||||
COUNT(DISTINCT rid) as unique_rooms,
|
||||
MIN(ts) as oldest_message,
|
||||
MAX(ts) as newest_message
|
||||
FROM message
|
||||
`);
|
||||
|
||||
if (messageTest.rows[0]) {
|
||||
const { total_messages, unique_rooms, oldest_message, newest_message } = messageTest.rows[0];
|
||||
console.log(`✅ Messages: ${total_messages} total across ${unique_rooms} rooms`);
|
||||
if (oldest_message && newest_message) {
|
||||
console.log(` Date range: ${oldest_message.toISOString().split('T')[0]} to ${newest_message.toISOString().split('T')[0]}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Test rooms table
|
||||
const roomTest = await pool.query(`
|
||||
SELECT COUNT(*) as total_rooms,
|
||||
COUNT(*) FILTER (WHERE t = 'c') as channels,
|
||||
COUNT(*) FILTER (WHERE t = 'p') as private_groups,
|
||||
COUNT(*) FILTER (WHERE t = 'd') as direct_messages
|
||||
FROM room
|
||||
`);
|
||||
|
||||
if (roomTest.rows[0]) {
|
||||
const { total_rooms, channels, private_groups, direct_messages } = roomTest.rows[0];
|
||||
console.log(`✅ Rooms: ${total_rooms} total (${channels} channels, ${private_groups} private, ${direct_messages} DMs)`);
|
||||
}
|
||||
|
||||
// Test file uploads if table exists
|
||||
if (tableStats.uploads > 0) {
|
||||
const uploadTest = await pool.query(`
|
||||
SELECT COUNT(*) as total_uploads,
|
||||
COUNT(DISTINCT typegroup) as file_types,
|
||||
pg_size_pretty(SUM(size)) as total_size
|
||||
FROM uploads
|
||||
WHERE size IS NOT NULL
|
||||
`);
|
||||
|
||||
if (uploadTest.rows[0]) {
|
||||
const { total_uploads, file_types, total_size } = uploadTest.rows[0];
|
||||
console.log(`✅ Uploads: ${total_uploads} files, ${file_types} types, ${total_size || 'unknown size'}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Test server health endpoint simulation
|
||||
console.log('\n🏥 Testing application endpoints simulation...');
|
||||
|
||||
try {
|
||||
const healthTest = await pool.query(`
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM users WHERE active = true) as active_users,
|
||||
(SELECT COUNT(*) FROM message) as total_messages,
|
||||
(SELECT COUNT(*) FROM room) as total_rooms
|
||||
`);
|
||||
|
||||
if (healthTest.rows[0]) {
|
||||
const stats = healthTest.rows[0];
|
||||
console.log('✅ Health check simulation passed');
|
||||
console.log(` Active users: ${stats.active_users}`);
|
||||
console.log(` Total messages: ${stats.total_messages}`);
|
||||
console.log(` Total rooms: ${stats.total_rooms}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`⚠️ Health check simulation failed: ${error.message}`);
|
||||
}
|
||||
|
||||
// Check indexes
|
||||
console.log('\n📇 Checking database indexes...');
|
||||
const indexResult = await pool.query(`
|
||||
SELECT
|
||||
schemaname,
|
||||
tablename,
|
||||
indexname,
|
||||
indexdef
|
||||
FROM pg_indexes
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY tablename, indexname
|
||||
`);
|
||||
|
||||
const indexesByTable = {};
|
||||
for (const idx of indexResult.rows) {
|
||||
if (!indexesByTable[idx.tablename]) {
|
||||
indexesByTable[idx.tablename] = [];
|
||||
}
|
||||
indexesByTable[idx.tablename].push(idx.indexname);
|
||||
}
|
||||
|
||||
for (const [table, indexes] of Object.entries(indexesByTable)) {
|
||||
console.log(` ${table}: ${indexes.length} indexes`);
|
||||
}
|
||||
|
||||
console.log('\n🎉 Migration verification completed successfully!');
|
||||
console.log('\n✅ Summary:');
|
||||
console.log(` - Database connection: Working`);
|
||||
console.log(` - Tables created: ${statsResult.rows.length}`);
|
||||
console.log(` - Total data rows: ${totalRows}`);
|
||||
console.log(` - Critical tables: All present`);
|
||||
console.log(` - Indexes: ${indexResult.rows.length} total`);
|
||||
|
||||
console.log('\n🚀 Next steps:');
|
||||
console.log(' 1. Update your application configuration');
|
||||
console.log(' 2. Start your chat server');
|
||||
console.log(' 3. Test chat functionality in the browser');
|
||||
console.log(' 4. Monitor logs for any issues');
|
||||
|
||||
return true;
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Migration verification failed:', error.message);
|
||||
console.error('\n🔧 Troubleshooting steps:');
|
||||
console.error(' 1. Check database connection settings');
|
||||
console.error(' 2. Verify database and user exist');
|
||||
console.error(' 3. Check PostgreSQL logs');
|
||||
console.error(' 4. Ensure import completed without errors');
|
||||
return false;
|
||||
} finally {
|
||||
await pool.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Run verification
|
||||
if (require.main === module) {
|
||||
verifyMigration().then(success => {
|
||||
process.exit(success ? 0 : 1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { verifyMigration };
|
||||
@@ -376,8 +376,8 @@ router.post('/upload-image', upload.single('image'), (req, res) => {
|
||||
});
|
||||
|
||||
// Create URL for the uploaded file - using an absolute URL with domain
|
||||
// This will generate a URL like: https://inventory.acot.site/uploads/products/filename.jpg
|
||||
const baseUrl = 'https://inventory.acot.site';
|
||||
// This will generate a URL like: https://acot.site/uploads/products/filename.jpg
|
||||
const baseUrl = 'https://acot.site';
|
||||
const imageUrl = `${baseUrl}/uploads/products/${req.file.filename}`;
|
||||
|
||||
// Schedule this image for deletion in 24 hours
|
||||
|
||||
@@ -194,7 +194,7 @@ router.post('/upload', upload.single('image'), async (req, res) => {
|
||||
}
|
||||
|
||||
// Create URL for the uploaded file
|
||||
const baseUrl = 'https://inventory.acot.site';
|
||||
const baseUrl = 'https://acot.site';
|
||||
const imageUrl = `${baseUrl}/uploads/reusable/${req.file.filename}`;
|
||||
|
||||
const pool = req.app.locals.pool;
|
||||
|
||||
@@ -41,7 +41,7 @@ const getFullImageUrl = (url: string): string => {
|
||||
}
|
||||
|
||||
// Otherwise, it's a relative URL, prepend the domain
|
||||
const baseUrl = 'https://inventory.acot.site';
|
||||
const baseUrl = 'https://acot.site';
|
||||
// Make sure url starts with / for path
|
||||
const path = url.startsWith('/') ? url : `/${url}`;
|
||||
return `${baseUrl}${path}`;
|
||||
|
||||
@@ -74,7 +74,7 @@ export const useProductImagesInit = (data: Product[]) => {
|
||||
}
|
||||
|
||||
// Otherwise, it's a relative URL, prepend the domain
|
||||
const baseUrl = 'https://inventory.acot.site';
|
||||
const baseUrl = 'https://acot.site';
|
||||
// Make sure url starts with / for path
|
||||
const path = url.startsWith('/') ? url : `/${url}`;
|
||||
return `${baseUrl}${path}`;
|
||||
|
||||
@@ -19,7 +19,7 @@ import {
|
||||
import { useQuery } from "@tanstack/react-query"
|
||||
import config from "@/config"
|
||||
import { Button } from "@/components/ui/button"
|
||||
import { CheckCircle2, AlertCircle, EyeIcon, EyeOffIcon, ArrowRightIcon, XIcon, FileSpreadsheetIcon, LinkIcon, CheckIcon, ChevronsUpDown } from "lucide-react"
|
||||
import { CheckCircle2, AlertCircle, EyeIcon, EyeOffIcon, ArrowRightIcon, XIcon, FileSpreadsheetIcon, LinkIcon, CheckIcon, ChevronsUpDown, Sparkles } from "lucide-react"
|
||||
import { Separator } from "@/components/ui/separator"
|
||||
import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table"
|
||||
import { Badge } from "@/components/ui/badge"
|
||||
@@ -59,6 +59,7 @@ export enum ColumnType {
|
||||
matchedSelectOptions,
|
||||
matchedMultiInput,
|
||||
matchedMultiSelect,
|
||||
aiSupplemental,
|
||||
}
|
||||
|
||||
export type MatchedOptions<T> = {
|
||||
@@ -97,6 +98,7 @@ export type MatchedMultiSelectColumn<T> = {
|
||||
value: T
|
||||
matchedOptions: MatchedOptions<T>[]
|
||||
}
|
||||
type AiSupplementalColumn = { type: ColumnType.aiSupplemental; index: number; header: string }
|
||||
|
||||
export type Column<T extends string> =
|
||||
| EmptyColumn
|
||||
@@ -107,6 +109,7 @@ export type Column<T extends string> =
|
||||
| MatchedSelectOptionsColumn<T>
|
||||
| MatchedMultiInputColumn<T>
|
||||
| MatchedMultiSelectColumn<T>
|
||||
| AiSupplementalColumn
|
||||
|
||||
export type Columns<T extends string> = Column<T>[]
|
||||
|
||||
@@ -114,15 +117,19 @@ export type Columns<T extends string> = Column<T>[]
|
||||
const ColumnActions = memo(({
|
||||
column,
|
||||
onIgnore,
|
||||
onToggleAiSupplemental,
|
||||
toggleValueMapping,
|
||||
isExpanded,
|
||||
canExpandValues
|
||||
canExpandValues,
|
||||
isAiSupplemental,
|
||||
}: {
|
||||
column: any,
|
||||
onIgnore: (index: number) => void,
|
||||
onToggleAiSupplemental: (index: number) => void,
|
||||
toggleValueMapping: (index: number) => void,
|
||||
isExpanded: boolean,
|
||||
canExpandValues: boolean
|
||||
canExpandValues: boolean,
|
||||
isAiSupplemental: boolean,
|
||||
}) => {
|
||||
// Create stable callback references to prevent unnecessary re-renders
|
||||
const handleIgnore = useCallback(() => {
|
||||
@@ -133,6 +140,10 @@ const ColumnActions = memo(({
|
||||
toggleValueMapping(column.index);
|
||||
}, [toggleValueMapping, column.index]);
|
||||
|
||||
const handleToggleAiSupplemental = useCallback(() => {
|
||||
onToggleAiSupplemental(column.index);
|
||||
}, [onToggleAiSupplemental, column.index]);
|
||||
|
||||
return (
|
||||
<div className="flex justify-end items-center gap-1">
|
||||
{canExpandValues && (
|
||||
@@ -154,6 +165,15 @@ const ColumnActions = memo(({
|
||||
}
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant={isAiSupplemental ? "default" : "ghost"}
|
||||
size="sm"
|
||||
onClick={handleToggleAiSupplemental}
|
||||
className="h-7 px-2"
|
||||
>
|
||||
<Sparkles className="h-3.5 w-3.5 mr-1" />
|
||||
{isAiSupplemental ? "AI info" : "Use for AI"}
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
@@ -900,6 +920,7 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
const withUnmappedValues = columns.filter(col =>
|
||||
col.type !== ColumnType.empty &&
|
||||
col.type !== ColumnType.ignored &&
|
||||
col.type !== ColumnType.aiSupplemental &&
|
||||
columnsWithUnmappedValuesMap.get(col.index)
|
||||
);
|
||||
|
||||
@@ -907,6 +928,7 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
const fullyMapped = columns.filter(col =>
|
||||
col.type !== ColumnType.empty &&
|
||||
col.type !== ColumnType.ignored &&
|
||||
col.type !== ColumnType.aiSupplemental &&
|
||||
!columnsWithUnmappedValuesMap.get(col.index)
|
||||
);
|
||||
|
||||
@@ -925,6 +947,10 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
return columns.filter(col => col.type === ColumnType.ignored);
|
||||
}, [columns]);
|
||||
|
||||
const aiSupplementalColumns = useMemo(() => {
|
||||
return columns.filter(col => col.type === ColumnType.aiSupplemental);
|
||||
}, [columns]);
|
||||
|
||||
// Get mapping information for required fields
|
||||
const requiredFieldMappings = useMemo(() => {
|
||||
const fieldsArray = Array.isArray(fields) ? fields : [fields];
|
||||
@@ -1132,6 +1158,24 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
[columns, setColumns],
|
||||
)
|
||||
|
||||
const onToggleAiSupplemental = useCallback(
|
||||
(columnIndex: number) => {
|
||||
setColumns(columns.map((column, index) => {
|
||||
if (columnIndex !== index) return column;
|
||||
|
||||
if (column.type === ColumnType.aiSupplemental) {
|
||||
return { type: ColumnType.empty, index: column.index, header: column.header } as Column<T>;
|
||||
}
|
||||
|
||||
return { type: ColumnType.aiSupplemental, index: column.index, header: column.header } as Column<T>;
|
||||
}));
|
||||
|
||||
setExpandedValues(prev => prev.filter(idx => idx !== columnIndex));
|
||||
setUserCollapsedColumns(prev => prev.filter(idx => idx !== columnIndex));
|
||||
},
|
||||
[columns, setColumns, setExpandedValues, setUserCollapsedColumns],
|
||||
)
|
||||
|
||||
const onRevertIgnore = useCallback(
|
||||
(columnIndex: number) => {
|
||||
setColumns(columns.map((column, index) => (columnIndex === index ? setColumn(column) : column)))
|
||||
@@ -1210,11 +1254,46 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
// Fix handleOnContinue - it should be useCallback, not useEffect
|
||||
const handleOnContinue = useCallback(async () => {
|
||||
setIsLoading(true)
|
||||
// Normalize the data with global selections before continuing
|
||||
const normalizedData = normalizeTableData(columns, data, fields)
|
||||
await onContinue(normalizedData, data, columns, globalSelections)
|
||||
setIsLoading(false)
|
||||
}, [onContinue, columns, data, fields, globalSelections])
|
||||
|
||||
try {
|
||||
// Normalize the data with global selections before continuing
|
||||
const normalizedData = normalizeTableData(columns, data, fields)
|
||||
const hasAiSupplementalColumns = aiSupplementalColumns.length > 0
|
||||
|
||||
const enhancedData = hasAiSupplementalColumns
|
||||
? normalizedData.map((row, rowIndex) => {
|
||||
const supplementalEntries = aiSupplementalColumns.reduce<Record<string, string>>((acc, column) => {
|
||||
const rawValue = data[rowIndex]?.[column.index]
|
||||
if (rawValue === undefined || rawValue === null) {
|
||||
return acc
|
||||
}
|
||||
|
||||
const trimmedValue = String(rawValue).trim()
|
||||
if (!trimmedValue) {
|
||||
return acc
|
||||
}
|
||||
|
||||
const headerLabel = column.header?.trim() || `Column ${column.index + 1}`
|
||||
acc[headerLabel] = trimmedValue
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
if (Object.keys(supplementalEntries).length === 0) {
|
||||
return row
|
||||
}
|
||||
|
||||
return {
|
||||
...row,
|
||||
__aiSupplemental: supplementalEntries,
|
||||
} as typeof row & { __aiSupplemental: Record<string, string> }
|
||||
})
|
||||
: normalizedData
|
||||
|
||||
await onContinue(enhancedData, data, columns, globalSelections)
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [onContinue, columns, data, fields, globalSelections, aiSupplementalColumns])
|
||||
|
||||
useEffect(
|
||||
() => {
|
||||
@@ -1289,6 +1368,7 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
const matchedColumnForField = columns.find(col =>
|
||||
col.type !== ColumnType.empty &&
|
||||
col.type !== ColumnType.ignored &&
|
||||
col.type !== ColumnType.aiSupplemental &&
|
||||
"value" in col &&
|
||||
col.value === fieldKey &&
|
||||
col.index !== currentColumnIndex
|
||||
@@ -1309,6 +1389,14 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
return <Badge variant="outline">Ignored</Badge>;
|
||||
}
|
||||
|
||||
if (column.type === ColumnType.aiSupplemental) {
|
||||
return (
|
||||
<Badge variant="outline" className="bg-sky-100 text-sky-800 border-sky-200">
|
||||
AI supplemental
|
||||
</Badge>
|
||||
);
|
||||
}
|
||||
|
||||
// Get the pre-created onChange handler for this column
|
||||
const handleChange = columnChangeHandlers.get(column.index);
|
||||
|
||||
@@ -1373,9 +1461,11 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
<ColumnActions
|
||||
column={column}
|
||||
onIgnore={onIgnore}
|
||||
onToggleAiSupplemental={onToggleAiSupplemental}
|
||||
toggleValueMapping={toggleValueMappingOptimized}
|
||||
isExpanded={isExpanded}
|
||||
canExpandValues={isExpandable(column)}
|
||||
isAiSupplemental={column.type === ColumnType.aiSupplemental}
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
@@ -1409,9 +1499,11 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
<ColumnActions
|
||||
column={column}
|
||||
onIgnore={onIgnore}
|
||||
onToggleAiSupplemental={onToggleAiSupplemental}
|
||||
toggleValueMapping={toggleValueMappingOptimized}
|
||||
isExpanded={expandedValues.includes(column.index)}
|
||||
canExpandValues={isExpandable(column)}
|
||||
isAiSupplemental={column.type === ColumnType.aiSupplemental}
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
@@ -1439,9 +1531,11 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
<ColumnActions
|
||||
column={column}
|
||||
onIgnore={onIgnore}
|
||||
onToggleAiSupplemental={onToggleAiSupplemental}
|
||||
toggleValueMapping={toggleValueMappingOptimized}
|
||||
isExpanded={isExpanded}
|
||||
canExpandValues={canExpandValues}
|
||||
isAiSupplemental={column.type === ColumnType.aiSupplemental}
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
@@ -1458,6 +1552,35 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Show AI supplemental columns if showAllColumns is true */}
|
||||
{showAllColumns && aiSupplementalColumns.map((column) => (
|
||||
<TableRow key={`ai-${column.index}`} className="bg-sky-50 hover:bg-sky-100">
|
||||
<TableCell className="font-medium">{column.header}</TableCell>
|
||||
<TableCell className="text-center">
|
||||
{renderSamplePreview(column.index)}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<ArrowRightIcon className="h-4 w-4 mx-auto text-sky-600" />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Badge variant="outline" className="bg-sky-100 text-sky-800 border-sky-200">
|
||||
AI supplemental
|
||||
</Badge>
|
||||
</TableCell>
|
||||
<TableCell className="flex justify-end items-center gap-1">
|
||||
<ColumnActions
|
||||
column={column}
|
||||
onIgnore={onIgnore}
|
||||
onToggleAiSupplemental={onToggleAiSupplemental}
|
||||
toggleValueMapping={toggleValueMappingOptimized}
|
||||
isExpanded={false}
|
||||
canExpandValues={false}
|
||||
isAiSupplemental
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
|
||||
{/* Show ignored columns if showAllColumns is true */}
|
||||
{showAllColumns && ignoredColumns.map((column) => (
|
||||
<TableRow key={`ignored-${column.index}`} className="text-muted-foreground bg-red-50 hover:bg-red-100">
|
||||
@@ -1509,12 +1632,14 @@ export const MatchColumnsStep = React.memo(<T extends string>({
|
||||
unmatchedColumns,
|
||||
matchedColumns,
|
||||
ignoredColumns,
|
||||
aiSupplementalColumns,
|
||||
showAllColumns,
|
||||
expandedValues,
|
||||
renderSamplePreview,
|
||||
renderFieldSelector,
|
||||
renderValueMappings,
|
||||
onIgnore,
|
||||
onToggleAiSupplemental,
|
||||
onRevertIgnore,
|
||||
toggleValueMappingOptimized,
|
||||
isExpandable
|
||||
|
||||
@@ -245,6 +245,7 @@ export const useValidationState = <T extends string>({
|
||||
__original,
|
||||
__corrected,
|
||||
__changes,
|
||||
__aiSupplemental: _aiSupplemental,
|
||||
...cleanRow
|
||||
} = row;
|
||||
return cleanRow as any;
|
||||
|
||||
@@ -17,7 +17,7 @@ export function prepareDataForAiValidation<T extends string>(
|
||||
fields: Fields<T>
|
||||
): Record<string, any>[] {
|
||||
return data.map(item => {
|
||||
const { __index, ...rest } = item;
|
||||
const { __index, __aiSupplemental, ...rest } = item as Record<string, any>;
|
||||
const withAllKeys: Record<string, any> = {};
|
||||
|
||||
fields.forEach((f) => {
|
||||
@@ -31,6 +31,10 @@ export function prepareDataForAiValidation<T extends string>(
|
||||
}
|
||||
});
|
||||
|
||||
if (typeof __aiSupplemental === 'object' && __aiSupplemental !== null) {
|
||||
withAllKeys.aiSupplementalInfo = __aiSupplemental;
|
||||
}
|
||||
|
||||
return withAllKeys;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -44,31 +44,31 @@ export default defineConfig(({ mode }) => {
|
||||
port: 5175,
|
||||
proxy: {
|
||||
"/api/aircall": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path,
|
||||
},
|
||||
"/api/klaviyo": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path,
|
||||
},
|
||||
"/api/meta": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path,
|
||||
},
|
||||
"/api/gorgias": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path,
|
||||
},
|
||||
"/api/dashboard-analytics": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
cookieDomainRewrite: {
|
||||
@@ -76,25 +76,25 @@ export default defineConfig(({ mode }) => {
|
||||
},
|
||||
},
|
||||
"/api/typeform": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path,
|
||||
},
|
||||
"/api/acot": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path,
|
||||
},
|
||||
"/api/clarity": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path,
|
||||
},
|
||||
"/api": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
ws: true,
|
||||
@@ -115,14 +115,14 @@ export default defineConfig(({ mode }) => {
|
||||
},
|
||||
},
|
||||
"/dashboard-auth": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
ws: true,
|
||||
rewrite: (path) => path.replace("/dashboard-auth", "/auth"),
|
||||
},
|
||||
"/auth-inv": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
ws: true,
|
||||
@@ -149,7 +149,7 @@ export default defineConfig(({ mode }) => {
|
||||
},
|
||||
},
|
||||
"/chat-api": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
ws: true,
|
||||
@@ -170,7 +170,7 @@ export default defineConfig(({ mode }) => {
|
||||
},
|
||||
},
|
||||
"/uploads": {
|
||||
target: "https://inventory.kent.pw",
|
||||
target: "https://acot.site",
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path,
|
||||
|
||||
Reference in New Issue
Block a user