import React, { useState, useEffect } from 'react'; import { postgresApi, s3Api, postgresToS3Api, injectEnv, getCurrentEnv, healthCheck, formatEnvVars, copyToClipboard, formatMigrationDetails, formatFileSize, formatPostgresConfig, formatS3Config, extractPostgresInfo, extractS3Info, buildPostgresConnectionString, buildS3Url, groupTablesBySchema, groupObjectsByPrefix, // إضافة دوال التقدم المفقودة streamProgress, streamProgressAsync, createProgressStream, formatProgressDisplay, createProgressBar, estimateRemainingTime, formatTimeEstimate } from './unifiedApi'; import './App.css'; function App() { // ==================== PostgreSQL Source Configuration ==================== const [postgresConfig, setPostgresConfig] = useState({ host: '', user: '', password: '', port: 5432, database: '', uri: '' }); // ==================== PostgreSQL Destination Configuration ==================== const [destPostgresConfig, setDestPostgresConfig] = useState({ host: '', user: '', password: '', port: 5432, database: '', uri: '' }); // ==================== Progress Tracking States ==================== const [progressData, setProgressData] = useState(null); const [progressStream, setProgressStream] = useState(null); const [showProgressModal, setShowProgressModal] = useState(false); const [progressHistory, setProgressHistory] = useState([]); // ==================== Source S3 Configuration ==================== const [sourceS3Config, setSourceS3Config] = useState({ accessKeyId: '', secretAccessKey: '', region: 'us-east-1', endpointUrl: '', sessionToken: '', bucket: '', prefix: '' }); // ==================== Destination S3 Configuration ==================== const [destS3Config, setDestS3Config] = useState({ accessKeyId: '', secretAccessKey: '', region: 'us-east-1', endpointUrl: '', sessionToken: '', bucket: '', prefix: '' }); // ==================== PostgreSQL to S3 Configuration ==================== const [pgToS3Config, setPgToS3Config] = useState({ postgresUri: '', s3Bucket: '', s3Prefix: '', compress: true, format: 'csv' }); // ==================== State Variables ==================== const [postgresSchemas, setPostgresSchemas] = useState([]); const [postgresTables, setPostgresTables] = useState([]); const [sourceBuckets, setSourceBuckets] = useState([]); const [destBuckets, setDestBuckets] = useState([]); const [sourceObjects, setSourceObjects] = useState([]); const [destObjects, setDestObjects] = useState([]); const [selectedPostgresDb, setSelectedPostgresDb] = useState(''); const [selectedSourceBucket, setSelectedSourceBucket] = useState(''); const [selectedDestBucket, setSelectedDestBucket] = useState(''); const [selectedPrefix, setSelectedPrefix] = useState(''); const [selectedSchemas, setSelectedSchemas] = useState([]); const [selectedTables, setSelectedTables] = useState([]); const [selectedObjects, setSelectedObjects] = useState([]); const [migrations, setMigrations] = useState([]); const [activeMigration, setActiveMigration] = useState(null); const [migrationLogs, setMigrationLogs] = useState([]); const [parsedPostgresUri, setParsedPostgresUri] = useState(null); const [parsedS3Uri, setParsedS3Uri] = useState(null); const [currentEnv, setCurrentEnv] = useState({}); const [loading, setLoading] = useState(false); const [activeTab, setActiveTab] = useState('postgres'); const [envFormat, setEnvFormat] = useState('dotenv'); const [notifications, setNotifications] = useState([]); const [postgresConnectionStatus, setPostgresConnectionStatus] = useState(null); const [destPostgresConnectionStatus, setDestPostgresConnectionStatus] = useState(null); const [sourceS3ConnectionStatus, setSourceS3ConnectionStatus] = useState(null); const [destS3ConnectionStatus, setDestS3ConnectionStatus] = useState(null); const [showSourceBuckets, setShowSourceBuckets] = useState(false); const [showDestBuckets, setShowDestBuckets] = useState(false); const [showPostgresSchemas, setShowPostgresSchemas] = useState(false); const [migrationOptions, setMigrationOptions] = useState({ createDestBucket: true, maxConcurrent: 5, preserveMetadata: true, storageClass: 'STANDARD', includePatterns: '', excludePatterns: '' }); const [objectGroups, setObjectGroups] = useState({}); const [tableGroups, setTableGroups] = useState({}); const [totalSize, setTotalSize] = useState(0); const [totalRows, setTotalRows] = useState(0); // ==================== Notification System ==================== const addNotification = (message, type = 'info') => { const id = Date.now(); setNotifications(prev => [...prev, { id, message, type }]); setTimeout(() => { setNotifications(prev => prev.filter(n => n.id !== id)); }, 5000); }; // ==================== Load Initial Data ==================== useEffect(() => { loadCurrentEnv(); }, []); const loadCurrentEnv = async () => { try { const result = await getCurrentEnv(); if (result.success) { setCurrentEnv(result.environment_variables); } } catch (error) { console.error('Error loading current env:', error); } }; // ============================================================================ // PostgreSQL Functions // ============================================================================ const testPostgresConnection = async (isSource = true) => { setLoading(true); try { const config = isSource ? postgresConfig : destPostgresConfig; let uri = config.uri; if (!uri && config.host && config.user && config.password && config.database) { uri = `postgresql://${config.user}:${config.password}@${config.host}:${config.port}/${config.database}`; } const result = await postgresApi.testConnection({ useEnvVars: false, uri }); if (result.success) { addNotification(`✅ PostgreSQL ${isSource ? 'source' : 'destination'} connection successful!`, 'success'); if (isSource) { setPostgresConnectionStatus({ success: true, host: config.host, port: config.port, version: result.version, database: result.database }); } else { setDestPostgresConnectionStatus({ success: true, host: config.host, port: config.port, version: result.version, database: result.database }); } loadCurrentEnv(); } else { addNotification(`❌ PostgreSQL ${isSource ? 'source' : 'destination'} connection failed: ${result.error}`, 'error'); if (isSource) { setPostgresConnectionStatus({ success: false, error: result.error, diagnostic: result.diagnostic }); } else { setDestPostgresConnectionStatus({ success: false, error: result.error, diagnostic: result.diagnostic }); } } } catch (error) { addNotification(`❌ Error testing PostgreSQL connection: ${error.message}`, 'error'); if (isSource) { setPostgresConnectionStatus({ success: false, error: error.message }); } else { setDestPostgresConnectionStatus({ success: false, error: error.message }); } } setLoading(false); }; const getPostgresSchemas = async (isSource = true) => { const config = isSource ? postgresConfig : destPostgresConfig; let uri = config.uri || buildPostgresConnectionString( config.host, config.database, config.user, config.port ); if (!uri) { addNotification('Please enter PostgreSQL connection details', 'warning'); return; } setLoading(true); try { const result = await postgresApi.getSchemas(uri); if (result.success) { setPostgresSchemas(result.schemas || []); setShowPostgresSchemas(true); addNotification(`✅ Found ${result.count} schema(s)`, 'success'); } else { addNotification(`❌ Failed to get schemas: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error getting schemas: ${error.message}`, 'error'); } setLoading(false); }; const getPostgresTables = async (schema = '', isSource = true) => { const config = isSource ? postgresConfig : destPostgresConfig; let uri = config.uri || buildPostgresConnectionString( config.host, config.database, config.user, config.port ); if (!uri) { addNotification('Please enter PostgreSQL connection details', 'warning'); return; } setLoading(true); try { const result = await postgresApi.getTables(uri, schema); if (result.success) { setPostgresTables(result.tables || []); const total = (result.tables || []).reduce((sum, table) => sum + (table.rows || 0), 0); setTotalRows(total); const groups = groupTablesBySchema(result.tables || []); setTableGroups(groups); addNotification(`✅ Found ${result.count} table(s)`, 'success'); } else { addNotification(`❌ Failed to get tables: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error getting tables: ${error.message}`, 'error'); } setLoading(false); }; const parsePostgresUri = async (uri, isSource = true) => { try { const result = await postgresApi.parseUri(uri); if (result.success) { setParsedPostgresUri(result); if (result.parsed) { if (isSource) { setPostgresConfig({ host: result.parsed.host || '', user: result.parsed.user || '', password: result.parsed.password || '', port: result.parsed.port || 5432, database: result.parsed.database || '', uri: uri }); } else { setDestPostgresConfig({ host: result.parsed.host || '', user: result.parsed.user || '', password: result.parsed.password || '', port: result.parsed.port || 5432, database: result.parsed.database || '', uri: uri }); } } addNotification('✅ PostgreSQL URI parsed successfully', 'success'); } else { setParsedPostgresUri(null); addNotification(`❌ Failed to parse PostgreSQL URI: ${result.error}`, 'error'); } } catch (error) { console.error('Error parsing PostgreSQL URI:', error); addNotification(`❌ Error parsing PostgreSQL URI: ${error.message}`, 'error'); } }; // ============================================================================ // S3 Functions // ============================================================================ const testSourceS3Connection = async () => { setLoading(true); try { const result = await s3Api.testSourceConnection({ useEnvVars: false, accessKeyId: sourceS3Config.accessKeyId, secretAccessKey: sourceS3Config.secretAccessKey, region: sourceS3Config.region, endpointUrl: sourceS3Config.endpointUrl, sessionToken: sourceS3Config.sessionToken }); if (result.success) { addNotification(`✅ Source S3 connection successful!`, 'success'); setSourceS3ConnectionStatus({ success: true, endpoint: sourceS3Config.endpointUrl || 'AWS S3 (default)', region: sourceS3Config.region, bucketCount: result.bucket_count }); loadCurrentEnv(); } else { addNotification(`❌ Source S3 connection failed: ${result.error}`, 'error'); setSourceS3ConnectionStatus({ success: false, error: result.error, diagnostic: result.diagnostic }); } } catch (error) { addNotification(`❌ Error testing source S3 connection: ${error.message}`, 'error'); setSourceS3ConnectionStatus({ success: false, error: error.message }); } setLoading(false); }; const testDestinationS3Connection = async () => { setLoading(true); try { const result = await s3Api.testDestinationConnection({ useEnvVars: false, accessKeyId: destS3Config.accessKeyId, secretAccessKey: destS3Config.secretAccessKey, region: destS3Config.region, endpointUrl: destS3Config.endpointUrl, sessionToken: destS3Config.sessionToken }); if (result.success) { addNotification(`✅ Destination S3 connection successful!`, 'success'); setDestS3ConnectionStatus({ success: true, endpoint: destS3Config.endpointUrl || 'AWS S3 (default)', region: destS3Config.region, bucketCount: result.bucket_count }); loadCurrentEnv(); } else { addNotification(`❌ Destination S3 connection failed: ${result.error}`, 'error'); setDestS3ConnectionStatus({ success: false, error: result.error, diagnostic: result.diagnostic }); } } catch (error) { addNotification(`❌ Error testing destination S3 connection: ${error.message}`, 'error'); setDestS3ConnectionStatus({ success: false, error: error.message }); } setLoading(false); }; const listSourceBuckets = async () => { setLoading(true); try { const result = await s3Api.listSourceBuckets( sourceS3Config.accessKeyId, sourceS3Config.secretAccessKey, sourceS3Config.region, sourceS3Config.endpointUrl, sourceS3Config.sessionToken ); if (result.success) { setSourceBuckets(result.buckets || []); setShowSourceBuckets(true); addNotification(`✅ Found ${result.count} source bucket(s)`, 'success'); } else { addNotification(`❌ Failed to list source buckets: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error listing source buckets: ${error.message}`, 'error'); } setLoading(false); }; const listDestinationBuckets = async () => { setLoading(true); try { const result = await s3Api.listDestinationBuckets( destS3Config.accessKeyId, destS3Config.secretAccessKey, destS3Config.region, destS3Config.endpointUrl, destS3Config.sessionToken ); if (result.success) { setDestBuckets(result.buckets || []); setShowDestBuckets(true); addNotification(`✅ Found ${result.count} destination bucket(s)`, 'success'); } else { addNotification(`❌ Failed to list destination buckets: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error listing destination buckets: ${error.message}`, 'error'); } setLoading(false); }; const selectSourceBucket = (bucketName) => { setSelectedSourceBucket(bucketName); setSourceS3Config(prev => ({ ...prev, bucket: bucketName })); setShowSourceBuckets(false); addNotification(`✅ Selected source bucket: ${bucketName}`, 'success'); }; const selectDestBucket = (bucketName) => { setSelectedDestBucket(bucketName); setDestS3Config(prev => ({ ...prev, bucket: bucketName })); setShowDestBuckets(false); addNotification(`✅ Selected destination bucket: ${bucketName}`, 'success'); }; const listSourceObjects = async () => { if (!selectedSourceBucket) { addNotification('Please select source bucket first', 'warning'); return; } setLoading(true); try { const result = await s3Api.listObjects( selectedSourceBucket, selectedPrefix, true, { accessKeyId: sourceS3Config.accessKeyId, secretAccessKey: sourceS3Config.secretAccessKey, region: sourceS3Config.region, endpointUrl: sourceS3Config.endpointUrl, sessionToken: sourceS3Config.sessionToken, maxKeys: 1000 } ); if (result.success) { setSourceObjects(result.objects || []); setTotalSize(result.total_size || 0); const groups = groupObjectsByPrefix(result.objects || [], 2); setObjectGroups(groups); addNotification(`✅ Found ${result.count} object(s) in ${selectedSourceBucket}`, 'success'); } else { addNotification(`❌ Failed to list objects: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error listing objects: ${error.message}`, 'error'); } setLoading(false); }; const createDestinationBucket = async () => { if (!selectedDestBucket) { addNotification('Please enter destination bucket name', 'warning'); return; } setLoading(true); try { const result = await s3Api.createBucket( selectedDestBucket, destS3Config.region, { accessKeyId: destS3Config.accessKeyId, secretAccessKey: destS3Config.secretAccessKey, endpointUrl: destS3Config.endpointUrl, sessionToken: destS3Config.sessionToken } ); if (result.success) { if (result.created) { addNotification(`✅ Bucket created successfully: ${selectedDestBucket}`, 'success'); } else { addNotification(`ℹ️ Bucket already exists: ${selectedDestBucket}`, 'info'); } } else { addNotification(`❌ Failed to create bucket: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error creating bucket: ${error.message}`, 'error'); } setLoading(false); }; const migrateSingleObject = async (sourceKey) => { if (!selectedSourceBucket || !selectedDestBucket) { addNotification('Please select source and destination buckets', 'warning'); return; } setLoading(true); try { const result = await s3Api.migrateObject( selectedSourceBucket, sourceKey, selectedDestBucket, `${selectedPrefix}${sourceKey}`, { sourceAccessKeyId: sourceS3Config.accessKeyId, sourceSecretAccessKey: sourceS3Config.secretAccessKey, sourceRegion: sourceS3Config.region, sourceEndpointUrl: sourceS3Config.endpointUrl, sourceSessionToken: sourceS3Config.sessionToken, destAccessKeyId: destS3Config.accessKeyId, destSecretAccessKey: destS3Config.secretAccessKey, destRegion: destS3Config.region, destEndpointUrl: destS3Config.endpointUrl, destSessionToken: destS3Config.sessionToken, preserveMetadata: migrationOptions.preserveMetadata, storageClass: migrationOptions.storageClass } ); if (result.success) { addNotification(`✅ Migrated: ${sourceKey}`, 'success'); listSourceObjects(); } else { addNotification(`❌ Migration failed: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error migrating object: ${error.message}`, 'error'); } setLoading(false); }; const migrateSelectedObjects = async () => { if (selectedObjects.length === 0) { addNotification('Please select objects to migrate', 'warning'); return; } setLoading(true); try { const result = await s3Api.migrateBatch( selectedObjects.map(key => ({ key })), selectedSourceBucket, selectedDestBucket, { sourceAccessKeyId: sourceS3Config.accessKeyId, sourceSecretAccessKey: sourceS3Config.secretAccessKey, sourceRegion: sourceS3Config.region, sourceEndpointUrl: sourceS3Config.endpointUrl, sourceSessionToken: sourceS3Config.sessionToken, destAccessKeyId: destS3Config.accessKeyId, destSecretAccessKey: destS3Config.secretAccessKey, destRegion: destS3Config.region, destEndpointUrl: destS3Config.endpointUrl, destSessionToken: destS3Config.sessionToken, preserveMetadata: migrationOptions.preserveMetadata, storageClass: migrationOptions.storageClass, maxConcurrent: migrationOptions.maxConcurrent } ); if (result.success) { addNotification(`✅ Migrated ${result.results?.successful?.length || 0} objects`, 'success'); setSelectedObjects([]); listSourceObjects(); } else { addNotification(`❌ Batch migration failed: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error in batch migration: ${error.message}`, 'error'); } setLoading(false); }; const parseS3Uri = async (uri) => { try { const result = await s3Api.parseUri(uri); if (result.success) { setParsedS3Uri(result); if (result.parsed) { setSelectedSourceBucket(result.parsed.bucket); setSelectedPrefix(result.parsed.key || ''); } addNotification('✅ S3 URI parsed successfully', 'success'); } else { setParsedS3Uri(null); addNotification(`❌ Failed to parse S3 URI: ${result.error}`, 'error'); } } catch (error) { console.error('Error parsing S3 URI:', error); addNotification(`❌ Error parsing S3 URI: ${error.message}`, 'error'); } }; // ============================================================================ // PostgreSQL to S3 Functions // ============================================================================ const testPgToS3PostgresConnection = async () => { if (!pgToS3Config.postgresUri) { addNotification('Please enter PostgreSQL URI', 'warning'); return; } setLoading(true); try { const result = await postgresToS3Api.testPostgresConnection(pgToS3Config.postgresUri); if (result.success) { addNotification(`✅ PostgreSQL connection successful!`, 'success'); } else { addNotification(`❌ PostgreSQL connection failed: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error testing PostgreSQL connection: ${error.message}`, 'error'); } setLoading(false); }; const testPgToS3S3Connection = async () => { setLoading(true); try { const result = await postgresToS3Api.testS3Connection( sourceS3Config.accessKeyId, sourceS3Config.secretAccessKey, sourceS3Config.region, sourceS3Config.endpointUrl ); if (result.success) { addNotification(`✅ S3 connection successful!`, 'success'); } else { addNotification(`❌ S3 connection failed: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error testing S3 connection: ${error.message}`, 'error'); } setLoading(false); }; // ============================================================================ // Progress Tracking Functions // ============================================================================ const startProgressTracking = (migrationId, type = 's3') => { setShowProgressModal(true); setProgressData({ migrationId, type, status: 'connecting', percentage: 0, startTime: new Date().toISOString(), logs: [{ timestamp: new Date().toISOString(), message: `🔄 Connecting to migration stream for ${migrationId}...`, level: 'info' }] }); const stream = createProgressStream(migrationId, type, { onProgress: (progress) => { setProgressData(prev => { const percentage = progress.percentage || progress.percentages?.size || 0; const newLogs = [...(prev?.logs || [])]; if (prev?.percentage < 25 && percentage >= 25) { newLogs.push({ timestamp: new Date().toISOString(), message: `✅ 25% complete - ${progress.processed_size_formatted || progress.processed?.size_formatted} transferred`, level: 'success' }); } else if (prev?.percentage < 50 && percentage >= 50) { newLogs.push({ timestamp: new Date().toISOString(), message: `✅ 50% complete - halfway there!`, level: 'success' }); } else if (prev?.percentage < 75 && percentage >= 75) { newLogs.push({ timestamp: new Date().toISOString(), message: `✅ 75% complete - almost done!`, level: 'success' }); } return { ...prev, ...progress, percentage, status: 'running', logs: newLogs }; }); updateProgressInUI(migrationId, progress); }, onComplete: (completion) => { setProgressData(prev => ({ ...prev, status: 'completed', percentage: 100, completedAt: new Date().toISOString(), logs: [...(prev?.logs || []), { timestamp: new Date().toISOString(), message: '✅ Migration completed successfully!', level: 'success' }] })); addNotification(`✅ Migration ${migrationId} completed!`, 'success'); setProgressHistory(prev => [...prev, { migrationId, type, completedAt: new Date().toISOString(), status: 'completed', totalTime: progressData?.elapsed_time || 0 }]); loadMigrations(type); setTimeout(() => { if (progressStream === stream) { setShowProgressModal(false); } }, 3000); }, onError: (error) => { setProgressData(prev => ({ ...prev, status: 'error', error: error.error, logs: [...(prev?.logs || []), { timestamp: new Date().toISOString(), message: `❌ Error: ${error.error}`, level: 'error' }] })); addNotification(`❌ Migration error: ${error.error}`, 'error'); setProgressHistory(prev => [...prev, { migrationId, type, completedAt: new Date().toISOString(), status: 'failed', error: error.error }]); }, reconnectInterval: 3000, maxReconnectAttempts: 5 }); setProgressStream(stream); return stream; }; const stopProgressTracking = () => { if (progressStream) { progressStream.stop(); setProgressStream(null); } setShowProgressModal(false); setProgressData(null); }; const updateProgressInUI = (migrationId, progress) => { const percentage = progress.percentage || progress.percentages?.size || 0; setTimeout(() => { const migrationCards = document.querySelectorAll('.migration-card'); migrationCards.forEach(card => { const idElement = card.querySelector('.migration-id'); if (idElement && idElement.textContent === migrationId) { let progressBar = card.querySelector('.progress-bar-fill'); let progressText = card.querySelector('.progress-text'); if (!progressBar) { const progressContainer = document.createElement('div'); progressContainer.className = 'progress-bar-container'; progressContainer.innerHTML = `
${percentage.toFixed(1)}% `; card.querySelector('.migration-body')?.appendChild(progressContainer); } else { progressBar.style.width = `${percentage}%`; if (progressText) { progressText.textContent = `${percentage.toFixed(1)}%`; } } } }); }, 100); }; // ============================================================================ // Migration Start Functions (using streaming) // ============================================================================ const startPostgresMigration = async () => { let sourceUri = postgresConfig.uri || buildPostgresConnectionString( postgresConfig.host, postgresConfig.database, postgresConfig.user, postgresConfig.port ); let destUri = destPostgresConfig.uri || buildPostgresConnectionString( destPostgresConfig.host, destPostgresConfig.database, destPostgresConfig.user, destPostgresConfig.port ); if (!sourceUri || !destUri) { addNotification('Please enter source and destination URIs', 'warning'); return; } setLoading(true); try { const result = await postgresApi.startMigration( sourceUri, destUri, selectedSchemas.length > 0 ? selectedSchemas : null, selectedTables.length > 0 ? selectedTables : null ); if (result.success) { const migrationId = result.migration_id; setActiveMigration(migrationId); addNotification(`✅ PostgreSQL migration ${migrationId} started!`, 'success'); startProgressTracking(migrationId, 'postgres'); } else { addNotification(`❌ Failed to start migration: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error starting migration: ${error.message}`, 'error'); } setLoading(false); }; const startS3Migration = async () => { if (!selectedSourceBucket) { addNotification('Please select source bucket', 'warning'); return; } if (!selectedDestBucket) { addNotification('Please select destination bucket', 'warning'); return; } setLoading(true); try { const includePatterns = migrationOptions.includePatterns ? migrationOptions.includePatterns.split(',').map(p => p.trim()) : null; const excludePatterns = migrationOptions.excludePatterns ? migrationOptions.excludePatterns.split(',').map(p => p.trim()) : null; const result = await s3Api.startMigration( selectedSourceBucket, selectedDestBucket, selectedPrefix, { sourceAccessKeyId: sourceS3Config.accessKeyId, sourceSecretAccessKey: sourceS3Config.secretAccessKey, sourceRegion: sourceS3Config.region, sourceEndpointUrl: sourceS3Config.endpointUrl, sourceSessionToken: sourceS3Config.sessionToken, destAccessKeyId: destS3Config.accessKeyId, destSecretAccessKey: destS3Config.secretAccessKey, destRegion: destS3Config.region, destEndpointUrl: destS3Config.endpointUrl, destSessionToken: destS3Config.sessionToken, includePatterns, excludePatterns, preserveMetadata: migrationOptions.preserveMetadata, storageClass: migrationOptions.storageClass, createDestBucket: migrationOptions.createDestBucket, maxConcurrent: migrationOptions.maxConcurrent } ); if (result.success) { const migrationId = result.migration_id; setActiveMigration(migrationId); addNotification(`✅ S3 to S3 migration ${migrationId} started!`, 'success'); startProgressTracking(migrationId, 's3'); } else { addNotification(`❌ Failed to start migration: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error starting migration: ${error.message}`, 'error'); } setLoading(false); }; const startPgToS3Migration = async () => { if (!pgToS3Config.postgresUri) { addNotification('Please enter PostgreSQL URI', 'warning'); return; } if (!pgToS3Config.s3Bucket) { addNotification('Please enter S3 bucket name', 'warning'); return; } setLoading(true); try { const result = await postgresToS3Api.startMigration( pgToS3Config.postgresUri, pgToS3Config.s3Bucket, pgToS3Config.s3Prefix, { schemas: selectedSchemas.length > 0 ? selectedSchemas : null, tables: selectedTables.length > 0 ? selectedTables : null, compress: pgToS3Config.compress, format: pgToS3Config.format, accessKeyId: sourceS3Config.accessKeyId, secretAccessKey: sourceS3Config.secretAccessKey, region: sourceS3Config.region, endpointUrl: sourceS3Config.endpointUrl } ); if (result.success) { const migrationId = result.migration_id; setActiveMigration(migrationId); addNotification(`✅ PostgreSQL to S3 migration ${migrationId} started!`, 'success'); startProgressTracking(migrationId, 'pg-to-s3'); } else { addNotification(`❌ Failed to start migration: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error starting migration: ${error.message}`, 'error'); } setLoading(false); }; // ============================================================================ // Common Functions // ============================================================================ const loadMigrations = async (type = 'postgres') => { try { let result; if (type === 'postgres') result = await postgresApi.listMigrations(); else if (type === 's3') result = await s3Api.listMigrations(); else if (type === 'pg-to-s3') result = await postgresToS3Api.listMigrations(); if (result?.success) { setMigrations(result.migrations || []); } } catch (error) { console.error('Error loading migrations:', error); } }; const cancelMigration = async (migrationId, type = 's3') => { try { let result; if (type === 's3') result = await s3Api.cancelMigration(migrationId); if (result?.success) { addNotification(`✅ Migration ${migrationId} cancelled`, 'success'); loadMigrations(type); } else { addNotification(`❌ Failed to cancel migration: ${result?.error}`, 'error'); } } catch (error) { addNotification(`❌ Error cancelling migration: ${error.message}`, 'error'); } }; const injectEnvironment = async () => { setLoading(true); try { const envVars = { PG_HOST: postgresConfig.host, PG_PORT: postgresConfig.port.toString(), PG_USER: postgresConfig.user, PG_PASSWORD: postgresConfig.password, PG_DATABASE: postgresConfig.database, DEST_PG_HOST: destPostgresConfig.host, DEST_PG_PORT: destPostgresConfig.port.toString(), DEST_PG_USER: destPostgresConfig.user, DEST_PG_PASSWORD: destPostgresConfig.password, DEST_PG_DATABASE: destPostgresConfig.database, SOURCE_AWS_ACCESS_KEY_ID: sourceS3Config.accessKeyId, SOURCE_AWS_SECRET_ACCESS_KEY: sourceS3Config.secretAccessKey, SOURCE_AWS_REGION: sourceS3Config.region, SOURCE_AWS_ENDPOINT_URL: sourceS3Config.endpointUrl, SOURCE_S3_BUCKET: selectedSourceBucket, SOURCE_S3_PREFIX: selectedPrefix, DEST_AWS_ACCESS_KEY_ID: destS3Config.accessKeyId, DEST_AWS_SECRET_ACCESS_KEY: destS3Config.secretAccessKey, DEST_AWS_REGION: destS3Config.region, DEST_AWS_ENDPOINT_URL: destS3Config.endpointUrl, DEST_S3_BUCKET: selectedDestBucket, DEST_S3_PREFIX: selectedPrefix }; const result = await injectEnv(envVars); if (result.success) { addNotification(`✅ Injected ${result.injected_variables?.length || 0} environment variables`, 'success'); loadCurrentEnv(); } else { addNotification(`❌ Failed to inject environment: ${result.error}`, 'error'); } } catch (error) { addNotification(`❌ Error injecting environment: ${error.message}`, 'error'); } setLoading(false); }; const clearEnvironment = async () => { try { const result = await injectEnv({}); if (result.success) { addNotification('✅ Environment cleared', 'success'); loadCurrentEnv(); } } catch (error) { addNotification(`❌ Error clearing environment: ${error.message}`, 'error'); } }; const copyEnvToClipboard = async () => { if (!currentEnv || Object.keys(currentEnv).length === 0) { addNotification('No environment variables to copy', 'warning'); return; } const formatted = formatEnvVars(currentEnv, envFormat); const success = await copyToClipboard(formatted); if (success) { addNotification(`✅ Copied ${Object.keys(currentEnv).length} variables to clipboard as ${envFormat} format`, 'success'); } else { addNotification(`❌ Failed to copy to clipboard`, 'error'); } }; const toggleObjectSelection = (key) => { setSelectedObjects(prev => prev.includes(key) ? prev.filter(k => k !== key) : [...prev, key] ); }; const toggleTableSelection = (tableName) => { setSelectedTables(prev => prev.includes(tableName) ? prev.filter(t => t !== tableName) : [...prev, tableName] ); }; const toggleSchemaSelection = (schemaName) => { setSelectedSchemas(prev => prev.includes(schemaName) ? prev.filter(s => s !== schemaName) : [...prev, schemaName] ); }; const selectAllObjects = () => { setSelectedObjects(sourceObjects.map(obj => obj.key)); }; const selectAllTables = () => { setSelectedTables(postgresTables.map(table => table.name)); }; const clearSelectedObjects = () => { setSelectedObjects([]); }; const clearSelectedTables = () => { setSelectedTables([]); }; // ============================================================================ // Render Functions // ============================================================================ const renderProgressModal = () => { if (!showProgressModal || !progressData) return null; const progress = progressData; const percentage = progress.percentage || 0; const bar = createProgressBar(percentage); return ({bar}
PostgreSQL, S3 to S3, and PostgreSQL to S3 migrations
Success: {postgresConnectionStatus.success ? '✅ Yes' : '❌ No'}
{postgresConnectionStatus.host &&Host: {postgresConnectionStatus.host}:{postgresConnectionStatus.port}
} {postgresConnectionStatus.version &&Version: {postgresConnectionStatus.version}
} {postgresConnectionStatus.database &&Database: {postgresConnectionStatus.database}
} {postgresConnectionStatus.error &&Error: {postgresConnectionStatus.error}
} {postgresConnectionStatus.diagnostic && (Diagnostic: {postgresConnectionStatus.diagnostic.message}
Reason: {postgresConnectionStatus.diagnostic.reason}
Success: {destPostgresConnectionStatus.success ? '✅ Yes' : '❌ No'}
{destPostgresConnectionStatus.host &&Host: {destPostgresConnectionStatus.host}:{destPostgresConnectionStatus.port}
} {destPostgresConnectionStatus.version &&Version: {destPostgresConnectionStatus.version}
} {destPostgresConnectionStatus.database &&Database: {destPostgresConnectionStatus.database}
} {destPostgresConnectionStatus.error &&Error: {destPostgresConnectionStatus.error}
} {destPostgresConnectionStatus.diagnostic && (Diagnostic: {destPostgresConnectionStatus.diagnostic.message}
Reason: {destPostgresConnectionStatus.diagnostic.reason}
| { const groupTables = group.tables.map(t => t.name); if (e.target.checked) { setSelectedTables([...new Set([...selectedTables, ...groupTables])]); } else { setSelectedTables(selectedTables.filter(name => !groupTables.includes(name))); } }} checked={group.tables.every(t => selectedTables.includes(t.name))} /> | Table Name | Type |
|---|---|---|
| toggleTableSelection(table.name)} /> | {table.name} | {table.type} |
Success: {sourceS3ConnectionStatus.success ? '✅ Yes' : '❌ No'}
{sourceS3ConnectionStatus.endpoint &&Endpoint: {sourceS3ConnectionStatus.endpoint}
} {sourceS3ConnectionStatus.region &&Region: {sourceS3ConnectionStatus.region}
} {sourceS3ConnectionStatus.bucketCount !== undefined &&Buckets Found: {sourceS3ConnectionStatus.bucketCount}
} {sourceS3ConnectionStatus.error &&Error: {sourceS3ConnectionStatus.error}
} {sourceS3ConnectionStatus.diagnostic && (Diagnostic: {sourceS3ConnectionStatus.diagnostic.message}
Reason: {sourceS3ConnectionStatus.diagnostic.reason}
Success: {destS3ConnectionStatus.success ? '✅ Yes' : '❌ No'}
{destS3ConnectionStatus.endpoint &&Endpoint: {destS3ConnectionStatus.endpoint}
} {destS3ConnectionStatus.region &&Region: {destS3ConnectionStatus.region}
} {destS3ConnectionStatus.bucketCount !== undefined &&Buckets Found: {destS3ConnectionStatus.bucketCount}
} {destS3ConnectionStatus.error &&Error: {destS3ConnectionStatus.error}
} {destS3ConnectionStatus.diagnostic && (Diagnostic: {destS3ConnectionStatus.diagnostic.message}
Reason: {destS3ConnectionStatus.diagnostic.reason}
| { const groupKeys = group.objects.map(obj => obj.key); if (e.target.checked) { setSelectedObjects([...new Set([...selectedObjects, ...groupKeys])]); } else { setSelectedObjects(selectedObjects.filter(key => !groupKeys.includes(key))); } }} checked={group.objects.every(obj => selectedObjects.includes(obj.key))} /> | Key | Size | Last Modified | ETag | Actions |
|---|---|---|---|---|---|
| toggleObjectSelection(obj.key)} /> | {obj.key} | {formatFileSize(obj.size)} | {new Date(obj.last_modified).toLocaleString()} | {obj.etag?.substring(0, 8)}... |
📭 No objects found in s3://{selectedSourceBucket}/{selectedPrefix}
Status: {migration.status}
Started: {migration.started_at ? new Date(migration.started_at * 1000).toLocaleString() : 'N/A'}
{formatPostgresConfig(postgresConfig)}
{formatPostgresConfig(destPostgresConfig)}
{formatS3Config(sourceS3Config, 'source')}
{formatS3Config(destS3Config, 'destination')}
{formatEnvVars(currentEnv, envFormat)}
) : (
No environment variables found. Test connections or inject environment first.
)}