شرح مختصر للتعديلات اللي عملتها
هذا الالتزام موجود في:
170
pages/api/backup-stream.ts
Normal file
170
pages/api/backup-stream.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { spawn } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { S3Client, GetObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { Upload } from "@aws-sdk/lib-storage";
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
||||
import { PassThrough } from 'stream';
|
||||
|
||||
// --- Helper Types and Functions for History Logging ---
|
||||
type BackupStatus = 'COMPLETED' | 'FAILED' | 'PROCESSING' | 'QUEUED' | 'CANCELLED';
|
||||
type BackupRecord = {
|
||||
id: string;
|
||||
dbName: string;
|
||||
status: BackupStatus;
|
||||
createdAt: string;
|
||||
fileName?: string;
|
||||
error?: string;
|
||||
downloadUrl?: string;
|
||||
};
|
||||
|
||||
const DB_PATH = path.resolve(process.cwd(), 'backup-history.json');
|
||||
|
||||
const readRecords = async (): Promise<BackupRecord[]> => {
|
||||
try {
|
||||
if (!fs.existsSync(DB_PATH)) {
|
||||
await fs.promises.writeFile(DB_PATH, JSON.stringify([]), 'utf-8');
|
||||
return [];
|
||||
}
|
||||
const fileContent = await fs.promises.readFile(DB_PATH, 'utf-8');
|
||||
return fileContent ? JSON.parse(fileContent) : [];
|
||||
} catch (error) {
|
||||
console.error("Error reading backup history:", error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
const writeRecords = async (records: BackupRecord[]): Promise<void> => {
|
||||
try {
|
||||
await fs.promises.writeFile(DB_PATH, JSON.stringify(records, null, 2), 'utf-8');
|
||||
} catch (error) {
|
||||
console.error("Error writing backup history:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const sendStreamMessage = (res: NextApiResponse, data: object) => {
|
||||
try {
|
||||
if (!res.writableEnded) {
|
||||
res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Failed to write to stream:", e);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const {
|
||||
dbHost, dbPort, dbUser, dbPassword, dbName, dbRequireSsl,
|
||||
s3Endpoint, s3BucketName, s3AccessKey, s3SecretKey, s3Region
|
||||
} = req.query;
|
||||
|
||||
const recordId = randomUUID();
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const backupFileName = `${dbName as string}_${timestamp}.dump`;
|
||||
|
||||
const backupsDir = path.resolve(process.cwd(), 'backups');
|
||||
const backupFilePath = path.join(backupsDir, backupFileName);
|
||||
if (!fs.existsSync(backupsDir)) fs.mkdirSync(backupsDir);
|
||||
|
||||
const newRecord: BackupRecord = { id: recordId, dbName: dbName as string, status: 'PROCESSING', createdAt: new Date().toISOString(), fileName: backupFileName };
|
||||
const records = await readRecords();
|
||||
records.push(newRecord);
|
||||
await writeRecords(records);
|
||||
|
||||
const pgDumpCommand = 'pg_dump';
|
||||
|
||||
const args: string[] = [
|
||||
'--format=c',
|
||||
'--blobs',
|
||||
'--verbose',
|
||||
`--host=${dbHost}`,
|
||||
`--port=${dbPort}`,
|
||||
`--username=${dbUser}`,
|
||||
`--dbname=${dbName}`,
|
||||
];
|
||||
|
||||
const env: NodeJS.ProcessEnv = {
|
||||
...process.env,
|
||||
PGPASSWORD: dbPassword as string,
|
||||
PGSSLMODE: dbRequireSsl === 'true' ? 'require' : 'prefer',
|
||||
};
|
||||
|
||||
const backupProcess = spawn(pgDumpCommand, args, { env });
|
||||
|
||||
req.on('close', async () => {
|
||||
console.log("Client disconnected. Terminating backup process...");
|
||||
backupProcess.kill(); // Stop the pg_dump process
|
||||
const finalRecords = await readRecords();
|
||||
const recordIndex = finalRecords.findIndex(r => r.id === recordId);
|
||||
if (recordIndex > -1 && finalRecords[recordIndex].status === 'PROCESSING') {
|
||||
finalRecords[recordIndex].status = 'CANCELLED';
|
||||
finalRecords[recordIndex].error = 'Process cancelled by user.';
|
||||
await writeRecords(finalRecords);
|
||||
}
|
||||
res.end();
|
||||
});
|
||||
|
||||
const s3Client = new S3Client({
|
||||
endpoint: s3Endpoint as string,
|
||||
region: s3Region as string,
|
||||
credentials: { accessKeyId: s3AccessKey as string, secretAccessKey: s3SecretKey as string }
|
||||
});
|
||||
|
||||
const passThrough = new PassThrough();
|
||||
passThrough.pipe(fs.createWriteStream(backupFilePath));
|
||||
|
||||
const s3Upload = new Upload({
|
||||
client: s3Client,
|
||||
params: { Bucket: s3BucketName as string, Key: backupFileName, Body: passThrough, ContentType: 'application/octet-stream' },
|
||||
});
|
||||
|
||||
if (backupProcess.stdout) backupProcess.stdout.pipe(passThrough);
|
||||
|
||||
let errorOutput = '';
|
||||
if (backupProcess.stderr) {
|
||||
backupProcess.stderr.on('data', (data: Buffer | string) => {
|
||||
errorOutput += data.toString();
|
||||
sendStreamMessage(res, { message: data.toString().trim() });
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await s3Upload.done();
|
||||
sendStreamMessage(res, { message: "✅ S3 upload completed successfully." });
|
||||
|
||||
const command = new GetObjectCommand({ Bucket: s3BucketName as string, Key: backupFileName });
|
||||
const signedUrl = await getSignedUrl(s3Client, command, { expiresIn: 3600 });
|
||||
|
||||
const finalRecords = await readRecords();
|
||||
const recordIndex = finalRecords.findIndex(r => r.id === recordId);
|
||||
if (recordIndex > -1) {
|
||||
finalRecords[recordIndex].status = 'COMPLETED';
|
||||
finalRecords[recordIndex].downloadUrl = signedUrl;
|
||||
await writeRecords(finalRecords);
|
||||
}
|
||||
sendStreamMessage(res, { message: "All tasks finished.", status: 'completed' });
|
||||
|
||||
} catch (err: any) {
|
||||
sendStreamMessage(res, { message: `❌ S3 Upload Failed: ${err.message}`, status: 'failed' });
|
||||
const finalRecords = await readRecords();
|
||||
const recordIndex = finalRecords.findIndex(r => r.id === recordId);
|
||||
if (recordIndex > -1) {
|
||||
finalRecords[recordIndex].status = 'FAILED';
|
||||
finalRecords[recordIndex].error = `S3 Error: ${err.message}`;
|
||||
await writeRecords(finalRecords);
|
||||
}
|
||||
} finally {
|
||||
if (!res.writableEnded) {
|
||||
sendStreamMessage(res, { status: 'closed' });
|
||||
res.end();
|
||||
}
|
||||
}
|
||||
}
|
59
pages/api/backup.ts
Normal file
59
pages/api/backup.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
// Define a type for our backup records for type safety
|
||||
type BackupRecord = {
|
||||
id: string;
|
||||
dbName: string;
|
||||
status: 'COMPLETED' | 'FAILED' | 'PROCESSING' | 'QUEUED';
|
||||
createdAt: string;
|
||||
fileName?: string;
|
||||
error?: string;
|
||||
downloadUrl?: string;
|
||||
};
|
||||
|
||||
// Define the path to our simple JSON database file
|
||||
const DB_PATH = path.resolve(process.cwd(), 'backup-history.json');
|
||||
|
||||
/**
|
||||
* Reads backup records from the JSON file.
|
||||
* @returns {Promise<BackupRecord[]>} A promise that resolves to an array of backup records.
|
||||
*/
|
||||
const readRecords = async (): Promise<BackupRecord[]> => {
|
||||
try {
|
||||
// Check if the database file exists
|
||||
if (!fs.existsSync(DB_PATH)) {
|
||||
// If not, create it with an empty array
|
||||
await fs.promises.writeFile(DB_PATH, JSON.stringify([]), 'utf-8');
|
||||
return [];
|
||||
}
|
||||
// If it exists, read and parse it
|
||||
const fileContent = await fs.promises.readFile(DB_PATH, 'utf-8');
|
||||
return JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
console.error("Error reading backup history:", error);
|
||||
// Return an empty array in case of any error to prevent crashes
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* API handler for fetching backup history.
|
||||
*/
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
// This endpoint only supports GET requests
|
||||
if (req.method !== 'GET') {
|
||||
res.setHeader('Allow', ['GET']);
|
||||
return res.status(405).json({ message: `Method ${req.method} Not Allowed` });
|
||||
}
|
||||
|
||||
try {
|
||||
const records = await readRecords();
|
||||
// Sort records by date, newest first
|
||||
const sortedRecords = records.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
||||
return res.status(200).json(sortedRecords);
|
||||
} catch (error) {
|
||||
return res.status(500).json({ error: 'Failed to retrieve backup history.' });
|
||||
}
|
||||
}
|
63
pages/api/backups.ts
Normal file
63
pages/api/backups.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
// Define a type for our backup records for type safety
|
||||
type BackupRecord = {
|
||||
id: string;
|
||||
dbName: string;
|
||||
status: 'COMPLETED' | 'FAILED' | 'PROCESSING' | 'QUEUED';
|
||||
createdAt: string;
|
||||
fileName?: string;
|
||||
error?: string;
|
||||
downloadUrl?: string;
|
||||
};
|
||||
|
||||
// Define the path to our simple JSON database file
|
||||
// This file will be created in the root of your project
|
||||
const DB_PATH = path.resolve(process.cwd(), 'backup-history.json');
|
||||
|
||||
/**
|
||||
* Reads backup records from the JSON file.
|
||||
* Creates the file if it doesn't exist.
|
||||
* @returns {Promise<BackupRecord[]>} A promise that resolves to an array of backup records.
|
||||
*/
|
||||
const readRecords = async (): Promise<BackupRecord[]> => {
|
||||
try {
|
||||
// Check if the database file exists
|
||||
if (!fs.existsSync(DB_PATH)) {
|
||||
// If not, create it with an empty array
|
||||
await fs.promises.writeFile(DB_PATH, JSON.stringify([]), 'utf-8');
|
||||
return [];
|
||||
}
|
||||
// If it exists, read and parse it
|
||||
const fileContent = await fs.promises.readFile(DB_PATH, 'utf-8');
|
||||
// Handle case where file is empty
|
||||
return fileContent ? JSON.parse(fileContent) : [];
|
||||
} catch (error) {
|
||||
console.error("Error reading backup history:", error);
|
||||
// Return an empty array in case of any error to prevent crashes
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* API handler for fetching backup history.
|
||||
*/
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
// This endpoint only supports GET requests
|
||||
if (req.method !== 'GET') {
|
||||
res.setHeader('Allow', ['GET']);
|
||||
return res.status(405).json({ message: `Method ${req.method} Not Allowed` });
|
||||
}
|
||||
|
||||
try {
|
||||
const records = await readRecords();
|
||||
// Sort records by date, newest first, to display the latest jobs on top
|
||||
const sortedRecords = records.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
||||
return res.status(200).json(sortedRecords);
|
||||
} catch (error) {
|
||||
console.error("API Error fetching history:", error);
|
||||
return res.status(500).json({ error: 'Failed to retrieve backup history.' });
|
||||
}
|
||||
}
|
13
pages/api/hello.ts
Normal file
13
pages/api/hello.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
|
||||
import type { NextApiRequest, NextApiResponse } from "next";
|
||||
|
||||
type Data = {
|
||||
name: string;
|
||||
};
|
||||
|
||||
export default function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<Data>,
|
||||
) {
|
||||
res.status(200).json({ name: "John Doe" });
|
||||
}
|
90
pages/api/test-connection.ts
Normal file
90
pages/api/test-connection.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { Client as PgClient } from 'pg';
|
||||
import mysql from 'mysql2/promise';
|
||||
import { MongoClient } from 'mongodb';
|
||||
|
||||
/**
|
||||
* Handles testing the connection to a database.
|
||||
* This version uses native Node.js drivers for reliability and security.
|
||||
*/
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== 'POST') {
|
||||
return res.status(405).json({ message: 'Method Not Allowed' });
|
||||
}
|
||||
|
||||
const {
|
||||
dbType,
|
||||
dbHost,
|
||||
dbPort,
|
||||
dbUser,
|
||||
dbPassword,
|
||||
dbName,
|
||||
dbRequireSsl
|
||||
} = req.body;
|
||||
|
||||
if (!dbType || !dbHost || !dbPort || !dbUser) {
|
||||
return res.status(400).json({ error: 'Missing required database credentials for testing.' });
|
||||
}
|
||||
|
||||
switch (dbType) {
|
||||
case 'postgresql':
|
||||
const pgClient = new PgClient({
|
||||
host: dbHost,
|
||||
port: Number(dbPort),
|
||||
user: dbUser,
|
||||
password: dbPassword,
|
||||
database: dbName,
|
||||
ssl: dbRequireSsl ? { rejectUnauthorized: false } : false,
|
||||
// ✅ **التعديل: زيادة مهلة الاتصال إلى 30 ثانية**
|
||||
connectionTimeoutMillis: 30000,
|
||||
});
|
||||
|
||||
try {
|
||||
await pgClient.connect();
|
||||
await pgClient.end();
|
||||
return res.status(200).json({ message: 'PostgreSQL connection successful!' });
|
||||
} catch (error: any) {
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
||||
case 'mysql':
|
||||
let mysqlConnection;
|
||||
try {
|
||||
mysqlConnection = await mysql.createConnection({
|
||||
host: dbHost,
|
||||
port: Number(dbPort),
|
||||
user: dbUser,
|
||||
password: dbPassword,
|
||||
database: dbName,
|
||||
ssl: dbRequireSsl ? { rejectUnauthorized: false } : undefined,
|
||||
connectTimeout: 30000, // 30 second timeout
|
||||
});
|
||||
await mysqlConnection.end();
|
||||
return res.status(200).json({ message: 'MySQL connection successful!' });
|
||||
} catch (error: any) {
|
||||
// Ensure connection is closed even if it fails
|
||||
if (mysqlConnection) await mysqlConnection.end();
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
||||
case 'mongodb':
|
||||
const mongoURI = `mongodb://${dbUser}:${encodeURIComponent(dbPassword || '')}@${dbHost}:${dbPort}/${dbName || ''}?authSource=admin`;
|
||||
const mongoClient = new MongoClient(mongoURI, {
|
||||
ssl: dbRequireSsl,
|
||||
serverSelectionTimeoutMS: 30000, // 30 second timeout
|
||||
});
|
||||
try {
|
||||
await mongoClient.connect();
|
||||
await mongoClient.db("admin").command({ ping: 1 });
|
||||
await mongoClient.close();
|
||||
return res.status(200).json({ message: 'MongoDB connection successful!' });
|
||||
} catch (error: any) {
|
||||
await mongoClient.close();
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
||||
default:
|
||||
return res.status(400).json({ error: 'Unsupported database type' });
|
||||
}
|
||||
}
|
||||
|
المرجع في مشكلة جديدة
حظر مستخدم