الملفات
universal_migrator/pg_s3_migrator.py

59 أسطر
1.9 KiB
Python

# -*- coding: utf-8 -*-
import os
import subprocess
import boto3
import json
from datetime import datetime
PROGRESS_FILE = "psql_progress.json"
def update_progress(status, message, percent=0):
with open(PROGRESS_FILE, "w") as f:
json.dump({"status": status, "message": message, "percent": percent}, f)
def backup_pg_to_s3():
# جلب متغيرات البيئة
DB_HOST = os.getenv("DB_HOST", "localhost")
DB_NAME = os.getenv("DB_NAME")
DB_USER = os.getenv("DB_USER")
DB_PASS = os.getenv("DB_PASS")
S3_BUCKET = os.getenv("DEST_BUCKET")
S3_ENDPOINT = os.getenv("DEST_ENDPOINT")
S3_ACCESS = os.getenv("DEST_ACCESS")
S3_SECRET = os.getenv("DEST_SECRET")
filename = f"backup_{DB_NAME}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.sql.gz"
update_progress("running", f"جاري بدء النسخ الاحتياطي لقاعدة {DB_NAME}...", 10)
# إعداد S3 Client
s3 = boto3.client("s3",
aws_access_key_id=S3_ACCESS,
aws_secret_access_key=S3_SECRET,
endpoint_url=S3_ENDPOINT
)
# أمر pg_dump مع الضغط
os.environ['PGPASSWORD'] = DB_PASS
dump_cmd = f"pg_dump -h {DB_HOST} -U {DB_USER} {DB_NAME} | gzip"
try:
process = subprocess.Popen(dump_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
update_progress("running", "جاري رفع الملف إلى S3 مباشرة...", 50)
s3.upload_fileobj(process.stdout, S3_BUCKET, filename)
process.stdout.close()
process.wait()
if process.returncode == 0:
update_progress("completed", f"تم رفع النسخة الاحتياطية {filename} إلى S3 بنجاح ✅", 100)
else:
update_progress("error", f"خطأ في pg_dump: {process.stderr.read().decode()}", 0)
except Exception as e:
update_progress("error", str(e), 0)
if __name__ == "__main__":
backup_pg_to_s3()