رفع الملفات إلى "/"
هذا الالتزام موجود في:
45
mysql_migrator.py
Normal file
45
mysql_migrator.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os, subprocess, json
|
||||
|
||||
def update(status, msg, percent):
|
||||
with open("mysql_progress.json", "w") as f:
|
||||
json.dump({"status": status, "message": msg, "percent": percent}, f)
|
||||
|
||||
def migrate():
|
||||
S_HOST = os.getenv("SRC_HOST")
|
||||
S_USER = os.getenv("SRC_USER")
|
||||
S_PASS = os.getenv("SRC_PASS")
|
||||
|
||||
T_HOST = os.getenv("DEST_HOST")
|
||||
T_USER = os.getenv("DEST_USER")
|
||||
T_PASS = os.getenv("DEST_PASS")
|
||||
|
||||
dbs = os.getenv("DATABASES", "")
|
||||
tables = os.getenv("TABLES", "")
|
||||
|
||||
update("running", "بدء التهيئة...", 20)
|
||||
|
||||
dump_cmd = "mysqldump "
|
||||
dump_cmd += f"-h {S_HOST} -u {S_USER} -p{S_PASS} "
|
||||
|
||||
if tables:
|
||||
dump_cmd += tables.replace(",", " ")
|
||||
else:
|
||||
dump_cmd += dbs.replace(",", " ")
|
||||
|
||||
dump_cmd += f" | mysql -h {T_HOST} -u {T_USER} -p{T_PASS}"
|
||||
|
||||
update("running", "جاري ضخ البيانات...", 60)
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(dump_cmd, shell=True, stderr=subprocess.PIPE)
|
||||
_, err = p.communicate()
|
||||
if p.returncode == 0:
|
||||
update("completed", "تم نقل MySQL بنجاح ✅", 100)
|
||||
else:
|
||||
update("error", err.decode(), 0)
|
||||
except Exception as e:
|
||||
update("error", str(e), 0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
migrate()
|
||||
59
pg_s3_migrator.py
Normal file
59
pg_s3_migrator.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import subprocess
|
||||
import boto3
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
PROGRESS_FILE = "psql_progress.json"
|
||||
|
||||
def update_progress(status, message, percent=0):
|
||||
with open(PROGRESS_FILE, "w") as f:
|
||||
json.dump({"status": status, "message": message, "percent": percent}, f)
|
||||
|
||||
def backup_pg_to_s3():
|
||||
# جلب متغيرات البيئة
|
||||
DB_HOST = os.getenv("DB_HOST", "localhost")
|
||||
DB_NAME = os.getenv("DB_NAME")
|
||||
DB_USER = os.getenv("DB_USER")
|
||||
DB_PASS = os.getenv("DB_PASS")
|
||||
|
||||
S3_BUCKET = os.getenv("DEST_BUCKET")
|
||||
S3_ENDPOINT = os.getenv("DEST_ENDPOINT")
|
||||
S3_ACCESS = os.getenv("DEST_ACCESS")
|
||||
S3_SECRET = os.getenv("DEST_SECRET")
|
||||
|
||||
filename = f"backup_{DB_NAME}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.sql.gz"
|
||||
|
||||
update_progress("running", f"جاري بدء النسخ الاحتياطي لقاعدة {DB_NAME}...", 10)
|
||||
|
||||
# إعداد S3 Client
|
||||
s3 = boto3.client("s3",
|
||||
aws_access_key_id=S3_ACCESS,
|
||||
aws_secret_access_key=S3_SECRET,
|
||||
endpoint_url=S3_ENDPOINT
|
||||
)
|
||||
|
||||
# أمر pg_dump مع الضغط
|
||||
os.environ['PGPASSWORD'] = DB_PASS
|
||||
dump_cmd = f"pg_dump -h {DB_HOST} -U {DB_USER} {DB_NAME} | gzip"
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(dump_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
update_progress("running", "جاري رفع الملف إلى S3 مباشرة...", 50)
|
||||
|
||||
s3.upload_fileobj(process.stdout, S3_BUCKET, filename)
|
||||
|
||||
process.stdout.close()
|
||||
process.wait()
|
||||
|
||||
if process.returncode == 0:
|
||||
update_progress("completed", f"تم رفع النسخة الاحتياطية {filename} إلى S3 بنجاح ✅", 100)
|
||||
else:
|
||||
update_progress("error", f"خطأ في pg_dump: {process.stderr.read().decode()}", 0)
|
||||
except Exception as e:
|
||||
update_progress("error", str(e), 0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
backup_pg_to_s3()
|
||||
47
psql_psql_migrator.py
Normal file
47
psql_psql_migrator.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os, subprocess, json
|
||||
|
||||
def update_progress(status, message, percent=0):
|
||||
with open("pg_progress.json", "w") as f:
|
||||
json.dump({"status": status, "message": message, "percent": percent}, f)
|
||||
|
||||
def migrate():
|
||||
# استلام البيانات
|
||||
S_HOST, S_NAME, S_USER, S_PASS = os.getenv("DB_HOST"), os.getenv("DB_NAME"), os.getenv("DB_USER"), os.getenv("DB_PASS")
|
||||
T_HOST, T_NAME, T_USER, T_PASS = os.getenv("DEST_HOST"), os.getenv("DEST_NAME"), os.getenv("DEST_USER"), os.getenv("DEST_PASS")
|
||||
|
||||
schemas = os.getenv("ONLY_SCHEMAS", "").strip()
|
||||
tables = os.getenv("ONLY_TABLES", "").strip()
|
||||
|
||||
filter_args = ""
|
||||
# إذا تم اختيار جداول محددة، نكتفي بها
|
||||
if tables:
|
||||
for t in tables.split(','):
|
||||
filter_args += f" -t {t.strip()}"
|
||||
# إذا لم تُحدد جداول ولكن حُددت Schemas، ننقل الـ Schemas بالكامل
|
||||
elif schemas:
|
||||
for s in schemas.split(','):
|
||||
filter_args += f" -n {s.strip()}"
|
||||
|
||||
update_progress("running", "بدء ضخ البيانات المختارة...", 30)
|
||||
os.environ['PGPASSWORD'] = S_PASS
|
||||
|
||||
migrate_cmd = (
|
||||
f"pg_dump -h {S_HOST} -U {S_USER} {filter_args} --clean --if-exists --no-owner {S_NAME} | "
|
||||
f"PGPASSWORD={T_PASS} psql -h {T_HOST} -U {T_USER} -d {T_NAME}"
|
||||
)
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(migrate_cmd, shell=True, stderr=subprocess.PIPE)
|
||||
update_progress("running", "جاري نقل الهياكل والبيانات...", 70)
|
||||
_, stderr = process.communicate()
|
||||
|
||||
if process.returncode == 0:
|
||||
update_progress("completed", "نجحت عملية النقل المخصصة! ✅", 100)
|
||||
else:
|
||||
update_progress("error", f"خطأ: {stderr.decode()}", 0)
|
||||
except Exception as e:
|
||||
update_progress("error", str(e), 0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
migrate()
|
||||
152
s3_s3_migrator.py
Normal file
152
s3_s3_migrator.py
Normal file
@@ -0,0 +1,152 @@
|
||||
import os
|
||||
import sys
|
||||
import boto3
|
||||
import json
|
||||
from botocore.client import Config
|
||||
from botocore.exceptions import ClientError
|
||||
from boto3.s3.transfer import TransferConfig # ✅ الإضافة الأولى
|
||||
|
||||
# ===========================
|
||||
# ENV VARIABLES
|
||||
# ===========================
|
||||
AWS_SRC_ACCESS_KEY = os.getenv("AWS_SRC_ACCESS_KEY")
|
||||
AWS_SRC_SECRET_KEY = os.getenv("AWS_SRC_SECRET_KEY")
|
||||
AWS_SRC_REGION = os.getenv("AWS_SRC_REGION", "us-east-1")
|
||||
AWS_SRC_BUCKET = os.getenv("AWS_SRC_BUCKET")
|
||||
|
||||
CUMIN_DEST_ACCESS_KEY = os.getenv("CUMIN_DEST_ACCESS_KEY")
|
||||
CUMIN_DEST_SECRET_KEY = os.getenv("CUMIN_DEST_SECRET_KEY")
|
||||
CUMIN_DEST_ENDPOINT = os.getenv("CUMIN_DEST_ENDPOINT")
|
||||
CUMIN_DEST_BUCKET = os.getenv("CUMIN_DEST_BUCKET")
|
||||
|
||||
# ===========================
|
||||
# VALIDATION
|
||||
# ===========================
|
||||
required = [
|
||||
AWS_SRC_ACCESS_KEY, AWS_SRC_SECRET_KEY, AWS_SRC_BUCKET,
|
||||
CUMIN_DEST_ACCESS_KEY, CUMIN_DEST_SECRET_KEY,
|
||||
CUMIN_DEST_ENDPOINT, CUMIN_DEST_BUCKET
|
||||
]
|
||||
|
||||
if not all(required):
|
||||
print("❌ Missing environment variables")
|
||||
sys.exit(1)
|
||||
|
||||
# ===========================
|
||||
# TEST CONNECTIONS AND BUCKETS
|
||||
# ===========================
|
||||
def test_connections():
|
||||
try:
|
||||
# Test source buckets
|
||||
src_buckets = src_s3.list_buckets()
|
||||
print("✅ Source S3 buckets:", [b['Name'] for b in src_buckets['Buckets']])
|
||||
if AWS_SRC_BUCKET not in [b['Name'] for b in src_buckets['Buckets']]:
|
||||
print(f"❌ Source bucket '{AWS_SRC_BUCKET}' not found in available buckets")
|
||||
return False
|
||||
except ClientError as e:
|
||||
print(f"❌ Source S3 error: {e}")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Test dest buckets (if applicable, but since it's custom endpoint, list_buckets might not work)
|
||||
print("✅ Destination S3 connected (endpoint: {CUMIN_DEST_ENDPOINT})")
|
||||
except ClientError as e:
|
||||
print(f"❌ Destination S3 error: {e}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
if not test_connections():
|
||||
sys.exit(1)
|
||||
|
||||
# ===========================
|
||||
# TRANSFER CONFIG (⭐ المهم)
|
||||
# ===========================
|
||||
transfer_config = TransferConfig(
|
||||
multipart_threshold=5 * 1024 * 1024, # 5MB
|
||||
multipart_chunksize=5 * 1024 * 1024,
|
||||
max_concurrency=2,
|
||||
use_threads=True
|
||||
)
|
||||
|
||||
# ===========================
|
||||
# CLIENTS
|
||||
# ===========================
|
||||
src_s3 = boto3.client(
|
||||
"s3",
|
||||
aws_access_key_id=AWS_SRC_ACCESS_KEY,
|
||||
aws_secret_access_key=AWS_SRC_SECRET_KEY,
|
||||
region_name=AWS_SRC_REGION
|
||||
)
|
||||
|
||||
dest_s3 = boto3.client(
|
||||
"s3",
|
||||
aws_access_key_id=CUMIN_DEST_ACCESS_KEY,
|
||||
aws_secret_access_key=CUMIN_DEST_SECRET_KEY,
|
||||
endpoint_url=CUMIN_DEST_ENDPOINT
|
||||
)
|
||||
|
||||
# ===========================
|
||||
# PROGRESS
|
||||
# ===========================
|
||||
def update_progress(current, total, percent, last_file, status="running"):
|
||||
with open("migration_progress.json", "w") as f:
|
||||
json.dump({
|
||||
"current": current,
|
||||
"total": total,
|
||||
"percent": percent,
|
||||
"last_file": last_file,
|
||||
"status": status
|
||||
}, f)
|
||||
|
||||
# ===========================
|
||||
# MIGRATE
|
||||
# ===========================
|
||||
def migrate_s3_to_s3():
|
||||
update_progress(0, 0, 0, "Initializing...", "running")
|
||||
|
||||
try:
|
||||
# جلب قائمة الملفات
|
||||
objects = src_s3.list_objects_v2(Bucket=AWS_SRC_BUCKET)
|
||||
if 'Contents' not in objects:
|
||||
print(f"ℹ️ No files found in bucket '{AWS_SRC_BUCKET}'")
|
||||
update_progress(0, 0, 100, "No files to migrate", "completed")
|
||||
return
|
||||
|
||||
files = objects['Contents']
|
||||
total = len(files)
|
||||
print(f"📁 Found {total} files in bucket '{AWS_SRC_BUCKET}'")
|
||||
current = 0
|
||||
|
||||
for obj in files:
|
||||
key = obj['Key']
|
||||
update_progress(current, total, int((current / total) * 100), key)
|
||||
|
||||
# نسخ الملف
|
||||
copy_source = {'Bucket': AWS_SRC_BUCKET, 'Key': key}
|
||||
dest_s3.copy_object(
|
||||
CopySource=copy_source,
|
||||
Bucket=CUMIN_DEST_BUCKET,
|
||||
Key=key,
|
||||
Config=transfer_config
|
||||
)
|
||||
|
||||
current += 1
|
||||
|
||||
update_progress(total, total, 100, "Migration completed", "completed")
|
||||
print("✅ Migration completed successfully")
|
||||
|
||||
except ClientError as e:
|
||||
error_msg = f"Migration failed: {e}"
|
||||
print(f"❌ {error_msg}")
|
||||
update_progress(0, 0, 0, error_msg, "failed")
|
||||
sys.exit(1)
|
||||
Config=transfer_config
|
||||
)
|
||||
|
||||
current += 1
|
||||
|
||||
update_progress(total, total, 100, "Migration completed", "completed")
|
||||
|
||||
if __name__ == "__main__":
|
||||
migrate_s3_to_s3()
|
||||
المرجع في مشكلة جديدة
حظر مستخدم