رفع الملفات إلى "/"

هذا الالتزام موجود في:
2026-01-16 22:18:44 +00:00
الأصل 3624f81d80
التزام 77f77805f3
4 ملفات معدلة مع 303 إضافات و0 حذوفات

152
s3_s3_migrator.py Normal file
عرض الملف

@@ -0,0 +1,152 @@
import os
import sys
import boto3
import json
from botocore.client import Config
from botocore.exceptions import ClientError
from boto3.s3.transfer import TransferConfig # ✅ الإضافة الأولى
# ===========================
# ENV VARIABLES
# ===========================
AWS_SRC_ACCESS_KEY = os.getenv("AWS_SRC_ACCESS_KEY")
AWS_SRC_SECRET_KEY = os.getenv("AWS_SRC_SECRET_KEY")
AWS_SRC_REGION = os.getenv("AWS_SRC_REGION", "us-east-1")
AWS_SRC_BUCKET = os.getenv("AWS_SRC_BUCKET")
CUMIN_DEST_ACCESS_KEY = os.getenv("CUMIN_DEST_ACCESS_KEY")
CUMIN_DEST_SECRET_KEY = os.getenv("CUMIN_DEST_SECRET_KEY")
CUMIN_DEST_ENDPOINT = os.getenv("CUMIN_DEST_ENDPOINT")
CUMIN_DEST_BUCKET = os.getenv("CUMIN_DEST_BUCKET")
# ===========================
# VALIDATION
# ===========================
required = [
AWS_SRC_ACCESS_KEY, AWS_SRC_SECRET_KEY, AWS_SRC_BUCKET,
CUMIN_DEST_ACCESS_KEY, CUMIN_DEST_SECRET_KEY,
CUMIN_DEST_ENDPOINT, CUMIN_DEST_BUCKET
]
if not all(required):
print("❌ Missing environment variables")
sys.exit(1)
# ===========================
# TEST CONNECTIONS AND BUCKETS
# ===========================
def test_connections():
try:
# Test source buckets
src_buckets = src_s3.list_buckets()
print("✅ Source S3 buckets:", [b['Name'] for b in src_buckets['Buckets']])
if AWS_SRC_BUCKET not in [b['Name'] for b in src_buckets['Buckets']]:
print(f"❌ Source bucket '{AWS_SRC_BUCKET}' not found in available buckets")
return False
except ClientError as e:
print(f"❌ Source S3 error: {e}")
return False
try:
# Test dest buckets (if applicable, but since it's custom endpoint, list_buckets might not work)
print("✅ Destination S3 connected (endpoint: {CUMIN_DEST_ENDPOINT})")
except ClientError as e:
print(f"❌ Destination S3 error: {e}")
return False
return True
if not test_connections():
sys.exit(1)
# ===========================
# TRANSFER CONFIG (⭐ المهم)
# ===========================
transfer_config = TransferConfig(
multipart_threshold=5 * 1024 * 1024, # 5MB
multipart_chunksize=5 * 1024 * 1024,
max_concurrency=2,
use_threads=True
)
# ===========================
# CLIENTS
# ===========================
src_s3 = boto3.client(
"s3",
aws_access_key_id=AWS_SRC_ACCESS_KEY,
aws_secret_access_key=AWS_SRC_SECRET_KEY,
region_name=AWS_SRC_REGION
)
dest_s3 = boto3.client(
"s3",
aws_access_key_id=CUMIN_DEST_ACCESS_KEY,
aws_secret_access_key=CUMIN_DEST_SECRET_KEY,
endpoint_url=CUMIN_DEST_ENDPOINT
)
# ===========================
# PROGRESS
# ===========================
def update_progress(current, total, percent, last_file, status="running"):
with open("migration_progress.json", "w") as f:
json.dump({
"current": current,
"total": total,
"percent": percent,
"last_file": last_file,
"status": status
}, f)
# ===========================
# MIGRATE
# ===========================
def migrate_s3_to_s3():
update_progress(0, 0, 0, "Initializing...", "running")
try:
# جلب قائمة الملفات
objects = src_s3.list_objects_v2(Bucket=AWS_SRC_BUCKET)
if 'Contents' not in objects:
print(f" No files found in bucket '{AWS_SRC_BUCKET}'")
update_progress(0, 0, 100, "No files to migrate", "completed")
return
files = objects['Contents']
total = len(files)
print(f"📁 Found {total} files in bucket '{AWS_SRC_BUCKET}'")
current = 0
for obj in files:
key = obj['Key']
update_progress(current, total, int((current / total) * 100), key)
# نسخ الملف
copy_source = {'Bucket': AWS_SRC_BUCKET, 'Key': key}
dest_s3.copy_object(
CopySource=copy_source,
Bucket=CUMIN_DEST_BUCKET,
Key=key,
Config=transfer_config
)
current += 1
update_progress(total, total, 100, "Migration completed", "completed")
print("✅ Migration completed successfully")
except ClientError as e:
error_msg = f"Migration failed: {e}"
print(f"{error_msg}")
update_progress(0, 0, 0, error_msg, "failed")
sys.exit(1)
Config=transfer_config
)
current += 1
update_progress(total, total, 100, "Migration completed", "completed")
if __name__ == "__main__":
migrate_s3_to_s3()