Big-Link-Man/scripts/test_s3_real.py

143 lines
4.7 KiB
Python

"""
Real S3 integration test - actually uploads to S3 bucket
Requires AWS credentials in environment:
- AWS_ACCESS_KEY_ID
- AWS_SECRET_ACCESS_KEY
- AWS_REGION (optional, can be set per-site)
Usage:
Set environment variables and run:
python scripts/test_s3_real.py
"""
import os
import sys
import time
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from src.deployment.s3_storage import S3StorageClient, S3StorageError, S3StorageAuthError
from src.database.models import SiteDeployment
from unittest.mock import Mock
def test_real_s3_upload():
"""Test actual S3 upload with real bucket"""
print("Testing Real S3 Upload\n")
# Check for credentials
access_key = os.environ.get('AWS_ACCESS_KEY_ID')
secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
region = os.environ.get('AWS_REGION', 'us-east-1')
if not access_key or not secret_key:
print("[SKIP] AWS credentials not found in environment")
print("Set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY to run this test")
return
# Get bucket name from environment or use default
bucket_name = os.environ.get('TEST_S3_BUCKET')
if not bucket_name:
print("[ERROR] TEST_S3_BUCKET environment variable not set")
print("Set TEST_S3_BUCKET to your test bucket name")
return
print(f"Using bucket: {bucket_name}")
print(f"Region: {region}\n")
# Create mock site with S3 config
site = Mock(spec=SiteDeployment)
site.s3_bucket_name = bucket_name
site.s3_bucket_region = region
site.s3_custom_domain = None
site.s3_endpoint_url = None
client = S3StorageClient(max_retries=3)
try:
# Test 1: Upload a simple HTML file
print("1. Uploading test file to S3...")
timestamp = int(time.time())
file_path = f"test-{timestamp}.html"
content = f"<html><body>Test upload at {timestamp}</body></html>"
result = client.upload_file(
site=site,
file_path=file_path,
content=content
)
if result.success:
print(f" [OK] Upload successful!")
print(f" [OK] File: {result.file_path}")
print(f" [OK] URL: {result.message}")
# Try to verify the file is accessible
import boto3
s3_client = boto3.client('s3', region_name=region)
try:
response = s3_client.head_object(Bucket=bucket_name, Key=file_path)
print(f" [OK] File verified in S3 (size: {response['ContentLength']} bytes)")
except Exception as e:
print(f" [WARN] Could not verify file in S3: {e}")
else:
print(f" [FAIL] Upload failed: {result.message}")
return
# Test 2: Upload with custom domain (if configured)
if os.environ.get('TEST_S3_CUSTOM_DOMAIN'):
print("\n2. Testing custom domain URL generation...")
site.s3_custom_domain = os.environ.get('TEST_S3_CUSTOM_DOMAIN')
file_path2 = f"test-custom-{timestamp}.html"
result2 = client.upload_file(
site=site,
file_path=file_path2,
content="<html><body>Custom domain test</body></html>"
)
if result2.success:
print(f" [OK] Upload with custom domain successful")
print(f" [OK] URL: {result2.message}")
else:
print(f" [FAIL] Upload failed: {result2.message}")
# Test 3: Test error handling (missing bucket name)
print("\n3. Testing error handling...")
site_no_bucket = Mock(spec=SiteDeployment)
site_no_bucket.s3_bucket_name = None
try:
client.upload_file(
site=site_no_bucket,
file_path="test.html",
content="<html>Test</html>"
)
print(" [FAIL] Should have raised ValueError for missing bucket")
except ValueError as e:
print(f" [OK] Correctly raised ValueError: {e}")
print("\n" + "="*50)
print("[SUCCESS] Real S3 tests passed!")
print("="*50)
print(f"\nTest file uploaded: {file_path}")
print(f"Clean up: aws s3 rm s3://{bucket_name}/{file_path}")
except S3StorageAuthError as e:
print(f"\n[ERROR] Authentication failed: {e}")
print("Check your AWS credentials")
except S3StorageError as e:
print(f"\n[ERROR] S3 error: {e}")
except Exception as e:
print(f"\n[ERROR] Unexpected error: {e}")
import traceback
traceback.print_exc()
if __name__ == "__main__":
test_real_s3_upload()