396 lines
15 KiB
Python
396 lines
15 KiB
Python
"""
|
|
Unit tests for S3StorageClient
|
|
Story 6.2: AWS S3 Client Implementation
|
|
"""
|
|
|
|
import os
|
|
import pytest
|
|
from unittest.mock import Mock, patch, MagicMock
|
|
from botocore.exceptions import ClientError, BotoCoreError
|
|
|
|
from src.deployment.s3_storage import (
|
|
S3StorageClient,
|
|
S3StorageError,
|
|
S3StorageAuthError
|
|
)
|
|
from src.deployment.bunny_storage import UploadResult
|
|
from src.database.models import SiteDeployment
|
|
|
|
|
|
class TestS3StorageClient:
|
|
"""Test S3 Storage client"""
|
|
|
|
@pytest.fixture
|
|
def mock_site(self):
|
|
"""Fixture for a mock site deployment with S3 config"""
|
|
site = Mock(spec=SiteDeployment)
|
|
site.s3_bucket_name = "test-bucket"
|
|
site.s3_bucket_region = "us-east-1"
|
|
site.s3_custom_domain = None
|
|
site.s3_endpoint_url = None
|
|
return site
|
|
|
|
@pytest.fixture
|
|
def mock_s3_client(self):
|
|
"""Fixture for a mocked boto3 S3 client"""
|
|
mock_client = Mock()
|
|
mock_resource = Mock()
|
|
mock_bucket = Mock()
|
|
mock_resource.Bucket.return_value = mock_bucket
|
|
|
|
with patch('src.deployment.s3_storage.boto3.client', return_value=mock_client), \
|
|
patch('src.deployment.s3_storage.boto3.resource', return_value=mock_resource):
|
|
yield {
|
|
'client': mock_client,
|
|
'resource': mock_resource,
|
|
'bucket': mock_bucket
|
|
}
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_upload_file_success(self, mock_site, mock_s3_client):
|
|
"""Test successful file upload"""
|
|
mock_s3_client['client'].put_object.return_value = {}
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
result = client.upload_file(
|
|
site=mock_site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
assert result.success is True
|
|
assert result.file_path == "test.html"
|
|
assert "Public URL" in result.message
|
|
|
|
# Verify put_object was called with correct parameters
|
|
mock_s3_client['client'].put_object.assert_called_once()
|
|
call_kwargs = mock_s3_client['client'].put_object.call_args[1]
|
|
assert call_kwargs['Bucket'] == "test-bucket"
|
|
assert call_kwargs['Key'] == "test.html"
|
|
assert call_kwargs['ContentType'] == "text/html"
|
|
assert call_kwargs['ACL'] == "public-read"
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_upload_file_with_custom_domain(self, mock_site, mock_s3_client):
|
|
"""Test file upload with custom domain"""
|
|
mock_site.s3_custom_domain = "cdn.example.com"
|
|
mock_s3_client['client'].put_object.return_value = {}
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
result = client.upload_file(
|
|
site=mock_site,
|
|
file_path="article.html",
|
|
content="<html>Content</html>"
|
|
)
|
|
|
|
assert result.success is True
|
|
assert "cdn.example.com" in result.message
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_upload_file_missing_bucket_name(self, mock_s3_client):
|
|
"""Test upload fails when bucket name is missing"""
|
|
site = Mock(spec=SiteDeployment)
|
|
site.s3_bucket_name = None
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
with pytest.raises(ValueError) as exc_info:
|
|
client.upload_file(
|
|
site=site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
assert "s3_bucket_name not configured" in str(exc_info.value)
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_upload_file_no_such_bucket(self, mock_site, mock_s3_client):
|
|
"""Test upload fails when bucket doesn't exist"""
|
|
error_response = {
|
|
'Error': {
|
|
'Code': 'NoSuchBucket',
|
|
'Message': 'The specified bucket does not exist'
|
|
}
|
|
}
|
|
mock_s3_client['client'].put_object.side_effect = ClientError(
|
|
error_response, 'PutObject'
|
|
)
|
|
mock_s3_client['client'].head_bucket.side_effect = ClientError(
|
|
error_response, 'HeadBucket'
|
|
)
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
with pytest.raises(S3StorageError) as exc_info:
|
|
client.upload_file(
|
|
site=mock_site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
assert "does not exist" in str(exc_info.value)
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_upload_file_access_denied(self, mock_site, mock_s3_client):
|
|
"""Test upload fails with access denied error"""
|
|
error_response = {
|
|
'Error': {
|
|
'Code': 'AccessDenied',
|
|
'Message': 'Access Denied'
|
|
}
|
|
}
|
|
mock_s3_client['client'].put_object.side_effect = ClientError(
|
|
error_response, 'PutObject'
|
|
)
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
with pytest.raises(S3StorageAuthError) as exc_info:
|
|
client.upload_file(
|
|
site=mock_site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
assert "Access denied" in str(exc_info.value)
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_upload_file_retry_on_transient_error(self, mock_site, mock_s3_client):
|
|
"""Test upload retries on transient errors"""
|
|
error_response = {
|
|
'Error': {
|
|
'Code': 'ServiceUnavailable',
|
|
'Message': 'Service temporarily unavailable'
|
|
}
|
|
}
|
|
|
|
# First call fails, second succeeds
|
|
mock_s3_client['client'].put_object.side_effect = [
|
|
ClientError(error_response, 'PutObject'),
|
|
{}
|
|
]
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
result = client.upload_file(
|
|
site=mock_site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
assert result.success is True
|
|
assert mock_s3_client['client'].put_object.call_count == 2
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_upload_file_content_type_detection(self, mock_site, mock_s3_client):
|
|
"""Test content type is correctly detected from file extension"""
|
|
mock_s3_client['client'].put_object.return_value = {}
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
# Test HTML file
|
|
client.upload_file(site=mock_site, file_path="page.html", content="<html></html>")
|
|
call_kwargs = mock_s3_client['client'].put_object.call_args[1]
|
|
assert call_kwargs['ContentType'] == "text/html"
|
|
|
|
# Test CSS file
|
|
mock_s3_client['client'].put_object.reset_mock()
|
|
client.upload_file(site=mock_site, file_path="style.css", content="body {}")
|
|
call_kwargs = mock_s3_client['client'].put_object.call_args[1]
|
|
assert call_kwargs['ContentType'] == "text/css"
|
|
|
|
# Test JS file
|
|
mock_s3_client['client'].put_object.reset_mock()
|
|
client.upload_file(site=mock_site, file_path="script.js", content="console.log()")
|
|
call_kwargs = mock_s3_client['client'].put_object.call_args[1]
|
|
assert call_kwargs['ContentType'] == "application/javascript"
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-west-2'
|
|
})
|
|
def test_upload_file_uses_site_region(self, mock_site, mock_s3_client):
|
|
"""Test upload uses region from site config"""
|
|
mock_site.s3_bucket_region = "us-west-2"
|
|
mock_s3_client['client'].put_object.return_value = {}
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
result = client.upload_file(
|
|
site=mock_site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
assert result.success is True
|
|
# Verify boto3.client was called with correct region
|
|
import boto3
|
|
# The region is passed during client creation, verify it's in the URL
|
|
assert "us-west-2" in result.message or "test-bucket" in result.message
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_upload_file_s3_compatible_endpoint(self, mock_site, mock_s3_client):
|
|
"""Test upload with S3-compatible endpoint URL"""
|
|
mock_site.s3_endpoint_url = "https://nyc3.digitaloceanspaces.com"
|
|
mock_s3_client['client'].put_object.return_value = {}
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
result = client.upload_file(
|
|
site=mock_site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
assert result.success is True
|
|
# Verify endpoint_url was passed to boto3.client
|
|
# This is verified by the fact that put_object succeeded with custom endpoint
|
|
|
|
def test_missing_aws_credentials(self, mock_site):
|
|
"""Test error when AWS credentials are missing"""
|
|
with patch.dict(os.environ, {}, clear=True):
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
with pytest.raises(S3StorageAuthError) as exc_info:
|
|
client.upload_file(
|
|
site=mock_site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
assert "AWS credentials not found" in str(exc_info.value)
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_configure_bucket_public_read(self, mock_site, mock_s3_client):
|
|
"""Test bucket configuration for public read access"""
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
# This is called internally during upload_file
|
|
client.upload_file(
|
|
site=mock_site,
|
|
file_path="test.html",
|
|
content="<html>Test</html>"
|
|
)
|
|
|
|
# Verify bucket configuration methods were called
|
|
mock_s3_client['client'].put_public_access_block.assert_called_once()
|
|
mock_s3_client['client'].put_bucket_policy.assert_called_once()
|
|
|
|
# Verify bucket policy allows public read only
|
|
policy_call = mock_s3_client['client'].put_bucket_policy.call_args
|
|
import json
|
|
policy = json.loads(policy_call[1]['Policy'])
|
|
assert policy['Statement'][0]['Action'] == 's3:GetObject'
|
|
assert policy['Statement'][0]['Effect'] == 'Allow'
|
|
assert policy['Statement'][0]['Principal'] == '*'
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_generate_public_url_default(self, mock_site, mock_s3_client):
|
|
"""Test public URL generation with default S3 format"""
|
|
mock_s3_client['client'].put_object.return_value = {}
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
result = client.upload_file(
|
|
site=mock_site,
|
|
file_path="article.html",
|
|
content="<html>Content</html>"
|
|
)
|
|
|
|
assert "test-bucket.s3-website-us-east-1.amazonaws.com/article.html" in result.message
|
|
|
|
@patch.dict(os.environ, {
|
|
'AWS_ACCESS_KEY_ID': 'test-key',
|
|
'AWS_SECRET_ACCESS_KEY': 'test-secret',
|
|
'AWS_REGION': 'us-east-1'
|
|
})
|
|
def test_generate_public_url_custom_domain(self, mock_site, mock_s3_client):
|
|
"""Test public URL generation with custom domain"""
|
|
mock_site.s3_custom_domain = "cdn.example.com"
|
|
mock_s3_client['client'].put_object.return_value = {}
|
|
mock_s3_client['client'].head_bucket.return_value = {}
|
|
mock_s3_client['client'].put_public_access_block.return_value = {}
|
|
mock_s3_client['client'].put_bucket_policy.return_value = {}
|
|
|
|
client = S3StorageClient(max_retries=3)
|
|
|
|
result = client.upload_file(
|
|
site=mock_site,
|
|
file_path="article.html",
|
|
content="<html>Content</html>"
|
|
)
|
|
|
|
assert "cdn.example.com/article.html" in result.message
|
|
|