Big-Link-Man/tests/integration/test_story_6_3_migration.py

240 lines
8.1 KiB
Python

"""
Integration tests for Story 6.3: Database Schema Updates for Multi-Cloud
Tests the migration script and verifies the new fields work correctly with a real database.
"""
import pytest
from sqlalchemy import text, inspect
from src.database.session import db_manager
from src.database.models import SiteDeployment
from src.database.repositories import SiteDeploymentRepository
@pytest.fixture
def db_connection():
"""Get a database connection for testing"""
db_manager.initialize()
engine = db_manager.get_engine()
connection = engine.connect()
yield connection
connection.close()
db_manager.close()
def test_migration_adds_storage_provider_column(db_connection):
"""Test that migration adds storage_provider column"""
inspector = inspect(db_connection)
columns = {col['name']: col for col in inspector.get_columns('site_deployments')}
assert 'storage_provider' in columns
assert columns['storage_provider']['nullable'] is False
assert columns['storage_provider']['type'].length == 20
def test_migration_adds_s3_columns(db_connection):
"""Test that migration adds all S3-specific columns"""
inspector = inspect(db_connection)
columns = {col['name']: col for col in inspector.get_columns('site_deployments')}
assert 's3_bucket_name' in columns
assert columns['s3_bucket_name']['nullable'] is True
assert 's3_bucket_region' in columns
assert columns['s3_bucket_region']['nullable'] is True
assert 's3_custom_domain' in columns
assert columns['s3_custom_domain']['nullable'] is True
assert 's3_endpoint_url' in columns
assert columns['s3_endpoint_url']['nullable'] is True
def test_migration_creates_storage_provider_index(db_connection):
"""Test that migration creates index on storage_provider"""
inspector = inspect(db_connection)
indexes = inspector.get_indexes('site_deployments')
index_names = [idx['name'] for idx in indexes]
assert any('storage_provider' in name for name in index_names)
def test_existing_records_have_bunny_default(db_connection):
"""Test that existing records have storage_provider='bunny'"""
result = db_connection.execute(text("""
SELECT COUNT(*) as count
FROM site_deployments
WHERE storage_provider = 'bunny' OR storage_provider IS NULL
"""))
total_result = db_connection.execute(text("""
SELECT COUNT(*) as count FROM site_deployments
"""))
bunny_count = result.fetchone()[0]
total_count = total_result.fetchone()[0]
if total_count > 0:
assert bunny_count == total_count
def test_create_bunny_deployment_with_repository(db_connection):
"""Test creating a bunny deployment using repository (backward compatibility)"""
from sqlalchemy.orm import sessionmaker
Session = sessionmaker(bind=db_connection)
session = Session()
try:
repo = SiteDeploymentRepository(session)
deployment = repo.create(
site_name="Test Bunny Site",
storage_zone_id=999,
storage_zone_name="test-zone",
storage_zone_password="test-password",
storage_zone_region="DE",
pull_zone_id=999,
pull_zone_bcdn_hostname="test-bunny.b-cdn.net"
)
assert deployment.storage_provider == "bunny"
assert deployment.s3_bucket_name is None
assert deployment.s3_bucket_region is None
assert deployment.id is not None
session.commit()
retrieved = repo.get_by_id(deployment.id)
assert retrieved is not None
assert retrieved.storage_provider == "bunny"
finally:
session.rollback()
session.close()
def test_create_s3_deployment_with_repository(db_connection):
"""Test creating an S3 deployment using repository with new fields"""
from sqlalchemy.orm import sessionmaker
Session = sessionmaker(bind=db_connection)
session = Session()
try:
repo = SiteDeploymentRepository(session)
deployment = repo.create(
site_name="Test S3 Site",
storage_zone_id=888,
storage_zone_name="s3-zone",
storage_zone_password="s3-password",
storage_zone_region="NY",
pull_zone_id=888,
pull_zone_bcdn_hostname="test-s3.b-cdn.net",
storage_provider="s3",
s3_bucket_name="my-test-bucket",
s3_bucket_region="us-east-1",
s3_custom_domain="cdn.example.com"
)
assert deployment.storage_provider == "s3"
assert deployment.s3_bucket_name == "my-test-bucket"
assert deployment.s3_bucket_region == "us-east-1"
assert deployment.s3_custom_domain == "cdn.example.com"
assert deployment.s3_endpoint_url is None
session.commit()
retrieved = repo.get_by_id(deployment.id)
assert retrieved is not None
assert retrieved.storage_provider == "s3"
assert retrieved.s3_bucket_name == "my-test-bucket"
finally:
session.rollback()
session.close()
def test_create_s3_compatible_deployment_with_repository(db_connection):
"""Test creating an S3-compatible deployment with custom endpoint"""
from sqlalchemy.orm import sessionmaker
Session = sessionmaker(bind=db_connection)
session = Session()
try:
repo = SiteDeploymentRepository(session)
deployment = repo.create(
site_name="Test DO Spaces Site",
storage_zone_id=777,
storage_zone_name="do-zone",
storage_zone_password="do-password",
storage_zone_region="LA",
pull_zone_id=777,
pull_zone_bcdn_hostname="test-do.b-cdn.net",
storage_provider="s3_compatible",
s3_bucket_name="my-spaces-bucket",
s3_bucket_region="nyc3",
s3_endpoint_url="https://nyc3.digitaloceanspaces.com"
)
assert deployment.storage_provider == "s3_compatible"
assert deployment.s3_bucket_name == "my-spaces-bucket"
assert deployment.s3_bucket_region == "nyc3"
assert deployment.s3_endpoint_url == "https://nyc3.digitaloceanspaces.com"
session.commit()
retrieved = repo.get_by_id(deployment.id)
assert retrieved is not None
assert retrieved.storage_provider == "s3_compatible"
assert retrieved.s3_endpoint_url == "https://nyc3.digitaloceanspaces.com"
finally:
session.rollback()
session.close()
def test_model_fields_accessible(db_connection):
"""Test that all new model fields are accessible"""
from sqlalchemy.orm import sessionmaker
Session = sessionmaker(bind=db_connection)
session = Session()
try:
repo = SiteDeploymentRepository(session)
deployment = repo.create(
site_name="Model Test Site",
storage_zone_id=666,
storage_zone_name="model-zone",
storage_zone_password="model-password",
storage_zone_region="DE",
pull_zone_id=666,
pull_zone_bcdn_hostname="model-test.b-cdn.net",
storage_provider="s3",
s3_bucket_name="model-bucket",
s3_bucket_region="us-west-2",
s3_custom_domain="model.example.com",
s3_endpoint_url="https://s3.us-west-2.amazonaws.com"
)
assert hasattr(deployment, 'storage_provider')
assert hasattr(deployment, 's3_bucket_name')
assert hasattr(deployment, 's3_bucket_region')
assert hasattr(deployment, 's3_custom_domain')
assert hasattr(deployment, 's3_endpoint_url')
assert deployment.storage_provider == "s3"
assert deployment.s3_bucket_name == "model-bucket"
assert deployment.s3_bucket_region == "us-west-2"
assert deployment.s3_custom_domain == "model.example.com"
assert deployment.s3_endpoint_url == "https://s3.us-west-2.amazonaws.com"
finally:
session.rollback()
session.close()