Big-Link-Man/scripts/migrate_add_multi_cloud_sto...

143 lines
5.3 KiB
Python

"""
Database migration script to add multi-cloud storage fields to site_deployments table
Story 6.3: Database Schema Updates for Multi-Cloud
Adds:
- storage_provider (String(20), Not Null, Default: 'bunny', Indexed)
- s3_bucket_name (String(255), Nullable)
- s3_bucket_region (String(50), Nullable)
- s3_custom_domain (String(255), Nullable)
- s3_endpoint_url (String(500), Nullable)
Usage:
python scripts/migrate_add_multi_cloud_storage_fields.py
"""
import sys
from pathlib import Path
project_root = Path(__file__).parent.parent
sys.path.insert(0, str(project_root))
from sqlalchemy import text
from src.database.session import db_manager
from src.core.config import get_config
def migrate():
"""Add multi-cloud storage fields to site_deployments table"""
print("Starting migration: add multi-cloud storage fields to site_deployments...")
try:
config = get_config()
print(f"Database URL: {config.database.url}")
except Exception as e:
print(f"Error loading configuration: {e}")
sys.exit(1)
try:
db_manager.initialize()
engine = db_manager.get_engine()
with engine.connect() as conn:
print("Checking for existing columns...")
result = conn.execute(text("PRAGMA table_info(site_deployments)"))
existing_columns = [row[1] for row in result]
print(f"Existing columns: {', '.join(existing_columns)}")
migrations_applied = []
if "storage_provider" not in existing_columns:
print("Adding storage_provider column...")
conn.execute(text("""
ALTER TABLE site_deployments
ADD COLUMN storage_provider VARCHAR(20) NOT NULL DEFAULT 'bunny'
"""))
conn.commit()
print("Setting storage_provider='bunny' for all existing records...")
conn.execute(text("""
UPDATE site_deployments
SET storage_provider = 'bunny'
"""))
conn.commit()
print("Creating index on storage_provider...")
conn.execute(text("""
CREATE INDEX IF NOT EXISTS idx_site_deployments_storage_provider
ON site_deployments(storage_provider)
"""))
conn.commit()
migrations_applied.append("storage_provider")
else:
print("storage_provider column already exists, skipping")
if "s3_bucket_name" not in existing_columns:
print("Adding s3_bucket_name column...")
conn.execute(text("""
ALTER TABLE site_deployments
ADD COLUMN s3_bucket_name VARCHAR(255)
"""))
conn.commit()
migrations_applied.append("s3_bucket_name")
else:
print("s3_bucket_name column already exists, skipping")
if "s3_bucket_region" not in existing_columns:
print("Adding s3_bucket_region column...")
conn.execute(text("""
ALTER TABLE site_deployments
ADD COLUMN s3_bucket_region VARCHAR(50)
"""))
conn.commit()
migrations_applied.append("s3_bucket_region")
else:
print("s3_bucket_region column already exists, skipping")
if "s3_custom_domain" not in existing_columns:
print("Adding s3_custom_domain column...")
conn.execute(text("""
ALTER TABLE site_deployments
ADD COLUMN s3_custom_domain VARCHAR(255)
"""))
conn.commit()
migrations_applied.append("s3_custom_domain")
else:
print("s3_custom_domain column already exists, skipping")
if "s3_endpoint_url" not in existing_columns:
print("Adding s3_endpoint_url column...")
conn.execute(text("""
ALTER TABLE site_deployments
ADD COLUMN s3_endpoint_url VARCHAR(500)
"""))
conn.commit()
migrations_applied.append("s3_endpoint_url")
else:
print("s3_endpoint_url column already exists, skipping")
if migrations_applied:
print(f"\nMigration complete! Added columns: {', '.join(migrations_applied)}")
print("\nNew fields added:")
print(" - storage_provider (VARCHAR(20), NOT NULL, DEFAULT 'bunny', indexed)")
print(" - s3_bucket_name (VARCHAR(255), nullable)")
print(" - s3_bucket_region (VARCHAR(50), nullable)")
print(" - s3_custom_domain (VARCHAR(255), nullable)")
print(" - s3_endpoint_url (VARCHAR(500), nullable)")
else:
print("\nNo migrations needed - all columns already exist")
except Exception as e:
print(f"Error during migration: {e}")
import traceback
traceback.print_exc()
sys.exit(1)
finally:
db_manager.close()
if __name__ == "__main__":
migrate()