Big-Link-Man/scripts/list_s3_fqdn_sites.py

49 lines
1.5 KiB
Python

"""
List all S3 sites that have custom domains (FQDNs) - these should be excluded from auto-import
"""
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from src.database.session import db_manager
from src.database.models import SiteDeployment
def list_fqdn_s3_sites():
"""List S3 sites with custom domains"""
db_manager.initialize()
session = db_manager.get_session()
try:
sites = session.query(SiteDeployment).filter(
SiteDeployment.storage_provider.in_(['s3', 's3_compatible']),
SiteDeployment.s3_custom_domain.isnot(None)
).order_by(SiteDeployment.id).all()
print(f"\nFound {len(sites)} S3 sites with custom domains (FQDNs):\n")
print(f"{'ID':<5} {'Bucket Name':<40} {'Custom Domain':<40}")
print("-" * 90)
for site in sites:
bucket = site.s3_bucket_name or 'N/A'
domain = site.s3_custom_domain or 'N/A'
print(f"{site.id:<5} {bucket:<40} {domain:<40}")
print(f"\nThese buckets should be added to s3_bucket_exclusions.txt to prevent re-import")
print("\nBucket names to exclude:")
for site in sites:
if site.s3_bucket_name:
print(site.s3_bucket_name)
except Exception as e:
print(f"Error: {e}")
import traceback
traceback.print_exc()
finally:
session.close()
if __name__ == "__main__":
list_fqdn_s3_sites()