CheddahBot/cheddahbot/__main__.py

234 lines
7.7 KiB
Python

"""Entry point: python -m cheddahbot"""
import logging
from logging.handlers import RotatingFileHandler
from pathlib import Path
from .agent import Agent
from .agent_registry import AgentRegistry
from .config import load_config
from .db import Database
from .llm import LLMAdapter
from .ui import create_ui
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
datefmt="%H:%M:%S",
)
# All levels to rotating log file (DEBUG+)
_log_dir = Path(__file__).resolve().parent.parent / "logs"
_log_dir.mkdir(exist_ok=True)
_file_handler = RotatingFileHandler(
_log_dir / "cheddahbot.log", maxBytes=5 * 1024 * 1024, backupCount=5
)
_file_handler.setLevel(logging.DEBUG)
_file_handler.setFormatter(
logging.Formatter("%(asctime)s [%(name)s] %(levelname)s: %(message)s")
)
logging.getLogger().addHandler(_file_handler)
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("httpcore").setLevel(logging.WARNING)
log = logging.getLogger("cheddahbot")
def main():
log.info("Loading configuration...")
config = load_config()
log.info("Initializing database...")
db = Database(config.db_path)
log.info("Chat brain model: %s", config.chat_model)
log.info("Execution brain model: %s (Claude Code CLI)", config.default_model)
default_llm = LLMAdapter(
default_model=config.chat_model,
openrouter_key=config.openrouter_api_key,
ollama_url=config.ollama_url,
lmstudio_url=config.lmstudio_url,
)
if default_llm.is_execution_brain_available():
log.info("Execution brain: Claude Code CLI found in PATH")
else:
log.warning(
"Execution brain: Claude Code CLI NOT found — heartbeat/scheduler tasks will fail"
)
# Skill registry (markdown skills from skills/ directory)
skills_registry = None
try:
from .skills import SkillRegistry
log.info("Initializing skill registry...")
skills_registry = SkillRegistry(config.skills_dir)
log.info("Loaded %d skills", len(skills_registry.list_skills()))
except Exception as e:
log.warning("Skill registry not available: %s", e)
# Tool system (shared across all agents — tools are singletons)
tools = None
try:
from .tools import ToolRegistry
# Create a temporary default agent for tool discovery; will be replaced below
_bootstrap_agent = Agent(config, db, default_llm)
log.info("Initializing tool system...")
tools = ToolRegistry(config, db, _bootstrap_agent)
except Exception as e:
log.warning("Tool system not available: %s", e)
# Multi-agent setup
registry = AgentRegistry()
log.info("Configuring %d agent(s)...", len(config.agents))
for i, agent_cfg in enumerate(config.agents):
# Per-agent LLM (if model override set)
if agent_cfg.model:
agent_llm = LLMAdapter(
default_model=agent_cfg.model,
openrouter_key=config.openrouter_api_key,
ollama_url=config.ollama_url,
lmstudio_url=config.lmstudio_url,
)
else:
agent_llm = default_llm
agent = Agent(config, db, agent_llm, agent_config=agent_cfg)
# Memory system (with optional scoping)
try:
from .memory import MemorySystem
memory = MemorySystem(config, db, scope=agent_cfg.memory_scope)
agent.set_memory(memory)
except Exception as e:
log.warning("Memory system not available for agent '%s': %s", agent_cfg.name, e)
# Wire shared tool registry and skills
if tools:
agent.set_tools(tools)
if skills_registry:
agent.set_skills_registry(skills_registry)
registry.register(agent_cfg.name, agent, is_default=(i == 0))
log.info(
" Agent '%s' (%s) — tools: %s, scope: %s",
agent_cfg.name,
agent_cfg.display_name,
"all" if agent_cfg.tools is None else str(len(agent_cfg.tools)),
agent_cfg.memory_scope or "shared",
)
# Update tool registry to reference the default agent and agent registry
default_agent = registry.default
if tools and default_agent:
tools.agent = default_agent
tools.agent_registry = registry
# Notification bus (UI-agnostic)
notification_bus = None
try:
from .notifications import NotificationBus
log.info("Initializing notification bus...")
notification_bus = NotificationBus(db)
except Exception as e:
log.warning("Notification bus not available: %s", e)
# ntfy.sh push notifications
if notification_bus and config.ntfy.enabled:
try:
import os
from .ntfy import NtfyChannel, NtfyNotifier
ntfy_channels = []
for ch_cfg in config.ntfy.channels:
topic = os.getenv(ch_cfg.topic_env_var, "")
if topic:
ntfy_channels.append(
NtfyChannel(
name=ch_cfg.name,
server=ch_cfg.server,
topic=topic,
categories=ch_cfg.categories,
include_patterns=ch_cfg.include_patterns,
exclude_patterns=ch_cfg.exclude_patterns,
priority=ch_cfg.priority,
tags=ch_cfg.tags,
)
)
else:
log.warning(
"ntfy channel '%s' skipped — env var %s not set",
ch_cfg.name, ch_cfg.topic_env_var,
)
notifier = NtfyNotifier(ntfy_channels)
if notifier.enabled:
notification_bus.subscribe("ntfy", notifier.notify)
log.info("ntfy notifier subscribed to notification bus")
except Exception as e:
log.warning("ntfy notifier not available: %s", e)
# Scheduler (uses default agent)
scheduler = None
try:
from .scheduler import Scheduler
log.info("Starting scheduler...")
scheduler = Scheduler(config, db, default_agent, notification_bus=notification_bus)
scheduler.start()
# Inject scheduler into tool context so get_active_tasks can read it
if tools:
tools.scheduler = scheduler
except Exception as e:
log.warning("Scheduler not available: %s", e)
import uvicorn
from fastapi import FastAPI
fastapi_app = FastAPI()
# Mount API endpoints
from .api import create_api_router
api_router = create_api_router(config, db, registry, scheduler=scheduler)
fastapi_app.include_router(api_router)
log.info("API router mounted at /api/")
# Mount new HTMX web UI (chat at /, dashboard at /dashboard)
from .web import mount_web_app
mount_web_app(
fastapi_app,
registry,
config,
default_llm,
notification_bus=notification_bus,
scheduler=scheduler,
db=db,
)
# Mount Gradio at /old for transition period
try:
import gradio as gr
log.info("Mounting Gradio UI at /old...")
blocks = create_ui(
registry, config, default_llm, notification_bus=notification_bus, scheduler=scheduler
)
gr.mount_gradio_app(fastapi_app, blocks, path="/old", pwa=False, show_error=True)
log.info("Gradio UI available at /old")
except Exception as e:
log.warning("Gradio UI not available: %s", e)
log.info("Launching web UI on %s:%s...", config.host, config.port)
uvicorn.run(fastapi_app, host=config.host, port=config.port)
if __name__ == "__main__":
main()