CheddahBot/cheddahbot/__main__.py

145 lines
4.6 KiB
Python

"""Entry point: python -m cheddahbot"""
import logging
from .agent import Agent
from .agent_registry import AgentRegistry
from .config import load_config
from .db import Database
from .llm import LLMAdapter
from .ui import create_ui
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
datefmt="%H:%M:%S",
)
log = logging.getLogger("cheddahbot")
def main():
log.info("Loading configuration...")
config = load_config()
log.info("Initializing database...")
db = Database(config.db_path)
log.info("Chat brain model: %s", config.chat_model)
log.info("Execution brain model: %s (Claude Code CLI)", config.default_model)
default_llm = LLMAdapter(
default_model=config.chat_model,
openrouter_key=config.openrouter_api_key,
ollama_url=config.ollama_url,
lmstudio_url=config.lmstudio_url,
)
if default_llm.is_execution_brain_available():
log.info("Execution brain: Claude Code CLI found in PATH")
else:
log.warning(
"Execution brain: Claude Code CLI NOT found — heartbeat/scheduler tasks will fail"
)
# Skill registry (markdown skills from skills/ directory)
skills_registry = None
try:
from .skills import SkillRegistry
log.info("Initializing skill registry...")
skills_registry = SkillRegistry(config.skills_dir)
log.info("Loaded %d skills", len(skills_registry.list_skills()))
except Exception as e:
log.warning("Skill registry not available: %s", e)
# Tool system (shared across all agents — tools are singletons)
tools = None
try:
from .tools import ToolRegistry
# Create a temporary default agent for tool discovery; will be replaced below
_bootstrap_agent = Agent(config, db, default_llm)
log.info("Initializing tool system...")
tools = ToolRegistry(config, db, _bootstrap_agent)
except Exception as e:
log.warning("Tool system not available: %s", e)
# Multi-agent setup
registry = AgentRegistry()
log.info("Configuring %d agent(s)...", len(config.agents))
for i, agent_cfg in enumerate(config.agents):
# Per-agent LLM (if model override set)
if agent_cfg.model:
agent_llm = LLMAdapter(
default_model=agent_cfg.model,
openrouter_key=config.openrouter_api_key,
ollama_url=config.ollama_url,
lmstudio_url=config.lmstudio_url,
)
else:
agent_llm = default_llm
agent = Agent(config, db, agent_llm, agent_config=agent_cfg)
# Memory system (with optional scoping)
try:
from .memory import MemorySystem
memory = MemorySystem(config, db, scope=agent_cfg.memory_scope)
agent.set_memory(memory)
except Exception as e:
log.warning("Memory system not available for agent '%s': %s", agent_cfg.name, e)
# Wire shared tool registry and skills
if tools:
agent.set_tools(tools)
if skills_registry:
agent.set_skills_registry(skills_registry)
registry.register(agent_cfg.name, agent, is_default=(i == 0))
log.info(
" Agent '%s' (%s) — tools: %s, scope: %s",
agent_cfg.name,
agent_cfg.display_name,
"all" if agent_cfg.tools is None else str(len(agent_cfg.tools)),
agent_cfg.memory_scope or "shared",
)
# Update tool registry to reference the default agent and agent registry
default_agent = registry.default
if tools and default_agent:
tools.agent = default_agent
tools.agent_registry = registry
# Notification bus (UI-agnostic)
notification_bus = None
try:
from .notifications import NotificationBus
log.info("Initializing notification bus...")
notification_bus = NotificationBus(db)
except Exception as e:
log.warning("Notification bus not available: %s", e)
# Scheduler (uses default agent)
try:
from .scheduler import Scheduler
log.info("Starting scheduler...")
scheduler = Scheduler(config, db, default_agent, notification_bus=notification_bus)
scheduler.start()
except Exception as e:
log.warning("Scheduler not available: %s", e)
log.info("Launching Gradio UI on %s:%s...", config.host, config.port)
app = create_ui(default_agent, config, default_llm, notification_bus=notification_bus)
app.launch(
server_name=config.host,
server_port=config.port,
pwa=True,
show_error=True,
)
if __name__ == "__main__":
main()