3.5: Wire multi-agent startup in __main__.py

Loop over config.agents to create per-agent LLM (when model override set),
Agent, MemorySystem (with scope), and register in AgentRegistry. Shared
ToolRegistry and SkillRegistry are wired to all agents. Scheduler and UI
use the default agent.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
cora-start
PeninsulaInd 2026-02-17 10:12:08 -06:00
parent 883fee36a3
commit 9d4d12e232
1 changed files with 59 additions and 22 deletions

View File

@ -3,6 +3,7 @@
import logging import logging
from .agent import Agent from .agent import Agent
from .agent_registry import AgentRegistry
from .config import load_config from .config import load_config
from .db import Database from .db import Database
from .llm import LLMAdapter from .llm import LLMAdapter
@ -25,53 +26,89 @@ def main():
log.info("Chat brain model: %s", config.chat_model) log.info("Chat brain model: %s", config.chat_model)
log.info("Execution brain model: %s (Claude Code CLI)", config.default_model) log.info("Execution brain model: %s (Claude Code CLI)", config.default_model)
llm = LLMAdapter( default_llm = LLMAdapter(
default_model=config.chat_model, default_model=config.chat_model,
openrouter_key=config.openrouter_api_key, openrouter_key=config.openrouter_api_key,
ollama_url=config.ollama_url, ollama_url=config.ollama_url,
lmstudio_url=config.lmstudio_url, lmstudio_url=config.lmstudio_url,
) )
if llm.is_execution_brain_available(): if default_llm.is_execution_brain_available():
log.info("Execution brain: Claude Code CLI found in PATH") log.info("Execution brain: Claude Code CLI found in PATH")
else: else:
log.warning( log.warning(
"Execution brain: Claude Code CLI NOT found — heartbeat/scheduler tasks will fail" "Execution brain: Claude Code CLI NOT found — heartbeat/scheduler tasks will fail"
) )
log.info("Creating agent...")
agent = Agent(config, db, llm)
# Phase 2+: Memory system
try:
from .memory import MemorySystem
log.info("Initializing memory system...")
memory = MemorySystem(config, db)
agent.set_memory(memory)
except Exception as e:
log.warning("Memory system not available: %s", e)
# Skill registry (markdown skills from skills/ directory) # Skill registry (markdown skills from skills/ directory)
skills_registry = None
try: try:
from .skills import SkillRegistry from .skills import SkillRegistry
log.info("Initializing skill registry...") log.info("Initializing skill registry...")
skills_registry = SkillRegistry(config.skills_dir) skills_registry = SkillRegistry(config.skills_dir)
agent.set_skills_registry(skills_registry)
log.info("Loaded %d skills", len(skills_registry.list_skills())) log.info("Loaded %d skills", len(skills_registry.list_skills()))
except Exception as e: except Exception as e:
log.warning("Skill registry not available: %s", e) log.warning("Skill registry not available: %s", e)
# Phase 3+: Tool system # Tool system (shared across all agents — tools are singletons)
tools = None
try: try:
from .tools import ToolRegistry from .tools import ToolRegistry
# Create a temporary default agent for tool discovery; will be replaced below
_bootstrap_agent = Agent(config, db, default_llm)
log.info("Initializing tool system...") log.info("Initializing tool system...")
tools = ToolRegistry(config, db, agent) tools = ToolRegistry(config, db, _bootstrap_agent)
agent.set_tools(tools)
except Exception as e: except Exception as e:
log.warning("Tool system not available: %s", e) log.warning("Tool system not available: %s", e)
# Multi-agent setup
registry = AgentRegistry()
log.info("Configuring %d agent(s)...", len(config.agents))
for i, agent_cfg in enumerate(config.agents):
# Per-agent LLM (if model override set)
if agent_cfg.model:
agent_llm = LLMAdapter(
default_model=agent_cfg.model,
openrouter_key=config.openrouter_api_key,
ollama_url=config.ollama_url,
lmstudio_url=config.lmstudio_url,
)
else:
agent_llm = default_llm
agent = Agent(config, db, agent_llm, agent_config=agent_cfg)
# Memory system (with optional scoping)
try:
from .memory import MemorySystem
memory = MemorySystem(config, db, scope=agent_cfg.memory_scope)
agent.set_memory(memory)
except Exception as e:
log.warning("Memory system not available for agent '%s': %s", agent_cfg.name, e)
# Wire shared tool registry and skills
if tools:
agent.set_tools(tools)
if skills_registry:
agent.set_skills_registry(skills_registry)
registry.register(agent_cfg.name, agent, is_default=(i == 0))
log.info(
" Agent '%s' (%s) — tools: %s, scope: %s",
agent_cfg.name,
agent_cfg.display_name,
"all" if agent_cfg.tools is None else str(len(agent_cfg.tools)),
agent_cfg.memory_scope or "shared",
)
# Update tool registry to reference the default agent (for ctx injection)
default_agent = registry.default
if tools and default_agent:
tools.agent = default_agent
# Notification bus (UI-agnostic) # Notification bus (UI-agnostic)
notification_bus = None notification_bus = None
try: try:
@ -82,18 +119,18 @@ def main():
except Exception as e: except Exception as e:
log.warning("Notification bus not available: %s", e) log.warning("Notification bus not available: %s", e)
# Phase 3+: Scheduler # Scheduler (uses default agent)
try: try:
from .scheduler import Scheduler from .scheduler import Scheduler
log.info("Starting scheduler...") log.info("Starting scheduler...")
scheduler = Scheduler(config, db, agent, notification_bus=notification_bus) scheduler = Scheduler(config, db, default_agent, notification_bus=notification_bus)
scheduler.start() scheduler.start()
except Exception as e: except Exception as e:
log.warning("Scheduler not available: %s", e) log.warning("Scheduler not available: %s", e)
log.info("Launching Gradio UI on %s:%s...", config.host, config.port) log.info("Launching Gradio UI on %s:%s...", config.host, config.port)
app = create_ui(agent, config, llm, notification_bus=notification_bus) app = create_ui(default_agent, config, default_llm, notification_bus=notification_bus)
app.launch( app.launch(
server_name=config.host, server_name=config.host,
server_port=config.port, server_port=config.port,