242 lines
8.9 KiB
Python
242 lines
8.9 KiB
Python
"""Entry point: python -m cheddahbot"""
|
|
|
|
import json
|
|
import logging
|
|
from pathlib import Path
|
|
|
|
from .agent import Agent
|
|
from .agent_registry import AgentRegistry
|
|
from .config import load_config
|
|
from .db import Database
|
|
from .llm import LLMAdapter
|
|
from .ui import create_ui
|
|
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
|
|
datefmt="%H:%M:%S",
|
|
)
|
|
log = logging.getLogger("cheddahbot")
|
|
|
|
|
|
def main():
|
|
log.info("Loading configuration...")
|
|
config = load_config()
|
|
|
|
log.info("Initializing database...")
|
|
db = Database(config.db_path)
|
|
|
|
log.info("Chat brain model: %s", config.chat_model)
|
|
log.info("Execution brain model: %s (Claude Code CLI)", config.default_model)
|
|
default_llm = LLMAdapter(
|
|
default_model=config.chat_model,
|
|
openrouter_key=config.openrouter_api_key,
|
|
ollama_url=config.ollama_url,
|
|
lmstudio_url=config.lmstudio_url,
|
|
)
|
|
if default_llm.is_execution_brain_available():
|
|
log.info("Execution brain: Claude Code CLI found in PATH")
|
|
else:
|
|
log.warning(
|
|
"Execution brain: Claude Code CLI NOT found — heartbeat/scheduler tasks will fail"
|
|
)
|
|
|
|
# Skill registry (markdown skills from skills/ directory)
|
|
skills_registry = None
|
|
try:
|
|
from .skills import SkillRegistry
|
|
|
|
log.info("Initializing skill registry...")
|
|
skills_registry = SkillRegistry(config.skills_dir)
|
|
log.info("Loaded %d skills", len(skills_registry.list_skills()))
|
|
except Exception as e:
|
|
log.warning("Skill registry not available: %s", e)
|
|
|
|
# Tool system (shared across all agents — tools are singletons)
|
|
tools = None
|
|
try:
|
|
from .tools import ToolRegistry
|
|
|
|
# Create a temporary default agent for tool discovery; will be replaced below
|
|
_bootstrap_agent = Agent(config, db, default_llm)
|
|
log.info("Initializing tool system...")
|
|
tools = ToolRegistry(config, db, _bootstrap_agent)
|
|
except Exception as e:
|
|
log.warning("Tool system not available: %s", e)
|
|
|
|
# Multi-agent setup
|
|
registry = AgentRegistry()
|
|
log.info("Configuring %d agent(s)...", len(config.agents))
|
|
|
|
for i, agent_cfg in enumerate(config.agents):
|
|
# Per-agent LLM (if model override set)
|
|
if agent_cfg.model:
|
|
agent_llm = LLMAdapter(
|
|
default_model=agent_cfg.model,
|
|
openrouter_key=config.openrouter_api_key,
|
|
ollama_url=config.ollama_url,
|
|
lmstudio_url=config.lmstudio_url,
|
|
)
|
|
else:
|
|
agent_llm = default_llm
|
|
|
|
agent = Agent(config, db, agent_llm, agent_config=agent_cfg)
|
|
|
|
# Memory system (with optional scoping)
|
|
try:
|
|
from .memory import MemorySystem
|
|
|
|
memory = MemorySystem(config, db, scope=agent_cfg.memory_scope)
|
|
agent.set_memory(memory)
|
|
except Exception as e:
|
|
log.warning("Memory system not available for agent '%s': %s", agent_cfg.name, e)
|
|
|
|
# Wire shared tool registry and skills
|
|
if tools:
|
|
agent.set_tools(tools)
|
|
if skills_registry:
|
|
agent.set_skills_registry(skills_registry)
|
|
|
|
registry.register(agent_cfg.name, agent, is_default=(i == 0))
|
|
log.info(
|
|
" Agent '%s' (%s) — tools: %s, scope: %s",
|
|
agent_cfg.name,
|
|
agent_cfg.display_name,
|
|
"all" if agent_cfg.tools is None else str(len(agent_cfg.tools)),
|
|
agent_cfg.memory_scope or "shared",
|
|
)
|
|
|
|
# Update tool registry to reference the default agent and agent registry
|
|
default_agent = registry.default
|
|
if tools and default_agent:
|
|
tools.agent = default_agent
|
|
tools.agent_registry = registry
|
|
|
|
# Notification bus (UI-agnostic)
|
|
notification_bus = None
|
|
try:
|
|
from .notifications import NotificationBus
|
|
|
|
log.info("Initializing notification bus...")
|
|
notification_bus = NotificationBus(db)
|
|
except Exception as e:
|
|
log.warning("Notification bus not available: %s", e)
|
|
|
|
# Scheduler (uses default agent)
|
|
try:
|
|
from .scheduler import Scheduler
|
|
|
|
log.info("Starting scheduler...")
|
|
scheduler = Scheduler(config, db, default_agent, notification_bus=notification_bus)
|
|
scheduler.start()
|
|
except Exception as e:
|
|
log.warning("Scheduler not available: %s", e)
|
|
|
|
log.info("Launching Gradio UI on %s:%s...", config.host, config.port)
|
|
blocks = create_ui(registry, config, default_llm, notification_bus=notification_bus)
|
|
|
|
# Build a parent FastAPI app so we can mount the dashboard alongside Gradio.
|
|
# Inserting routes into blocks.app before launch() doesn't work because
|
|
# launch()/mount_gradio_app() replaces the internal App instance.
|
|
import gradio as gr
|
|
import uvicorn
|
|
from fastapi import FastAPI
|
|
from fastapi.responses import RedirectResponse
|
|
from starlette.staticfiles import StaticFiles
|
|
|
|
fastapi_app = FastAPI()
|
|
|
|
# Mount the dashboard as static files (must come before Gradio's catch-all)
|
|
dashboard_dir = Path(__file__).resolve().parent.parent / "dashboard"
|
|
if dashboard_dir.is_dir():
|
|
# Redirect /dashboard (no trailing slash) → /dashboard/
|
|
@fastapi_app.get("/dashboard")
|
|
async def _dashboard_redirect():
|
|
return RedirectResponse(url="/dashboard/")
|
|
|
|
fastapi_app.mount(
|
|
"/dashboard",
|
|
StaticFiles(directory=str(dashboard_dir), html=True),
|
|
name="dashboard",
|
|
)
|
|
log.info("Dashboard mounted at /dashboard/ (serving %s)", dashboard_dir)
|
|
|
|
# Link building status API endpoint (for dashboard consumption)
|
|
@fastapi_app.get("/api/linkbuilding/status")
|
|
async def linkbuilding_status():
|
|
"""Return link building pipeline status for dashboard consumption."""
|
|
result = {
|
|
"pending_cora_runs": [],
|
|
"in_progress": [],
|
|
"completed": [],
|
|
"failed": [],
|
|
}
|
|
|
|
# Query KV store for tracked link building states
|
|
try:
|
|
for key, value in db.kv_scan("linkbuilding:watched:"):
|
|
try:
|
|
state = json.loads(value)
|
|
entry = {
|
|
"filename": state.get("filename", key.split(":")[-1]),
|
|
"status": state.get("status", "unknown"),
|
|
"task_id": state.get("task_id", ""),
|
|
}
|
|
if state.get("status") == "completed":
|
|
entry["completed_at"] = state.get("completed_at", "")
|
|
result["completed"].append(entry)
|
|
elif state.get("status") == "failed":
|
|
entry["error"] = state.get("error", "")
|
|
entry["failed_at"] = state.get("failed_at", "")
|
|
result["failed"].append(entry)
|
|
elif state.get("status") == "processing":
|
|
entry["started_at"] = state.get("started_at", "")
|
|
result["in_progress"].append(entry)
|
|
except json.JSONDecodeError:
|
|
pass
|
|
except Exception as e:
|
|
log.warning("Error reading linkbuilding KV state: %s", e)
|
|
|
|
# Query ClickUp for pending tasks (to do + Link Building + Cora Backlinks)
|
|
if config.clickup.enabled:
|
|
try:
|
|
from .clickup import ClickUpClient
|
|
|
|
cu = ClickUpClient(
|
|
api_token=config.clickup.api_token,
|
|
workspace_id=config.clickup.workspace_id,
|
|
task_type_field_name=config.clickup.task_type_field_name,
|
|
)
|
|
try:
|
|
tasks = cu.get_tasks_from_space(
|
|
config.clickup.space_id, statuses=["to do"]
|
|
)
|
|
for task in tasks:
|
|
if task.task_type != "Link Building":
|
|
continue
|
|
lb_method = task.custom_fields.get("LB Method", "")
|
|
if lb_method and lb_method != "Cora Backlinks":
|
|
continue
|
|
result["pending_cora_runs"].append({
|
|
"keyword": task.custom_fields.get("Keyword", ""),
|
|
"url": task.custom_fields.get("IMSURL", ""),
|
|
"client": task.custom_fields.get("Client", ""),
|
|
"task_id": task.id,
|
|
"task_name": task.name,
|
|
})
|
|
finally:
|
|
cu.close()
|
|
except Exception as e:
|
|
log.warning("Error querying ClickUp for pending link building: %s", e)
|
|
|
|
return result
|
|
|
|
# Mount Gradio at the root
|
|
gr.mount_gradio_app(fastapi_app, blocks, path="/", pwa=True, show_error=True)
|
|
|
|
uvicorn.run(fastapi_app, host=config.host, port=config.port)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|