580 lines
18 KiB
Python
580 lines
18 KiB
Python
"""Dashboard API endpoints.
|
|
|
|
Provides JSON data for the standalone HTML dashboard.
|
|
All ClickUp data is cached for 5 minutes to avoid hammering the API.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import calendar
|
|
import json
|
|
import logging
|
|
import shutil
|
|
import threading
|
|
import time
|
|
from datetime import UTC, datetime
|
|
from typing import TYPE_CHECKING
|
|
|
|
from fastapi import APIRouter
|
|
|
|
if TYPE_CHECKING:
|
|
from .agent_registry import AgentRegistry
|
|
from .config import Config
|
|
from .db import Database
|
|
from .scheduler import Scheduler
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
router = APIRouter(prefix="/api")
|
|
|
|
# Module-level refs wired at startup by create_api_router()
|
|
_config: Config | None = None
|
|
_db: Database | None = None
|
|
_registry: AgentRegistry | None = None
|
|
_scheduler: Scheduler | None = None
|
|
|
|
# Simple in-memory cache for ClickUp data
|
|
_cache: dict[str, dict] = {}
|
|
_cache_lock = threading.Lock()
|
|
CACHE_TTL = 300 # 5 minutes
|
|
|
|
|
|
def create_api_router(
|
|
config: Config,
|
|
db: Database,
|
|
registry: AgentRegistry,
|
|
scheduler: Scheduler | None = None,
|
|
) -> APIRouter:
|
|
"""Wire dependencies and return the router."""
|
|
global _config, _db, _registry, _scheduler
|
|
_config = config
|
|
_db = db
|
|
_registry = registry
|
|
_scheduler = scheduler
|
|
return router
|
|
|
|
|
|
def _get_cached(key: str) -> dict | None:
|
|
with _cache_lock:
|
|
entry = _cache.get(key)
|
|
if entry and time.time() - entry["ts"] < CACHE_TTL:
|
|
return entry["data"]
|
|
return None
|
|
|
|
|
|
def _set_cached(key: str, data):
|
|
with _cache_lock:
|
|
_cache[key] = {"data": data, "ts": time.time()}
|
|
|
|
|
|
def _get_clickup_client():
|
|
from .clickup import ClickUpClient
|
|
|
|
return ClickUpClient(
|
|
api_token=_config.clickup.api_token,
|
|
workspace_id=_config.clickup.workspace_id,
|
|
task_type_field_name=_config.clickup.task_type_field_name,
|
|
)
|
|
|
|
|
|
# ── Tasks ──
|
|
|
|
|
|
@router.get("/tasks")
|
|
async def get_tasks():
|
|
"""All ClickUp tasks grouped by work category."""
|
|
cached = _get_cached("tasks")
|
|
if cached is not None:
|
|
return cached
|
|
|
|
if not _config or not _config.clickup.enabled:
|
|
return {"tasks": [], "error": "ClickUp not configured"}
|
|
|
|
client = _get_clickup_client()
|
|
try:
|
|
raw_tasks = client.get_tasks_from_overall_lists(_config.clickup.space_id)
|
|
tasks = []
|
|
for t in raw_tasks:
|
|
tasks.append(
|
|
{
|
|
"id": t.id,
|
|
"name": t.name,
|
|
"status": t.status,
|
|
"task_type": t.task_type,
|
|
"url": t.url,
|
|
"due_date": t.due_date,
|
|
"date_done": t.date_done,
|
|
"list_name": t.list_name,
|
|
"tags": t.tags,
|
|
"custom_fields": t.custom_fields,
|
|
}
|
|
)
|
|
result = {"tasks": tasks, "count": len(tasks)}
|
|
_set_cached("tasks", result)
|
|
return result
|
|
except Exception as e:
|
|
log.error("Failed to fetch tasks for dashboard: %s", e)
|
|
return {"tasks": [], "error": str(e)}
|
|
finally:
|
|
client.close()
|
|
|
|
|
|
@router.get("/tasks/by-company")
|
|
async def get_tasks_by_company():
|
|
"""Tasks grouped by Client custom field."""
|
|
data = await get_tasks()
|
|
by_company: dict[str, list] = {}
|
|
for task in data.get("tasks", []):
|
|
company = task["custom_fields"].get("Client") or "Unassigned"
|
|
by_company.setdefault(company, []).append(task)
|
|
|
|
# Sort companies by task count descending
|
|
sorted_companies = sorted(by_company.items(), key=lambda x: -len(x[1]))
|
|
return {
|
|
"companies": [
|
|
{"name": name, "tasks": tasks, "count": len(tasks)}
|
|
for name, tasks in sorted_companies
|
|
]
|
|
}
|
|
|
|
|
|
@router.get("/tasks/link-building")
|
|
async def get_link_building_tasks():
|
|
"""Link building tasks with KV state merged in."""
|
|
cached = _get_cached("lb_tasks")
|
|
if cached is not None:
|
|
return cached
|
|
|
|
if not _config or not _config.clickup.enabled:
|
|
return {"total": 0, "companies": [], "status_counts": {}}
|
|
|
|
client = _get_clickup_client()
|
|
try:
|
|
raw_tasks = client.get_tasks_from_overall_lists(
|
|
_config.clickup.space_id, include_closed=True
|
|
)
|
|
except Exception as e:
|
|
log.error("Failed to fetch LB tasks: %s", e)
|
|
return {"total": 0, "companies": [], "status_counts": {}, "error": str(e)}
|
|
finally:
|
|
client.close()
|
|
|
|
lb_tasks = []
|
|
for t in raw_tasks:
|
|
if t.task_type != "Link Building":
|
|
continue
|
|
task_dict = {
|
|
"id": t.id,
|
|
"name": t.name,
|
|
"status": t.status,
|
|
"task_type": t.task_type,
|
|
"url": t.url,
|
|
"due_date": t.due_date,
|
|
"date_done": t.date_done,
|
|
"list_name": t.list_name,
|
|
"tags": t.tags,
|
|
"custom_fields": t.custom_fields,
|
|
}
|
|
lb_tasks.append(task_dict)
|
|
|
|
# Merge KV state
|
|
if _db:
|
|
for task in lb_tasks:
|
|
kv_key = f"clickup:task:{task['id']}:state"
|
|
raw = _db.kv_get(kv_key)
|
|
if raw:
|
|
try:
|
|
task["kv_state"] = json.loads(raw)
|
|
except json.JSONDecodeError:
|
|
task["kv_state"] = None
|
|
else:
|
|
task["kv_state"] = None
|
|
|
|
# -- Build focused groups --
|
|
|
|
# Split active vs closed
|
|
closed_statuses = {"complete", "closed", "done"}
|
|
active_lb = [
|
|
t for t in lb_tasks
|
|
if not any(kw in t["status"] for kw in closed_statuses)
|
|
]
|
|
|
|
# need_cora: open LB tasks where LB Method = "Cora Backlinks"
|
|
# Exclude tasks that automation already touched
|
|
automation_touched = {
|
|
"error", "automation underway", "complete",
|
|
"closed", "done", "internal review",
|
|
}
|
|
need_cora = [
|
|
t for t in active_lb
|
|
if t["custom_fields"].get("LB Method") == "Cora Backlinks"
|
|
and t["status"] not in automation_touched
|
|
]
|
|
need_cora.sort(key=lambda t: int(t.get("due_date") or "9999999999999"))
|
|
|
|
# recently_completed: closed/complete tasks with date_done in last 7 days
|
|
seven_days_ago_ms = (time.time() - 7 * 86400) * 1000
|
|
recently_completed = []
|
|
for t in lb_tasks:
|
|
s = t["status"]
|
|
if not (s.endswith("complete") or "closed" in s or "done" in s):
|
|
continue
|
|
if t["date_done"]:
|
|
try:
|
|
if int(t["date_done"]) >= seven_days_ago_ms:
|
|
recently_completed.append(t)
|
|
except (ValueError, TypeError):
|
|
pass
|
|
recently_completed.sort(key=lambda t: int(t.get("date_done") or "0"), reverse=True)
|
|
|
|
# in_progress_not_started: status "in progress" but no meaningful KV state
|
|
early_states = {"", "approved", "awaiting_approval"}
|
|
in_progress_not_started = []
|
|
for t in lb_tasks:
|
|
if t["status"] != "in progress":
|
|
continue
|
|
kv = t.get("kv_state")
|
|
if kv is None or kv.get("state", "") in early_states:
|
|
in_progress_not_started.append(t)
|
|
by_company: dict[str, list] = {}
|
|
for task in active_lb:
|
|
company = task["custom_fields"].get("Client") or "Unassigned"
|
|
by_company.setdefault(company, []).append(task)
|
|
|
|
result = {
|
|
"total": len(active_lb),
|
|
"need_cora": need_cora,
|
|
"recently_completed": recently_completed,
|
|
"in_progress_not_started": in_progress_not_started,
|
|
"companies": [
|
|
{"name": name, "tasks": tasks, "count": len(tasks)}
|
|
for name, tasks in sorted(by_company.items(), key=lambda x: -len(x[1]))
|
|
],
|
|
"status_counts": _count_lb_statuses(active_lb),
|
|
"next_month_count": _count_next_month(active_lb),
|
|
}
|
|
_set_cached("lb_tasks", result)
|
|
return result
|
|
|
|
|
|
@router.get("/tasks/need-cora")
|
|
async def get_need_cora_tasks():
|
|
"""Tasks that need a Cora report, deduplicated by keyword.
|
|
|
|
Includes work categories: Link Building (Cora Backlinks), On Page
|
|
Optimization, and Content Creation. Tasks sharing the same Keyword
|
|
field are grouped into a single entry.
|
|
"""
|
|
cached = _get_cached("need_cora")
|
|
if cached is not None:
|
|
return cached
|
|
|
|
if not _config or not _config.clickup.enabled:
|
|
return {"count": 0, "keywords": []}
|
|
|
|
# Reuse the cached /tasks data if available, otherwise fetch fresh
|
|
tasks_data = await get_tasks()
|
|
all_tasks = tasks_data.get("tasks", [])
|
|
|
|
automation_touched = {
|
|
"error", "automation underway", "complete",
|
|
"closed", "done", "internal review",
|
|
}
|
|
|
|
# Work categories that need Cora reports
|
|
cora_categories = {"Link Building", "On Page Optimization", "Content Creation"}
|
|
|
|
# Only include tasks due within 30 days behind / 30 days ahead
|
|
now_ms = time.time() * 1000
|
|
window_ms = 30 * 86400 * 1000
|
|
due_min = now_ms - window_ms
|
|
due_max = now_ms + window_ms
|
|
|
|
qualifying = []
|
|
for t in all_tasks:
|
|
if t["task_type"] not in cora_categories:
|
|
continue
|
|
if t["status"] in automation_touched:
|
|
continue
|
|
# Filter by due date window (skip tasks with no due date)
|
|
due = t.get("due_date")
|
|
if not due:
|
|
continue
|
|
try:
|
|
due_int = int(due)
|
|
except (ValueError, TypeError):
|
|
continue
|
|
if due_int < due_min or due_int > due_max:
|
|
continue
|
|
# Link Building additionally requires LB Method = "Cora Backlinks"
|
|
if t["task_type"] == "Link Building":
|
|
if t["custom_fields"].get("LB Method") != "Cora Backlinks":
|
|
continue
|
|
qualifying.append(t)
|
|
|
|
# Group by normalised Keyword
|
|
by_keyword: dict[str, dict] = {}
|
|
for t in qualifying:
|
|
kw = (t["custom_fields"].get("Keyword") or t["name"]).strip()
|
|
kw_lower = kw.lower()
|
|
if kw_lower not in by_keyword:
|
|
by_keyword[kw_lower] = {
|
|
"keyword": kw,
|
|
"company": t["custom_fields"].get("Client") or "Unassigned",
|
|
"due_date": t.get("due_date"),
|
|
"tasks": [],
|
|
}
|
|
by_keyword[kw_lower]["tasks"].append({
|
|
"id": t["id"],
|
|
"name": t["name"],
|
|
"task_type": t["task_type"],
|
|
"status": t["status"],
|
|
"url": t.get("url", ""),
|
|
})
|
|
# Keep the earliest due date across grouped tasks
|
|
existing_due = by_keyword[kw_lower]["due_date"]
|
|
task_due = t.get("due_date")
|
|
if task_due and (not existing_due or int(task_due) < int(existing_due)):
|
|
by_keyword[kw_lower]["due_date"] = task_due
|
|
|
|
keywords = sorted(
|
|
by_keyword.values(),
|
|
key=lambda k: int(k.get("due_date") or "9999999999999"),
|
|
)
|
|
|
|
result = {"count": len(keywords), "keywords": keywords}
|
|
_set_cached("need_cora", result)
|
|
return result
|
|
|
|
|
|
@router.get("/tasks/press-releases")
|
|
async def get_press_release_tasks():
|
|
"""Press release tasks with KV state merged in."""
|
|
data = await get_tasks()
|
|
pr_tasks = [t for t in data.get("tasks", []) if t["task_type"] == "Press Release"]
|
|
|
|
if _db:
|
|
for task in pr_tasks:
|
|
kv_key = f"clickup:task:{task['id']}:state"
|
|
raw = _db.kv_get(kv_key)
|
|
if raw:
|
|
try:
|
|
task["kv_state"] = json.loads(raw)
|
|
except json.JSONDecodeError:
|
|
task["kv_state"] = None
|
|
else:
|
|
task["kv_state"] = None
|
|
|
|
by_company: dict[str, list] = {}
|
|
for task in pr_tasks:
|
|
company = task["custom_fields"].get("Client") or "Unassigned"
|
|
by_company.setdefault(company, []).append(task)
|
|
|
|
return {
|
|
"total": len(pr_tasks),
|
|
"companies": [
|
|
{"name": name, "tasks": tasks, "count": len(tasks)}
|
|
for name, tasks in sorted(by_company.items(), key=lambda x: -len(x[1]))
|
|
],
|
|
"status_counts": _count_statuses(pr_tasks),
|
|
}
|
|
|
|
|
|
def _count_statuses(tasks: list[dict]) -> dict[str, int]:
|
|
counts: dict[str, int] = {}
|
|
for t in tasks:
|
|
s = t.get("status", "unknown")
|
|
counts[s] = counts.get(s, 0) + 1
|
|
return counts
|
|
|
|
|
|
def _count_lb_statuses(tasks: list[dict]) -> dict[str, int]:
|
|
"""Count only relevant statuses for tasks due in the current or previous month."""
|
|
now = datetime.now(UTC)
|
|
# First day of current month
|
|
cur_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
|
# First day of previous month
|
|
if now.month == 1:
|
|
prev_start = cur_start.replace(year=now.year - 1, month=12)
|
|
else:
|
|
prev_start = cur_start.replace(month=now.month - 1)
|
|
|
|
prev_start_ms = int(prev_start.timestamp() * 1000)
|
|
|
|
allowed = {"to do", "in progress", "error", "automation underway"}
|
|
counts: dict[str, int] = {}
|
|
for t in tasks:
|
|
s = t.get("status", "unknown")
|
|
if s not in allowed:
|
|
continue
|
|
due = t.get("due_date")
|
|
if not due:
|
|
continue
|
|
try:
|
|
if int(due) >= prev_start_ms:
|
|
counts[s] = counts.get(s, 0) + 1
|
|
except (ValueError, TypeError):
|
|
pass
|
|
return counts
|
|
|
|
|
|
def _count_next_month(tasks: list[dict]) -> int:
|
|
"""Count tasks due next month."""
|
|
now = datetime.now(UTC)
|
|
if now.month == 12:
|
|
next_start = now.replace(
|
|
year=now.year + 1, month=1, day=1,
|
|
hour=0, minute=0, second=0, microsecond=0,
|
|
)
|
|
next_end_month = 1
|
|
next_end_year = now.year + 1
|
|
else:
|
|
next_start = now.replace(
|
|
month=now.month + 1, day=1,
|
|
hour=0, minute=0, second=0, microsecond=0,
|
|
)
|
|
next_end_month = now.month + 1
|
|
next_end_year = now.year
|
|
|
|
last_day = calendar.monthrange(next_end_year, next_end_month)[1]
|
|
next_end = next_start.replace(day=last_day, hour=23, minute=59, second=59)
|
|
|
|
start_ms = int(next_start.timestamp() * 1000)
|
|
end_ms = int(next_end.timestamp() * 1000)
|
|
|
|
count = 0
|
|
for t in tasks:
|
|
due = t.get("due_date")
|
|
if not due:
|
|
continue
|
|
try:
|
|
d = int(due)
|
|
if start_ms <= d <= end_ms:
|
|
count += 1
|
|
except (ValueError, TypeError):
|
|
pass
|
|
return count
|
|
|
|
|
|
# ── Agents ──
|
|
|
|
|
|
@router.get("/agents")
|
|
async def get_agents():
|
|
"""Agent configurations."""
|
|
if not _config:
|
|
return {"agents": []}
|
|
|
|
agents = []
|
|
for ac in _config.agents:
|
|
agents.append(
|
|
{
|
|
"name": ac.name,
|
|
"display_name": ac.display_name,
|
|
"personality_file": ac.personality_file,
|
|
"model": ac.model or _config.chat_model,
|
|
"tools": ac.tools,
|
|
"skills": ac.skills,
|
|
"memory_scope": ac.memory_scope or "shared",
|
|
}
|
|
)
|
|
return {"agents": agents}
|
|
|
|
|
|
# ── System ──
|
|
|
|
|
|
@router.get("/system/health")
|
|
async def get_system_health():
|
|
"""System health: disk space, brains, integrations."""
|
|
health = {
|
|
"execution_brain": shutil.which("claude") is not None,
|
|
"clickup_enabled": _config.clickup.enabled if _config else False,
|
|
"chat_model": _config.chat_model if _config else "unknown",
|
|
"execution_model": _config.default_model if _config else "unknown",
|
|
"disks": [],
|
|
}
|
|
|
|
# Disk space (Windows drives)
|
|
for drive in ["C:", "D:", "E:", "Z:"]:
|
|
try:
|
|
usage = shutil.disk_usage(drive + "/")
|
|
health["disks"].append(
|
|
{
|
|
"drive": drive,
|
|
"total_gb": round(usage.total / (1024**3), 1),
|
|
"free_gb": round(usage.free / (1024**3), 1),
|
|
"percent_free": round(usage.free / usage.total * 100, 1),
|
|
}
|
|
)
|
|
except (FileNotFoundError, OSError):
|
|
pass
|
|
|
|
return health
|
|
|
|
|
|
@router.get("/system/notifications")
|
|
async def get_notifications():
|
|
"""Recent notifications from the DB."""
|
|
if not _db:
|
|
return {"notifications": []}
|
|
|
|
rows = _db._conn.execute(
|
|
"SELECT id, message, category, created_at FROM notifications"
|
|
" ORDER BY id DESC LIMIT 50"
|
|
).fetchall()
|
|
return {"notifications": [dict(r) for r in rows]}
|
|
|
|
|
|
@router.get("/system/kv-states")
|
|
async def get_kv_states():
|
|
"""All ClickUp task states from KV store."""
|
|
if not _db:
|
|
return {"states": []}
|
|
|
|
pairs = _db.kv_scan("clickup:task:")
|
|
states = []
|
|
for _key, val in pairs:
|
|
try:
|
|
state = json.loads(val)
|
|
states.append(state)
|
|
except json.JSONDecodeError:
|
|
pass
|
|
|
|
return {"states": states}
|
|
|
|
|
|
@router.get("/system/loops")
|
|
async def get_loop_timestamps():
|
|
"""Last-run timestamps for scheduler loops."""
|
|
if not _scheduler:
|
|
return {"loops": {}, "error": "Scheduler not available"}
|
|
return {"loops": _scheduler.get_loop_timestamps()}
|
|
|
|
|
|
@router.post("/system/loops/force")
|
|
async def force_loop_run():
|
|
"""Force the heartbeat and poll loops to run immediately."""
|
|
if not _scheduler:
|
|
return {"status": "error", "message": "Scheduler not available"}
|
|
_scheduler.force_heartbeat()
|
|
_scheduler.force_poll()
|
|
return {"status": "ok", "message": "Force pulse sent to heartbeat and poll loops"}
|
|
|
|
|
|
@router.post("/system/briefing/force")
|
|
async def force_briefing():
|
|
"""Force the morning briefing to send now (won't block tomorrow's)."""
|
|
if not _scheduler:
|
|
return {"status": "error", "message": "Scheduler not available"}
|
|
_scheduler.force_briefing()
|
|
return {"status": "ok", "message": "Briefing force-triggered"}
|
|
|
|
|
|
@router.post("/cache/clear")
|
|
async def clear_cache():
|
|
"""Clear the ClickUp data cache."""
|
|
with _cache_lock:
|
|
_cache.clear()
|
|
return {"status": "cleared"}
|