Fix PR task flow: add ClickUp status updates, comments, and attachments

The press release tool now handles its own ClickUp sync lifecycle when
a clickup_task_id is provided — sets status to "in progress" with a
starting comment, uploads docx attachments after creation, then sets
status to "internal review" with a completion comment. The scheduler
now passes clickup_task_id to tools and defers to tool-level sync when
detected, falling back to scheduler-level sync for other tools.
ToolRegistry.execute() now filters args to accepted params to prevent
TypeError when extra keys (like clickup_task_id) are passed to tools
that don't accept them.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
cora-start
PeninsulaInd 2026-02-17 16:36:45 -06:00
parent 0f2274e6f1
commit 082ca6ba44
4 changed files with 121 additions and 52 deletions

View File

@ -318,14 +318,16 @@ class Scheduler:
state["started_at"] = now state["started_at"] = now
self.db.kv_set(kv_key, json.dumps(state)) self.db.kv_set(kv_key, json.dumps(state))
# Set ClickUp status to "in progress"
client = self._get_clickup_client() client = self._get_clickup_client()
client.update_task_status(task_id, self.config.clickup.in_progress_status)
try: try:
# Build tool arguments from field mapping # Build tool arguments from field mapping
args = self._build_tool_args(state) args = self._build_tool_args(state)
# Pass clickup_task_id so the tool can handle its own ClickUp sync
# (status updates, comments, attachments) if it supports it.
args["clickup_task_id"] = task_id
# Execute the skill via the tool registry # Execute the skill via the tool registry
if hasattr(self.agent, "_tools") and self.agent._tools: if hasattr(self.agent, "_tools") and self.agent._tools:
result = self.agent._tools.execute(skill_name, args) result = self.agent._tools.execute(skill_name, args)
@ -335,30 +337,46 @@ class Scheduler:
f"Task description: {state.get('custom_fields', {})}" f"Task description: {state.get('custom_fields', {})}"
) )
# Extract and upload any docx deliverables # Check if the tool already handled ClickUp sync internally
docx_paths = _extract_docx_paths(result) tool_handled_sync = "## ClickUp Sync" in result
state["deliverable_paths"] = docx_paths
uploaded_count = 0
for path in docx_paths:
if client.upload_attachment(task_id, path):
uploaded_count += 1
else:
log.warning("Failed to upload %s for task %s", path, task_id)
# Success if tool_handled_sync:
state["state"] = "completed" # Tool did its own status updates, comments, and attachments.
state["completed_at"] = datetime.now(UTC).isoformat() # Just update the kv_store state.
self.db.kv_set(kv_key, json.dumps(state)) state["state"] = "completed"
state["completed_at"] = datetime.now(UTC).isoformat()
self.db.kv_set(kv_key, json.dumps(state))
else:
# Tool doesn't handle sync — scheduler does it (fallback path).
# Set status to "in progress" (tool didn't do it)
client.update_task_status(task_id, self.config.clickup.in_progress_status)
# Update ClickUp # Extract and upload any docx deliverables
client.update_task_status(task_id, self.config.clickup.review_status) docx_paths = _extract_docx_paths(result)
attach_note = f"\n📎 {uploaded_count} file(s) attached." if uploaded_count else "" state["deliverable_paths"] = docx_paths
comment = ( uploaded_count = 0
f"✅ CheddahBot completed this task.\n\n" for path in docx_paths:
f"Skill: {skill_name}\n" if client.upload_attachment(task_id, path):
f"Result:\n{result[:3000]}{attach_note}" uploaded_count += 1
) else:
client.add_comment(task_id, comment) log.warning("Failed to upload %s for task %s", path, task_id)
# Success
state["state"] = "completed"
state["completed_at"] = datetime.now(UTC).isoformat()
self.db.kv_set(kv_key, json.dumps(state))
# Update ClickUp
client.update_task_status(task_id, self.config.clickup.review_status)
attach_note = (
f"\n📎 {uploaded_count} file(s) attached." if uploaded_count else ""
)
comment = (
f"✅ CheddahBot completed this task.\n\n"
f"Skill: {skill_name}\n"
f"Result:\n{result[:3000]}{attach_note}"
)
client.add_comment(task_id, comment)
self._notify( self._notify(
f"ClickUp task completed: **{task_name}**\n" f"ClickUp task completed: **{task_name}**\n"

View File

@ -159,6 +159,15 @@ class ToolRegistry:
"memory": self.agent._memory, "memory": self.agent._memory,
"agent_registry": self.agent_registry, "agent_registry": self.agent_registry,
} }
# Filter args to only params the function accepts (plus **kwargs)
has_var_keyword = any(
p.kind == inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values()
)
if not has_var_keyword:
accepted = set(sig.parameters.keys())
args = {k: v for k, v in args.items() if k in accepted}
result = tool_def.func(**args) result = tool_def.func(**args)
return str(result) if result is not None else "Done." return str(result) if result is not None else "Done."
except Exception as e: except Exception as e:

View File

@ -43,6 +43,24 @@ def _set_status(ctx: dict | None, message: str) -> None:
ctx["db"].kv_set("pipeline:status", message) ctx["db"].kv_set("pipeline:status", message)
def _get_clickup_client(ctx: dict | None):
"""Create a ClickUpClient from tool context, or None if unavailable."""
if not ctx or not ctx.get("config") or not ctx["config"].clickup.enabled:
return None
try:
from ..clickup import ClickUpClient
config = ctx["config"]
return ClickUpClient(
api_token=config.clickup.api_token,
workspace_id=config.clickup.workspace_id,
task_type_field_name=config.clickup.task_type_field_name,
)
except Exception as e:
log.warning("Could not create ClickUp client: %s", e)
return None
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Helpers # Helpers
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@ -394,6 +412,25 @@ def write_press_releases(
agent = ctx["agent"] agent = ctx["agent"]
# ── ClickUp: set "in progress" and post starting comment ────────────
cu_client = None
if clickup_task_id:
cu_client = _get_clickup_client(ctx)
if cu_client:
try:
config = ctx["config"]
cu_client.update_task_status(
clickup_task_id, config.clickup.in_progress_status
)
cu_client.add_comment(
clickup_task_id,
f"🔄 CheddahBot starting press release creation.\n\n"
f"Topic: {topic}\nCompany: {company_name}",
)
log.info("ClickUp task %s set to in-progress", clickup_task_id)
except Exception as e:
log.warning("ClickUp start-sync failed for %s: %s", clickup_task_id, e)
# Load skill prompts # Load skill prompts
try: try:
pr_skill = _load_skill("press_release_prompt.md") pr_skill = _load_skill("press_release_prompt.md")
@ -550,6 +587,25 @@ def write_press_releases(
text_to_docx(clean_result, docx_path) text_to_docx(clean_result, docx_path)
docx_files.append(str(docx_path)) docx_files.append(str(docx_path))
# ── ClickUp: upload docx attachments + comment ─────────────────────
uploaded_count = 0
if clickup_task_id and cu_client:
try:
for path in docx_files:
if cu_client.upload_attachment(clickup_task_id, path):
uploaded_count += 1
else:
log.warning("ClickUp: failed to upload %s for task %s", path, clickup_task_id)
cu_client.add_comment(
clickup_task_id,
f"📎 Saved {len(docx_files)} press release(s). "
f"{uploaded_count} file(s) attached.\n"
f"Generating JSON-LD schemas next...",
)
log.info("ClickUp: uploaded %d attachments for task %s", uploaded_count, clickup_task_id)
except Exception as e:
log.warning("ClickUp attachment upload failed for %s: %s", clickup_task_id, e)
# ── Step 4: Generate 2 JSON-LD schemas (Sonnet + WebSearch) ─────────── # ── Step 4: Generate 2 JSON-LD schemas (Sonnet + WebSearch) ───────────
log.info("[PR Pipeline] Step 4/4: Generating 2 JSON-LD schemas...") log.info("[PR Pipeline] Step 4/4: Generating 2 JSON-LD schemas...")
schema_texts: list[str] = [] schema_texts: list[str] = []
@ -629,54 +685,35 @@ def write_press_releases(
output_parts.append(f"| {c['step']} | {c['model']} | {c['elapsed_s']} |") output_parts.append(f"| {c['step']} | {c['model']} | {c['elapsed_s']} |")
output_parts.append(f"| **Total** | | **{round(total_elapsed, 1)}** |") output_parts.append(f"| **Total** | | **{round(total_elapsed, 1)}** |")
# ── ClickUp sync (when triggered from chat with a task ID) ─────────── # ── ClickUp: completion — status to review + final comment ──────────
if clickup_task_id and ctx and ctx.get("config") and ctx["config"].clickup.enabled: if clickup_task_id and cu_client:
try: try:
from ..clickup import ClickUpClient
config = ctx["config"] config = ctx["config"]
client = ClickUpClient(
api_token=config.clickup.api_token,
workspace_id=config.clickup.workspace_id,
task_type_field_name=config.clickup.task_type_field_name,
)
# Upload each .docx as an attachment # Post completion comment
uploaded_count = 0
for path in docx_files:
if client.upload_attachment(clickup_task_id, path):
uploaded_count += 1
else:
log.warning("ClickUp: failed to upload %s for task %s", path, clickup_task_id)
# Post a result comment
attach_note = f"\n📎 {uploaded_count} file(s) attached." if uploaded_count else "" attach_note = f"\n📎 {uploaded_count} file(s) attached." if uploaded_count else ""
result_text = "\n".join(output_parts)[:3000] result_text = "\n".join(output_parts)[:3000]
comment = ( comment = (
f"✅ CheddahBot completed this task (via chat).\n\n" f"✅ CheddahBot completed this task.\n\n"
f"Skill: write_press_releases\n" f"Skill: write_press_releases\n"
f"Result:\n{result_text}{attach_note}" f"Result:\n{result_text}{attach_note}"
) )
client.add_comment(clickup_task_id, comment) cu_client.add_comment(clickup_task_id, comment)
# Update task status to review # Set status to internal review
client.update_task_status(clickup_task_id, config.clickup.review_status) cu_client.update_task_status(clickup_task_id, config.clickup.review_status)
# Update kv_store state if one exists # Update kv_store state if one exists
db = ctx.get("db") db = ctx.get("db")
if db: if db:
import json as _json
kv_key = f"clickup:task:{clickup_task_id}:state" kv_key = f"clickup:task:{clickup_task_id}:state"
existing = db.kv_get(kv_key) existing = db.kv_get(kv_key)
if existing: if existing:
state = _json.loads(existing) state = json.loads(existing)
state["state"] = "completed" state["state"] = "completed"
state["completed_at"] = datetime.now(UTC).isoformat() state["completed_at"] = datetime.now(UTC).isoformat()
state["deliverable_paths"] = docx_files state["deliverable_paths"] = docx_files
db.kv_set(kv_key, _json.dumps(state)) db.kv_set(kv_key, json.dumps(state))
client.close()
output_parts.append("\n## ClickUp Sync\n") output_parts.append("\n## ClickUp Sync\n")
output_parts.append(f"- Task `{clickup_task_id}` updated") output_parts.append(f"- Task `{clickup_task_id}` updated")
@ -689,6 +726,8 @@ def write_press_releases(
output_parts.append("\n## ClickUp Sync\n") output_parts.append("\n## ClickUp Sync\n")
output_parts.append(f"- **Sync failed:** {e}") output_parts.append(f"- **Sync failed:** {e}")
output_parts.append("- Press release results are still valid above") output_parts.append("- Press release results are still valid above")
finally:
cu_client.close()
return "\n".join(output_parts) return "\n".join(output_parts)

View File

@ -17,3 +17,6 @@
- If he says he is going to bed or going away for awhile, you can ask if we can run in unattended mode - he'll usually say yes. - If he says he is going to bed or going away for awhile, you can ask if we can run in unattended mode - he'll usually say yes.
- Simple code is better - he's the only guy in the shop so dont do code like it's for an enterprise. Simple and maintainable is important. - Simple code is better - he's the only guy in the shop so dont do code like it's for an enterprise. Simple and maintainable is important.
- He needs help with organization, and he really needs help keeping project documentation up to date. Do that for him. - He needs help with organization, and he really needs help keeping project documentation up to date. Do that for him.
## Strict Protocols
- STRICT METADATA ADHERENCE: You must never alias, rename, truncate, or 'clean up' task titles, client names, or IDs retrieved from the ClickUp API. All references to tasks must use the verbatim strings returned by the API. Proactive 'organization' of task data that alters the original text is strictly forbidden.