Compare commits
No commits in common. "clickup-runner" and "master" have entirely different histories.
clickup-ru
...
master
|
|
@ -65,7 +65,7 @@ OPTIMIZATION_RULES = {
|
|||
# Measures correlation to ranking position (1=top, 100=bottom), so negative = better ranking.
|
||||
# Only include entities with Best of Both <= this value.
|
||||
# Set to None to disable filtering.
|
||||
"entity_correlation_threshold": -0.199,
|
||||
"entity_correlation_threshold": -0.19,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -492,7 +492,7 @@ These override any data from the Cora report:
|
|||
| Competitor names | NEVER use competitor company names as entities or LSI keywords. Do not mention competitors by name in content. |
|
||||
| Measurement entities | Ignore measurements (dimensions, tolerances, etc.) as entities — skip these in entity optimization |
|
||||
| Organization entities | Organizations like ISO, ANSI, ASTM are fine — keep these as entities |
|
||||
| Entity correlation filter | Only entities with Best of Both <= -0.199 are included. Best of Both is the lower of Spearman's or Pearson's correlation to ranking position (1=top, 100=bottom), so more negative = stronger ranking signal. This filter is applied in `cora_parser.py` and affects all downstream consumers. To disable, set `entity_correlation_threshold` to `None` in `OPTIMIZATION_RULES`. Added 2026-03-20 — revert if entity coverage feels too thin. |
|
||||
| Entity correlation filter | Only entities with Best of Both <= -0.19 are included. Best of Both is the lower of Spearman's or Pearson's correlation to ranking position (1=top, 100=bottom), so more negative = stronger ranking signal. This filter is applied in `cora_parser.py` and affects all downstream consumers. To disable, set `entity_correlation_threshold` to `None` in `OPTIMIZATION_RULES`. Added 2026-03-20 — revert if entity coverage feels too thin. |
|
||||
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -8,28 +8,3 @@ memory/*.md
|
|||
dist/
|
||||
build/
|
||||
.venv/
|
||||
credentials/
|
||||
|
||||
# local runtime artifacts
|
||||
.coverage
|
||||
*.db
|
||||
*.sqlite
|
||||
logs/
|
||||
working/
|
||||
projects/
|
||||
*.stackdump
|
||||
nul
|
||||
screencapture-*.png
|
||||
|
||||
# editor / tool local state
|
||||
.claude/settings.local.json
|
||||
.claude/skills/content-researcher/scripts/working/
|
||||
|
||||
# scratch drafts
|
||||
*- Cora Data.md
|
||||
*- Draft.md
|
||||
*- Outline.md
|
||||
|
||||
# external tool sources pulled in for reference
|
||||
amazon-sdk/
|
||||
|
||||
|
|
|
|||
|
|
@ -500,32 +500,6 @@ class ClickUpClient:
|
|||
)
|
||||
return False
|
||||
|
||||
def add_dependency(self, task_id: str, depends_on: str) -> bool:
|
||||
"""Add a 'blocked by' dependency: *task_id* is blocked by *depends_on*.
|
||||
|
||||
Uses POST /task/{task_id}/dependency with {"depends_on": ...}.
|
||||
"""
|
||||
try:
|
||||
|
||||
def _call():
|
||||
resp = self._client.post(
|
||||
f"/task/{task_id}/dependency",
|
||||
json={"depends_on": depends_on},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
self._retry(_call)
|
||||
log.info(
|
||||
"Added dependency: task %s blocked by %s", task_id, depends_on
|
||||
)
|
||||
return True
|
||||
except (httpx.TransportError, httpx.HTTPStatusError) as e:
|
||||
log.error(
|
||||
"Failed to add dependency on task %s: %s", task_id, e
|
||||
)
|
||||
return False
|
||||
|
||||
def get_custom_field_by_name(self, task_id: str, field_name: str) -> Any:
|
||||
"""Read a custom field value from a task by field name.
|
||||
|
||||
|
|
|
|||
|
|
@ -131,17 +131,6 @@ class NtfyConfig:
|
|||
channels: list[NtfyChannelConfig] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GoogleDriveConfig:
|
||||
root_folder_id: str = ""
|
||||
enabled: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class GmailApiConfig:
|
||||
enabled: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class AgentConfig:
|
||||
"""Per-agent configuration for multi-agent support."""
|
||||
|
|
@ -176,8 +165,6 @@ class Config:
|
|||
content: ContentConfig = field(default_factory=ContentConfig)
|
||||
timeouts: TimeoutConfig = field(default_factory=TimeoutConfig)
|
||||
ntfy: NtfyConfig = field(default_factory=NtfyConfig)
|
||||
google_drive: GoogleDriveConfig = field(default_factory=GoogleDriveConfig)
|
||||
gmail_api: GmailApiConfig = field(default_factory=GmailApiConfig)
|
||||
agents: list[AgentConfig] = field(default_factory=lambda: [AgentConfig()])
|
||||
|
||||
# Derived paths
|
||||
|
|
@ -260,18 +247,6 @@ def load_config() -> Config:
|
|||
setattr(ch, k, v)
|
||||
cfg.ntfy.channels.append(ch)
|
||||
|
||||
# Google Drive config
|
||||
if "google_drive" in data and isinstance(data["google_drive"], dict):
|
||||
for k, v in data["google_drive"].items():
|
||||
if hasattr(cfg.google_drive, k):
|
||||
setattr(cfg.google_drive, k, v)
|
||||
|
||||
# Gmail API config
|
||||
if "gmail_api" in data and isinstance(data["gmail_api"], dict):
|
||||
for k, v in data["gmail_api"].items():
|
||||
if hasattr(cfg.gmail_api, k):
|
||||
setattr(cfg.gmail_api, k, v)
|
||||
|
||||
# Multi-agent configs
|
||||
if "agents" in data and isinstance(data["agents"], list):
|
||||
cfg.agents = []
|
||||
|
|
|
|||
|
|
@ -1,133 +0,0 @@
|
|||
"""Client contact directory parser for skills/companies.md."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_COMPANIES_FILE = Path(__file__).resolve().parent.parent / "skills" / "companies.md"
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompanyContact:
|
||||
"""Structured contact record for a client company."""
|
||||
|
||||
name: str
|
||||
aliases: list[str] = field(default_factory=list)
|
||||
executive: str = ""
|
||||
email: str = ""
|
||||
opening: str = ""
|
||||
cc: str = ""
|
||||
pa_org_id: int = 0
|
||||
website: str = ""
|
||||
gbp: str = ""
|
||||
|
||||
@property
|
||||
def executive_first_name(self) -> str:
|
||||
"""Extract first name from executive field (e.g. 'Gary Hermsen, CEO' -> 'Gary')."""
|
||||
if not self.executive:
|
||||
return ""
|
||||
return self.executive.split(",")[0].split()[0].strip()
|
||||
|
||||
@property
|
||||
def cc_list(self) -> list[str]:
|
||||
"""Parse CC field into a list of email addresses."""
|
||||
if not self.cc:
|
||||
return []
|
||||
return [addr.strip() for addr in self.cc.split(",") if addr.strip()]
|
||||
|
||||
|
||||
def parse_company_directory(path: Path | str = _COMPANIES_FILE) -> dict[str, CompanyContact]:
|
||||
"""Parse companies.md and return contacts keyed by lowercase company name.
|
||||
|
||||
Follows the same parsing pattern as press_release.py:_parse_company_data().
|
||||
"""
|
||||
path = Path(path)
|
||||
if not path.exists():
|
||||
log.warning("Company directory not found: %s", path)
|
||||
return {}
|
||||
|
||||
text = path.read_text(encoding="utf-8")
|
||||
contacts: dict[str, CompanyContact] = {}
|
||||
current: CompanyContact | None = None
|
||||
|
||||
for line in text.splitlines():
|
||||
line = line.strip()
|
||||
if line.startswith("## "):
|
||||
if current:
|
||||
contacts[current.name.lower()] = current
|
||||
current = CompanyContact(name=line[3:].strip())
|
||||
elif current:
|
||||
_parse_field(current, line)
|
||||
|
||||
# Don't forget the last company
|
||||
if current:
|
||||
contacts[current.name.lower()] = current
|
||||
|
||||
log.info("Loaded %d company contacts from %s", len(contacts), path.name)
|
||||
return contacts
|
||||
|
||||
|
||||
def _parse_field(contact: CompanyContact, line: str) -> None:
|
||||
"""Parse a single field line into the contact record."""
|
||||
field_map = {
|
||||
"- **Executive:**": "executive",
|
||||
"- **Email:**": "email",
|
||||
"- **Opening:**": "opening",
|
||||
"- **CC:**": "cc",
|
||||
"- **PA Org ID:**": "pa_org_id",
|
||||
"- **Website:**": "website",
|
||||
"- **GBP:**": "gbp",
|
||||
}
|
||||
# Handle Aliases specially (comma-separated list)
|
||||
if line.startswith("- **Aliases:**"):
|
||||
value = line[len("- **Aliases:**"):].strip()
|
||||
contact.aliases = [a.strip() for a in value.split(",") if a.strip()]
|
||||
return
|
||||
|
||||
for prefix, attr in field_map.items():
|
||||
if line.startswith(prefix):
|
||||
value = line[len(prefix):].strip()
|
||||
if attr == "pa_org_id":
|
||||
try:
|
||||
setattr(contact, attr, int(value))
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
else:
|
||||
setattr(contact, attr, value)
|
||||
return
|
||||
|
||||
|
||||
def lookup_contact(company_name: str, contacts: dict[str, CompanyContact] | None = None) -> CompanyContact | None:
|
||||
"""Find a company contact by name with fuzzy matching.
|
||||
|
||||
Tries exact match on name and aliases, then substring containment.
|
||||
"""
|
||||
if contacts is None:
|
||||
contacts = parse_company_directory()
|
||||
|
||||
name_lower = company_name.lower().strip()
|
||||
|
||||
# Exact match on canonical name
|
||||
if name_lower in contacts:
|
||||
return contacts[name_lower]
|
||||
|
||||
# Exact match on aliases
|
||||
for contact in contacts.values():
|
||||
for alias in contact.aliases:
|
||||
if alias.lower() == name_lower:
|
||||
return contact
|
||||
|
||||
# Substring match on name and aliases
|
||||
for key, contact in contacts.items():
|
||||
if key in name_lower or name_lower in key:
|
||||
return contact
|
||||
for alias in contact.aliases:
|
||||
alias_lower = alias.lower()
|
||||
if alias_lower in name_lower or name_lower in alias_lower:
|
||||
return contact
|
||||
|
||||
return None
|
||||
|
|
@ -1,130 +0,0 @@
|
|||
"""Client delivery pipeline: Google Drive upload + Gmail draft creation."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
from .contacts import CompanyContact, lookup_contact, parse_company_directory
|
||||
from .email_templates import load_template, render_template
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeliveryResult:
|
||||
"""Result of a client delivery attempt."""
|
||||
|
||||
doc_links: list[str] = field(default_factory=list)
|
||||
draft_id: str = ""
|
||||
errors: list[str] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def success(self) -> bool:
|
||||
return bool(self.draft_id) and not self.errors
|
||||
|
||||
|
||||
def deliver_to_client(
|
||||
files: list[Path],
|
||||
company_name: str,
|
||||
task_id: str,
|
||||
task_type: str,
|
||||
ctx: dict | None = None,
|
||||
) -> DeliveryResult:
|
||||
"""Upload files to Google Drive and create a Gmail draft for the client.
|
||||
|
||||
Steps:
|
||||
1. Look up client contact (email, opening, cc) from companies.md
|
||||
2. Upload each .docx to Google Drive as a Google Doc
|
||||
3. Load and render the email template for this task type
|
||||
4. Create a Gmail draft with the rendered content
|
||||
|
||||
This function is designed to be non-fatal -- callers should wrap in try/except
|
||||
so delivery failures don't break the main pipeline.
|
||||
"""
|
||||
result = DeliveryResult()
|
||||
config = ctx.get("config") if ctx else None
|
||||
|
||||
# 1. Look up client contact
|
||||
contacts = parse_company_directory()
|
||||
contact = lookup_contact(company_name, contacts)
|
||||
if not contact:
|
||||
result.errors.append("No contact found for company '%s'" % company_name)
|
||||
log.warning("Delivery aborted: %s", result.errors[-1])
|
||||
return result
|
||||
if not contact.email:
|
||||
result.errors.append(
|
||||
"No email address for '%s' -- add Email field to skills/companies.md" % contact.name
|
||||
)
|
||||
log.warning("Delivery aborted: %s", result.errors[-1])
|
||||
return result
|
||||
|
||||
# 2. Upload to Google Drive
|
||||
drive_enabled = config and config.google_drive.enabled if config else False
|
||||
if drive_enabled:
|
||||
try:
|
||||
from .google_drive import DriveClient
|
||||
|
||||
drive = DriveClient(root_folder_id=config.google_drive.root_folder_id)
|
||||
folder_id = drive.ensure_client_folder(contact.name)
|
||||
|
||||
for file_path in files:
|
||||
if not file_path.exists():
|
||||
result.errors.append("File not found: %s" % file_path)
|
||||
continue
|
||||
file_id, web_link = drive.upload_as_google_doc(file_path, folder_id)
|
||||
drive.set_link_sharing(file_id, role="commenter")
|
||||
result.doc_links.append(web_link)
|
||||
except Exception as e:
|
||||
result.errors.append("Drive upload failed: %s" % e)
|
||||
log.error("Drive upload failed: %s", e)
|
||||
else:
|
||||
log.info("Google Drive disabled, skipping upload")
|
||||
# Fall back to listing local file paths
|
||||
for file_path in files:
|
||||
result.doc_links.append(str(file_path))
|
||||
|
||||
# 3. Load and render email template
|
||||
template = load_template(task_type)
|
||||
if not template:
|
||||
result.errors.append("No email template for task type '%s'" % task_type)
|
||||
log.warning("Delivery aborted: %s", result.errors[-1])
|
||||
return result
|
||||
|
||||
# Build Google Doc links as a bullet list
|
||||
links_text = "\n".join("- %s" % link for link in result.doc_links)
|
||||
|
||||
# Use the contact's custom opening, or fall back to "Hi {first_name},"
|
||||
opening = contact.opening or ("Hi %s," % contact.executive_first_name)
|
||||
|
||||
context = {
|
||||
"company_name": contact.name,
|
||||
"opening": opening,
|
||||
"executive_first_name": contact.executive_first_name,
|
||||
"google_doc_links": links_text,
|
||||
"task_id": task_id,
|
||||
}
|
||||
subject, body = render_template(template, context)
|
||||
|
||||
# 4. Create Gmail draft
|
||||
gmail_enabled = config and config.gmail_api.enabled if config else False
|
||||
if gmail_enabled:
|
||||
try:
|
||||
from .gmail_draft import GmailDraftClient
|
||||
|
||||
gmail = GmailDraftClient()
|
||||
result.draft_id = gmail.create_draft(
|
||||
to=contact.email,
|
||||
subject=subject,
|
||||
body=body,
|
||||
cc=contact.cc_list or None,
|
||||
)
|
||||
except Exception as e:
|
||||
result.errors.append("Gmail draft creation failed: %s" % e)
|
||||
log.error("Gmail draft creation failed: %s", e)
|
||||
else:
|
||||
log.info("Gmail API disabled, skipping draft creation")
|
||||
result.errors.append("Gmail API disabled -- draft not created")
|
||||
|
||||
return result
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
"""Email template loader and renderer for client delivery emails."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "skills" / "email_templates"
|
||||
|
||||
|
||||
@dataclass
|
||||
class EmailTemplate:
|
||||
"""Parsed email template with subject and body."""
|
||||
|
||||
task_type: str
|
||||
subject_template: str
|
||||
body_template: str
|
||||
|
||||
|
||||
def load_template(task_type: str, templates_dir: Path | str = _TEMPLATES_DIR) -> EmailTemplate | None:
|
||||
"""Load an email template matching the given task type.
|
||||
|
||||
Searches skills/email_templates/ for .md files with matching task_type in frontmatter.
|
||||
"""
|
||||
templates_dir = Path(templates_dir)
|
||||
if not templates_dir.exists():
|
||||
log.warning("Email templates directory not found: %s", templates_dir)
|
||||
return None
|
||||
|
||||
for md_file in templates_dir.glob("*.md"):
|
||||
template = _parse_template_file(md_file)
|
||||
if template and template.task_type.lower() == task_type.lower():
|
||||
log.info("Loaded email template '%s' for task type '%s'", md_file.name, task_type)
|
||||
return template
|
||||
|
||||
log.warning("No email template found for task type '%s'", task_type)
|
||||
return None
|
||||
|
||||
|
||||
def _parse_template_file(path: Path) -> EmailTemplate | None:
|
||||
"""Parse a template .md file with YAML frontmatter."""
|
||||
text = path.read_text(encoding="utf-8")
|
||||
|
||||
# Split frontmatter from body
|
||||
if not text.startswith("---"):
|
||||
return None
|
||||
|
||||
parts = text.split("---", 2)
|
||||
if len(parts) < 3:
|
||||
return None
|
||||
|
||||
try:
|
||||
meta = yaml.safe_load(parts[1]) or {}
|
||||
except yaml.YAMLError:
|
||||
log.warning("Invalid YAML frontmatter in %s", path.name)
|
||||
return None
|
||||
|
||||
task_type = meta.get("task_type", "")
|
||||
subject = meta.get("subject", "")
|
||||
body = parts[2].strip()
|
||||
|
||||
if not task_type or not body:
|
||||
return None
|
||||
|
||||
return EmailTemplate(task_type=task_type, subject_template=subject, body_template=body)
|
||||
|
||||
|
||||
def render_template(template: EmailTemplate, context: dict) -> tuple[str, str]:
|
||||
"""Render a template with the given context dict.
|
||||
|
||||
Returns (subject, body). Uses str.format_map() for placeholder substitution.
|
||||
Unknown placeholders are left as-is.
|
||||
"""
|
||||
subject = _safe_format(template.subject_template, context)
|
||||
body = _safe_format(template.body_template, context)
|
||||
return subject, body
|
||||
|
||||
|
||||
def _safe_format(template: str, context: dict) -> str:
|
||||
"""Format a string template, leaving unknown placeholders intact."""
|
||||
|
||||
class SafeDict(dict):
|
||||
def __missing__(self, key: str) -> str:
|
||||
return "{" + key + "}"
|
||||
|
||||
return template.format_map(SafeDict(context))
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
"""Gmail draft creation via Gmail API (OAuth2)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import logging
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
from googleapiclient.discovery import build
|
||||
|
||||
from .google_auth import get_credentials
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GmailDraftClient:
|
||||
"""Create email drafts in Gmail via the API."""
|
||||
|
||||
def __init__(self):
|
||||
creds = get_credentials()
|
||||
self._service = build("gmail", "v1", credentials=creds)
|
||||
|
||||
def create_draft(
|
||||
self,
|
||||
to: str,
|
||||
subject: str,
|
||||
body: str,
|
||||
cc: list[str] | None = None,
|
||||
) -> str:
|
||||
"""Create a draft email in the authenticated user's Gmail.
|
||||
|
||||
Args:
|
||||
to: Recipient email address.
|
||||
subject: Email subject line.
|
||||
body: Plain text email body.
|
||||
cc: Optional list of CC email addresses.
|
||||
|
||||
Returns:
|
||||
The Gmail draft ID.
|
||||
"""
|
||||
message = MIMEText(body)
|
||||
message["to"] = to
|
||||
message["subject"] = subject
|
||||
if cc:
|
||||
message["cc"] = ", ".join(cc)
|
||||
|
||||
raw = base64.urlsafe_b64encode(message.as_bytes()).decode("ascii")
|
||||
draft = (
|
||||
self._service.users()
|
||||
.drafts()
|
||||
.create(userId="me", body={"message": {"raw": raw}})
|
||||
.execute()
|
||||
)
|
||||
draft_id = draft["id"]
|
||||
log.info("Created Gmail draft %s (to=%s, subject=%s)", draft_id, to, subject)
|
||||
return draft_id
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
"""Google OAuth2 credential management for Gmail API and Google Drive API."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from google.auth.transport.requests import Request
|
||||
from google.oauth2.credentials import Credentials
|
||||
from google_auth_oauthlib.flow import InstalledAppFlow
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# All scopes requested upfront to avoid re-consent.
|
||||
SCOPES = [
|
||||
"https://www.googleapis.com/auth/gmail.compose", # Create drafts
|
||||
"https://www.googleapis.com/auth/gmail.readonly", # Read replies
|
||||
"https://www.googleapis.com/auth/drive.file", # Manage app-created files
|
||||
]
|
||||
|
||||
_DEFAULT_CREDENTIALS_FILE = Path("credentials/gmail_credentials.json")
|
||||
_DEFAULT_TOKEN_FILE = Path("data/google_token.json")
|
||||
|
||||
|
||||
def get_credentials(
|
||||
credentials_file: Path | str = _DEFAULT_CREDENTIALS_FILE,
|
||||
token_file: Path | str = _DEFAULT_TOKEN_FILE,
|
||||
scopes: list[str] | None = None,
|
||||
) -> Credentials:
|
||||
"""Return valid Google OAuth2 credentials, refreshing or re-authenticating as needed.
|
||||
|
||||
On first run, opens a browser for OAuth consent and stores the refresh token.
|
||||
Subsequent runs use the stored token and refresh it automatically.
|
||||
"""
|
||||
credentials_file = Path(credentials_file)
|
||||
token_file = Path(token_file)
|
||||
scopes = scopes or SCOPES
|
||||
|
||||
creds: Credentials | None = None
|
||||
|
||||
# Load existing token
|
||||
if token_file.exists():
|
||||
creds = Credentials.from_authorized_user_file(str(token_file), scopes)
|
||||
|
||||
# Refresh or re-authenticate
|
||||
if creds and creds.expired and creds.refresh_token:
|
||||
log.info("Refreshing expired Google token")
|
||||
creds.refresh(Request())
|
||||
elif not creds or not creds.valid:
|
||||
if not credentials_file.exists():
|
||||
raise FileNotFoundError(
|
||||
"OAuth credentials file not found at %s. "
|
||||
"Download it from Google Cloud Console -> APIs & Services -> Credentials."
|
||||
% credentials_file
|
||||
)
|
||||
log.info("Starting Google OAuth consent flow (browser will open)")
|
||||
flow = InstalledAppFlow.from_client_secrets_file(str(credentials_file), scopes)
|
||||
creds = flow.run_local_server(port=0)
|
||||
|
||||
# Persist token for next run
|
||||
token_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(token_file, "w") as f:
|
||||
f.write(creds.to_json())
|
||||
|
||||
return creds
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
"""Google Drive client for uploading .docx files as Google Docs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from googleapiclient.discovery import build
|
||||
from googleapiclient.http import MediaFileUpload
|
||||
|
||||
from .google_auth import get_credentials
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_FOLDER_CACHE_FILE = Path("data/drive_folder_cache.json")
|
||||
_GOOGLE_DOC_MIME = "application/vnd.google-apps.document"
|
||||
_FOLDER_MIME = "application/vnd.google-apps.folder"
|
||||
|
||||
|
||||
class DriveClient:
|
||||
"""Upload .docx files to Google Drive as Google Docs with shareable links."""
|
||||
|
||||
def __init__(self, root_folder_id: str, cache_file: Path | str = _FOLDER_CACHE_FILE):
|
||||
self._root_folder_id = root_folder_id
|
||||
self._cache_file = Path(cache_file)
|
||||
self._folder_cache = self._load_cache()
|
||||
creds = get_credentials()
|
||||
self._service = build("drive", "v3", credentials=creds)
|
||||
|
||||
def _load_cache(self) -> dict[str, str]:
|
||||
"""Load folder ID cache from disk."""
|
||||
if self._cache_file.exists():
|
||||
try:
|
||||
return json.loads(self._cache_file.read_text())
|
||||
except (json.JSONDecodeError, OSError):
|
||||
log.warning("Corrupt drive folder cache, starting fresh")
|
||||
return {}
|
||||
|
||||
def _save_cache(self) -> None:
|
||||
"""Persist folder ID cache to disk."""
|
||||
self._cache_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
self._cache_file.write_text(json.dumps(self._folder_cache, indent=2))
|
||||
|
||||
def ensure_client_folder(self, company_name: str) -> str:
|
||||
"""Get or create a subfolder for the client under the root folder.
|
||||
|
||||
Returns the folder ID.
|
||||
"""
|
||||
cache_key = company_name.strip().lower()
|
||||
if cache_key in self._folder_cache:
|
||||
return self._folder_cache[cache_key]
|
||||
|
||||
# Check if folder already exists in Drive
|
||||
query = (
|
||||
f"name = '{company_name}' and "
|
||||
f"'{self._root_folder_id}' in parents and "
|
||||
f"mimeType = '{_FOLDER_MIME}' and trashed = false"
|
||||
)
|
||||
results = self._service.files().list(q=query, fields="files(id, name)").execute()
|
||||
files = results.get("files", [])
|
||||
|
||||
if files:
|
||||
folder_id = files[0]["id"]
|
||||
log.info("Found existing Drive folder for '%s': %s", company_name, folder_id)
|
||||
else:
|
||||
# Create new folder
|
||||
metadata = {
|
||||
"name": company_name,
|
||||
"mimeType": _FOLDER_MIME,
|
||||
"parents": [self._root_folder_id],
|
||||
}
|
||||
folder = self._service.files().create(body=metadata, fields="id").execute()
|
||||
folder_id = folder["id"]
|
||||
log.info("Created Drive folder for '%s': %s", company_name, folder_id)
|
||||
|
||||
self._folder_cache[cache_key] = folder_id
|
||||
self._save_cache()
|
||||
return folder_id
|
||||
|
||||
def upload_as_google_doc(
|
||||
self, local_path: Path | str, folder_id: str
|
||||
) -> tuple[str, str]:
|
||||
"""Upload a .docx file to Google Drive, converting to Google Doc.
|
||||
|
||||
Returns (file_id, web_view_link).
|
||||
"""
|
||||
local_path = Path(local_path)
|
||||
if not local_path.exists():
|
||||
raise FileNotFoundError("File not found: %s" % local_path)
|
||||
|
||||
metadata = {
|
||||
"name": local_path.stem,
|
||||
"parents": [folder_id],
|
||||
"mimeType": _GOOGLE_DOC_MIME,
|
||||
}
|
||||
media = MediaFileUpload(
|
||||
str(local_path),
|
||||
mimetype="application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
)
|
||||
result = (
|
||||
self._service.files()
|
||||
.create(body=metadata, media_body=media, fields="id, webViewLink")
|
||||
.execute()
|
||||
)
|
||||
file_id = result["id"]
|
||||
web_link = result["webViewLink"]
|
||||
log.info("Uploaded '%s' as Google Doc: %s", local_path.name, web_link)
|
||||
return file_id, web_link
|
||||
|
||||
def set_link_sharing(self, file_id: str, role: str = "commenter") -> None:
|
||||
"""Make a file accessible to anyone with the link.
|
||||
|
||||
Args:
|
||||
file_id: Google Drive file ID.
|
||||
role: Permission role - 'reader', 'commenter', or 'writer'.
|
||||
"""
|
||||
permission = {"type": "anyone", "role": role}
|
||||
self._service.permissions().create(
|
||||
fileId=file_id, body=permission, fields="id"
|
||||
).execute()
|
||||
log.info("Set link sharing (role=%s) on file %s", role, file_id)
|
||||
|
|
@ -11,7 +11,7 @@ import json
|
|||
import logging
|
||||
import re
|
||||
import time
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
from . import tool
|
||||
|
|
@ -117,8 +117,11 @@ def _find_qualifying_tasks_sweep(client, config, categories: list[str]):
|
|||
|
||||
# Current and last month tags (e.g. "feb26", "jan26")
|
||||
current_month_tag = now.strftime("%b%y").lower()
|
||||
# Go back one month (use 1st-of-current-month minus 1 day to avoid day-out-of-range)
|
||||
last_month = now.replace(day=1) - timedelta(days=1)
|
||||
# Go back one month
|
||||
if now.month == 1:
|
||||
last_month = now.replace(year=now.year - 1, month=12)
|
||||
else:
|
||||
last_month = now.replace(month=now.month - 1)
|
||||
last_month_tag = last_month.strftime("%b%y").lower()
|
||||
|
||||
# Fetch all "to do" tasks with due dates up to lookahead
|
||||
|
|
|
|||
|
|
@ -255,39 +255,6 @@ def clickup_create_task(
|
|||
client_obj.close()
|
||||
|
||||
|
||||
@tool(
|
||||
"clickup_add_dependency",
|
||||
"Add a 'blocked by' dependency between two ClickUp tasks. "
|
||||
"The blocked_task_id will be blocked by blocker_task_id "
|
||||
"(i.e. blocker must complete before blocked can start).",
|
||||
category="clickup",
|
||||
)
|
||||
def clickup_add_dependency(
|
||||
blocked_task_id: str,
|
||||
blocker_task_id: str,
|
||||
ctx: dict | None = None,
|
||||
) -> str:
|
||||
"""Set blocked_task_id as blocked by blocker_task_id."""
|
||||
client = _get_clickup_client(ctx)
|
||||
if not client:
|
||||
return "Error: ClickUp API token not configured."
|
||||
|
||||
try:
|
||||
ok = client.add_dependency(blocked_task_id, depends_on=blocker_task_id)
|
||||
if ok:
|
||||
return (
|
||||
f"Dependency added: task {blocked_task_id} "
|
||||
f"is now blocked by {blocker_task_id}."
|
||||
)
|
||||
return (
|
||||
f"Failed to add dependency. Check that both task IDs are valid."
|
||||
)
|
||||
except Exception as e:
|
||||
return f"Error adding dependency: {e}"
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
|
||||
@tool(
|
||||
"clickup_reset_task",
|
||||
"Reset a ClickUp task to 'to do' status so it can be retried on the next poll. "
|
||||
|
|
|
|||
|
|
@ -285,15 +285,6 @@ def _fuzzy_keyword_match(a: str, b: str, llm_check: Callable[[str, str], bool] |
|
|||
return True
|
||||
if llm_check is None:
|
||||
return False
|
||||
|
||||
# Only call LLM when keywords share most words (possible plural difference).
|
||||
words_a = set(a.split())
|
||||
words_b = set(b.split())
|
||||
shared = words_a & words_b
|
||||
total = max(len(words_a), len(words_b))
|
||||
if total > 1 and len(shared) < total - 1:
|
||||
return False
|
||||
|
||||
return llm_check(a, b)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
"""Press-release pipeline tool.
|
||||
|
||||
Autonomous workflow:
|
||||
1. Generate 7 compliant headlines (chat brain)
|
||||
2. AI judge picks the 2 best (chat brain)
|
||||
3. Write 2 full press releases (execution brain x 2)
|
||||
3.5. Adversarial fact-check (Sonnet + WebSearch, graceful failure)
|
||||
4. Generate 2 JSON-LD schemas (execution brain x 2, Sonnet + WebSearch)
|
||||
5. Save files, return cost summary
|
||||
1. Generate 7 compliant headlines (chat brain)
|
||||
2. AI judge picks the 2 best (chat brain)
|
||||
3. Write 2 full press releases (execution brain x 2)
|
||||
4. Generate 2 JSON-LD schemas (execution brain x 2, Sonnet + WebSearch)
|
||||
5. Save 4 files, return cost summary
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
@ -36,7 +35,6 @@ _COMPANIES_FILE = _SKILLS_DIR / "companies.md"
|
|||
_HEADLINES_FILE = _SKILLS_DIR / "headlines.md"
|
||||
|
||||
SONNET_CLI_MODEL = "sonnet"
|
||||
FACT_CHECK_MODEL = "sonnet"
|
||||
|
||||
|
||||
def _set_status(ctx: dict | None, message: str) -> None:
|
||||
|
|
@ -526,103 +524,6 @@ def _build_schema_prompt(pr_text: str, company_name: str, url: str, skill_text:
|
|||
return prompt
|
||||
|
||||
|
||||
def _build_fact_check_prompt(
|
||||
pr_text: str,
|
||||
company_name: str,
|
||||
url: str,
|
||||
topic: str,
|
||||
keyword: str,
|
||||
) -> str:
|
||||
"""Build the prompt for the adversarial fact-checker step."""
|
||||
return (
|
||||
"You are a factual accuracy reviewer for press releases. Your ONLY job is to "
|
||||
"find and correct statements that are factually wrong. You are NOT an editor.\n\n"
|
||||
"GROUND TRUTH -- the following data was provided by the client and is correct "
|
||||
"by definition. Do NOT change, question, or 'correct' any of it, even if your "
|
||||
"web search suggests something different:\n"
|
||||
f" - Company name: {company_name}\n"
|
||||
f" - Target URL: {url}\n"
|
||||
f" - Topic: {topic}\n"
|
||||
f" - Keyword: {keyword}\n"
|
||||
" - Any person names, titles, quotes, or contact details in the PR\n"
|
||||
" - Any product names, service names, or brand names\n"
|
||||
" - The overall framing, angle, and tone of the PR\n\n"
|
||||
"WHAT TO CHECK (use WebSearch/WebFetch to verify):\n"
|
||||
" - Industry statistics or market size claims\n"
|
||||
" - Historical dates or facts\n"
|
||||
" - Technical specifications not sourced from the client data\n"
|
||||
" - General knowledge claims (e.g. 'X is the leading cause of Y')\n"
|
||||
" - Geographic or regulatory facts\n\n"
|
||||
"RULES:\n"
|
||||
" - ONLY fix actual factual errors -- wrong numbers, wrong dates, wrong facts\n"
|
||||
" - Do NOT add content, remove content, restructure, or 'improve' anything\n"
|
||||
" - Do NOT change tone, style, word choice, or sentence structure\n"
|
||||
" - Do NOT suggest additions or enhancements\n"
|
||||
" - Make the MINIMUM change needed to fix each error\n"
|
||||
" - Preserve the exact formatting, paragraph breaks, and headline\n\n"
|
||||
"OUTPUT FORMAT:\n"
|
||||
" - If you find NO factual errors: output exactly [NO_ERRORS] and nothing else\n"
|
||||
" - If you find errors: output [CORRECTED] on the first line, then the full "
|
||||
"corrected PR text (preserving all formatting), then a blank line, then "
|
||||
"CHANGES: followed by a numbered list of what you changed and why\n\n"
|
||||
"Press release to review:\n"
|
||||
"---\n"
|
||||
f"{pr_text}\n"
|
||||
"---"
|
||||
)
|
||||
|
||||
|
||||
def _apply_fact_check(
|
||||
raw_output: str, original_text: str
|
||||
) -> tuple[str, str, str]:
|
||||
"""Parse fact-checker output. Returns (text, status, changes).
|
||||
|
||||
status is one of: "clean", "corrected", "skipped"
|
||||
On any parse failure or suspect rewrite, returns original text unchanged.
|
||||
"""
|
||||
if not raw_output or not raw_output.strip():
|
||||
return original_text, "skipped", ""
|
||||
|
||||
stripped = raw_output.strip()
|
||||
|
||||
# No errors found
|
||||
if stripped.startswith("[NO_ERRORS]"):
|
||||
return original_text, "clean", ""
|
||||
|
||||
# Corrections found
|
||||
if stripped.startswith("[CORRECTED]"):
|
||||
# Split off the [CORRECTED] prefix
|
||||
body = stripped[len("[CORRECTED]"):].strip()
|
||||
|
||||
# Split into corrected text and change log
|
||||
changes = ""
|
||||
if "\nCHANGES:" in body:
|
||||
text_part, changes = body.split("\nCHANGES:", 1)
|
||||
corrected = text_part.strip()
|
||||
changes = changes.strip()
|
||||
else:
|
||||
corrected = body
|
||||
|
||||
if not corrected:
|
||||
return original_text, "skipped", ""
|
||||
|
||||
# Safety: reject if word count differs by more than 15%
|
||||
orig_wc = _word_count(original_text)
|
||||
new_wc = _word_count(corrected)
|
||||
if orig_wc > 0 and abs(new_wc - orig_wc) / orig_wc > 0.15:
|
||||
log.warning(
|
||||
"Fact-check rejected: word count changed too much "
|
||||
"(%d -> %d, %.0f%% delta)",
|
||||
orig_wc, new_wc, abs(new_wc - orig_wc) / orig_wc * 100,
|
||||
)
|
||||
return original_text, "skipped", "rejected -- word count delta too large"
|
||||
|
||||
return corrected, "corrected", changes
|
||||
|
||||
# Unparseable output
|
||||
return original_text, "skipped", ""
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Main tool
|
||||
# ---------------------------------------------------------------------------
|
||||
|
|
@ -700,8 +601,8 @@ def write_press_releases(
|
|||
cost_log: list[dict] = []
|
||||
|
||||
# ── Step 1: Generate 7 headlines (chat brain) ─────────────────────────
|
||||
log.info("[PR Pipeline] Step 1/5: Generating 7 headlines for %s...", company_name)
|
||||
_set_status(ctx, f"Step 1/5: Generating 7 headlines for {company_name}...")
|
||||
log.info("[PR Pipeline] Step 1/4: Generating 7 headlines for %s...", company_name)
|
||||
_set_status(ctx, f"Step 1/4: Generating 7 headlines for {company_name}...")
|
||||
step_start = time.time()
|
||||
headline_prompt = _build_headline_prompt(topic, company_name, url, lsi_terms, headlines_ref)
|
||||
messages = [
|
||||
|
|
@ -726,8 +627,8 @@ def write_press_releases(
|
|||
headlines_file.write_text(headlines_raw.strip(), encoding="utf-8")
|
||||
|
||||
# ── Step 2: AI judge picks best 2 (chat brain) ───────────────────────
|
||||
log.info("[PR Pipeline] Step 2/5: AI judge selecting best 2 headlines...")
|
||||
_set_status(ctx, "Step 2/5: AI judge selecting best 2 headlines...")
|
||||
log.info("[PR Pipeline] Step 2/4: AI judge selecting best 2 headlines...")
|
||||
_set_status(ctx, "Step 2/4: AI judge selecting best 2 headlines...")
|
||||
step_start = time.time()
|
||||
judge_prompt = _build_judge_prompt(headlines_raw, headlines_ref, topic)
|
||||
messages = [
|
||||
|
|
@ -765,7 +666,7 @@ def write_press_releases(
|
|||
winners = winners[:2]
|
||||
|
||||
# ── Step 3: Write 2 press releases (execution brain x 2) ─────────────
|
||||
log.info("[PR Pipeline] Step 3/5: Writing 2 press releases...")
|
||||
log.info("[PR Pipeline] Step 3/4: Writing 2 press releases...")
|
||||
anchor_phrase = _derive_anchor_phrase(company_name, keyword) if keyword else ""
|
||||
pr_texts: list[str] = []
|
||||
pr_files: list[str] = []
|
||||
|
|
@ -773,7 +674,7 @@ def write_press_releases(
|
|||
anchor_warnings: list[str] = []
|
||||
for i, headline in enumerate(winners):
|
||||
log.info("[PR Pipeline] Writing PR %d/2: %s", i + 1, headline[:60])
|
||||
_set_status(ctx, f"Step 3/5: Writing press release {i + 1}/2 — {headline[:60]}...")
|
||||
_set_status(ctx, f"Step 3/4: Writing press release {i + 1}/2 — {headline[:60]}...")
|
||||
step_start = time.time()
|
||||
pr_prompt = _build_pr_prompt(
|
||||
headline,
|
||||
|
|
@ -836,65 +737,6 @@ def write_press_releases(
|
|||
text_to_docx(clean_result, docx_path)
|
||||
docx_files.append(str(docx_path))
|
||||
|
||||
# ── Step 3.5: Adversarial fact-check (Sonnet + WebSearch) ───────────
|
||||
log.info("[PR Pipeline] Step 3.5/5: Running adversarial fact-check...")
|
||||
fact_check_statuses: list[str] = [] # per-PR: "clean", "corrected", "skipped"
|
||||
fact_check_changes: list[str] = [] # per-PR change log (empty if clean/skipped)
|
||||
fact_check_failed = False
|
||||
for i, pr_text in enumerate(pr_texts):
|
||||
log.info("[PR Pipeline] Fact-checking PR %d/2...", i + 1)
|
||||
_set_status(ctx, f"Step 3.5/5: Fact-checking PR {i + 1}/2...")
|
||||
step_start = time.time()
|
||||
try:
|
||||
fc_prompt = _build_fact_check_prompt(
|
||||
pr_text, company_name, url, topic, keyword
|
||||
)
|
||||
fc_result = agent.execute_task(
|
||||
fc_prompt, tools="WebSearch,WebFetch", model=FACT_CHECK_MODEL
|
||||
)
|
||||
corrected, status, changes = _apply_fact_check(fc_result, pr_text)
|
||||
fact_check_statuses.append(status)
|
||||
fact_check_changes.append(changes)
|
||||
|
||||
if status == "corrected":
|
||||
pr_texts[i] = corrected
|
||||
# Re-write files with corrected text
|
||||
Path(pr_files[i]).write_text(corrected, encoding="utf-8")
|
||||
text_to_docx(corrected, Path(docx_files[i]))
|
||||
log.info(
|
||||
"[PR Pipeline] PR %d: %d correction(s) applied",
|
||||
i + 1, changes.count("\n") + 1 if changes else 1,
|
||||
)
|
||||
elif status == "clean":
|
||||
log.info("[PR Pipeline] PR %d: no factual errors found", i + 1)
|
||||
else:
|
||||
log.warning("[PR Pipeline] PR %d: fact-check skipped (unparseable output)", i + 1)
|
||||
|
||||
elapsed = round(time.time() - step_start, 1)
|
||||
cost_log.append(
|
||||
{
|
||||
"step": f"3.5{chr(97 + i)}. Fact-check PR {i + 1}",
|
||||
"model": FACT_CHECK_MODEL,
|
||||
"elapsed_s": elapsed,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
fact_check_failed = True
|
||||
fact_check_statuses.append("skipped")
|
||||
fact_check_changes.append("")
|
||||
log.warning("[PR Pipeline] PR %d fact-check failed: %s", i + 1, e)
|
||||
|
||||
# Notify ClickUp if fact-check could not run at all
|
||||
if fact_check_failed and clickup_task_id and cu_client:
|
||||
try:
|
||||
cu_client.add_comment(
|
||||
clickup_task_id,
|
||||
"Note: factual accuracy check could not be run on this PR. "
|
||||
"Manual review recommended.",
|
||||
)
|
||||
except Exception as e:
|
||||
log.warning("ClickUp fact-check warning failed for %s: %s", clickup_task_id, e)
|
||||
|
||||
# ── ClickUp: upload docx attachments + comment ─────────────────────
|
||||
uploaded_count = 0
|
||||
failed_uploads: list[str] = []
|
||||
|
|
@ -913,27 +755,11 @@ def write_press_releases(
|
|||
f"\n[WARNING]Warning: {len(failed_uploads)} attachment(s) failed to upload. "
|
||||
f"Files saved locally at:\n{paths_list}"
|
||||
)
|
||||
# Build fact-check summary for comment
|
||||
fc_summary = ""
|
||||
for fi, fc_status in enumerate(fact_check_statuses):
|
||||
label = f"PR {chr(65 + fi)}"
|
||||
if fc_status == "corrected":
|
||||
fc_summary += f"\nFact-check {label}: corrections applied"
|
||||
if fact_check_changes[fi]:
|
||||
fc_summary += f"\n {fact_check_changes[fi]}"
|
||||
elif fc_status == "clean":
|
||||
fc_summary += f"\nFact-check {label}: no errors found"
|
||||
else:
|
||||
fc_summary += (
|
||||
f"\nFact-check {label}: could not run -- manual review recommended"
|
||||
)
|
||||
|
||||
cu_client.add_comment(
|
||||
clickup_task_id,
|
||||
f"Saved {len(docx_files)} press release(s). "
|
||||
f"📎 Saved {len(docx_files)} press release(s). "
|
||||
f"{uploaded_count} file(s) attached.\n"
|
||||
f"Generating JSON-LD schemas next...{upload_warning}"
|
||||
f"{fc_summary}",
|
||||
f"Generating JSON-LD schemas next...{upload_warning}",
|
||||
)
|
||||
log.info(
|
||||
"ClickUp: uploaded %d attachments for task %s", uploaded_count, clickup_task_id
|
||||
|
|
@ -942,12 +768,12 @@ def write_press_releases(
|
|||
log.warning("ClickUp attachment upload failed for %s: %s", clickup_task_id, e)
|
||||
|
||||
# ── Step 4: Generate 2 JSON-LD schemas (Sonnet + WebSearch) ───────────
|
||||
log.info("[PR Pipeline] Step 4/5: Generating 2 JSON-LD schemas...")
|
||||
log.info("[PR Pipeline] Step 4/4: Generating 2 JSON-LD schemas...")
|
||||
schema_texts: list[str] = []
|
||||
schema_files: list[str] = []
|
||||
for i, pr_text in enumerate(pr_texts):
|
||||
log.info("[PR Pipeline] Schema %d/2 for: %s", i + 1, winners[i][:60])
|
||||
_set_status(ctx, f"Step 4/5: Generating schema {i + 1}/2...")
|
||||
_set_status(ctx, f"Step 4/4: Generating schema {i + 1}/2...")
|
||||
step_start = time.time()
|
||||
schema_prompt = _build_schema_prompt(pr_text, company_name, url, schema_skill)
|
||||
exec_tools = "WebSearch,WebFetch"
|
||||
|
|
@ -1052,34 +878,6 @@ def write_press_releases(
|
|||
finally:
|
||||
cu_client.close()
|
||||
|
||||
# ── Client delivery: Drive upload + Gmail draft ──────────────────
|
||||
if clickup_task_id and docx_files:
|
||||
try:
|
||||
from ..delivery import deliver_to_client
|
||||
|
||||
delivery_result = deliver_to_client(
|
||||
files=[Path(f) for f in docx_files],
|
||||
company_name=company_name,
|
||||
task_id=clickup_task_id,
|
||||
task_type="Press Release",
|
||||
ctx=ctx,
|
||||
)
|
||||
output_parts.append("\n## Client Delivery\n")
|
||||
if delivery_result.doc_links:
|
||||
output_parts.append(
|
||||
"- Google Docs: " + ", ".join(delivery_result.doc_links)
|
||||
)
|
||||
if delivery_result.draft_id:
|
||||
output_parts.append(
|
||||
"- Gmail draft created (ID: %s)" % delivery_result.draft_id
|
||||
)
|
||||
if delivery_result.errors:
|
||||
for err in delivery_result.errors:
|
||||
output_parts.append("- Warning: %s" % err)
|
||||
except Exception as e:
|
||||
log.warning("Client delivery failed: %s", e)
|
||||
output_parts.append("\n## Client Delivery\n- Failed: %s" % e)
|
||||
|
||||
return "\n".join(output_parts)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,312 +0,0 @@
|
|||
# ClickUp Runner
|
||||
|
||||
Headless background service that polls ClickUp for tasks with the
|
||||
"Delegate to Claude" checkbox checked, routes them through a skill map
|
||||
based on task type + stage, runs Claude Code headless, and posts results
|
||||
back to ClickUp.
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Set required env vars (or put them in .env at repo root)
|
||||
export CLICKUP_API_TOKEN="pk_..."
|
||||
export CLICKUP_SPACE_ID="..."
|
||||
|
||||
# Run the runner
|
||||
uv run python -m clickup_runner
|
||||
```
|
||||
|
||||
## Quick Reference: Running a Task
|
||||
|
||||
To delegate a task to the runner, it needs **all three fields** set or it will be skipped:
|
||||
|
||||
1. **Work Category** -- which task type (dropdown)
|
||||
2. **Stage** -- which pipeline step to run (dropdown)
|
||||
3. **Delegate to Claude** -- checked (checkbox)
|
||||
|
||||
Plus the task must have a **due date <= today** and be in an **"Overall" list**.
|
||||
|
||||
### Initial Stage by Task Type
|
||||
|
||||
| Task Type | First Stage | What it does | Full pipeline |
|
||||
|-----------|-------------|--------------|---------------|
|
||||
| Content Creation | `run_cora` | Submits Cora analysis | `run_cora` -> `outline` -> `draft` |
|
||||
| On Page Optimization | `run_cora` | Submits Cora analysis | `run_cora` -> `outline` -> `draft` -> `hidden div` |
|
||||
| Press Release | `draft` | Writes full PR + schemas | `draft` (single stage) |
|
||||
| Link Building | `run_cora` | Submits Cora analysis | `run_cora` -> `build` |
|
||||
|
||||
### After Each Stage
|
||||
|
||||
The runner **unchecks** "Delegate to Claude" and sets status to **Review** so you can check the output. To continue to the next stage, review the output, then re-check "Delegate to Claude". The Stage field is advanced automatically.
|
||||
|
||||
### Required Custom Fields by Task Type
|
||||
|
||||
| Task Type | Required Fields |
|
||||
|-----------|----------------|
|
||||
| Content Creation | Keyword, IMSURL (for run_cora) |
|
||||
| On Page Optimization | Keyword, IMSURL (for run_cora) |
|
||||
| Press Release | Keyword, IMSURL |
|
||||
| Link Building | Keyword, IMSURL, CLIFlags (`--tier1-count N`) |
|
||||
|
||||
### If Something Goes Wrong
|
||||
|
||||
The runner sets the **Error** checkbox, posts a comment explaining what failed and how to fix it, unchecks "Delegate to Claude", and sets status to **Review**. Fix the issue, then re-check "Delegate to Claude" to retry.
|
||||
|
||||
---
|
||||
|
||||
## How It Works
|
||||
|
||||
1. Every 720 seconds, polls all "Overall" lists in the ClickUp space
|
||||
2. Checks for completed AutoCora jobs (result polling)
|
||||
3. Finds tasks where:
|
||||
- "Delegate to Claude" checkbox is checked
|
||||
- Due date is today or earlier
|
||||
4. Reads the task's Work Category and Stage fields
|
||||
5. Looks up the skill route in `skill_map.py`
|
||||
6. Dispatches to either:
|
||||
- **AutoCora handler** (for `run_cora` stage): submits a Cora job to the NAS queue
|
||||
- **Claude Code handler**: runs `claude -p` with the skill file + task context as prompt
|
||||
7. On success: uploads output files as ClickUp attachments, copies to NAS (best-effort),
|
||||
advances Stage, sets next status, posts summary comment
|
||||
8. On error: sets Error checkbox, posts structured error comment (what failed, how to fix)
|
||||
9. Always unchecks "Delegate to Claude" after processing
|
||||
|
||||
## Configuration
|
||||
|
||||
Config is loaded from `clickup_runner.yaml` at the repo root (optional),
|
||||
with env var overrides.
|
||||
|
||||
### clickup_runner.yaml
|
||||
|
||||
```yaml
|
||||
clickup:
|
||||
space_id: "your_space_id"
|
||||
task_type_field_name: "Work Category"
|
||||
delegate_field_name: "Delegate to Claude"
|
||||
stage_field_name: "Stage"
|
||||
error_field_name: "Error"
|
||||
ai_working_status: "ai working"
|
||||
review_status: "review"
|
||||
|
||||
autocora:
|
||||
jobs_dir: "//PennQnap1/SHARE1/AutoCora/jobs"
|
||||
results_dir: "//PennQnap1/SHARE1/AutoCora/results"
|
||||
xlsx_dir: "//PennQnap1/SHARE1/Cora-For-Human"
|
||||
poll_interval_seconds: 120
|
||||
|
||||
blm:
|
||||
blm_dir: "E:/dev/Big-Link-Man"
|
||||
timeout_seconds: 1800
|
||||
|
||||
nas:
|
||||
generated_dir: "//PennQnap1/SHARE1/generated"
|
||||
|
||||
runner:
|
||||
poll_interval_seconds: 720
|
||||
claude_timeout_seconds: 2700
|
||||
max_turns_default: 10
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Required | Description |
|
||||
|----------|----------|-------------|
|
||||
| `CLICKUP_API_TOKEN` | Yes | ClickUp API token |
|
||||
| `CLICKUP_SPACE_ID` | Yes | ClickUp space to poll |
|
||||
| `NTFY_ERROR_TOPIC` | No | ntfy.sh topic for error notifications |
|
||||
| `NTFY_SUCCESS_TOPIC` | No | ntfy.sh topic for success notifications |
|
||||
|
||||
## ClickUp Custom Fields Required
|
||||
|
||||
These must exist in your ClickUp space:
|
||||
|
||||
| Field | Type | Purpose |
|
||||
|-------|------|---------|
|
||||
| Delegate to Claude | Checkbox | Trigger -- checked = process this task |
|
||||
| Stage | Dropdown | Pipeline position (run_cora, outline, draft, etc.) |
|
||||
| Error | Checkbox | Flagged when processing fails |
|
||||
| Work Category | Dropdown | Task type (Content Creation, Press Release, etc.) |
|
||||
| Keyword | Text | SEO keyword for Cora analysis (required for run_cora stage) |
|
||||
| IMSURL | URL | Target money-site URL (used in prompts and Cora jobs) |
|
||||
| Client | Dropdown | Client name (used for NAS file organization and prompts) |
|
||||
|
||||
## Skill Map
|
||||
|
||||
The routing table lives in `skill_map.py`. Each task type has a sequence
|
||||
of stages, and each stage maps to either an AutoCora job or a Claude Code
|
||||
skill file.
|
||||
|
||||
### Content Creation
|
||||
```
|
||||
run_cora -> outline -> draft -> final
|
||||
```
|
||||
|
||||
### On Page Optimization
|
||||
```
|
||||
run_cora -> outline -> draft -> hidden div -> final
|
||||
```
|
||||
|
||||
### Press Release
|
||||
```
|
||||
draft -> final
|
||||
```
|
||||
|
||||
### Link Building
|
||||
```
|
||||
run_cora -> build -> final
|
||||
```
|
||||
|
||||
## Adding a New Task Type
|
||||
|
||||
1. Add an entry to `SKILL_MAP` in `skill_map.py`
|
||||
2. Write the skill `.md` file(s) in `skills/`
|
||||
3. Add the Work Category value in ClickUp
|
||||
4. Add the Stage dropdown values in ClickUp
|
||||
|
||||
## Statuses
|
||||
|
||||
| Status | Owner | Meaning |
|
||||
|--------|-------|---------|
|
||||
| To Do | Nobody | Not started |
|
||||
| In Progress | Human | Human is working on it |
|
||||
| Needs Input | Human | Blocked, needs info |
|
||||
| AI Working | Claude | Runner is processing |
|
||||
| Review | Human | Output ready for human review |
|
||||
| Client Review | Client | Sent to client |
|
||||
| Complete | Nobody | Done |
|
||||
|
||||
## Claude Code Handler
|
||||
|
||||
When a task routes to a Claude handler, the runner:
|
||||
|
||||
1. Sets status to "AI Working"
|
||||
2. Reads the skill `.md` file from `skills/`
|
||||
3. Builds a prompt with skill instructions + task context:
|
||||
- Task name, description, customer, target URL
|
||||
- ClickUp task link
|
||||
- Attached `.xlsx` Cora report URLs (if any)
|
||||
- Instructions to write output files to the working directory
|
||||
4. Runs `claude -p "<prompt>" --allowedTools "..." --max-turns N --permission-mode bypassPermissions --bare`
|
||||
5. Collects all files Claude created in the temp working directory
|
||||
6. Uploads files to ClickUp as attachments
|
||||
7. Copies files to NAS at `//PennQnap1/SHARE1/generated/{customer}/` (best-effort)
|
||||
8. Advances Stage, updates status, posts comment, unchecks Delegate to Claude
|
||||
9. Sends ntfy.sh notification (if configured)
|
||||
|
||||
On failure, it posts a structured error comment:
|
||||
```
|
||||
[ERROR] Claude processing failed
|
||||
--
|
||||
What failed: <error details>
|
||||
|
||||
How to fix: <instructions>
|
||||
```
|
||||
|
||||
## AutoCora Handler
|
||||
|
||||
AutoCora jobs are asynchronous -- submission and result polling happen on
|
||||
separate poll cycles.
|
||||
|
||||
### Submission (when a `run_cora` task is found)
|
||||
|
||||
1. Reads the `Keyword` and `IMSURL` custom fields from the task
|
||||
2. Sets status to "AI Working"
|
||||
3. Writes a job JSON file to `//PennQnap1/SHARE1/AutoCora/jobs/`:
|
||||
```json
|
||||
{
|
||||
"keyword": "CNC Machining",
|
||||
"url": "https://acme.com/cnc-machining",
|
||||
"task_ids": ["task_id"]
|
||||
}
|
||||
```
|
||||
4. Stores job metadata in the state DB for result polling
|
||||
5. Posts comment "Cora job submitted for keyword: ..."
|
||||
6. Unchecks "Delegate to Claude"
|
||||
|
||||
### Result Polling (every poll cycle)
|
||||
|
||||
At the start of each cycle, the runner scans the results directory:
|
||||
|
||||
1. Looks for `.result` files in `//PennQnap1/SHARE1/AutoCora/results/`
|
||||
2. Matches results to pending jobs via the state DB
|
||||
3. On **success**:
|
||||
- Advances Stage to the next stage (e.g. run_cora -> outline)
|
||||
- Sets status to "review"
|
||||
- Posts comment with keyword and .xlsx location
|
||||
- Clears Error checkbox
|
||||
- **Does NOT re-check Delegate to Claude** (human reviews first)
|
||||
4. On **failure**:
|
||||
- Sets Error checkbox
|
||||
- Posts structured error comment with failure reason
|
||||
5. Archives processed `.result` files to `results/processed/`
|
||||
|
||||
### .xlsx Skip
|
||||
|
||||
If a task at `run_cora` stage already has an `.xlsx` attachment, the runner
|
||||
skips Cora submission and advances directly to the next stage.
|
||||
|
||||
## BLM Handler (Link Building build stage)
|
||||
|
||||
The Link Building `build` stage runs Big-Link-Man directly -- no Claude needed.
|
||||
|
||||
1. Sets status to "AI Working"
|
||||
2. Looks for a Cora `.xlsx` in `//PennQnap1/SHARE1/Cora-For-Human/` matching the task keyword
|
||||
3. Runs `ingest-cora` via BLM's own venv Python (`E:/dev/Big-Link-Man/.venv/Scripts/python.exe`)
|
||||
4. Runs `generate-batch` with `--continue-on-error`
|
||||
5. On success: advances Stage to `final`, posts summary comment, unchecks Delegate to Claude
|
||||
6. On failure: sets Error checkbox, posts structured error comment
|
||||
|
||||
BLM credentials are injected from `BLM_USERNAME` and `BLM_PASSWORD` env vars.
|
||||
|
||||
## Cora .xlsx Files
|
||||
|
||||
All Cora reports live in **one folder**: `//PennQnap1/SHARE1/Cora-For-Human/`
|
||||
|
||||
- AutoCora drops finished reports here
|
||||
- All handlers (BLM, content, OPT) read from this folder by keyword match
|
||||
- Files are **not moved** after processing -- they stay in place so multiple task types can use the same report
|
||||
- A separate cleanup script can move old files to `processed/` when no open tasks match the keyword
|
||||
|
||||
## Logs
|
||||
|
||||
- Console output: INFO level
|
||||
- File log: `logs/clickup_runner.log` (DEBUG level)
|
||||
- Run history: `data/clickup_runner.db` (run_log table + kv_store for AutoCora jobs)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Task not being picked up
|
||||
- Check that "Delegate to Claude" is checked
|
||||
- Check that the due date is today or earlier
|
||||
- Check that Work Category and Stage are set and valid
|
||||
- Check that the task is in an "Overall" list
|
||||
|
||||
### Claude errors
|
||||
- Check `logs/clickup_runner.log` for the full error
|
||||
- Verify the skill `.md` file exists in `skills/`
|
||||
- Verify `claude` CLI is on PATH
|
||||
- Check the Error comment on the ClickUp task for fix instructions
|
||||
|
||||
### AutoCora not producing results
|
||||
- Verify the NAS is mounted and accessible
|
||||
- Check that job files appear in `//PennQnap1/SHARE1/AutoCora/jobs/`
|
||||
- Check the AutoCora worker logs on the NAS
|
||||
- Look for `.result` files in `//PennQnap1/SHARE1/AutoCora/results/`
|
||||
|
||||
### NAS copy failures
|
||||
- NAS copy is best-effort and won't block the pipeline
|
||||
- Check that `//PennQnap1/SHARE1/generated/` is accessible
|
||||
- Check `logs/clickup_runner.log` for copy warnings
|
||||
|
||||
## Tests
|
||||
|
||||
```bash
|
||||
# Unit tests only (no credentials needed)
|
||||
uv run pytest tests/test_clickup_runner/ -m "not integration"
|
||||
|
||||
# Full suite (needs CLICKUP_API_TOKEN)
|
||||
uv run pytest tests/test_clickup_runner/
|
||||
|
||||
# Specific test file
|
||||
uv run pytest tests/test_clickup_runner/test_autocora.py -v
|
||||
```
|
||||
|
|
@ -1 +0,0 @@
|
|||
"""ClickUp + Claude Code automation runner."""
|
||||
|
|
@ -1,808 +0,0 @@
|
|||
"""ClickUp + Claude Code automation runner -- entry point.
|
||||
|
||||
Usage:
|
||||
uv run python -m clickup_runner
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from .autocora import archive_result, scan_results, submit_job
|
||||
from .blm import find_cora_xlsx, run_generate, run_ingest
|
||||
from .fact_check import fact_check_pr_files
|
||||
from .claude_runner import (
|
||||
RunResult,
|
||||
build_prompt,
|
||||
copy_to_nas,
|
||||
notify,
|
||||
read_skill_file,
|
||||
run_claude,
|
||||
)
|
||||
from .clickup_client import ClickUpClient, ClickUpTask
|
||||
from .config import Config, load_config
|
||||
from .skill_map import SkillRoute, get_route, get_supported_task_types, get_valid_stages
|
||||
from .state import StateDB
|
||||
|
||||
log = logging.getLogger("clickup_runner")
|
||||
|
||||
# Flag for graceful shutdown
|
||||
_shutdown = False
|
||||
|
||||
|
||||
def _handle_signal(signum, frame):
|
||||
global _shutdown
|
||||
log.info("Received signal %d -- shutting down after current cycle", signum)
|
||||
_shutdown = True
|
||||
|
||||
|
||||
def _setup_logging():
|
||||
"""Configure logging: console + file."""
|
||||
fmt = logging.Formatter(
|
||||
"[%(asctime)s] %(levelname)-7s %(name)s: %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
console.setFormatter(fmt)
|
||||
console.setLevel(logging.INFO)
|
||||
|
||||
root = logging.getLogger()
|
||||
root.setLevel(logging.INFO)
|
||||
root.addHandler(console)
|
||||
|
||||
# File handler for persistent logs
|
||||
try:
|
||||
from pathlib import Path
|
||||
|
||||
log_dir = Path(__file__).resolve().parent.parent / "logs"
|
||||
log_dir.mkdir(exist_ok=True)
|
||||
file_handler = logging.FileHandler(
|
||||
log_dir / "clickup_runner.log", encoding="utf-8"
|
||||
)
|
||||
file_handler.setFormatter(fmt)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
root.addHandler(file_handler)
|
||||
except Exception as e:
|
||||
log.warning("Could not set up file logging: %s", e)
|
||||
|
||||
|
||||
def _due_date_cutoff_ms() -> int:
|
||||
"""Return end-of-today as Unix milliseconds for due_date_lt filter."""
|
||||
now = datetime.now(timezone.utc)
|
||||
end_of_day = now.replace(hour=23, minute=59, second=59, microsecond=999999)
|
||||
return int(end_of_day.timestamp() * 1000)
|
||||
|
||||
|
||||
def _is_due_today_or_earlier(task: ClickUpTask) -> bool:
|
||||
"""Check if a task's due date is today or earlier."""
|
||||
if not task.due_date:
|
||||
return False
|
||||
try:
|
||||
due_ms = int(task.due_date)
|
||||
cutoff_ms = _due_date_cutoff_ms()
|
||||
return due_ms <= cutoff_ms
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
|
||||
def poll_cycle(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
db: StateDB,
|
||||
) -> int:
|
||||
"""Run one poll cycle. Returns the number of tasks dispatched."""
|
||||
space_id = cfg.clickup.space_id
|
||||
if not space_id:
|
||||
log.error("No space_id configured -- skipping poll cycle")
|
||||
return 0
|
||||
|
||||
# Check for completed AutoCora jobs before dispatching new tasks
|
||||
_check_autocora_results(client, cfg, db)
|
||||
|
||||
# Fetch all tasks from Overall lists with due date <= today
|
||||
cutoff_ms = _due_date_cutoff_ms()
|
||||
tasks = client.get_tasks_from_overall_lists(space_id, due_date_lt=cutoff_ms)
|
||||
|
||||
dispatched = 0
|
||||
|
||||
for task in tasks:
|
||||
# 1. Check "Delegate to Claude" checkbox
|
||||
if not client.is_checkbox_checked(task, cfg.clickup.delegate_field_name):
|
||||
continue
|
||||
|
||||
# 2. Verify due date <= today
|
||||
if not _is_due_today_or_earlier(task):
|
||||
continue
|
||||
|
||||
# 3. Read task type and stage
|
||||
task_type = task.task_type
|
||||
stage = client.get_stage(task, cfg.clickup.stage_field_name)
|
||||
|
||||
log.info(
|
||||
"Found delegated task: %s (id=%s, type=%s, stage=%s)",
|
||||
task.name,
|
||||
task.id,
|
||||
task_type,
|
||||
stage,
|
||||
)
|
||||
|
||||
# 4. Look up skill route
|
||||
if not task_type:
|
||||
_handle_no_mapping(
|
||||
client, cfg, task,
|
||||
"Task has no Work Category set. "
|
||||
"Set the Work Category field, then re-check Delegate to Claude.",
|
||||
)
|
||||
continue
|
||||
|
||||
if not stage:
|
||||
_handle_no_mapping(
|
||||
client, cfg, task,
|
||||
"Task has no Stage set. "
|
||||
"Valid stages for %s: %s. "
|
||||
"Set the Stage field, then re-check Delegate to Claude."
|
||||
% (task_type, ", ".join(get_valid_stages(task_type)) or "none"),
|
||||
)
|
||||
continue
|
||||
|
||||
# 5. Check for .xlsx attachment on run_cora stage
|
||||
route = get_route(task_type, stage)
|
||||
if route and route.handler == "autocora":
|
||||
# If .xlsx is already attached, skip Cora and advance
|
||||
attachments = client.get_task_attachments(task.id)
|
||||
task.attachments = attachments
|
||||
if task.has_xlsx_attachment():
|
||||
log.info(
|
||||
"Task %s has .xlsx attached -- skipping run_cora, advancing to %s",
|
||||
task.id,
|
||||
route.next_stage,
|
||||
)
|
||||
client.set_stage(
|
||||
task.id,
|
||||
task.list_id,
|
||||
route.next_stage,
|
||||
cfg.clickup.stage_field_name,
|
||||
)
|
||||
# Re-read stage and re-route
|
||||
stage = route.next_stage
|
||||
route = get_route(task_type, stage)
|
||||
|
||||
if route is None:
|
||||
valid = get_valid_stages(task_type)
|
||||
if not valid:
|
||||
msg = (
|
||||
"Task type '%s' is not supported. "
|
||||
"Supported types: %s. "
|
||||
"Fix the Work Category field, then re-check Delegate to Claude."
|
||||
% (task_type, ", ".join(get_supported_task_types()))
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
"Stage '%s' is not valid for task type '%s'. "
|
||||
"Valid stages: %s. "
|
||||
"Fix the Stage field, then re-check Delegate to Claude."
|
||||
% (stage, task_type, ", ".join(valid))
|
||||
)
|
||||
_handle_no_mapping(client, cfg, task, msg)
|
||||
continue
|
||||
|
||||
# 6. Dispatch
|
||||
log.info(
|
||||
"Dispatching task %s: type=%s, stage=%s, handler=%s",
|
||||
task.id,
|
||||
task_type,
|
||||
stage,
|
||||
route.handler,
|
||||
)
|
||||
|
||||
run_id = db.log_run_start(task.id, task.name, task_type, stage)
|
||||
|
||||
if route.handler == "autocora":
|
||||
_dispatch_autocora(client, cfg, db, task, route, run_id)
|
||||
elif route.handler == "blm":
|
||||
_dispatch_blm(client, cfg, db, task, route, run_id)
|
||||
else:
|
||||
_dispatch_claude(client, cfg, db, task, route, run_id)
|
||||
|
||||
dispatched += 1
|
||||
|
||||
return dispatched
|
||||
|
||||
|
||||
def _handle_no_mapping(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
task: ClickUpTask,
|
||||
message: str,
|
||||
):
|
||||
"""Handle a task that can't be routed: post comment, set error, uncheck."""
|
||||
comment = "[ERROR] Cannot process task\n--\n%s" % message
|
||||
client.add_comment(task.id, comment)
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.error_field_name, True
|
||||
)
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.delegate_field_name, False
|
||||
)
|
||||
log.warning("Task %s: %s", task.id, message)
|
||||
|
||||
|
||||
def _check_autocora_results(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
db: StateDB,
|
||||
):
|
||||
"""Poll for completed AutoCora jobs and update ClickUp accordingly."""
|
||||
results = scan_results(cfg.autocora.results_dir)
|
||||
if not results:
|
||||
return
|
||||
|
||||
log.info("Found %d AutoCora result(s) to process", len(results))
|
||||
|
||||
for result in results:
|
||||
# Look up the pending job in the state DB
|
||||
job_data = db.kv_get_json("autocora:job:%s" % result.job_id)
|
||||
|
||||
if not job_data:
|
||||
# Orphaned result from a previous run -- archive and skip.
|
||||
# Without the KV entry we don't have keyword or run context.
|
||||
log.warning(
|
||||
"Result %s has no matching state DB entry -- archiving as orphan",
|
||||
result.job_id,
|
||||
)
|
||||
archive_result(result)
|
||||
continue
|
||||
|
||||
task_id = job_data["task_id"]
|
||||
|
||||
if result.status == "SUCCESS":
|
||||
_handle_autocora_success(client, cfg, db, task_id, result, job_data)
|
||||
else:
|
||||
_handle_autocora_failure(client, cfg, db, task_id, result, job_data)
|
||||
|
||||
# Clean up state DB entry
|
||||
db.kv_delete("autocora:job:%s" % result.job_id)
|
||||
|
||||
archive_result(result)
|
||||
|
||||
|
||||
def _handle_autocora_success(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
db: StateDB,
|
||||
task_id: str,
|
||||
result,
|
||||
job_data: dict | None,
|
||||
):
|
||||
"""Handle a successful AutoCora result for one task."""
|
||||
keyword = result.keyword or (job_data or {}).get("keyword", "unknown")
|
||||
|
||||
# Advance stage -- need list_id from task or job_data
|
||||
try:
|
||||
task = client.get_task(task_id)
|
||||
except Exception as e:
|
||||
log.error("Failed to fetch task %s for AutoCora result: %s", task_id, e)
|
||||
return
|
||||
|
||||
# Look up the route to get next_stage
|
||||
task_type = task.task_type
|
||||
stage = client.get_stage(task, cfg.clickup.stage_field_name)
|
||||
route = get_route(task_type, stage)
|
||||
|
||||
if route:
|
||||
client.set_stage(
|
||||
task_id, task.list_id, route.next_stage, cfg.clickup.stage_field_name
|
||||
)
|
||||
next_stage = route.next_stage
|
||||
else:
|
||||
# Fallback -- just note it in the comment
|
||||
next_stage = "(unknown)"
|
||||
|
||||
client.update_task_status(task_id, cfg.clickup.review_status)
|
||||
client.add_comment(
|
||||
task_id,
|
||||
"Cora report generated for \"%s\". Stage advanced to %s.\n"
|
||||
"Review the .xlsx in %s, then re-check Delegate to Claude for the next stage."
|
||||
% (keyword, next_stage, cfg.autocora.xlsx_dir),
|
||||
)
|
||||
client.set_checkbox(
|
||||
task_id, task.list_id, cfg.clickup.error_field_name, False
|
||||
)
|
||||
|
||||
# Finish the run log if we have a run_id
|
||||
run_id = (job_data or {}).get("run_id")
|
||||
if run_id:
|
||||
db.log_run_finish(run_id, "completed", result="Cora report ready")
|
||||
|
||||
notify(cfg, "Cora done: %s" % keyword, "Task %s ready for review" % task_id)
|
||||
log.info("AutoCora SUCCESS for task %s (keyword=%s)", task_id, keyword)
|
||||
|
||||
|
||||
def _handle_autocora_failure(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
db: StateDB,
|
||||
task_id: str,
|
||||
result,
|
||||
job_data: dict | None,
|
||||
):
|
||||
"""Handle a failed AutoCora result for one task."""
|
||||
keyword = result.keyword or (job_data or {}).get("keyword", "unknown")
|
||||
reason = result.reason or "Unknown error"
|
||||
|
||||
try:
|
||||
task = client.get_task(task_id)
|
||||
except Exception as e:
|
||||
log.error("Failed to fetch task %s for AutoCora result: %s", task_id, e)
|
||||
return
|
||||
|
||||
comment = (
|
||||
"[ERROR] Cora report failed for keyword: \"%s\"\n"
|
||||
"--\n"
|
||||
"What failed: %s\n"
|
||||
"\n"
|
||||
"How to fix: Check the AutoCora worker logs, fix the issue, "
|
||||
"then re-check Delegate to Claude."
|
||||
) % (keyword, reason)
|
||||
|
||||
client.add_comment(task_id, comment)
|
||||
client.set_checkbox(
|
||||
task_id, task.list_id, cfg.clickup.error_field_name, True
|
||||
)
|
||||
client.update_task_status(task_id, cfg.clickup.review_status)
|
||||
|
||||
run_id = (job_data or {}).get("run_id")
|
||||
if run_id:
|
||||
db.log_run_finish(run_id, "failed", error="Cora failed: %s" % reason)
|
||||
|
||||
notify(
|
||||
cfg,
|
||||
"Cora FAILED: %s" % keyword,
|
||||
"Task %s -- %s" % (task_id, reason),
|
||||
is_error=True,
|
||||
)
|
||||
log.error("AutoCora FAILURE for task %s: %s", task_id, reason)
|
||||
|
||||
|
||||
def _dispatch_autocora(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
db: StateDB,
|
||||
task: ClickUpTask,
|
||||
route: SkillRoute,
|
||||
run_id: int,
|
||||
):
|
||||
"""Submit an AutoCora job for a task."""
|
||||
keyword = task.get_field_value("Keyword") or ""
|
||||
url = task.get_field_value("IMSURL") or ""
|
||||
|
||||
if not keyword:
|
||||
_handle_no_mapping(
|
||||
client, cfg, task,
|
||||
"Task has no Keyword field set. "
|
||||
"Set the Keyword custom field, then re-check Delegate to Claude.",
|
||||
)
|
||||
db.log_run_finish(run_id, "failed", error="Missing Keyword field")
|
||||
return
|
||||
|
||||
# 1. Set status to "ai working"
|
||||
client.update_task_status(task.id, cfg.clickup.ai_working_status)
|
||||
|
||||
# 2. Submit the job to the NAS queue
|
||||
job_id = submit_job(keyword, url, task.id, cfg.autocora.jobs_dir)
|
||||
|
||||
if not job_id:
|
||||
_handle_dispatch_error(
|
||||
client, cfg, db, task, run_id,
|
||||
error="Failed to write AutoCora job file to %s" % cfg.autocora.jobs_dir,
|
||||
fix="Check that the NAS is mounted and accessible, "
|
||||
"then re-check Delegate to Claude.",
|
||||
)
|
||||
return
|
||||
|
||||
# 3. Store job metadata in state DB for result polling
|
||||
db.kv_set_json("autocora:job:%s" % job_id, {
|
||||
"task_id": task.id,
|
||||
"task_name": task.name,
|
||||
"keyword": keyword,
|
||||
"url": url,
|
||||
"run_id": run_id,
|
||||
})
|
||||
|
||||
# 4. Post comment + uncheck delegate
|
||||
client.add_comment(
|
||||
task.id,
|
||||
"Cora job submitted for keyword: \"%s\" (job: %s).\n"
|
||||
"The runner will check for results automatically."
|
||||
% (keyword, job_id),
|
||||
)
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.delegate_field_name, False
|
||||
)
|
||||
|
||||
# 5. Log as submitted (not completed -- that happens when results arrive)
|
||||
db.log_run_finish(run_id, "submitted", result="Job: %s" % job_id)
|
||||
|
||||
notify(cfg, "Cora submitted: %s" % keyword, "Task: %s" % task.name)
|
||||
|
||||
log.info(
|
||||
"AutoCora job submitted: %s (task=%s, keyword=%s)",
|
||||
job_id, task.id, keyword,
|
||||
)
|
||||
|
||||
|
||||
def _dispatch_blm(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
db: StateDB,
|
||||
task: ClickUpTask,
|
||||
route: SkillRoute,
|
||||
run_id: int,
|
||||
):
|
||||
"""Run BLM ingest-cora + generate-batch directly (no Claude needed)."""
|
||||
keyword = task.get_field_value("Keyword") or ""
|
||||
url = task.get_field_value("IMSURL") or ""
|
||||
cli_flags = task.get_field_value("CLIFlags") or ""
|
||||
bp_ratio = task.get_field_value("BrandedPlusRatio") or ""
|
||||
custom_anchors = task.get_field_value("CustomAnchors") or ""
|
||||
|
||||
if not keyword:
|
||||
_handle_dispatch_error(
|
||||
client, cfg, db, task, run_id,
|
||||
error="Missing Keyword field",
|
||||
fix="Set the Keyword custom field, then re-check Delegate to Claude.",
|
||||
)
|
||||
return
|
||||
|
||||
# 1. Set status to AI Working
|
||||
client.update_task_status(task.id, cfg.clickup.ai_working_status)
|
||||
|
||||
# 2. Find the Cora xlsx
|
||||
xlsx_path = find_cora_xlsx(keyword, cfg.autocora.xlsx_dir)
|
||||
if not xlsx_path:
|
||||
_handle_dispatch_error(
|
||||
client, cfg, db, task, run_id,
|
||||
error="No Cora xlsx found for keyword '%s' in %s"
|
||||
% (keyword, cfg.autocora.xlsx_dir),
|
||||
fix="Check that the Cora report exists in %s, then re-check Delegate to Claude."
|
||||
% cfg.autocora.xlsx_dir,
|
||||
)
|
||||
return
|
||||
|
||||
log.info("Found Cora xlsx: %s", xlsx_path)
|
||||
client.add_comment(task.id, "Starting BLM pipeline for '%s'.\nCora file: %s" % (keyword, xlsx_path))
|
||||
|
||||
# 3. Run ingest-cora
|
||||
log.info("Running ingest-cora for task %s (keyword=%s)", task.id, keyword)
|
||||
ingest = run_ingest(
|
||||
xlsx_path=xlsx_path,
|
||||
keyword=keyword,
|
||||
money_site_url=url,
|
||||
blm_dir=cfg.blm.blm_dir,
|
||||
timeout=cfg.blm.timeout_seconds,
|
||||
branded_plus_ratio=bp_ratio,
|
||||
custom_anchors=custom_anchors,
|
||||
cli_flags=cli_flags,
|
||||
)
|
||||
|
||||
if not ingest.success:
|
||||
_handle_dispatch_error(
|
||||
client, cfg, db, task, run_id,
|
||||
error="ingest-cora failed: %s" % ingest.error,
|
||||
fix="Check BLM logs, fix the issue, then re-check Delegate to Claude.",
|
||||
)
|
||||
return
|
||||
|
||||
log.info(
|
||||
"ingest-cora OK: project=%s (ID=%s), job_file=%s",
|
||||
ingest.project_name, ingest.project_id, ingest.job_file,
|
||||
)
|
||||
|
||||
# 4. Run generate-batch
|
||||
log.info("Running generate-batch for task %s (job=%s)", task.id, ingest.job_file)
|
||||
gen = run_generate(
|
||||
job_file=ingest.job_file,
|
||||
blm_dir=cfg.blm.blm_dir,
|
||||
timeout=cfg.blm.timeout_seconds,
|
||||
)
|
||||
|
||||
if not gen.success:
|
||||
_handle_dispatch_error(
|
||||
client, cfg, db, task, run_id,
|
||||
error="generate-batch failed: %s" % gen.error,
|
||||
fix="Check BLM logs, fix the issue, then re-check Delegate to Claude.",
|
||||
)
|
||||
return
|
||||
|
||||
log.info("generate-batch OK: job moved to %s", gen.job_moved_to)
|
||||
|
||||
# 5. Advance stage + post summary
|
||||
client.set_stage(
|
||||
task.id, task.list_id, route.next_stage, cfg.clickup.stage_field_name
|
||||
)
|
||||
client.update_task_status(task.id, route.next_status)
|
||||
|
||||
summary = (
|
||||
"BLM pipeline completed for '%s'.\n\n"
|
||||
"- Project: %s (ID: %s)\n"
|
||||
"- Keyword: %s\n"
|
||||
"- Job file: %s"
|
||||
) % (keyword, ingest.project_name, ingest.project_id,
|
||||
ingest.main_keyword, gen.job_moved_to or ingest.job_file)
|
||||
|
||||
client.add_comment(task.id, summary)
|
||||
|
||||
# 6. Clear error + uncheck delegate
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.error_field_name, False
|
||||
)
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.delegate_field_name, False
|
||||
)
|
||||
|
||||
db.log_run_finish(run_id, "completed", result="BLM pipeline done")
|
||||
notify(cfg, "BLM done: %s" % keyword, "Task %s completed" % task.id)
|
||||
log.info("BLM pipeline completed for task %s (keyword=%s)", task.id, keyword)
|
||||
|
||||
|
||||
def _download_attachments(
|
||||
client: ClickUpClient,
|
||||
task: ClickUpTask,
|
||||
work_dir: Path,
|
||||
) -> list[str]:
|
||||
"""Download all task attachments to the work directory.
|
||||
|
||||
Returns list of local filenames (not full paths) that were downloaded.
|
||||
"""
|
||||
downloaded = []
|
||||
for att in task.attachments:
|
||||
title = att.get("title", "")
|
||||
url = att.get("url", "")
|
||||
if not title or not url:
|
||||
continue
|
||||
|
||||
dest = work_dir / title
|
||||
if client.download_attachment(url, dest):
|
||||
downloaded.append(title)
|
||||
else:
|
||||
log.warning("Skipping attachment %s -- download failed", title)
|
||||
|
||||
if downloaded:
|
||||
log.info(
|
||||
"Downloaded %d attachment(s) to %s: %s",
|
||||
len(downloaded), work_dir, ", ".join(downloaded),
|
||||
)
|
||||
|
||||
return downloaded
|
||||
|
||||
|
||||
def _dispatch_claude(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
db: StateDB,
|
||||
task: ClickUpTask,
|
||||
route: SkillRoute,
|
||||
run_id: int,
|
||||
):
|
||||
"""Run Claude Code headless for a task."""
|
||||
# 1. Set status to "ai working"
|
||||
client.update_task_status(task.id, cfg.clickup.ai_working_status)
|
||||
|
||||
# 2. Read skill file
|
||||
try:
|
||||
skill_content = read_skill_file(route, cfg.skills_dir)
|
||||
except FileNotFoundError as e:
|
||||
_handle_dispatch_error(
|
||||
client, cfg, db, task, run_id,
|
||||
error=str(e),
|
||||
fix="Create the skill file at skills/%s, then re-check Delegate to Claude."
|
||||
% route.skill_file,
|
||||
)
|
||||
return
|
||||
|
||||
# 3. Create work dir and download all attachments into it
|
||||
work_dir = Path(tempfile.mkdtemp(prefix="clickup_runner_"))
|
||||
downloaded_files = _download_attachments(client, task, work_dir)
|
||||
|
||||
# 4. Build prompt (reference local filenames, not URLs)
|
||||
prompt = build_prompt(task, route, skill_content, downloaded_files)
|
||||
|
||||
# 5. Run Claude
|
||||
log.info("Starting Claude for task %s (%s)", task.id, task.name)
|
||||
result = run_claude(
|
||||
prompt, route, cfg,
|
||||
work_dir=work_dir,
|
||||
exclude_files=set(downloaded_files),
|
||||
)
|
||||
|
||||
if not result.success:
|
||||
_handle_dispatch_error(
|
||||
client, cfg, db, task, run_id,
|
||||
error=result.error,
|
||||
fix="Check logs/clickup_runner.log for details. "
|
||||
"Fix the issue, then re-check Delegate to Claude.",
|
||||
)
|
||||
# Clean up temp dir
|
||||
_cleanup_work_dir(result.work_dir)
|
||||
return
|
||||
|
||||
# 5b. Fact-check PR files (Press Release only, graceful failure)
|
||||
fc_status_lines: list[str] = []
|
||||
if task.task_type == "Press Release":
|
||||
log.info("Running adversarial fact-check for task %s", task.id)
|
||||
company = task.get_field_value("Client") or ""
|
||||
pr_topic = task.get_field_value("PR Topic") or ""
|
||||
pr_keyword = task.get_field_value("Keyword") or ""
|
||||
pr_url = task.get_field_value("IMSURL") or ""
|
||||
fc_status_lines, fc_failed = fact_check_pr_files(
|
||||
result.output_files,
|
||||
company_name=company,
|
||||
url=pr_url,
|
||||
topic=pr_topic,
|
||||
keyword=pr_keyword,
|
||||
)
|
||||
if fc_failed:
|
||||
log.warning("Fact-check had failures for task %s", task.id)
|
||||
|
||||
# 6. Upload output files to ClickUp
|
||||
uploaded = 0
|
||||
for f in result.output_files:
|
||||
if client.upload_attachment(task.id, f):
|
||||
uploaded += 1
|
||||
|
||||
# 7. Copy to NAS (best-effort)
|
||||
customer = task.get_field_value("Client") or ""
|
||||
if customer and cfg.nas.generated_dir:
|
||||
copy_to_nas(result.output_files, customer, cfg.nas.generated_dir)
|
||||
|
||||
# 8. Advance stage + status
|
||||
client.set_stage(
|
||||
task.id, task.list_id, route.next_stage, cfg.clickup.stage_field_name
|
||||
)
|
||||
client.update_task_status(task.id, route.next_status)
|
||||
|
||||
# 9. Post success comment
|
||||
summary = "Stage complete. %d file(s) attached." % uploaded
|
||||
if fc_status_lines:
|
||||
summary += "\n" + "\n".join(fc_status_lines)
|
||||
if result.output:
|
||||
# Include first 500 chars of Claude's output as context
|
||||
truncated = result.output[:500]
|
||||
if len(result.output) > 500:
|
||||
truncated += "..."
|
||||
summary += "\n\n---\nClaude output:\n%s" % truncated
|
||||
client.add_comment(task.id, summary)
|
||||
|
||||
# 10. Uncheck delegate + clear error
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.delegate_field_name, False
|
||||
)
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.error_field_name, False
|
||||
)
|
||||
|
||||
# 11. Log success
|
||||
db.log_run_finish(
|
||||
run_id, "completed",
|
||||
result="%d files uploaded" % uploaded,
|
||||
)
|
||||
|
||||
# 12. Notify
|
||||
notify(cfg, "Task complete: %s" % task.name, summary)
|
||||
|
||||
log.info(
|
||||
"Task %s completed: stage -> %s, %d file(s) uploaded",
|
||||
task.id, route.next_stage, uploaded,
|
||||
)
|
||||
|
||||
# 13. Clean up temp dir
|
||||
_cleanup_work_dir(result.work_dir)
|
||||
|
||||
|
||||
def _handle_dispatch_error(
|
||||
client: ClickUpClient,
|
||||
cfg: Config,
|
||||
db: StateDB,
|
||||
task: ClickUpTask,
|
||||
run_id: int,
|
||||
error: str,
|
||||
fix: str,
|
||||
):
|
||||
"""Handle a failed Claude dispatch: set error state, comment, notify."""
|
||||
comment = (
|
||||
"[ERROR] Claude processing failed\n"
|
||||
"--\n"
|
||||
"What failed: %s\n"
|
||||
"\n"
|
||||
"How to fix: %s"
|
||||
) % (error, fix)
|
||||
|
||||
client.add_comment(task.id, comment)
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.error_field_name, True
|
||||
)
|
||||
client.set_checkbox(
|
||||
task.id, task.list_id, cfg.clickup.delegate_field_name, False
|
||||
)
|
||||
client.update_task_status(task.id, cfg.clickup.review_status)
|
||||
|
||||
db.log_run_finish(run_id, "failed", error=error)
|
||||
|
||||
notify(
|
||||
cfg,
|
||||
"FAILED: %s" % task.name,
|
||||
"Error: %s\nFix: %s" % (error, fix),
|
||||
is_error=True,
|
||||
)
|
||||
|
||||
log.error("Task %s failed: %s", task.id, error)
|
||||
|
||||
|
||||
def _cleanup_work_dir(work_dir):
|
||||
"""Remove temporary work directory."""
|
||||
if work_dir is None:
|
||||
return
|
||||
try:
|
||||
import shutil
|
||||
shutil.rmtree(str(work_dir), ignore_errors=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
_setup_logging()
|
||||
log.info("ClickUp Runner starting up")
|
||||
|
||||
cfg = load_config()
|
||||
|
||||
if not cfg.clickup.api_token:
|
||||
log.error("CLICKUP_API_TOKEN not set -- exiting")
|
||||
sys.exit(1)
|
||||
if not cfg.clickup.space_id:
|
||||
log.error("CLICKUP_SPACE_ID not set -- exiting")
|
||||
sys.exit(1)
|
||||
|
||||
client = ClickUpClient(
|
||||
api_token=cfg.clickup.api_token,
|
||||
task_type_field_name=cfg.clickup.task_type_field_name,
|
||||
)
|
||||
db = StateDB(cfg.db_path)
|
||||
|
||||
# Graceful shutdown on SIGINT/SIGTERM
|
||||
signal.signal(signal.SIGINT, _handle_signal)
|
||||
signal.signal(signal.SIGTERM, _handle_signal)
|
||||
|
||||
log.info(
|
||||
"Runner ready. Polling every %ds. Space: %s",
|
||||
cfg.runner.poll_interval_seconds,
|
||||
cfg.clickup.space_id,
|
||||
)
|
||||
|
||||
try:
|
||||
while not _shutdown:
|
||||
try:
|
||||
count = poll_cycle(client, cfg, db)
|
||||
if count:
|
||||
log.info("Dispatched %d task(s) this cycle", count)
|
||||
except Exception:
|
||||
log.exception("Error in poll cycle")
|
||||
|
||||
# Sleep in small increments so we can catch shutdown signal
|
||||
for _ in range(cfg.runner.poll_interval_seconds):
|
||||
if _shutdown:
|
||||
break
|
||||
time.sleep(1)
|
||||
finally:
|
||||
client.close()
|
||||
log.info("ClickUp Runner shut down")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,179 +0,0 @@
|
|||
"""AutoCora job submission and result polling.
|
||||
|
||||
Submits Cora SEO analysis jobs to the NAS queue and polls for results.
|
||||
Jobs are JSON files written to the jobs directory; an external worker
|
||||
picks them up, runs Cora, and writes .result files to the results directory.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import shutil
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CoraResult:
|
||||
"""Parsed result from a .result file."""
|
||||
|
||||
job_id: str
|
||||
status: str # "SUCCESS" or "FAILURE"
|
||||
keyword: str
|
||||
task_ids: list[str]
|
||||
reason: str # failure reason, empty on success
|
||||
result_path: Path
|
||||
|
||||
|
||||
def slugify(text: str, max_len: int = 80) -> str:
|
||||
"""Convert text to a filesystem-safe slug.
|
||||
|
||||
Lowercase, alphanumeric + hyphens only, max length.
|
||||
"""
|
||||
slug = text.lower().strip()
|
||||
slug = re.sub(r"[^a-z0-9]+", "-", slug)
|
||||
slug = slug.strip("-")
|
||||
if len(slug) > max_len:
|
||||
slug = slug[:max_len].rstrip("-")
|
||||
return slug or "unknown"
|
||||
|
||||
|
||||
def make_job_id(keyword: str) -> str:
|
||||
"""Generate a unique job ID from keyword + timestamp."""
|
||||
ts = int(time.time() * 1000)
|
||||
return "job-%d-%s" % (ts, slugify(keyword))
|
||||
|
||||
|
||||
def submit_job(
|
||||
keyword: str,
|
||||
url: str,
|
||||
task_id: str,
|
||||
jobs_dir: str,
|
||||
) -> str | None:
|
||||
"""Write a job JSON file to the NAS jobs directory.
|
||||
|
||||
Returns the job_id on success, None on failure.
|
||||
"""
|
||||
jobs_path = Path(jobs_dir)
|
||||
|
||||
try:
|
||||
jobs_path.mkdir(parents=True, exist_ok=True)
|
||||
except OSError as e:
|
||||
log.error("Cannot access jobs directory %s: %s", jobs_dir, e)
|
||||
return None
|
||||
|
||||
job_id = make_job_id(keyword)
|
||||
job_file = jobs_path / ("%s.json" % job_id)
|
||||
|
||||
job_data = {
|
||||
"keyword": keyword,
|
||||
"url": url or "https://seotoollab.com/blank.html",
|
||||
"task_ids": [task_id],
|
||||
}
|
||||
|
||||
try:
|
||||
job_file.write_text(
|
||||
json.dumps(job_data, indent=2),
|
||||
encoding="utf-8",
|
||||
)
|
||||
log.info("Submitted AutoCora job: %s (keyword=%s)", job_id, keyword)
|
||||
return job_id
|
||||
except OSError as e:
|
||||
log.error("Failed to write job file %s: %s", job_file, e)
|
||||
return None
|
||||
|
||||
|
||||
def parse_result_file(result_path: Path) -> CoraResult | None:
|
||||
"""Parse a .result file (JSON or legacy plain-text format).
|
||||
|
||||
Returns a CoraResult or None if the file can't be parsed.
|
||||
"""
|
||||
try:
|
||||
raw = result_path.read_text(encoding="utf-8").strip()
|
||||
except OSError as e:
|
||||
log.warning("Cannot read result file %s: %s", result_path, e)
|
||||
return None
|
||||
|
||||
if not raw:
|
||||
log.warning("Empty result file: %s", result_path)
|
||||
return None
|
||||
|
||||
job_id = result_path.stem # filename without .result extension
|
||||
|
||||
# Try JSON first
|
||||
try:
|
||||
data = json.loads(raw)
|
||||
return CoraResult(
|
||||
job_id=job_id,
|
||||
status=data.get("status", "FAILURE"),
|
||||
keyword=data.get("keyword", ""),
|
||||
task_ids=data.get("task_ids", []),
|
||||
reason=data.get("reason", ""),
|
||||
result_path=result_path,
|
||||
)
|
||||
except (json.JSONDecodeError, AttributeError):
|
||||
pass
|
||||
|
||||
# Legacy plain-text format
|
||||
if raw.startswith("SUCCESS"):
|
||||
return CoraResult(
|
||||
job_id=job_id,
|
||||
status="SUCCESS",
|
||||
keyword="",
|
||||
task_ids=[],
|
||||
reason="",
|
||||
result_path=result_path,
|
||||
)
|
||||
if raw.startswith("FAILURE"):
|
||||
reason = raw.split(":", 1)[1].strip() if ":" in raw else "Unknown"
|
||||
return CoraResult(
|
||||
job_id=job_id,
|
||||
status="FAILURE",
|
||||
keyword="",
|
||||
task_ids=[],
|
||||
reason=reason,
|
||||
result_path=result_path,
|
||||
)
|
||||
|
||||
log.warning("Unrecognized result format in %s", result_path)
|
||||
return None
|
||||
|
||||
|
||||
def scan_results(results_dir: str) -> list[CoraResult]:
|
||||
"""Scan the results directory for .result files and parse them.
|
||||
|
||||
Returns a list of parsed results (skips unparseable files).
|
||||
"""
|
||||
results_path = Path(results_dir)
|
||||
if not results_path.exists():
|
||||
return []
|
||||
|
||||
results: list[CoraResult] = []
|
||||
for f in sorted(results_path.glob("*.result")):
|
||||
parsed = parse_result_file(f)
|
||||
if parsed:
|
||||
results.append(parsed)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def archive_result(result: CoraResult) -> bool:
|
||||
"""Move a .result file to the processed/ subdirectory.
|
||||
|
||||
Returns True on success.
|
||||
"""
|
||||
processed_dir = result.result_path.parent / "processed"
|
||||
try:
|
||||
processed_dir.mkdir(exist_ok=True)
|
||||
dest = processed_dir / result.result_path.name
|
||||
shutil.move(str(result.result_path), str(dest))
|
||||
log.info("Archived result file: %s", result.result_path.name)
|
||||
return True
|
||||
except OSError as e:
|
||||
log.warning("Failed to archive result %s: %s", result.result_path, e)
|
||||
return False
|
||||
|
|
@ -1,262 +0,0 @@
|
|||
"""Big-Link-Man CLI runner.
|
||||
|
||||
Runs ingest-cora and generate-batch via BLM's own venv Python.
|
||||
Ported from cheddahbot/tools/linkbuilding.py for headless use.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class IngestResult:
|
||||
"""Parsed output from ingest-cora."""
|
||||
|
||||
success: bool
|
||||
project_id: str = ""
|
||||
project_name: str = ""
|
||||
main_keyword: str = ""
|
||||
job_file: str = ""
|
||||
error: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class GenerateResult:
|
||||
"""Parsed output from generate-batch."""
|
||||
|
||||
success: bool
|
||||
job_moved_to: str = ""
|
||||
error: str = ""
|
||||
|
||||
|
||||
def _resolve_venv_python(blm_dir: str) -> Path:
|
||||
"""Find BLM's venv Python executable."""
|
||||
venv_python = Path(blm_dir) / ".venv" / "Scripts" / "python.exe"
|
||||
if not venv_python.exists():
|
||||
# Fallback for Linux/Mac
|
||||
venv_python = Path(blm_dir) / ".venv" / "bin" / "python"
|
||||
if not venv_python.exists():
|
||||
raise FileNotFoundError(
|
||||
"No .venv found in %s. BLM must have its own venv." % blm_dir
|
||||
)
|
||||
return venv_python
|
||||
|
||||
|
||||
def _run_blm(
|
||||
args: list[str], blm_dir: str, timeout: int = 1800
|
||||
) -> subprocess.CompletedProcess:
|
||||
"""Run a BLM CLI command with credential injection."""
|
||||
venv_python = _resolve_venv_python(blm_dir)
|
||||
cmd = [str(venv_python), "main.py"] + args
|
||||
|
||||
# Inject credentials from env vars
|
||||
username = os.getenv("BLM_USERNAME", "")
|
||||
password = os.getenv("BLM_PASSWORD", "")
|
||||
if username and "-u" not in args and "--username" not in args:
|
||||
cmd.extend(["-u", username])
|
||||
if password and "-p" not in args and "--password" not in args:
|
||||
cmd.extend(["-p", password])
|
||||
|
||||
log.info("BLM command: %s (cwd=%s)", " ".join(cmd), blm_dir)
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
cwd=blm_dir,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout,
|
||||
)
|
||||
log.info("BLM exit code: %d", result.returncode)
|
||||
if result.stdout:
|
||||
log.debug("BLM stdout: %s", result.stdout[:2000])
|
||||
if result.stderr:
|
||||
log.debug("BLM stderr: %s", result.stderr[:2000])
|
||||
return result
|
||||
|
||||
|
||||
def find_cora_xlsx(keyword: str, cora_inbox: str) -> str | None:
|
||||
"""Find the Cora xlsx in the inbox directory by keyword match.
|
||||
|
||||
Looks for files whose name (slugified) matches the keyword.
|
||||
Returns the full path or None.
|
||||
"""
|
||||
inbox = Path(cora_inbox)
|
||||
if not inbox.exists():
|
||||
log.warning("Cora inbox not found: %s", cora_inbox)
|
||||
return None
|
||||
|
||||
# Slugify keyword for matching: lowercase, spaces -> underscores
|
||||
slug = keyword.lower().strip().replace(" ", "_")
|
||||
slug = re.sub(r"[^a-z0-9_]", "", slug)
|
||||
|
||||
# Look for exact match first, then prefix match
|
||||
for xlsx in sorted(inbox.glob("*.xlsx"), key=lambda p: p.stat().st_mtime, reverse=True):
|
||||
name_lower = xlsx.stem.lower()
|
||||
if name_lower == slug:
|
||||
return str(xlsx)
|
||||
|
||||
# Prefix match (keyword slug is prefix of filename)
|
||||
for xlsx in sorted(inbox.glob("*.xlsx"), key=lambda p: p.stat().st_mtime, reverse=True):
|
||||
name_lower = xlsx.stem.lower()
|
||||
if name_lower.startswith(slug):
|
||||
return str(xlsx)
|
||||
|
||||
log.warning("No xlsx matching '%s' in %s", keyword, cora_inbox)
|
||||
return None
|
||||
|
||||
|
||||
def build_ingest_args(
|
||||
xlsx_path: str,
|
||||
project_name: str,
|
||||
money_site_url: str = "",
|
||||
branded_plus_ratio: str = "",
|
||||
custom_anchors: str = "",
|
||||
cli_flags: str = "",
|
||||
) -> list[str]:
|
||||
"""Build the ingest-cora CLI argument list."""
|
||||
args = ["ingest-cora", "-f", xlsx_path, "-n", project_name]
|
||||
|
||||
if money_site_url:
|
||||
args.extend(["-m", money_site_url])
|
||||
|
||||
if branded_plus_ratio:
|
||||
try:
|
||||
bp = float(branded_plus_ratio)
|
||||
if bp != 0.7:
|
||||
args.extend(["-bp", str(bp)])
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
if custom_anchors:
|
||||
args.extend(["-a", custom_anchors])
|
||||
|
||||
if cli_flags:
|
||||
args.extend(cli_flags.strip().split())
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def parse_ingest_output(stdout: str) -> IngestResult:
|
||||
"""Parse ingest-cora stdout."""
|
||||
result = IngestResult(success=False)
|
||||
|
||||
for line in stdout.splitlines():
|
||||
line = line.strip()
|
||||
|
||||
m = re.match(r"^Success: Project '(.+)' created \(ID: (\d+)\)$", line)
|
||||
if m:
|
||||
result.project_name = m.group(1)
|
||||
result.project_id = m.group(2)
|
||||
result.success = True
|
||||
continue
|
||||
|
||||
m = re.match(r"^Job file created: (.+)$", line)
|
||||
if m:
|
||||
result.job_file = m.group(1).strip()
|
||||
continue
|
||||
|
||||
m = re.match(r"^Main Keyword: (.+)$", line)
|
||||
if m:
|
||||
result.main_keyword = m.group(1).strip()
|
||||
continue
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def parse_generate_output(stdout: str) -> GenerateResult:
|
||||
"""Parse generate-batch stdout."""
|
||||
result = GenerateResult(success=False)
|
||||
|
||||
for line in stdout.splitlines():
|
||||
line = line.strip()
|
||||
|
||||
m = re.match(r"^Job file moved to: (.+)$", line)
|
||||
if m:
|
||||
result.job_moved_to = m.group(1).strip()
|
||||
result.success = True
|
||||
continue
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def run_ingest(
|
||||
xlsx_path: str,
|
||||
keyword: str,
|
||||
money_site_url: str,
|
||||
blm_dir: str,
|
||||
timeout: int = 1800,
|
||||
branded_plus_ratio: str = "",
|
||||
custom_anchors: str = "",
|
||||
cli_flags: str = "",
|
||||
) -> IngestResult:
|
||||
"""Run ingest-cora and return parsed result."""
|
||||
args = build_ingest_args(
|
||||
xlsx_path=xlsx_path,
|
||||
project_name=keyword,
|
||||
money_site_url=money_site_url,
|
||||
branded_plus_ratio=branded_plus_ratio,
|
||||
custom_anchors=custom_anchors,
|
||||
cli_flags=cli_flags,
|
||||
)
|
||||
|
||||
try:
|
||||
proc = _run_blm(args, blm_dir, timeout=timeout)
|
||||
except subprocess.TimeoutExpired:
|
||||
return IngestResult(
|
||||
success=False,
|
||||
error="ingest-cora timed out after %d seconds" % timeout,
|
||||
)
|
||||
except FileNotFoundError as e:
|
||||
return IngestResult(success=False, error=str(e))
|
||||
|
||||
if proc.returncode != 0:
|
||||
return IngestResult(
|
||||
success=False,
|
||||
error="ingest-cora failed (exit code %d).\nstdout: %s\nstderr: %s"
|
||||
% (proc.returncode, proc.stdout[-500:], proc.stderr[-500:]),
|
||||
)
|
||||
|
||||
parsed = parse_ingest_output(proc.stdout)
|
||||
if not parsed.job_file:
|
||||
return IngestResult(
|
||||
success=False,
|
||||
error="ingest-cora produced no job file.\nstdout: %s" % proc.stdout[-500:],
|
||||
)
|
||||
|
||||
return parsed
|
||||
|
||||
|
||||
def run_generate(
|
||||
job_file: str,
|
||||
blm_dir: str,
|
||||
timeout: int = 1800,
|
||||
) -> GenerateResult:
|
||||
"""Run generate-batch and return parsed result."""
|
||||
job_path = Path(blm_dir) / job_file if not Path(job_file).is_absolute() else Path(job_file)
|
||||
args = ["generate-batch", "-j", str(job_path), "--continue-on-error"]
|
||||
|
||||
try:
|
||||
proc = _run_blm(args, blm_dir, timeout=timeout)
|
||||
except subprocess.TimeoutExpired:
|
||||
return GenerateResult(
|
||||
success=False,
|
||||
error="generate-batch timed out after %d seconds" % timeout,
|
||||
)
|
||||
except FileNotFoundError as e:
|
||||
return GenerateResult(success=False, error=str(e))
|
||||
|
||||
if proc.returncode != 0:
|
||||
return GenerateResult(
|
||||
success=False,
|
||||
error="generate-batch failed (exit code %d).\nstdout: %s\nstderr: %s"
|
||||
% (proc.returncode, proc.stdout[-500:], proc.stderr[-500:]),
|
||||
)
|
||||
|
||||
return parse_generate_output(proc.stdout)
|
||||
|
|
@ -1,334 +0,0 @@
|
|||
"""Claude Code subprocess runner.
|
||||
NEW WORKING CODE
|
||||
Builds prompts from skill files + task context, runs `claude -p`,
|
||||
collects output files, and returns structured results.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from .clickup_client import ClickUpTask
|
||||
from .config import Config
|
||||
from .skill_map import SkillRoute
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunResult:
|
||||
"""Outcome of a Claude Code run."""
|
||||
|
||||
success: bool
|
||||
output: str = ""
|
||||
error: str = ""
|
||||
output_files: list[Path] = field(default_factory=list)
|
||||
work_dir: Path | None = None
|
||||
|
||||
|
||||
def build_prompt(
|
||||
task: ClickUpTask,
|
||||
route: SkillRoute,
|
||||
skill_content: str,
|
||||
attachment_filenames: list[str] | None = None,
|
||||
) -> str:
|
||||
"""Assemble the prompt sent to `claude -p`.
|
||||
|
||||
Structure:
|
||||
1. Skill file content (system-level instructions)
|
||||
2. Task context block (name, description, customer, URL, attachments)
|
||||
|
||||
Args:
|
||||
attachment_filenames: Local filenames (not paths) of attachments
|
||||
already downloaded to the working directory.
|
||||
"""
|
||||
parts: list[str] = []
|
||||
|
||||
# -- Skill instructions --
|
||||
parts.append(skill_content.strip())
|
||||
|
||||
# -- Task context --
|
||||
ctx_lines = [
|
||||
"",
|
||||
"---",
|
||||
"## Task Context",
|
||||
"",
|
||||
"Task: %s" % task.name,
|
||||
]
|
||||
|
||||
client_name = task.get_field_value("Client")
|
||||
if client_name:
|
||||
ctx_lines.append("Client: %s" % client_name)
|
||||
|
||||
ims_url = task.get_field_value("IMSURL")
|
||||
if ims_url:
|
||||
ctx_lines.append("Target URL: %s" % ims_url)
|
||||
|
||||
if task.url:
|
||||
ctx_lines.append("ClickUp Task: %s" % task.url)
|
||||
|
||||
# Task-type-specific fields
|
||||
keyword = task.get_field_value("Keyword")
|
||||
if keyword:
|
||||
ctx_lines.append("Keyword: %s" % keyword)
|
||||
|
||||
cli_flags = task.get_field_value("CLIFlags")
|
||||
if cli_flags:
|
||||
ctx_lines.append("CLIFlags: %s" % cli_flags)
|
||||
|
||||
bp_ratio = task.get_field_value("BrandedPlusRatio")
|
||||
if bp_ratio:
|
||||
ctx_lines.append("BrandedPlusRatio: %s" % bp_ratio)
|
||||
|
||||
custom_anchors = task.get_field_value("CustomAnchors")
|
||||
if custom_anchors:
|
||||
ctx_lines.append("CustomAnchors: %s" % custom_anchors)
|
||||
|
||||
pr_topic = task.get_field_value("PR Topic")
|
||||
if pr_topic:
|
||||
ctx_lines.append("PR Topic: %s" % pr_topic)
|
||||
|
||||
if task.description:
|
||||
ctx_lines.append("")
|
||||
ctx_lines.append("### Description")
|
||||
ctx_lines.append(task.description.strip())
|
||||
|
||||
if attachment_filenames:
|
||||
ctx_lines.append("")
|
||||
ctx_lines.append("### Attached Files (in working directory)")
|
||||
for fname in attachment_filenames:
|
||||
ctx_lines.append("- %s" % fname)
|
||||
|
||||
# Tell Claude where to write output
|
||||
ctx_lines.append("")
|
||||
ctx_lines.append(
|
||||
"### Output Instructions"
|
||||
)
|
||||
ctx_lines.append(
|
||||
"Write all output files to the current working directory. "
|
||||
"Do NOT create subdirectories."
|
||||
)
|
||||
|
||||
parts.append("\n".join(ctx_lines))
|
||||
return "\n\n".join(parts)
|
||||
|
||||
|
||||
def _collect_output_files(
|
||||
work_dir: Path,
|
||||
exclude: set[str] | None = None,
|
||||
) -> list[Path]:
|
||||
"""Return all files Claude created in the working directory.
|
||||
|
||||
Args:
|
||||
exclude: Set of filenames to skip (e.g. downloaded attachments
|
||||
that were in the dir before Claude ran).
|
||||
"""
|
||||
if not work_dir.exists():
|
||||
return []
|
||||
exclude = exclude or set()
|
||||
files = [f for f in work_dir.iterdir() if f.is_file() and f.name not in exclude]
|
||||
# Sort for deterministic ordering
|
||||
files.sort(key=lambda p: p.name)
|
||||
return files
|
||||
|
||||
|
||||
def run_claude(
|
||||
prompt: str,
|
||||
route: SkillRoute,
|
||||
cfg: Config,
|
||||
work_dir: Path | None = None,
|
||||
exclude_files: set[str] | None = None,
|
||||
) -> RunResult:
|
||||
"""Run `claude -p` as a subprocess and return the result.
|
||||
|
||||
Args:
|
||||
prompt: The assembled prompt string.
|
||||
route: SkillRoute with tools and max_turns.
|
||||
cfg: Runner config (timeout, etc.).
|
||||
work_dir: Directory for Claude to write files into.
|
||||
If None, a temp directory is created.
|
||||
exclude_files: Filenames to exclude from output_files
|
||||
(e.g. pre-existing attachments).
|
||||
"""
|
||||
created_tmp = False
|
||||
if work_dir is None:
|
||||
work_dir = Path(tempfile.mkdtemp(prefix="clickup_runner_"))
|
||||
created_tmp = True
|
||||
|
||||
cmd = [
|
||||
"claude",
|
||||
"-p",
|
||||
prompt,
|
||||
"--output-format", "text",
|
||||
"--permission-mode", "bypassPermissions",
|
||||
]
|
||||
|
||||
if route.tools:
|
||||
cmd.extend(["--allowedTools", route.tools])
|
||||
|
||||
if route.max_turns:
|
||||
cmd.extend(["--max-turns", str(route.max_turns)])
|
||||
|
||||
log.info(
|
||||
"Running claude: tools=%s, max_turns=%d, timeout=%ds, work_dir=%s",
|
||||
route.tools or "(all)",
|
||||
route.max_turns,
|
||||
cfg.runner.claude_timeout_seconds,
|
||||
work_dir,
|
||||
)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=cfg.runner.claude_timeout_seconds,
|
||||
cwd=str(work_dir),
|
||||
)
|
||||
|
||||
stdout = result.stdout or ""
|
||||
stderr = result.stderr or ""
|
||||
|
||||
if result.returncode != 0:
|
||||
log.error(
|
||||
"Claude exited with code %d.\nstderr: %s",
|
||||
result.returncode,
|
||||
stderr[:2000],
|
||||
)
|
||||
return RunResult(
|
||||
success=False,
|
||||
output=stdout,
|
||||
error="Claude exited with code %d: %s"
|
||||
% (result.returncode, stderr[:2000]),
|
||||
output_files=_collect_output_files(work_dir, exclude_files),
|
||||
work_dir=work_dir,
|
||||
)
|
||||
|
||||
output_files = _collect_output_files(work_dir, exclude_files)
|
||||
log.info(
|
||||
"Claude completed successfully. %d output file(s).", len(output_files)
|
||||
)
|
||||
|
||||
return RunResult(
|
||||
success=True,
|
||||
output=stdout,
|
||||
error="",
|
||||
output_files=output_files,
|
||||
work_dir=work_dir,
|
||||
)
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
log.error(
|
||||
"Claude timed out after %ds", cfg.runner.claude_timeout_seconds
|
||||
)
|
||||
return RunResult(
|
||||
success=False,
|
||||
output="",
|
||||
error="Claude timed out after %d seconds"
|
||||
% cfg.runner.claude_timeout_seconds,
|
||||
output_files=_collect_output_files(work_dir, exclude_files),
|
||||
work_dir=work_dir,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
log.error("claude CLI not found on PATH")
|
||||
return RunResult(
|
||||
success=False,
|
||||
output="",
|
||||
error="claude CLI not found on PATH. Is Claude Code installed?",
|
||||
work_dir=work_dir,
|
||||
)
|
||||
|
||||
|
||||
def copy_to_nas(
|
||||
files: list[Path],
|
||||
customer: str,
|
||||
nas_dir: str,
|
||||
) -> list[Path]:
|
||||
"""Best-effort copy of output files to NAS.
|
||||
|
||||
Returns list of successfully copied paths.
|
||||
"""
|
||||
if not nas_dir or not customer:
|
||||
return []
|
||||
|
||||
dest_dir = Path(nas_dir) / customer
|
||||
copied: list[Path] = []
|
||||
|
||||
try:
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
except OSError as e:
|
||||
log.warning("Cannot create NAS directory %s: %s", dest_dir, e)
|
||||
return []
|
||||
|
||||
for f in files:
|
||||
try:
|
||||
dest = dest_dir / f.name
|
||||
shutil.copy2(str(f), str(dest))
|
||||
copied.append(dest)
|
||||
log.info("Copied %s to NAS: %s", f.name, dest)
|
||||
except OSError as e:
|
||||
log.warning("Failed to copy %s to NAS: %s", f.name, e)
|
||||
|
||||
return copied
|
||||
|
||||
|
||||
def notify(
|
||||
cfg: Config,
|
||||
title: str,
|
||||
message: str,
|
||||
is_error: bool = False,
|
||||
) -> None:
|
||||
"""Send a notification via ntfy.sh (best-effort)."""
|
||||
topic = cfg.ntfy.error_topic if is_error else cfg.ntfy.success_topic
|
||||
if not topic:
|
||||
return
|
||||
|
||||
url = "%s/%s" % (cfg.ntfy.server.rstrip("/"), topic)
|
||||
try:
|
||||
import httpx as _httpx
|
||||
|
||||
_httpx.post(
|
||||
url,
|
||||
content=message.encode("ascii", errors="replace"),
|
||||
headers={
|
||||
"Title": title.encode("ascii", errors="replace").decode("ascii"),
|
||||
"Priority": "high" if is_error else "default",
|
||||
},
|
||||
timeout=10.0,
|
||||
)
|
||||
log.info("Sent ntfy notification to %s", topic)
|
||||
except Exception as e:
|
||||
log.warning("Failed to send ntfy notification: %s", e)
|
||||
|
||||
|
||||
def _strip_frontmatter(text: str) -> str:
|
||||
"""Remove YAML frontmatter (--- ... ---) from a markdown file."""
|
||||
if not text.startswith("---"):
|
||||
return text
|
||||
end = text.find("---", 3)
|
||||
if end == -1:
|
||||
return text
|
||||
return text[end + 3:].lstrip("\r\n")
|
||||
|
||||
|
||||
def read_skill_file(route: SkillRoute, skills_dir: Path) -> str:
|
||||
"""Read a skill .md file and return its content.
|
||||
|
||||
Strips YAML frontmatter (skill registry metadata) since it's not
|
||||
useful as prompt content and the leading --- breaks `claude -p`.
|
||||
|
||||
Raises FileNotFoundError if the skill file doesn't exist.
|
||||
"""
|
||||
skill_path = skills_dir / route.skill_file
|
||||
if not skill_path.exists():
|
||||
raise FileNotFoundError(
|
||||
"Skill file not found: %s" % skill_path
|
||||
)
|
||||
raw = skill_path.read_text(encoding="utf-8")
|
||||
return _strip_frontmatter(raw)
|
||||
|
|
@ -1,508 +0,0 @@
|
|||
"""ClickUp REST API client for the runner.
|
||||
|
||||
Adapted from cheddahbot/clickup.py -- stripped to what the runner needs,
|
||||
with additions for checkbox, stage dropdown, and attachment operations.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
BASE_URL = "https://api.clickup.com/api/v2"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClickUpTask:
|
||||
"""Lightweight representation of a ClickUp task."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
status: str
|
||||
description: str = ""
|
||||
task_type: str = "" # Work Category value
|
||||
url: str = ""
|
||||
due_date: str = "" # Unix-ms timestamp string, or ""
|
||||
custom_fields: dict[str, Any] = field(default_factory=dict)
|
||||
custom_fields_raw: list[dict] = field(default_factory=list)
|
||||
list_id: str = ""
|
||||
list_name: str = ""
|
||||
folder_name: str = ""
|
||||
tags: list[str] = field(default_factory=list)
|
||||
attachments: list[dict] = field(default_factory=list)
|
||||
|
||||
@classmethod
|
||||
def from_api(
|
||||
cls, data: dict, task_type_field_name: str = "Work Category"
|
||||
) -> ClickUpTask:
|
||||
"""Parse a task from the ClickUp API response."""
|
||||
custom_fields: dict[str, Any] = {}
|
||||
custom_fields_raw = data.get("custom_fields", [])
|
||||
task_type = ""
|
||||
|
||||
for cf in custom_fields_raw:
|
||||
cf_name = cf.get("name", "")
|
||||
cf_value = cf.get("value")
|
||||
|
||||
# Resolve dropdown type_config to label
|
||||
if cf.get("type") == "drop_down" and cf_value is not None:
|
||||
options = cf.get("type_config", {}).get("options", [])
|
||||
order_index = cf_value if isinstance(cf_value, int) else None
|
||||
for opt in options:
|
||||
if (
|
||||
order_index is not None
|
||||
and opt.get("orderindex") == order_index
|
||||
) or opt.get("id") == cf_value:
|
||||
cf_value = opt.get("name", cf_value)
|
||||
break
|
||||
|
||||
custom_fields[cf_name] = cf_value
|
||||
if cf_name == task_type_field_name:
|
||||
task_type = str(cf_value) if cf_value else ""
|
||||
|
||||
status_name = data.get("status", {}).get("status", "unknown")
|
||||
raw_due = data.get("due_date")
|
||||
due_date = str(raw_due) if raw_due else ""
|
||||
tags = [tag["name"] for tag in data.get("tags", [])]
|
||||
|
||||
# Folder name from list -> folder if available
|
||||
folder_data = data.get("folder", {})
|
||||
folder_name = folder_data.get("name", "") if folder_data else ""
|
||||
|
||||
return cls(
|
||||
id=data["id"],
|
||||
name=data.get("name", ""),
|
||||
status=status_name.lower(),
|
||||
description=data.get("description", "") or "",
|
||||
task_type=task_type,
|
||||
url=data.get("url", ""),
|
||||
due_date=due_date,
|
||||
custom_fields=custom_fields,
|
||||
custom_fields_raw=custom_fields_raw,
|
||||
list_id=data.get("list", {}).get("id", ""),
|
||||
list_name=data.get("list", {}).get("name", ""),
|
||||
folder_name=folder_name,
|
||||
tags=tags,
|
||||
)
|
||||
|
||||
def get_field_value(self, field_name: str) -> Any:
|
||||
"""Get a custom field value by name."""
|
||||
return self.custom_fields.get(field_name)
|
||||
|
||||
def has_xlsx_attachment(self) -> bool:
|
||||
"""Check if this task has an .xlsx attachment."""
|
||||
return any(
|
||||
a.get("title", "").lower().endswith(".xlsx")
|
||||
or a.get("url", "").lower().endswith(".xlsx")
|
||||
for a in self.attachments
|
||||
)
|
||||
|
||||
|
||||
class ClickUpClient:
|
||||
"""ClickUp REST API v2 client for the runner."""
|
||||
|
||||
def __init__(self, api_token: str, task_type_field_name: str = "Work Category"):
|
||||
self._token = api_token
|
||||
self._task_type_field_name = task_type_field_name
|
||||
self._client = httpx.Client(
|
||||
base_url=BASE_URL,
|
||||
headers={
|
||||
"Authorization": api_token,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
timeout=30.0,
|
||||
)
|
||||
# Cache: field_name -> {field_id, options} per list_id
|
||||
self._field_cache: dict[str, dict[str, Any]] = {}
|
||||
|
||||
def close(self):
|
||||
self._client.close()
|
||||
|
||||
# ── Retry helper ──
|
||||
|
||||
@staticmethod
|
||||
def _retry(fn, max_attempts: int = 3, backoff: float = 2.0):
|
||||
"""Retry on 5xx / transport errors with exponential backoff."""
|
||||
last_exc: Exception | None = None
|
||||
for attempt in range(1, max_attempts + 1):
|
||||
try:
|
||||
return fn()
|
||||
except (httpx.TransportError, httpx.HTTPStatusError) as e:
|
||||
if (
|
||||
isinstance(e, httpx.HTTPStatusError)
|
||||
and e.response.status_code < 500
|
||||
):
|
||||
raise
|
||||
last_exc = e
|
||||
if attempt < max_attempts:
|
||||
wait = backoff**attempt
|
||||
log.warning(
|
||||
"Retry %d/%d after %.1fs: %s",
|
||||
attempt,
|
||||
max_attempts,
|
||||
wait,
|
||||
e,
|
||||
)
|
||||
time.sleep(wait)
|
||||
raise last_exc # type: ignore[misc]
|
||||
|
||||
# ── Read ──
|
||||
|
||||
def get_tasks(
|
||||
self,
|
||||
list_id: str,
|
||||
statuses: list[str] | None = None,
|
||||
due_date_lt: int | None = None,
|
||||
due_date_gt: int | None = None,
|
||||
include_closed: bool = False,
|
||||
) -> list[ClickUpTask]:
|
||||
"""Fetch tasks from a list with optional filters."""
|
||||
params: dict[str, Any] = {
|
||||
"include_closed": "true" if include_closed else "false",
|
||||
"subtasks": "true",
|
||||
}
|
||||
if statuses:
|
||||
for s in statuses:
|
||||
params.setdefault("statuses[]", [])
|
||||
if isinstance(params["statuses[]"], list):
|
||||
params["statuses[]"].append(s)
|
||||
if due_date_lt is not None:
|
||||
params["due_date_lt"] = str(due_date_lt)
|
||||
if due_date_gt is not None:
|
||||
params["due_date_gt"] = str(due_date_gt)
|
||||
|
||||
# httpx needs repeated params as list of tuples
|
||||
param_list = []
|
||||
for k, v in params.items():
|
||||
if isinstance(v, list):
|
||||
for item in v:
|
||||
param_list.append((k, item))
|
||||
else:
|
||||
param_list.append((k, v))
|
||||
|
||||
resp = self._client.get(f"/list/{list_id}/task", params=param_list)
|
||||
resp.raise_for_status()
|
||||
tasks_data = resp.json().get("tasks", [])
|
||||
return [
|
||||
ClickUpTask.from_api(t, self._task_type_field_name) for t in tasks_data
|
||||
]
|
||||
|
||||
def get_task(self, task_id: str) -> ClickUpTask:
|
||||
"""Fetch a single task by ID."""
|
||||
resp = self._client.get(f"/task/{task_id}")
|
||||
resp.raise_for_status()
|
||||
return ClickUpTask.from_api(resp.json(), self._task_type_field_name)
|
||||
|
||||
def get_folders(self, space_id: str) -> list[dict]:
|
||||
"""Return folders in a space with their lists."""
|
||||
resp = self._client.get(f"/space/{space_id}/folder")
|
||||
resp.raise_for_status()
|
||||
folders = []
|
||||
for f in resp.json().get("folders", []):
|
||||
lists = [
|
||||
{"id": lst["id"], "name": lst["name"]} for lst in f.get("lists", [])
|
||||
]
|
||||
folders.append({"id": f["id"], "name": f["name"], "lists": lists})
|
||||
return folders
|
||||
|
||||
def get_tasks_from_overall_lists(
|
||||
self,
|
||||
space_id: str,
|
||||
due_date_lt: int | None = None,
|
||||
) -> list[ClickUpTask]:
|
||||
"""Fetch tasks from all 'Overall' lists in each folder.
|
||||
|
||||
Does NOT filter by status -- we need all tasks so we can check
|
||||
the Delegate to Claude checkbox ourselves.
|
||||
"""
|
||||
all_tasks: list[ClickUpTask] = []
|
||||
overall_ids: list[str] = []
|
||||
|
||||
try:
|
||||
folders = self.get_folders(space_id)
|
||||
for folder in folders:
|
||||
for lst in folder["lists"]:
|
||||
if lst["name"].lower() == "overall":
|
||||
overall_ids.append(lst["id"])
|
||||
except httpx.HTTPStatusError as e:
|
||||
log.warning("Failed to fetch folders for space %s: %s", space_id, e)
|
||||
return []
|
||||
|
||||
for list_id in overall_ids:
|
||||
try:
|
||||
tasks = self.get_tasks(
|
||||
list_id, due_date_lt=due_date_lt
|
||||
)
|
||||
all_tasks.extend(tasks)
|
||||
except httpx.HTTPStatusError as e:
|
||||
log.warning("Failed to fetch tasks from list %s: %s", list_id, e)
|
||||
|
||||
log.info(
|
||||
"Found %d tasks across %d Overall lists",
|
||||
len(all_tasks),
|
||||
len(overall_ids),
|
||||
)
|
||||
return all_tasks
|
||||
|
||||
def get_task_attachments(self, task_id: str) -> list[dict]:
|
||||
"""Get attachments for a task.
|
||||
|
||||
Returns list of dicts with keys: id, title, url, date, etc.
|
||||
NOTE: Requires ClickUp Business plan or higher.
|
||||
"""
|
||||
try:
|
||||
resp = self._client.get(f"/task/{task_id}/attachment")
|
||||
resp.raise_for_status()
|
||||
return resp.json().get("attachments", [])
|
||||
except httpx.HTTPStatusError as e:
|
||||
if e.response.status_code == 401:
|
||||
log.warning(
|
||||
"Attachment listing not available (may require Business plan)"
|
||||
)
|
||||
else:
|
||||
log.warning(
|
||||
"Failed to get attachments for task %s: %s", task_id, e
|
||||
)
|
||||
return []
|
||||
|
||||
def download_attachment(self, url: str, dest: Path) -> bool:
|
||||
"""Download a ClickUp attachment to a local file.
|
||||
|
||||
ClickUp attachment URLs are pre-signed S3 URLs that don't need
|
||||
auth headers, so we use a plain httpx request (not the API client).
|
||||
|
||||
Returns True on success, False on failure.
|
||||
"""
|
||||
try:
|
||||
with httpx.stream("GET", url, follow_redirects=True, timeout=60.0) as resp:
|
||||
resp.raise_for_status()
|
||||
with open(dest, "wb") as f:
|
||||
for chunk in resp.iter_bytes(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
log.info("Downloaded attachment to %s", dest)
|
||||
return True
|
||||
except Exception as e:
|
||||
log.warning("Failed to download attachment from %s: %s", url, e)
|
||||
return False
|
||||
|
||||
def get_custom_fields(self, list_id: str) -> list[dict]:
|
||||
"""Get custom field definitions for a list."""
|
||||
try:
|
||||
resp = self._client.get(f"/list/{list_id}/field")
|
||||
resp.raise_for_status()
|
||||
return resp.json().get("fields", [])
|
||||
except httpx.HTTPStatusError as e:
|
||||
log.error("Failed to get custom fields for list %s: %s", list_id, e)
|
||||
return []
|
||||
|
||||
# ── Write ──
|
||||
|
||||
def update_task_status(self, task_id: str, status: str) -> bool:
|
||||
"""Update a task's status."""
|
||||
try:
|
||||
|
||||
def _call():
|
||||
resp = self._client.put(
|
||||
f"/task/{task_id}", json={"status": status}
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
self._retry(_call)
|
||||
log.info("Updated task %s status to '%s'", task_id, status)
|
||||
return True
|
||||
except (httpx.TransportError, httpx.HTTPStatusError) as e:
|
||||
log.error("Failed to update task %s status: %s", task_id, e)
|
||||
return False
|
||||
|
||||
def add_comment(self, task_id: str, text: str) -> bool:
|
||||
"""Add a comment to a task."""
|
||||
try:
|
||||
|
||||
def _call():
|
||||
resp = self._client.post(
|
||||
f"/task/{task_id}/comment",
|
||||
json={"comment_text": text},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
self._retry(_call)
|
||||
log.info("Added comment to task %s", task_id)
|
||||
return True
|
||||
except (httpx.TransportError, httpx.HTTPStatusError) as e:
|
||||
log.error("Failed to add comment to task %s: %s", task_id, e)
|
||||
return False
|
||||
|
||||
def upload_attachment(self, task_id: str, file_path: str | Path) -> bool:
|
||||
"""Upload a file attachment to a task.
|
||||
|
||||
Uses module-level httpx.post() because the shared client sets
|
||||
Content-Type: application/json which conflicts with multipart.
|
||||
"""
|
||||
fp = Path(file_path)
|
||||
if not fp.exists():
|
||||
log.warning("Attachment file not found: %s", fp)
|
||||
return False
|
||||
try:
|
||||
|
||||
def _call():
|
||||
with open(fp, "rb") as f:
|
||||
resp = httpx.post(
|
||||
f"{BASE_URL}/task/{task_id}/attachment",
|
||||
headers={"Authorization": self._token},
|
||||
files={
|
||||
"attachment": (
|
||||
fp.name,
|
||||
f,
|
||||
"application/octet-stream",
|
||||
)
|
||||
},
|
||||
timeout=60.0,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
self._retry(_call)
|
||||
log.info("Uploaded attachment %s to task %s", fp.name, task_id)
|
||||
return True
|
||||
except (httpx.TransportError, httpx.HTTPStatusError) as e:
|
||||
log.warning("Failed to upload attachment to task %s: %s", task_id, e)
|
||||
return False
|
||||
|
||||
def set_custom_field_value(
|
||||
self, task_id: str, field_id: str, value: Any
|
||||
) -> bool:
|
||||
"""Set a custom field value by field ID."""
|
||||
try:
|
||||
|
||||
def _call():
|
||||
resp = self._client.post(
|
||||
f"/task/{task_id}/field/{field_id}",
|
||||
json={"value": value},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
self._retry(_call)
|
||||
log.info("Set field %s on task %s", field_id, task_id)
|
||||
return True
|
||||
except (httpx.TransportError, httpx.HTTPStatusError) as e:
|
||||
log.error(
|
||||
"Failed to set field %s on task %s: %s", field_id, task_id, e
|
||||
)
|
||||
return False
|
||||
|
||||
def _resolve_field(
|
||||
self, list_id: str, field_name: str
|
||||
) -> dict[str, Any] | None:
|
||||
"""Look up a custom field's ID and options by name.
|
||||
|
||||
Returns {"field_id": "...", "type": "...", "options": {...}} or None.
|
||||
Caches per list_id:field_name.
|
||||
"""
|
||||
cache_key = f"{list_id}:{field_name}"
|
||||
if cache_key in self._field_cache:
|
||||
return self._field_cache[cache_key]
|
||||
|
||||
fields = self.get_custom_fields(list_id)
|
||||
for f in fields:
|
||||
if f.get("name") == field_name:
|
||||
options: dict[str, str] = {}
|
||||
if f.get("type") == "drop_down":
|
||||
for opt in f.get("type_config", {}).get("options", []):
|
||||
opt_name = opt.get("name", "")
|
||||
opt_id = opt.get("id", "")
|
||||
if opt_name and opt_id:
|
||||
options[opt_name] = opt_id
|
||||
result = {
|
||||
"field_id": f["id"],
|
||||
"type": f.get("type", ""),
|
||||
"options": options,
|
||||
}
|
||||
self._field_cache[cache_key] = result
|
||||
return result
|
||||
|
||||
log.warning("Field '%s' not found in list %s", field_name, list_id)
|
||||
return None
|
||||
|
||||
def set_field_by_name(
|
||||
self, task_id: str, list_id: str, field_name: str, value: Any
|
||||
) -> bool:
|
||||
"""Set a custom field by name, auto-resolving dropdown UUIDs.
|
||||
|
||||
For dropdowns, value is matched against option names (case-insensitive).
|
||||
For checkboxes, value should be True/False (sent as "true"/"false").
|
||||
"""
|
||||
info = self._resolve_field(list_id, field_name)
|
||||
if not info:
|
||||
return False
|
||||
|
||||
resolved = value
|
||||
|
||||
if info["type"] == "drop_down" and isinstance(value, str):
|
||||
for opt_name, opt_id in info["options"].items():
|
||||
if opt_name.lower() == value.lower():
|
||||
resolved = opt_id
|
||||
break
|
||||
else:
|
||||
log.warning(
|
||||
"Dropdown option '%s' not found for field '%s'",
|
||||
value,
|
||||
field_name,
|
||||
)
|
||||
return False
|
||||
|
||||
return self.set_custom_field_value(task_id, info["field_id"], resolved)
|
||||
|
||||
# ── Convenience: checkbox operations ──
|
||||
|
||||
def set_checkbox(
|
||||
self, task_id: str, list_id: str, field_name: str, checked: bool
|
||||
) -> bool:
|
||||
"""Set a checkbox custom field to checked (True) or unchecked (False)."""
|
||||
info = self._resolve_field(list_id, field_name)
|
||||
if not info:
|
||||
return False
|
||||
# ClickUp checkbox API expects "true" or "false" string
|
||||
return self.set_custom_field_value(
|
||||
task_id, info["field_id"], "true" if checked else "false"
|
||||
)
|
||||
|
||||
def is_checkbox_checked(self, task: ClickUpTask, field_name: str) -> bool:
|
||||
"""Check if a checkbox field is checked on a task.
|
||||
|
||||
ClickUp checkbox values come back as True/False or "true"/"false".
|
||||
"""
|
||||
val = task.custom_fields.get(field_name)
|
||||
if val is None:
|
||||
return False
|
||||
if isinstance(val, bool):
|
||||
return val
|
||||
if isinstance(val, str):
|
||||
return val.lower() == "true"
|
||||
return bool(val)
|
||||
|
||||
# ── Convenience: stage operations ──
|
||||
|
||||
def get_stage(self, task: ClickUpTask, field_name: str = "Stage") -> str:
|
||||
"""Get the current stage value from a task."""
|
||||
val = task.custom_fields.get(field_name)
|
||||
return str(val).lower().strip() if val else ""
|
||||
|
||||
def set_stage(
|
||||
self,
|
||||
task_id: str,
|
||||
list_id: str,
|
||||
stage_value: str,
|
||||
field_name: str = "Stage",
|
||||
) -> bool:
|
||||
"""Set the Stage dropdown on a task."""
|
||||
return self.set_field_by_name(task_id, list_id, field_name, stage_value)
|
||||
|
|
@ -1,128 +0,0 @@
|
|||
"""Configuration loader: env vars -> config.yaml -> defaults."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from dotenv import load_dotenv
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
ROOT_DIR = Path(__file__).resolve().parent.parent
|
||||
load_dotenv(ROOT_DIR / ".env")
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClickUpConfig:
|
||||
api_token: str = ""
|
||||
space_id: str = ""
|
||||
task_type_field_name: str = "Work Category"
|
||||
# Custom field names (must match ClickUp exactly)
|
||||
delegate_field_name: str = "Delegate to Claude"
|
||||
stage_field_name: str = "Stage"
|
||||
error_field_name: str = "Error"
|
||||
# Statuses
|
||||
ai_working_status: str = "ai working"
|
||||
review_status: str = "review"
|
||||
client_review_status: str = "client review"
|
||||
complete_status: str = "complete"
|
||||
|
||||
|
||||
@dataclass
|
||||
class AutoCoraConfig:
|
||||
jobs_dir: str = "//PennQnap1/SHARE1/AutoCora/jobs"
|
||||
results_dir: str = "//PennQnap1/SHARE1/AutoCora/results"
|
||||
xlsx_dir: str = "//PennQnap1/SHARE1/Cora-For-Human"
|
||||
poll_interval_seconds: int = 120
|
||||
|
||||
|
||||
@dataclass
|
||||
class BLMConfig:
|
||||
blm_dir: str = "E:/dev/Big-Link-Man"
|
||||
timeout_seconds: int = 1800 # 30 minutes
|
||||
|
||||
|
||||
@dataclass
|
||||
class NASConfig:
|
||||
generated_dir: str = "//PennQnap1/SHARE1/generated"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunnerConfig:
|
||||
poll_interval_seconds: int = 720
|
||||
claude_timeout_seconds: int = 2700 # 45 minutes
|
||||
max_turns_default: int = 10
|
||||
temp_dir: str = "" # empty = system temp
|
||||
|
||||
|
||||
@dataclass
|
||||
class NtfyConfig:
|
||||
enabled: bool = False
|
||||
server: str = "https://ntfy.sh"
|
||||
error_topic: str = ""
|
||||
success_topic: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
clickup: ClickUpConfig = field(default_factory=ClickUpConfig)
|
||||
autocora: AutoCoraConfig = field(default_factory=AutoCoraConfig)
|
||||
blm: BLMConfig = field(default_factory=BLMConfig)
|
||||
nas: NASConfig = field(default_factory=NASConfig)
|
||||
runner: RunnerConfig = field(default_factory=RunnerConfig)
|
||||
ntfy: NtfyConfig = field(default_factory=NtfyConfig)
|
||||
|
||||
# Derived paths
|
||||
root_dir: Path = field(default_factory=lambda: ROOT_DIR)
|
||||
skills_dir: Path = field(default_factory=lambda: ROOT_DIR / "skills")
|
||||
db_path: Path = field(default_factory=lambda: ROOT_DIR / "data" / "clickup_runner.db")
|
||||
|
||||
|
||||
def _apply_section(cfg_obj, data: dict):
|
||||
"""Apply a dict of values to a dataclass instance, skipping unknown keys."""
|
||||
for k, v in data.items():
|
||||
if hasattr(cfg_obj, k):
|
||||
setattr(cfg_obj, k, v)
|
||||
|
||||
|
||||
def load_config(yaml_path: Path | None = None) -> Config:
|
||||
"""Load config from env vars -> config.yaml -> defaults."""
|
||||
cfg = Config()
|
||||
|
||||
# Load YAML if exists
|
||||
if yaml_path is None:
|
||||
yaml_path = ROOT_DIR / "clickup_runner.yaml"
|
||||
if yaml_path.exists():
|
||||
with open(yaml_path) as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
|
||||
for section_name in ("clickup", "autocora", "blm", "nas", "runner", "ntfy"):
|
||||
if section_name in data and isinstance(data[section_name], dict):
|
||||
_apply_section(getattr(cfg, section_name), data[section_name])
|
||||
|
||||
# Env var overrides
|
||||
if token := os.getenv("CLICKUP_API_TOKEN"):
|
||||
cfg.clickup.api_token = token
|
||||
if space := os.getenv("CLICKUP_SPACE_ID"):
|
||||
cfg.clickup.space_id = space
|
||||
|
||||
# ntfy topics from env vars
|
||||
if topic := os.getenv("NTFY_ERROR_TOPIC"):
|
||||
cfg.ntfy.error_topic = topic
|
||||
if topic := os.getenv("NTFY_SUCCESS_TOPIC"):
|
||||
cfg.ntfy.success_topic = topic
|
||||
|
||||
# Validate required fields
|
||||
if not cfg.clickup.api_token:
|
||||
log.warning("CLICKUP_API_TOKEN not set -- runner will not be able to poll")
|
||||
if not cfg.clickup.space_id:
|
||||
log.warning("CLICKUP_SPACE_ID not set -- runner will not be able to poll")
|
||||
|
||||
# Ensure data dir exists
|
||||
cfg.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return cfg
|
||||
|
|
@ -1,220 +0,0 @@
|
|||
"""Adversarial fact-checker for press release outputs.
|
||||
|
||||
Runs a second Claude Code pass on generated PR text files to catch
|
||||
factual errors. Treats all client-provided data (company name, titles,
|
||||
URLs, topic) as ground truth and only corrects claims the PR inferred
|
||||
or fabricated beyond what was given.
|
||||
|
||||
Graceful failure: any error returns the original text untouched.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
FACT_CHECK_MODEL = "sonnet"
|
||||
FACT_CHECK_TIMEOUT = 300 # 5 minutes per PR
|
||||
|
||||
|
||||
def build_fact_check_prompt(
|
||||
pr_text: str,
|
||||
company_name: str,
|
||||
url: str,
|
||||
topic: str,
|
||||
keyword: str,
|
||||
) -> str:
|
||||
"""Build the prompt for the adversarial fact-checker."""
|
||||
return (
|
||||
"You are a factual accuracy reviewer for press releases. Your ONLY job is to "
|
||||
"find and correct statements that are factually wrong. You are NOT an editor.\n\n"
|
||||
"GROUND TRUTH -- the following data was provided by the client and is correct "
|
||||
"by definition. Do NOT change, question, or 'correct' any of it, even if your "
|
||||
"web search suggests something different:\n"
|
||||
" - Company name: %s\n"
|
||||
" - Target URL: %s\n"
|
||||
" - Topic: %s\n"
|
||||
" - Keyword: %s\n"
|
||||
" - Any person names, titles, quotes, or contact details in the PR\n"
|
||||
" - Any product names, service names, or brand names\n"
|
||||
" - The overall framing, angle, and tone of the PR\n\n"
|
||||
"WHAT TO CHECK (use WebSearch/WebFetch to verify):\n"
|
||||
" - Industry statistics or market size claims\n"
|
||||
" - Historical dates or facts\n"
|
||||
" - Technical specifications not sourced from the client data\n"
|
||||
" - General knowledge claims (e.g. 'X is the leading cause of Y')\n"
|
||||
" - Geographic or regulatory facts\n\n"
|
||||
"RULES:\n"
|
||||
" - ONLY fix actual factual errors -- wrong numbers, wrong dates, wrong facts\n"
|
||||
" - Do NOT add content, remove content, restructure, or 'improve' anything\n"
|
||||
" - Do NOT change tone, style, word choice, or sentence structure\n"
|
||||
" - Do NOT suggest additions or enhancements\n"
|
||||
" - Make the MINIMUM change needed to fix each error\n"
|
||||
" - Preserve the exact formatting, paragraph breaks, and headline\n\n"
|
||||
"OUTPUT FORMAT:\n"
|
||||
" - If you find NO factual errors: output exactly [NO_ERRORS] and nothing else\n"
|
||||
" - If you find errors: output [CORRECTED] on the first line, then the full "
|
||||
"corrected PR text (preserving all formatting), then a blank line, then "
|
||||
"CHANGES: followed by a numbered list of what you changed and why\n\n"
|
||||
"Press release to review:\n"
|
||||
"---\n"
|
||||
"%s\n"
|
||||
"---"
|
||||
) % (company_name, url, topic, keyword, pr_text)
|
||||
|
||||
|
||||
def apply_fact_check(raw_output: str, original_text: str) -> tuple[str, str, str]:
|
||||
"""Parse fact-checker output. Returns (text, status, changes).
|
||||
|
||||
status is one of: "clean", "corrected", "skipped"
|
||||
On any parse failure or suspect rewrite, returns original text unchanged.
|
||||
"""
|
||||
if not raw_output or not raw_output.strip():
|
||||
return original_text, "skipped", ""
|
||||
|
||||
stripped = raw_output.strip()
|
||||
|
||||
# No errors found
|
||||
if stripped.startswith("[NO_ERRORS]"):
|
||||
return original_text, "clean", ""
|
||||
|
||||
# Corrections found
|
||||
if stripped.startswith("[CORRECTED]"):
|
||||
body = stripped[len("[CORRECTED]"):].strip()
|
||||
|
||||
# Split into corrected text and change log
|
||||
changes = ""
|
||||
if "\nCHANGES:" in body:
|
||||
text_part, changes = body.split("\nCHANGES:", 1)
|
||||
corrected = text_part.strip()
|
||||
changes = changes.strip()
|
||||
else:
|
||||
corrected = body
|
||||
|
||||
if not corrected:
|
||||
return original_text, "skipped", ""
|
||||
|
||||
# Safety: reject if word count differs by more than 15%
|
||||
orig_wc = len(original_text.split())
|
||||
new_wc = len(corrected.split())
|
||||
if orig_wc > 0 and abs(new_wc - orig_wc) / orig_wc > 0.15:
|
||||
log.warning(
|
||||
"Fact-check rejected: word count changed too much "
|
||||
"(%d -> %d, %.0f%% delta)",
|
||||
orig_wc, new_wc, abs(new_wc - orig_wc) / orig_wc * 100,
|
||||
)
|
||||
return original_text, "skipped", "rejected -- word count delta too large"
|
||||
|
||||
return corrected, "corrected", changes
|
||||
|
||||
# Unparseable output
|
||||
return original_text, "skipped", ""
|
||||
|
||||
|
||||
def fact_check_pr_files(
|
||||
output_files: list[Path],
|
||||
company_name: str,
|
||||
url: str,
|
||||
topic: str,
|
||||
keyword: str,
|
||||
timeout: int = FACT_CHECK_TIMEOUT,
|
||||
) -> tuple[list[str], bool]:
|
||||
"""Run fact-check on .txt PR files in the output list.
|
||||
|
||||
Returns:
|
||||
(status_lines, any_failed) where status_lines is a list of
|
||||
human-readable results per PR, and any_failed is True if the
|
||||
fact-checker could not run on at least one PR.
|
||||
"""
|
||||
claude_bin = shutil.which("claude")
|
||||
if not claude_bin:
|
||||
log.warning("Fact-check: claude CLI not found, skipping")
|
||||
return ["Fact-check: claude CLI not found, skipped"], True
|
||||
|
||||
txt_files = [f for f in output_files if f.suffix == ".txt"]
|
||||
# Skip non-PR files like "Headlines Evaluation.md"
|
||||
# PR files are the .txt files (the actual press releases)
|
||||
if not txt_files:
|
||||
return [], False
|
||||
|
||||
status_lines: list[str] = []
|
||||
any_failed = False
|
||||
|
||||
for i, txt_file in enumerate(txt_files):
|
||||
label = "PR %s" % chr(65 + i) # PR A, PR B, etc.
|
||||
try:
|
||||
original = txt_file.read_text(encoding="utf-8")
|
||||
if not original.strip():
|
||||
continue
|
||||
|
||||
prompt = build_fact_check_prompt(
|
||||
original, company_name, url, topic, keyword
|
||||
)
|
||||
|
||||
cmd = [
|
||||
claude_bin,
|
||||
"-p", prompt,
|
||||
"--output-format", "text",
|
||||
"--permission-mode", "bypassPermissions",
|
||||
"--allowedTools", "WebSearch,WebFetch",
|
||||
"--max-turns", "10",
|
||||
"--model", FACT_CHECK_MODEL,
|
||||
]
|
||||
|
||||
log.info("Fact-checking %s: %s", label, txt_file.name)
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout,
|
||||
cwd=str(txt_file.parent),
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
log.warning(
|
||||
"Fact-check %s failed (exit %d): %s",
|
||||
label, result.returncode, (result.stderr or "")[:500],
|
||||
)
|
||||
status_lines.append(
|
||||
"Fact-check %s: could not run -- manual review recommended" % label
|
||||
)
|
||||
any_failed = True
|
||||
continue
|
||||
|
||||
corrected, status, changes = apply_fact_check(result.stdout, original)
|
||||
|
||||
if status == "corrected":
|
||||
txt_file.write_text(corrected, encoding="utf-8")
|
||||
log.info("Fact-check %s: corrections applied", label)
|
||||
line = "Fact-check %s: corrections applied" % label
|
||||
if changes:
|
||||
line += "\n %s" % changes
|
||||
status_lines.append(line)
|
||||
elif status == "clean":
|
||||
log.info("Fact-check %s: no errors found", label)
|
||||
status_lines.append("Fact-check %s: no errors found" % label)
|
||||
else:
|
||||
log.warning("Fact-check %s: skipped (unparseable output)", label)
|
||||
status_lines.append(
|
||||
"Fact-check %s: could not run -- manual review recommended" % label
|
||||
)
|
||||
any_failed = True
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
log.warning("Fact-check %s timed out after %ds", label, timeout)
|
||||
status_lines.append(
|
||||
"Fact-check %s: timed out -- manual review recommended" % label
|
||||
)
|
||||
any_failed = True
|
||||
except Exception as e:
|
||||
log.warning("Fact-check %s error: %s", label, e)
|
||||
status_lines.append(
|
||||
"Fact-check %s: could not run -- manual review recommended" % label
|
||||
)
|
||||
any_failed = True
|
||||
|
||||
return status_lines, any_failed
|
||||
|
|
@ -1,128 +0,0 @@
|
|||
"""Skill routing: task_type + stage -> skill configuration.
|
||||
|
||||
Each entry maps a (task_type, stage) pair to either:
|
||||
- A Claude Code skill (skill_file, tools, max_turns)
|
||||
- An AutoCora handler (handler="autocora")
|
||||
|
||||
To add a new task type: add an entry here + write the skill .md file.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SkillRoute:
|
||||
"""One step in a task's pipeline."""
|
||||
|
||||
next_stage: str
|
||||
next_status: str
|
||||
handler: str = "claude" # "claude" or "autocora"
|
||||
skill_file: str = "" # path relative to skills_dir
|
||||
tools: str = "" # comma-separated Claude Code --allowedTools
|
||||
max_turns: int = 10
|
||||
|
||||
|
||||
# Tools commonly needed for content work
|
||||
_CONTENT_TOOLS = "Read,Edit,Write,Bash,Glob,Grep,WebFetch,WebSearch"
|
||||
_LINK_TOOLS = "Read,Edit,Write,Bash,Glob,Grep"
|
||||
|
||||
|
||||
SKILL_MAP: dict[str, dict[str, SkillRoute]] = {
|
||||
"Content Creation": {
|
||||
"run_cora": SkillRoute(
|
||||
handler="autocora",
|
||||
next_stage="outline",
|
||||
next_status="review",
|
||||
),
|
||||
"outline": SkillRoute(
|
||||
skill_file="content_outline.md",
|
||||
next_stage="draft",
|
||||
next_status="review",
|
||||
tools=_CONTENT_TOOLS,
|
||||
max_turns=20,
|
||||
),
|
||||
"draft": SkillRoute(
|
||||
skill_file="content_draft.md",
|
||||
next_stage="final",
|
||||
next_status="review",
|
||||
tools=_CONTENT_TOOLS,
|
||||
max_turns=30,
|
||||
),
|
||||
},
|
||||
"On Page Optimization": {
|
||||
"run_cora": SkillRoute(
|
||||
handler="autocora",
|
||||
next_stage="outline",
|
||||
next_status="review",
|
||||
),
|
||||
"outline": SkillRoute(
|
||||
skill_file="content_outline.md",
|
||||
next_stage="draft",
|
||||
next_status="review",
|
||||
tools=_CONTENT_TOOLS,
|
||||
max_turns=20,
|
||||
),
|
||||
"draft": SkillRoute(
|
||||
skill_file="content_draft.md",
|
||||
next_stage="hidden div",
|
||||
next_status="review",
|
||||
tools=_CONTENT_TOOLS,
|
||||
max_turns=30,
|
||||
),
|
||||
"hidden div": SkillRoute(
|
||||
skill_file="content_hidden_div.md",
|
||||
next_stage="final",
|
||||
next_status="review",
|
||||
tools=_CONTENT_TOOLS,
|
||||
max_turns=15,
|
||||
),
|
||||
},
|
||||
"Press Release": {
|
||||
"draft": SkillRoute(
|
||||
skill_file="press_release_prompt.md",
|
||||
next_stage="final",
|
||||
next_status="review",
|
||||
tools=_CONTENT_TOOLS,
|
||||
max_turns=25,
|
||||
),
|
||||
},
|
||||
"Link Building": {
|
||||
"run_cora": SkillRoute(
|
||||
handler="autocora",
|
||||
next_stage="build",
|
||||
next_status="review",
|
||||
),
|
||||
"build": SkillRoute(
|
||||
handler="blm",
|
||||
next_stage="final",
|
||||
next_status="complete",
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_route(task_type: str, stage: str) -> SkillRoute | None:
|
||||
"""Look up the skill route for a task type + stage.
|
||||
|
||||
Returns None if no mapping exists.
|
||||
"""
|
||||
type_routes = SKILL_MAP.get(task_type)
|
||||
if not type_routes:
|
||||
return None
|
||||
return type_routes.get(stage.lower().strip())
|
||||
|
||||
|
||||
def get_valid_stages(task_type: str) -> list[str]:
|
||||
"""Return the list of valid stage names for a task type."""
|
||||
type_routes = SKILL_MAP.get(task_type)
|
||||
if not type_routes:
|
||||
return []
|
||||
return list(type_routes.keys())
|
||||
|
||||
|
||||
def get_supported_task_types() -> list[str]:
|
||||
"""Return all supported task type names."""
|
||||
return list(SKILL_MAP.keys())
|
||||
|
|
@ -1,135 +0,0 @@
|
|||
"""Minimal SQLite persistence for the runner.
|
||||
|
||||
Just a KV store for tracking processed tasks and AutoCora jobs,
|
||||
plus a run log for auditing.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
import threading
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
class StateDB:
|
||||
"""Thread-safe SQLite KV store + run log."""
|
||||
|
||||
def __init__(self, db_path: Path):
|
||||
self._path = db_path
|
||||
self._local = threading.local()
|
||||
self._init_schema()
|
||||
|
||||
@property
|
||||
def _conn(self) -> sqlite3.Connection:
|
||||
if not hasattr(self._local, "conn"):
|
||||
self._local.conn = sqlite3.connect(str(self._path))
|
||||
self._local.conn.row_factory = sqlite3.Row
|
||||
self._local.conn.execute("PRAGMA journal_mode=WAL")
|
||||
return self._local.conn
|
||||
|
||||
def _init_schema(self):
|
||||
self._conn.executescript("""
|
||||
CREATE TABLE IF NOT EXISTS kv_store (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS run_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
task_id TEXT NOT NULL,
|
||||
task_name TEXT NOT NULL,
|
||||
task_type TEXT NOT NULL,
|
||||
stage TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
started_at TEXT NOT NULL,
|
||||
finished_at TEXT,
|
||||
result TEXT,
|
||||
error TEXT
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_run_log_task
|
||||
ON run_log(task_id, started_at);
|
||||
""")
|
||||
self._conn.commit()
|
||||
|
||||
# ── KV Store ──
|
||||
|
||||
def kv_set(self, key: str, value: str):
|
||||
self._conn.execute(
|
||||
"INSERT OR REPLACE INTO kv_store (key, value) VALUES (?, ?)",
|
||||
(key, value),
|
||||
)
|
||||
self._conn.commit()
|
||||
|
||||
def kv_get(self, key: str) -> str | None:
|
||||
row = self._conn.execute(
|
||||
"SELECT value FROM kv_store WHERE key = ?", (key,)
|
||||
).fetchone()
|
||||
return row["value"] if row else None
|
||||
|
||||
def kv_set_json(self, key: str, data: Any):
|
||||
self.kv_set(key, json.dumps(data))
|
||||
|
||||
def kv_get_json(self, key: str) -> Any | None:
|
||||
raw = self.kv_get(key)
|
||||
if raw is None:
|
||||
return None
|
||||
return json.loads(raw)
|
||||
|
||||
def kv_scan(self, prefix: str) -> list[tuple[str, str]]:
|
||||
"""Return all KV pairs where key starts with prefix."""
|
||||
rows = self._conn.execute(
|
||||
"SELECT key, value FROM kv_store WHERE key LIKE ?",
|
||||
(prefix + "%",),
|
||||
).fetchall()
|
||||
return [(r["key"], r["value"]) for r in rows]
|
||||
|
||||
def kv_delete(self, key: str):
|
||||
self._conn.execute("DELETE FROM kv_store WHERE key = ?", (key,))
|
||||
self._conn.commit()
|
||||
|
||||
# ── Run Log ──
|
||||
|
||||
def log_run_start(
|
||||
self,
|
||||
task_id: str,
|
||||
task_name: str,
|
||||
task_type: str,
|
||||
stage: str,
|
||||
) -> int:
|
||||
"""Log the start of a task run. Returns the run log ID."""
|
||||
now = _now()
|
||||
cur = self._conn.execute(
|
||||
"""INSERT INTO run_log
|
||||
(task_id, task_name, task_type, stage, status, started_at)
|
||||
VALUES (?, ?, ?, ?, 'running', ?)""",
|
||||
(task_id, task_name, task_type, stage, now),
|
||||
)
|
||||
self._conn.commit()
|
||||
return cur.lastrowid # type: ignore[return-value]
|
||||
|
||||
def log_run_finish(
|
||||
self, run_id: int, status: str, result: str | None = None, error: str | None = None
|
||||
):
|
||||
"""Update a run log entry with the outcome."""
|
||||
now = _now()
|
||||
self._conn.execute(
|
||||
"""UPDATE run_log
|
||||
SET status = ?, finished_at = ?, result = ?, error = ?
|
||||
WHERE id = ?""",
|
||||
(status, now, result, error, run_id),
|
||||
)
|
||||
self._conn.commit()
|
||||
|
||||
def get_recent_runs(self, limit: int = 20) -> list[dict]:
|
||||
"""Get the most recent run log entries."""
|
||||
rows = self._conn.execute(
|
||||
"SELECT * FROM run_log ORDER BY started_at DESC LIMIT ?",
|
||||
(limit,),
|
||||
).fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
def _now() -> str:
|
||||
return datetime.now(UTC).isoformat()
|
||||
|
|
@ -139,15 +139,6 @@ ntfy:
|
|||
priority: high
|
||||
tags: clipboard
|
||||
|
||||
# Google Drive (for client deliverables)
|
||||
google_drive:
|
||||
root_folder_id: "" # Set to your "CheddahBot Deliverables" folder ID
|
||||
enabled: false
|
||||
|
||||
# Gmail API (for draft creation and reply tracking)
|
||||
gmail_api:
|
||||
enabled: false
|
||||
|
||||
# Multi-agent configuration
|
||||
# Each agent gets its own personality, tool whitelist, and memory scope.
|
||||
# The first agent is the default. Omit this section for single-agent mode.
|
||||
|
|
|
|||
|
|
@ -5,8 +5,7 @@
|
|||
```bash
|
||||
uv run python scripts/create_clickup_task.py --name "LINKS - keyword" --client "Client Name" \
|
||||
--category "Link Building" --due-date 2026-03-18 --tag mar26 --time-estimate 2h \
|
||||
--field "Keyword=keyword" --field "IMSURL=https://example.com" --field "LB Method=Cora Backlinks" \
|
||||
--dependency TASK_ID
|
||||
--field "Keyword=keyword" --field "IMSURL=https://example.com" --field "LB Method=Cora Backlinks"
|
||||
```
|
||||
|
||||
## Defaults
|
||||
|
|
@ -21,23 +20,11 @@ uv run python scripts/create_clickup_task.py --name "LINKS - keyword" --client "
|
|||
|
||||
Any field can be set via `--field "Name=Value"`. Dropdowns are auto-resolved by name (case-insensitive).
|
||||
|
||||
## Stage Field (Required for ClickUp Runner)
|
||||
|
||||
Every task must have a **Stage** set or the runner will skip it. Set Stage to the first stage for the task type:
|
||||
|
||||
| Task Type | Initial Stage |
|
||||
|-----------|---------------|
|
||||
| Content Creation | `run_cora` |
|
||||
| On Page Optimization | `run_cora` |
|
||||
| Press Release | `draft` |
|
||||
| Link Building | `run_cora` |
|
||||
|
||||
## Task Types
|
||||
|
||||
### Link Building
|
||||
- **Prefix**: `LINKS - {keyword}`
|
||||
- **Work Category**: "Link Building"
|
||||
- **Stage**: `run_cora`
|
||||
- **Required fields**: Keyword, IMSURL
|
||||
- **LB Method**: default "Cora Backlinks"
|
||||
- **CLIFlags**: only add `--tier1-count N` when count is specified
|
||||
|
|
@ -48,7 +35,6 @@ Every task must have a **Stage** set or the runner will skip it. Set Stage to th
|
|||
### On Page Optimization
|
||||
- **Prefix**: `OPT - {keyword}`
|
||||
- **Work Category**: "On Page Optimization"
|
||||
- **Stage**: `run_cora`
|
||||
- **Required fields**: Keyword, IMSURL
|
||||
- **time estimate**: 3h
|
||||
-
|
||||
|
|
@ -56,33 +42,20 @@ Every task must have a **Stage** set or the runner will skip it. Set Stage to th
|
|||
### Content Creation
|
||||
- **Prefix**: `CREATE - {keyword}`
|
||||
- **Work Category**: "Content Creation"
|
||||
- **Stage**: `run_cora`
|
||||
- **Required fields**: Keyword
|
||||
- **time estimate**: 4h
|
||||
|
||||
### Press Release
|
||||
- **Prefix**: `PR - {keyword}`
|
||||
- **Work Category**: "Press Release"
|
||||
- **Stage**: `draft`
|
||||
- **Required fields**: Keyword, IMSURL
|
||||
- **Work Category**: "Press Release"
|
||||
- **PR Topic**: if not provided, ask if there is a topic. it can be blank if they respond with none.
|
||||
- **time estimate**: 1.5h
|
||||
- **Headline tone trigger words**: By default, the PR writer assumes the company already offers the capability (awareness tone). To get announcement-style headlines (Announces, Launches, Introduces), include one of these words in the task name or PR Topic:
|
||||
- **Introduces** -- company is introducing something new
|
||||
- **New Product** -- new product or service launch
|
||||
- **Launch** -- launching something new
|
||||
- **Actual News** -- explicit override for announcement tone
|
||||
|
||||
## Chat Tool
|
||||
|
||||
The `clickup_create_task` tool provides the same capabilities via CheddahBot UI. Arbitrary custom fields are passed as JSON via `custom_fields_json`.
|
||||
|
||||
## Dependencies and Due Dates
|
||||
|
||||
When a CREATE (NEW) task exists, LINKS and PR tasks that need the new page URL should be **blocked by** the CREATE task using `--dependency`. Their due date should be **one week after** the CREATE task's due date. For example, if CREATE is due 4/9, the LINKS and PR tasks should be due 4/16.
|
||||
|
||||
OPT tasks already have a URL, so LINKS paired with an OPT does **not** need a dependency -- just the +1 week due date offset.
|
||||
|
||||
## Client Folder Lookup
|
||||
|
||||
Tasks are created in the "Overall" list inside the client's folder. Folder name is matched case-insensitively.
|
||||
|
|
|
|||
|
|
@ -19,8 +19,6 @@ dependencies = [
|
|||
"jinja2>=3.1.6",
|
||||
"python-multipart>=0.0.22",
|
||||
"sse-starlette>=3.3.3",
|
||||
"google-auth-oauthlib>=1.3.1",
|
||||
"google-api-python-client>=2.194.0",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
|
|
|
|||
|
|
@ -1,253 +0,0 @@
|
|||
"""Poll Gmail for client replies and update ClickUp tasks.
|
||||
|
||||
Searches for email threads containing "Ref: CU-<task_id>" where the latest
|
||||
message is from someone other than the bot. Posts the reply text as a ClickUp
|
||||
comment and moves the task to "After Client Feedback" status.
|
||||
|
||||
Usage:
|
||||
uv run python scripts/check_gmail_replies.py # normal run
|
||||
uv run python scripts/check_gmail_replies.py --dry-run # preview only
|
||||
uv run python scripts/check_gmail_replies.py --since 48 # look back 48 hours
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from email.utils import parseaddr
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
from googleapiclient.discovery import build # noqa: E402
|
||||
|
||||
from cheddahbot.clickup import ClickUpClient # noqa: E402
|
||||
from cheddahbot.google_auth import get_credentials # noqa: E402
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
STATE_FILE = Path("data/gmail_sync_state.json")
|
||||
REF_PATTERN = re.compile(r"Ref:\s*CU-(\w+)")
|
||||
AFTER_FEEDBACK_STATUS = "after client feedback"
|
||||
|
||||
|
||||
def _load_state() -> dict:
|
||||
"""Load sync state from disk."""
|
||||
if STATE_FILE.exists():
|
||||
try:
|
||||
return json.loads(STATE_FILE.read_text())
|
||||
except (json.JSONDecodeError, OSError):
|
||||
log.warning("Corrupt state file, starting fresh")
|
||||
return {}
|
||||
|
||||
|
||||
def _save_state(state: dict) -> None:
|
||||
"""Persist sync state to disk."""
|
||||
STATE_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
STATE_FILE.write_text(json.dumps(state, indent=2))
|
||||
|
||||
|
||||
def _get_message_body(payload: dict) -> str:
|
||||
"""Extract plain text body from a Gmail message payload."""
|
||||
# Simple single-part message
|
||||
if payload.get("mimeType") == "text/plain" and "data" in payload.get("body", {}):
|
||||
return base64.urlsafe_b64decode(payload["body"]["data"]).decode("utf-8", errors="replace")
|
||||
|
||||
# Multipart message -- find the text/plain part
|
||||
for part in payload.get("parts", []):
|
||||
if part.get("mimeType") == "text/plain" and "data" in part.get("body", {}):
|
||||
return base64.urlsafe_b64decode(part["body"]["data"]).decode("utf-8", errors="replace")
|
||||
# Nested multipart
|
||||
if part.get("parts"):
|
||||
result = _get_message_body(part)
|
||||
if result:
|
||||
return result
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def _extract_reply_text(body: str) -> str:
|
||||
"""Extract just the new reply content (above the quoted text).
|
||||
|
||||
Looks for common reply markers like "On ... wrote:" and takes everything above.
|
||||
Falls back to the full body if no marker is found.
|
||||
"""
|
||||
# Common reply markers
|
||||
markers = [
|
||||
r"\nOn .+ wrote:\s*\n", # Gmail-style
|
||||
r"\n-{3,}\s*Original Message\s*-{3,}", # Outlook-style
|
||||
r"\nFrom:\s+.+\nSent:\s+", # Outlook-style
|
||||
]
|
||||
for marker in markers:
|
||||
match = re.search(marker, body, re.IGNORECASE)
|
||||
if match:
|
||||
reply = body[: match.start()].strip()
|
||||
if reply:
|
||||
return reply
|
||||
|
||||
return body.strip()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Check Gmail for client replies")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Preview without making changes")
|
||||
parser.add_argument("--since", type=int, help="Look back N hours (overrides state file)")
|
||||
parser.add_argument("--verbose", action="store_true", help="Enable debug logging")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.verbose:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
# Determine lookback time
|
||||
state = _load_state()
|
||||
if args.since:
|
||||
since = datetime.now(UTC) - timedelta(hours=args.since)
|
||||
elif "last_check" in state:
|
||||
since = datetime.fromisoformat(state["last_check"])
|
||||
else:
|
||||
since = datetime.now(UTC) - timedelta(hours=24)
|
||||
|
||||
processed_ids = set(state.get("processed_message_ids", []))
|
||||
since_str = since.strftime("%Y/%m/%d")
|
||||
|
||||
log.info("Checking for replies since %s", since.isoformat())
|
||||
|
||||
# Authenticate and build Gmail service
|
||||
creds = get_credentials()
|
||||
gmail = build("gmail", "v1", credentials=creds)
|
||||
|
||||
# Get the authenticated user's email for filtering out our own messages
|
||||
profile = gmail.users().getProfile(userId="me").execute()
|
||||
my_email = profile.get("emailAddress", "").lower()
|
||||
log.info("Authenticated as %s", my_email)
|
||||
|
||||
# Search for threads with Ref: CU- pattern
|
||||
query = 'in:inbox "Ref: CU-" after:%s' % since_str
|
||||
log.info("Gmail search query: %s", query)
|
||||
|
||||
threads_result = gmail.users().threads().list(userId="me", q=query).execute()
|
||||
threads = threads_result.get("threads", [])
|
||||
log.info("Found %d candidate threads", len(threads))
|
||||
|
||||
# Initialize ClickUp client
|
||||
clickup_token = os.environ.get("CLICKUP_API_TOKEN", "")
|
||||
cu_client = ClickUpClient(api_token=clickup_token) if clickup_token else None
|
||||
|
||||
stats = {"replies_found": 0, "comments_posted": 0, "statuses_updated": 0, "errors": 0}
|
||||
|
||||
for thread_info in threads:
|
||||
thread_id = thread_info["id"]
|
||||
thread = gmail.users().threads().get(userId="me", id=thread_id).execute()
|
||||
messages = thread.get("messages", [])
|
||||
|
||||
if not messages:
|
||||
continue
|
||||
|
||||
# Get the latest message in the thread
|
||||
latest = messages[-1]
|
||||
msg_id = latest["id"]
|
||||
|
||||
# Skip if already processed
|
||||
if msg_id in processed_ids:
|
||||
continue
|
||||
|
||||
# Check if the latest message is from someone else (not us)
|
||||
headers = {h["name"].lower(): h["value"] for h in latest.get("payload", {}).get("headers", [])}
|
||||
from_addr = parseaddr(headers.get("from", ""))[1].lower()
|
||||
if from_addr == my_email:
|
||||
continue # Our own message, skip
|
||||
|
||||
# Extract the body and find the Ref tag
|
||||
body = _get_message_body(latest.get("payload", {}))
|
||||
if not body:
|
||||
# Check earlier messages in thread for the Ref tag
|
||||
for msg in reversed(messages[:-1]):
|
||||
body_check = _get_message_body(msg.get("payload", {}))
|
||||
ref_match = REF_PATTERN.search(body_check)
|
||||
if ref_match:
|
||||
body = _get_message_body(latest.get("payload", {}))
|
||||
task_id = ref_match.group(1)
|
||||
break
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
# Search the entire thread for the Ref tag (might be in quoted text or earlier message)
|
||||
task_id = None
|
||||
for msg in messages:
|
||||
msg_body = _get_message_body(msg.get("payload", {}))
|
||||
ref_match = REF_PATTERN.search(msg_body)
|
||||
if ref_match:
|
||||
task_id = ref_match.group(1)
|
||||
break
|
||||
|
||||
if not task_id:
|
||||
continue
|
||||
|
||||
reply_text = _extract_reply_text(body)
|
||||
reply_date = headers.get("date", "unknown")
|
||||
subject = headers.get("subject", "(no subject)")
|
||||
|
||||
stats["replies_found"] += 1
|
||||
print("\n--- Reply Found ---")
|
||||
print(" From: %s" % from_addr)
|
||||
print(" Subject: %s" % subject)
|
||||
print(" Date: %s" % reply_date)
|
||||
print(" Task: CU-%s" % task_id)
|
||||
print(" Reply preview: %s..." % reply_text[:200])
|
||||
|
||||
if not args.dry_run and cu_client:
|
||||
# Post comment to ClickUp
|
||||
try:
|
||||
comment = "Email reply from %s (%s):\n\n%s" % (from_addr, reply_date, reply_text)
|
||||
cu_client.add_comment(task_id, comment)
|
||||
stats["comments_posted"] += 1
|
||||
print(" -> Comment posted to ClickUp")
|
||||
except Exception as e:
|
||||
log.error("Failed to post comment for task %s: %s", task_id, e)
|
||||
stats["errors"] += 1
|
||||
continue # Don't update status or mark processed if comment failed
|
||||
|
||||
# Update task status
|
||||
try:
|
||||
cu_client.update_task_status(task_id, AFTER_FEEDBACK_STATUS)
|
||||
stats["statuses_updated"] += 1
|
||||
print(" -> Status set to '%s'" % AFTER_FEEDBACK_STATUS)
|
||||
except Exception as e:
|
||||
log.error("Failed to update status for task %s: %s", task_id, e)
|
||||
stats["errors"] += 1
|
||||
|
||||
# Mark as processed
|
||||
processed_ids.add(msg_id)
|
||||
|
||||
# Save state
|
||||
if not args.dry_run:
|
||||
state["last_check"] = datetime.now(UTC).isoformat()
|
||||
state["processed_message_ids"] = list(processed_ids)[-500:] # Keep last 500
|
||||
_save_state(state)
|
||||
|
||||
print("\n--- Summary ---")
|
||||
print(" Replies found: %d" % stats["replies_found"])
|
||||
if not args.dry_run:
|
||||
print(" Comments posted: %d" % stats["comments_posted"])
|
||||
print(" Statuses updated: %d" % stats["statuses_updated"])
|
||||
print(" Errors: %d" % stats["errors"])
|
||||
else:
|
||||
print(" (dry-run -- no changes made)")
|
||||
|
||||
if cu_client:
|
||||
cu_client.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -22,26 +22,10 @@ sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
|||
from dotenv import load_dotenv
|
||||
|
||||
from cheddahbot.clickup import ClickUpClient
|
||||
from clickup_runner.skill_map import SKILL_MAP
|
||||
|
||||
DEFAULT_ASSIGNEE = 10765627 # Bryan Bigari
|
||||
|
||||
|
||||
def _build_stage_comment(category: str) -> str:
|
||||
"""Build a pipeline stages comment from SKILL_MAP for a task type."""
|
||||
stages = SKILL_MAP.get(category)
|
||||
if not stages:
|
||||
return ""
|
||||
lines = ["Pipeline stages for %s:" % category]
|
||||
for i, (stage, route) in enumerate(stages.items(), 1):
|
||||
handler = route.handler if route.handler != "claude" else "Claude"
|
||||
lines.append(
|
||||
" %d. %s (%s) -> %s" % (i, stage, handler, route.next_status)
|
||||
)
|
||||
lines.append('Set Stage to "%s" to start.' % list(stages.keys())[0])
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _date_to_unix_ms(date_str: str) -> int:
|
||||
"""Convert YYYY-MM-DD to Unix milliseconds (noon UTC).
|
||||
|
||||
|
|
@ -113,12 +97,6 @@ def main():
|
|||
default="",
|
||||
help="Time estimate (e.g. '2h', '30m', '1h30m')",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dependency",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Task ID this task is blocked by (repeatable)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
api_token = os.environ.get("CLICKUP_API_TOKEN", "")
|
||||
|
|
@ -180,14 +158,6 @@ def main():
|
|||
task_id, list_id, "Work Category", args.category
|
||||
)
|
||||
|
||||
# Add dependencies (blocked by)
|
||||
for dep_id in args.dependency:
|
||||
if not client.add_dependency(task_id, dep_id):
|
||||
print(
|
||||
f"Warning: Failed to add dependency on {dep_id}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
# Set any additional custom fields
|
||||
for field_name, field_value in custom_fields.items():
|
||||
ok = client.set_custom_field_smart(
|
||||
|
|
@ -199,12 +169,6 @@ def main():
|
|||
file=sys.stderr,
|
||||
)
|
||||
|
||||
# Post pipeline stages comment if task type has stages
|
||||
if args.category:
|
||||
stage_comment = _build_stage_comment(args.category)
|
||||
if stage_comment:
|
||||
client.add_comment(task_id, stage_comment)
|
||||
|
||||
print(json.dumps({
|
||||
"id": task_id,
|
||||
"name": args.name,
|
||||
|
|
|
|||
|
|
@ -1,77 +0,0 @@
|
|||
"""Create a Gmail draft for client delivery (manual/retry use).
|
||||
|
||||
Usage:
|
||||
uv run python scripts/create_client_draft.py --company "McCormick Industries" \\
|
||||
--files data/generated/press_releases/mccormick/file1.docx \\
|
||||
--task-id abc123 --type "Press Release"
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
from cheddahbot.config import load_config # noqa: E402
|
||||
from cheddahbot.delivery import deliver_to_client # noqa: E402
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Create client delivery draft email")
|
||||
parser.add_argument("--company", required=True, help="Company name (must match companies.md)")
|
||||
parser.add_argument("--files", nargs="+", required=True, help="Paths to .docx files")
|
||||
parser.add_argument("--task-id", required=True, help="ClickUp task ID for tracking")
|
||||
parser.add_argument("--type", default="Press Release", help="Task type (default: Press Release)")
|
||||
args = parser.parse_args()
|
||||
|
||||
config = load_config()
|
||||
ctx = {"config": config}
|
||||
|
||||
file_paths = [Path(f) for f in args.files]
|
||||
for f in file_paths:
|
||||
if not f.exists():
|
||||
print("ERROR: File not found: %s" % f)
|
||||
sys.exit(1)
|
||||
|
||||
print("Creating client delivery draft...")
|
||||
print(" Company: %s" % args.company)
|
||||
print(" Files: %s" % [str(f) for f in file_paths])
|
||||
print(" Task ID: %s" % args.task_id)
|
||||
print(" Type: %s" % args.type)
|
||||
print()
|
||||
|
||||
result = deliver_to_client(
|
||||
files=file_paths,
|
||||
company_name=args.company,
|
||||
task_id=args.task_id,
|
||||
task_type=args.type,
|
||||
ctx=ctx,
|
||||
)
|
||||
|
||||
if result.doc_links:
|
||||
print("Google Doc links:")
|
||||
for link in result.doc_links:
|
||||
print(" - %s" % link)
|
||||
|
||||
if result.draft_id:
|
||||
print("\nGmail draft created: %s" % result.draft_id)
|
||||
|
||||
if result.errors:
|
||||
print("\nErrors:")
|
||||
for err in result.errors:
|
||||
print(" - %s" % err)
|
||||
|
||||
print("\nDone." if result.success else "\nCompleted with issues.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,265 +0,0 @@
|
|||
"""CLI script to create a bundled set of ClickUp tasks (LINKS, PR, NEW, OPT).
|
||||
|
||||
Usage:
|
||||
uv run python scripts/create_task_set.py \
|
||||
--client "RPM Rubber" --keyword "rubber gaskets" \
|
||||
--types OPT,LINKS,PR --url "https://example.com/page" \
|
||||
--tag apr26 --due-date 2026-04-08 \
|
||||
--pr-topic "discuss RPM rubber gasket capabilities"
|
||||
|
||||
uv run python scripts/create_task_set.py \
|
||||
--client "Metal Craft Spinning" --keyword "fan panels" \
|
||||
--types NEW,LINKS --tag may26 --due-date 2026-05-11
|
||||
|
||||
uv run python scripts/create_task_set.py \
|
||||
--client "Hogge Precision" --keyword "swiss machining" \
|
||||
--types LINKS --url "https://example.com/page" \
|
||||
--tag jun26 --due-date 2026-06-08 --articles 5
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from cheddahbot.clickup import ClickUpClient
|
||||
|
||||
DEFAULT_ASSIGNEE = 10765627 # Bryan Bigari
|
||||
VALID_TYPES = {"NEW", "OPT", "LINKS", "PR"}
|
||||
|
||||
# Time estimates in milliseconds
|
||||
TIME_ESTIMATES = {
|
||||
"NEW": 14400000, # 4h
|
||||
"OPT": 7200000, # 2h
|
||||
"LINKS": 9000000, # 2.5h
|
||||
"PR": 5400000, # 1.5h
|
||||
}
|
||||
|
||||
WORK_CATEGORIES = {
|
||||
"NEW": "Content Creation",
|
||||
"OPT": "On Page Optimization",
|
||||
"LINKS": "Link Building",
|
||||
"PR": "Press Release",
|
||||
}
|
||||
|
||||
INITIAL_STAGES = {
|
||||
"NEW": "run_cora",
|
||||
"OPT": "run_cora",
|
||||
"LINKS": "run_cora",
|
||||
"PR": "draft",
|
||||
}
|
||||
|
||||
|
||||
def _date_to_unix_ms(date_str: str) -> int:
|
||||
"""Convert YYYY-MM-DD to Unix milliseconds (noon UTC)."""
|
||||
dt = datetime.strptime(date_str, "%Y-%m-%d").replace(hour=12, tzinfo=UTC)
|
||||
return int(dt.timestamp() * 1000)
|
||||
|
||||
|
||||
def _add_days_ms(unix_ms: int, days: int) -> int:
|
||||
"""Add days to a Unix ms timestamp."""
|
||||
return unix_ms + days * 86400 * 1000
|
||||
|
||||
|
||||
def main():
|
||||
load_dotenv()
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Create a bundled set of ClickUp tasks"
|
||||
)
|
||||
parser.add_argument("--client", required=True, help="Client folder name")
|
||||
parser.add_argument("--keyword", required=True, help="SEO keyword")
|
||||
parser.add_argument(
|
||||
"--types",
|
||||
required=True,
|
||||
help="Comma-separated task types: NEW, OPT, LINKS, PR",
|
||||
)
|
||||
parser.add_argument("--due-date", required=True, help="Content due date (YYYY-MM-DD)")
|
||||
parser.add_argument("--tag", required=True, help="Month tag (e.g. apr26)")
|
||||
parser.add_argument("--url", default="", help="IMSURL (required for OPT)")
|
||||
parser.add_argument("--pr-topic", default="", help="PR topic (required if PR in types)")
|
||||
parser.add_argument("--articles", type=int, default=0, help="Tier-1 article count for LINKS")
|
||||
parser.add_argument("--anchors", default="", help="Custom anchors for LINKS (comma-delimited)")
|
||||
parser.add_argument("--priority", type=int, default=2, help="Priority (default: 2/High)")
|
||||
parser.add_argument("--assignee", type=int, default=DEFAULT_ASSIGNEE, help="ClickUp user ID")
|
||||
parser.add_argument(
|
||||
"--links-tag", default="",
|
||||
help="Override tag for LINKS/PR (if different from content tag)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pr-tag", default="",
|
||||
help="Override tag for PR (if different from content tag)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--links-date", default="",
|
||||
help="Override due date for LINKS (YYYY-MM-DD, default: content + 7 days)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pr-date", default="",
|
||||
help="Override due date for PR (YYYY-MM-DD, default: content + 7 days)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Parse and validate types
|
||||
task_types = [t.strip().upper() for t in args.types.split(",")]
|
||||
for t in task_types:
|
||||
if t not in VALID_TYPES:
|
||||
print(f"Error: Invalid type '{t}'. Must be one of: {VALID_TYPES}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
has_content = "NEW" in task_types or "OPT" in task_types
|
||||
|
||||
# Validate required fields
|
||||
if "OPT" in task_types and not args.url:
|
||||
print("Error: --url is required when OPT is in types", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if "PR" in task_types and not args.pr_topic:
|
||||
print("Error: --pr-topic is required when PR is in types", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if not args.url and "NEW" not in task_types:
|
||||
needs_url = [t for t in task_types if t in ("LINKS", "PR")]
|
||||
if needs_url:
|
||||
print(
|
||||
f"Error: --url is required for {needs_url} when NEW is not in types",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
api_token = os.environ.get("CLICKUP_API_TOKEN", "")
|
||||
space_id = os.environ.get("CLICKUP_SPACE_ID", "")
|
||||
if not api_token:
|
||||
print("Error: CLICKUP_API_TOKEN not set", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if not space_id:
|
||||
print("Error: CLICKUP_SPACE_ID not set", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
client = ClickUpClient(api_token=api_token)
|
||||
try:
|
||||
list_id = client.find_list_in_folder(space_id, args.client)
|
||||
if not list_id:
|
||||
print(f"Error: No folder '{args.client}' with 'Overall' list", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
content_due_ms = _date_to_unix_ms(args.due_date)
|
||||
followup_due_ms = _add_days_ms(content_due_ms, 7)
|
||||
|
||||
created = {}
|
||||
new_task_id = None
|
||||
|
||||
# Determine task order: content tasks first (need ID for dependencies)
|
||||
ordered = []
|
||||
for t in ["NEW", "OPT"]:
|
||||
if t in task_types:
|
||||
ordered.append(t)
|
||||
for t in ["LINKS", "PR"]:
|
||||
if t in task_types:
|
||||
ordered.append(t)
|
||||
|
||||
for task_type in ordered:
|
||||
# Task name
|
||||
name = f"{task_type} - {args.keyword}"
|
||||
|
||||
# Due date
|
||||
if task_type in ("NEW", "OPT"):
|
||||
due_ms = content_due_ms
|
||||
elif task_type == "LINKS" and args.links_date:
|
||||
due_ms = _date_to_unix_ms(args.links_date)
|
||||
elif task_type == "PR" and args.pr_date:
|
||||
due_ms = _date_to_unix_ms(args.pr_date)
|
||||
elif has_content:
|
||||
due_ms = followup_due_ms
|
||||
else:
|
||||
due_ms = content_due_ms
|
||||
|
||||
# Tag
|
||||
if task_type == "LINKS" and args.links_tag:
|
||||
tag = args.links_tag
|
||||
elif task_type == "PR" and args.pr_tag:
|
||||
tag = args.pr_tag
|
||||
else:
|
||||
tag = args.tag
|
||||
|
||||
result = client.create_task(
|
||||
list_id=list_id,
|
||||
name=name,
|
||||
status="to do",
|
||||
priority=args.priority,
|
||||
assignees=[args.assignee],
|
||||
due_date=due_ms,
|
||||
tags=[tag],
|
||||
time_estimate=TIME_ESTIMATES[task_type],
|
||||
)
|
||||
task_id = result.get("id", "")
|
||||
task_url = result.get("url", "")
|
||||
|
||||
# Common fields
|
||||
client.set_custom_field_smart(task_id, list_id, "Client", args.client)
|
||||
client.set_custom_field_smart(
|
||||
task_id, list_id, "Work Category", WORK_CATEGORIES[task_type]
|
||||
)
|
||||
client.set_custom_field_smart(
|
||||
task_id, list_id, "Stage", INITIAL_STAGES[task_type]
|
||||
)
|
||||
client.set_custom_field_smart(task_id, list_id, "Delegate to Claude", "true")
|
||||
|
||||
# IMSURL
|
||||
if args.url and task_type != "NEW":
|
||||
client.set_custom_field_smart(task_id, list_id, "IMSURL", args.url)
|
||||
|
||||
# Type-specific fields
|
||||
if task_type == "LINKS":
|
||||
client.set_custom_field_smart(task_id, list_id, "LB Method", "Cora Backlinks")
|
||||
client.set_custom_field_smart(task_id, list_id, "BrandedPlusRatio", "0.80")
|
||||
if args.articles:
|
||||
client.set_custom_field_smart(
|
||||
task_id, list_id, "CLIFlags", f"--tier1-count {args.articles}"
|
||||
)
|
||||
if args.anchors:
|
||||
client.set_custom_field_smart(
|
||||
task_id, list_id, "CustomAnchors", args.anchors
|
||||
)
|
||||
|
||||
if task_type == "PR":
|
||||
client.set_custom_field_smart(task_id, list_id, "PR Topic", args.pr_topic)
|
||||
|
||||
# Track NEW task ID for dependencies
|
||||
if task_type == "NEW":
|
||||
new_task_id = task_id
|
||||
|
||||
# Set dependency if NEW exists and no URL
|
||||
if task_type in ("LINKS", "PR") and new_task_id and not args.url:
|
||||
ok = client.add_dependency(task_id, depends_on=new_task_id)
|
||||
dep_status = "blocked by NEW" if ok else "DEPENDENCY FAILED"
|
||||
else:
|
||||
dep_status = ""
|
||||
|
||||
created[task_type] = {
|
||||
"id": task_id,
|
||||
"name": name,
|
||||
"url": task_url,
|
||||
"dep": dep_status,
|
||||
}
|
||||
|
||||
status_parts = [f"{task_type} created: {task_id}"]
|
||||
if dep_status:
|
||||
status_parts.append(dep_status)
|
||||
print(" | ".join(status_parts))
|
||||
|
||||
# Summary
|
||||
print(json.dumps(created, indent=2))
|
||||
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
"""Fix CLIFlags custom field: replace --tier-one with --tier-1 on all non-closed tasks.
|
||||
|
||||
Usage:
|
||||
uv run python scripts/fix_tier_one_flags.py # dry-run (default)
|
||||
uv run python scripts/fix_tier_one_flags.py --apply # actually update tasks
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from cheddahbot.clickup import ClickUpClient
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Fix --tier-one -> --tier-1 in CLIFlags")
|
||||
parser.add_argument("--apply", action="store_true", help="Actually update tasks (default: dry-run)")
|
||||
args = parser.parse_args()
|
||||
|
||||
token = os.environ["CLICKUP_API_TOKEN"]
|
||||
space_id = os.environ["CLICKUP_SPACE_ID"]
|
||||
|
||||
client = ClickUpClient(api_token=token)
|
||||
|
||||
print(f"Fetching all non-closed tasks from space {space_id}...")
|
||||
tasks = client.get_tasks_from_space(space_id)
|
||||
print(f"Found {len(tasks)} tasks total")
|
||||
|
||||
fix_count = 0
|
||||
for task in tasks:
|
||||
cli_flags = task.custom_fields.get("CLIFlags", "") or ""
|
||||
if "--tier-one" not in cli_flags:
|
||||
continue
|
||||
|
||||
new_flags = cli_flags.replace("--tier-one", "--tier-1")
|
||||
fix_count += 1
|
||||
print(f" [{task.id}] {task.name}")
|
||||
print(f" OLD: {cli_flags}")
|
||||
print(f" NEW: {new_flags}")
|
||||
|
||||
if args.apply:
|
||||
ok = client.set_custom_field_by_name(task.id, "CLIFlags", new_flags)
|
||||
if ok:
|
||||
print(" -> Updated")
|
||||
else:
|
||||
print(" -> FAILED to update")
|
||||
|
||||
print(f"\n{'Updated' if args.apply else 'Would update'} {fix_count} tasks")
|
||||
if not args.apply and fix_count > 0:
|
||||
print("Run with --apply to make changes")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
"""One-time Google OAuth2 setup script.
|
||||
|
||||
Usage:
|
||||
uv run python scripts/google_auth_setup.py
|
||||
uv run python scripts/google_auth_setup.py --test-drive FOLDER_ID
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
from cheddahbot.google_auth import get_credentials # noqa: E402
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Google OAuth2 setup for CheddahBot")
|
||||
parser.add_argument(
|
||||
"--test-drive",
|
||||
metavar="FOLDER_ID",
|
||||
help="Test Drive access by listing files in the given folder",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
print("Authenticating with Google (browser will open on first run)...")
|
||||
creds = get_credentials()
|
||||
print("Authentication successful. Token saved to data/google_token.json")
|
||||
|
||||
# Show granted scopes
|
||||
if hasattr(creds, "scopes") and creds.scopes:
|
||||
print("\nGranted scopes:")
|
||||
for scope in creds.scopes:
|
||||
print(" - %s" % scope)
|
||||
|
||||
if args.test_drive:
|
||||
from googleapiclient.discovery import build
|
||||
|
||||
service = build("drive", "v3", credentials=creds)
|
||||
results = (
|
||||
service.files()
|
||||
.list(
|
||||
q="'%s' in parents and trashed = false" % args.test_drive,
|
||||
fields="files(id, name, mimeType)",
|
||||
pageSize=10,
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
files = results.get("files", [])
|
||||
if files:
|
||||
print("\nFiles in folder %s:" % args.test_drive)
|
||||
for f in files:
|
||||
print(" - %s (%s)" % (f["name"], f["mimeType"]))
|
||||
else:
|
||||
print("\nFolder %s is empty or not accessible." % args.test_drive)
|
||||
|
||||
print("\nSetup complete.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,11 +1,7 @@
|
|||
# Company Directory
|
||||
|
||||
## McCormick Industries
|
||||
- **Aliases:** McCormick
|
||||
- **Executive:** Gary Hermsen, CEO
|
||||
- **Email:**
|
||||
- **Opening:**
|
||||
- **CC:**
|
||||
- **PA Org ID:** 19413
|
||||
- **Website:**
|
||||
- **GBP:**
|
||||
|
|
@ -29,7 +25,6 @@
|
|||
- **GBP:**
|
||||
|
||||
## Metal Craft
|
||||
- **Aliases:** Metal Craft Spinning
|
||||
- **Executive:** Kyle, Vice President
|
||||
- **PA Org ID:** 19800
|
||||
- **Website:**
|
||||
|
|
@ -42,21 +37,18 @@
|
|||
- **GBP:**
|
||||
|
||||
## MOD-TRONIC Instruments Limited
|
||||
- **Aliases:** Modtronic
|
||||
- **Executive:** Steven Ruple, President
|
||||
- **PA Org ID:** 19901
|
||||
- **Website:**
|
||||
- **GBP:**
|
||||
|
||||
## Krueger Sentry Gauge
|
||||
- **Aliases:** Krueger Sentry
|
||||
- **Executive:** Lee Geurts, Vice President
|
||||
- **PA Org ID:** 20862
|
||||
- **Website:**
|
||||
- **GBP:**
|
||||
|
||||
## Chapter 2 Incorporated
|
||||
- **Aliases:** Chapter 2
|
||||
- **Executive:** Kyle Johnston, Senior Engineer
|
||||
- **PA Org ID:** 19517
|
||||
- **Website:**
|
||||
|
|
@ -69,17 +61,15 @@
|
|||
- **GBP:**
|
||||
|
||||
## Renown Electric Motors & Repairs Inc.
|
||||
- **Aliases:** Renown Electric
|
||||
- **Executive:** Jeff Collins, Partner
|
||||
- **PA Org ID:** 19546
|
||||
- **Website:**
|
||||
- **GBP:**
|
||||
|
||||
## RPM Industrial Rubber Parts
|
||||
- **Aliases:** RPM Rubber
|
||||
## RPM Mechanical Inc.
|
||||
- **Executive:** Mike McNeil, Vice President
|
||||
- **PA Org ID:** 19395
|
||||
- **Website:** https://www.rpmrubberparts.com/
|
||||
- **Website:**
|
||||
- **GBP:**
|
||||
|
||||
## Green Bay Plastics
|
||||
|
|
@ -101,7 +91,6 @@
|
|||
- **GBP:**
|
||||
|
||||
## Axiomatic Global Electronic Solutions
|
||||
- **Aliases:** Axiomatic
|
||||
- **Executive:** Amanda Wilkins, Chief Marketing Officer
|
||||
- **PA Org ID:** 19633
|
||||
- **Website:**
|
||||
|
|
@ -114,7 +103,6 @@
|
|||
- **GBP:**
|
||||
|
||||
## ELIS Manufacturing and Packaging Solutions Inc.
|
||||
- **Aliases:** ELIS Packaging
|
||||
- **Executive:** Keith Vinson, Chief Executive Officer
|
||||
- **PA Org ID:** 19656
|
||||
- **Website:**
|
||||
|
|
@ -127,14 +115,12 @@
|
|||
- **GBP:**
|
||||
|
||||
## FZE Manufacturing
|
||||
- **Aliases:** FZE Manufacturing Solutions
|
||||
- **Executive:** Doug Pribyl, CEO
|
||||
- **PA Org ID:** 22377
|
||||
- **Website:**
|
||||
- **GBP:**
|
||||
|
||||
## Machine Specialty & Manufacturing (MSM)
|
||||
- **Aliases:** Machine Specialty
|
||||
- **Executive:** Max Hutson, Vice President of Operations
|
||||
- **PA Org ID:** 19418
|
||||
- **Website:**
|
||||
|
|
@ -146,22 +132,7 @@
|
|||
- **Website:**
|
||||
- **GBP:**
|
||||
|
||||
## Royal Purple Industrial
|
||||
- **Executive:** Rusty Waples, Brand and Product Marketing Director
|
||||
- **PA Org ID:** 23623
|
||||
- **Website:** https://www.royalpurpleind.com/
|
||||
- **GBP:** https://maps.app.goo.gl/wBgq49g3Xs4Y91zP9
|
||||
|
||||
## James Eagen Sons
|
||||
- **Aliases:** James Eagen
|
||||
- **Executive:**
|
||||
- **PA Org ID:**
|
||||
## EVR Products
|
||||
- **Executive:** Gary Waldick, Vice President of EVR Products
|
||||
- **Website:**
|
||||
- **GBP:**
|
||||
|
||||
## EVR Products
|
||||
- **Aliases:** Elasto Valve Rubber (EVR)
|
||||
- **Executive:** Gary Waldick, Vice President of EVR Products
|
||||
- **Website:** evrproducts.com
|
||||
- **GBP:** https://maps.app.goo.gl/bwaEWE5hCkgni1gk6
|
||||
- The company is a custom rubber products supplier based in Sudbury, Ontario
|
||||
|
|
@ -1,168 +0,0 @@
|
|||
# Content Draft -- Autonomous Pipeline
|
||||
|
||||
You are an autonomous content writer. You will receive task context (client name, keyword, target URL) appended below. Your job is to read the approved outline, write the full content draft, optimize it against Cora data, and produce ONE output file ready to paste into Google Docs or WordPress.
|
||||
|
||||
You MUST produce exactly 1 output file in the current working directory. No subdirectories.
|
||||
|
||||
## Step 1: Read the Outline
|
||||
|
||||
The task will have an outline file attached (from the previous pipeline stage). Read it carefully. It has two parts:
|
||||
|
||||
1. **The Outline** (top) -- heading structure, per-section word counts, section descriptions
|
||||
2. **Writer's Reference** (bottom) -- variation placement map, entity checklist, top LSI terms, entity rules
|
||||
|
||||
Both parts drive your writing. Follow the outline structure exactly -- do not add, remove, or reorder sections unless something is clearly broken.
|
||||
|
||||
## Step 2: Fetch Context (if available)
|
||||
|
||||
### 2a. Existing Page Content
|
||||
|
||||
If a target URL is provided AND it is not `seotoollab.com/blank.html`, use the BS4 scraper to get the current page:
|
||||
|
||||
```bash
|
||||
cd .claude/skills/content-researcher/scripts && uv run --with requests,beautifulsoup4 python competitor_scraper.py "{imsurl}" --output-dir ./working/
|
||||
```
|
||||
|
||||
Read the output to understand:
|
||||
- Current tone and voice (match it in the rewrite)
|
||||
- Existing content worth preserving (specific product details, specs, certifications)
|
||||
- Do NOT copy promotional fluff -- only keep factual content
|
||||
|
||||
If no IMSURL or blank page, write fresh content.
|
||||
|
||||
### 2b. Company Info
|
||||
|
||||
Read `skills/companies.md` if the client is listed there. Use the executive name/title for any quotes or attribution if appropriate for this content type.
|
||||
|
||||
## Step 3: Write the Draft
|
||||
|
||||
Write the full content following the outline exactly.
|
||||
|
||||
### Writing Rules
|
||||
|
||||
- Follow the heading structure from the outline exactly (H1, H2, H3 in order)
|
||||
- Hit each section's word count target (within 10%)
|
||||
- Total word count must match the Cora target from the outline header (within 10%)
|
||||
- Write in clear, scannable paragraphs (max 4 sentences per paragraph)
|
||||
- Use subheadings every 2-4 paragraphs
|
||||
- Match the tone of the existing page if one was scraped. Otherwise write in professional B2B style appropriate to the industry.
|
||||
- Do NOT use first person ("I", "we") in body text unless quoting an executive
|
||||
- Do NOT fabricate certifications, awards, partnerships, or specific claims about the company unless found on their website or in the task description
|
||||
- Include a CTA in the final section (e.g. "Contact [Company] for..." or "Learn more at...")
|
||||
|
||||
### Variation Targets
|
||||
|
||||
Follow the Variation Placement Map from the Writer's Reference section of the outline. These are the top priority:
|
||||
|
||||
- Place each variation in the sections specified
|
||||
- Hit the target count for each variation
|
||||
- Multi-word variations (e.g. "ac drive repair") count toward their component single-word variations too
|
||||
- Do NOT sacrifice readability to force variations in -- they should read naturally
|
||||
|
||||
### Entity Coverage
|
||||
|
||||
Follow the Entity Checklist from the Writer's Reference:
|
||||
|
||||
- **Coverage first**: every entity in the "Must mention" group needs at least 1 mention
|
||||
- **Never remove entity mentions** -- only add. Removing entities can damage variation counts.
|
||||
- **Variations take priority** over entity deficit counts
|
||||
- Outlier entities flagged in the checklist should be used sparingly (2-3x max as noted)
|
||||
|
||||
### LSI Terms
|
||||
|
||||
Weave in the Top 20 LSI Terms from the Writer's Reference naturally throughout the draft. These don't need specific counts -- just make sure they appear where contextually appropriate.
|
||||
|
||||
### Fan-Out Query (FOQ) Section
|
||||
|
||||
After the main content, write the FOQ section from the outline:
|
||||
|
||||
- Each FOQ heading is an H3 phrased as a question
|
||||
- Answer in 2-3 sentences max, self-contained
|
||||
- **Restate the question in the answer**: "How does X work? X works by..." (this format is preferred by featured snippets and LLM citations)
|
||||
- Mark the section: `<!-- FOQ SECTION START -->` and `<!-- FOQ SECTION END -->`
|
||||
- FOQs are excluded from the main word count
|
||||
|
||||
## Step 4: Self-Check
|
||||
|
||||
Before writing the output file, review your own draft:
|
||||
|
||||
- Count words per section (excluding FOQ). Compare against the outline's per-section targets. If any section is over by more than 10%, trim. If under by more than 10%, expand.
|
||||
- Scan for each entity in the "Must mention" list from the Writer's Reference. If any are missing, add them.
|
||||
- Scan for the top LSI terms. If key ones are absent, weave them in where natural.
|
||||
- Re-read for keyword stuffing -- if any paragraph sounds forced, rewrite it.
|
||||
|
||||
## Step 5: Generate Meta Tags
|
||||
|
||||
Add meta tags as an HTML comment block at the very top of the output file.
|
||||
|
||||
### Title Tag
|
||||
|
||||
Format: `Exact Match Keyword | Company Name | Related Search 1 | Related Search 2 | Related Search 3 | Related Search 4`
|
||||
|
||||
Rules:
|
||||
- Start with the exact-match keyword
|
||||
- Then the company name
|
||||
- Then up to 4 related search queries that the content actually answers
|
||||
- Pull these from the keyword variations and fan-out queries
|
||||
- Each related search should represent a real search intent that the page satisfies
|
||||
- Total can be up to ~500 characters -- Google reads far more than it displays
|
||||
|
||||
### Meta Description
|
||||
|
||||
Rules:
|
||||
- 400-500 characters
|
||||
- Must match the actual content on the page -- do not promise what the content doesn't deliver
|
||||
- Optimize for click-through rate (CTR):
|
||||
- Lead with a clear benefit or answer to the searcher's intent
|
||||
- Include specific details that differentiate (years in business, number of brands serviced, certifications)
|
||||
- Use active language that creates urgency or curiosity without being clickbait
|
||||
- Include a soft CTA ("Learn how...", "Find out why...", "Get expert...")
|
||||
- Front-load the most important information (Google may truncate)
|
||||
- Hit the primary keyword, 2-3 key variations, and 2-3 entities naturally
|
||||
- Do NOT just rewrite the intro paragraph -- the meta description is its own piece of copy
|
||||
|
||||
### URL Slug
|
||||
|
||||
Short, keyword-focused. Example: `/ac-drive-repair`
|
||||
|
||||
### Format
|
||||
|
||||
```html
|
||||
<!--
|
||||
META TITLE: Exact Match Keyword | Company Name | Related Search 1 | Related Search 2
|
||||
META DESCRIPTION: 400-500 character description optimized for CTR...
|
||||
URL SLUG: /url-slug-here
|
||||
-->
|
||||
```
|
||||
|
||||
## Step 6: Self-Verification
|
||||
|
||||
Before finishing, verify:
|
||||
|
||||
- [ ] Heading structure matches the outline exactly
|
||||
- [ ] Total word count within 10% of Cora target (excluding FOQs)
|
||||
- [ ] Per-section word counts within 10% of outline targets
|
||||
- [ ] All "Must mention" entities have at least 1 mention
|
||||
- [ ] Key variations hit their target counts (check the placement map)
|
||||
- [ ] Top LSI terms are present
|
||||
- [ ] FOQ section is marked with comment tags and excluded from word count
|
||||
- [ ] Meta title follows the format: keyword | company | related searches
|
||||
- [ ] Meta description is 400-500 characters with CTR optimization
|
||||
- [ ] No fabricated claims about the company
|
||||
- [ ] Content reads naturally -- not keyword-stuffed
|
||||
- [ ] No local file paths in the output
|
||||
- [ ] File pastes cleanly into Google Docs or WordPress
|
||||
|
||||
## Output Files
|
||||
|
||||
You MUST write exactly 1 file to the current working directory. Use the **keyword** from the task context in the filename.
|
||||
|
||||
Example -- if the keyword is "ac drive repair":
|
||||
|
||||
| File | Format | Contents |
|
||||
|------|--------|----------|
|
||||
| `ac drive repair - Draft.md` | Markdown | Meta tags at top (HTML comment), full content with headings, FOQ section at bottom |
|
||||
|
||||
The file should paste directly into Google Docs (as markdown) or WordPress Gutenberg code editor (headings map to blocks). No special formatting, no CSS classes, no wrapper divs.
|
||||
|
||||
Do NOT create any other files. Do NOT create subdirectories.
|
||||
|
|
@ -1,242 +0,0 @@
|
|||
# Content Outline -- Autonomous Pipeline
|
||||
|
||||
You are an autonomous content outline builder. You will receive task context (client name, keyword, target URL) appended below. Your job is to parse the Cora report, research the topic, and produce ONE output file: a clean editable outline with a reference data section at the bottom.
|
||||
|
||||
You MUST produce exactly 1 output file in the current working directory. No subdirectories.
|
||||
|
||||
## Step 1: Parse the Cora Report
|
||||
|
||||
The task will have a Cora .xlsx attached. Download or locate it, then run the Cora parser scripts to extract structured data.
|
||||
|
||||
### 1a. Summary + Structure Targets
|
||||
|
||||
```bash
|
||||
cd .claude/skills/content-researcher/scripts && uv run --with openpyxl python cora_parser.py "{cora_xlsx_path}" --sheet summary --format json
|
||||
```
|
||||
|
||||
From the summary, extract:
|
||||
- **Word count target** (use `word_count_cluster_target` if available, otherwise `word_count_goal`)
|
||||
- **Keyword variations** list
|
||||
- **Entity count target** (`distinct_entities_target`)
|
||||
- **Density targets** (variation, entity, LSI)
|
||||
|
||||
### 1b. Structure Targets
|
||||
|
||||
```bash
|
||||
cd .claude/skills/content-researcher/scripts && uv run --with openpyxl python cora_parser.py "{cora_xlsx_path}" --sheet structure --format json
|
||||
```
|
||||
|
||||
Extract heading count targets: H1, H2, H3, H4 counts.
|
||||
|
||||
### 1c. Keyword Variations
|
||||
|
||||
```bash
|
||||
cd .claude/skills/content-researcher/scripts && uv run --with openpyxl python cora_parser.py "{cora_xlsx_path}" --sheet variations --format json
|
||||
```
|
||||
|
||||
Extract each variation with its page1_max and page1_avg. These are the keyword family -- hitting these targets is the top priority for the draft.
|
||||
|
||||
### 1d. Entities
|
||||
|
||||
```bash
|
||||
cd .claude/skills/content-researcher/scripts && uv run --with openpyxl python cora_parser.py "{cora_xlsx_path}" --sheet entities --format json
|
||||
```
|
||||
|
||||
Entities are already filtered by correlation (Best of Both <= -0.199) in the parser. From the results, note:
|
||||
- Total relevant entities (the ones that passed the filter)
|
||||
- Which ones have 0 current mentions (coverage gaps)
|
||||
- Max count and deficit for each
|
||||
|
||||
### 1e. LSI Keywords
|
||||
|
||||
```bash
|
||||
cd .claude/skills/content-researcher/scripts && uv run --with openpyxl python cora_parser.py "{cora_xlsx_path}" --sheet lsi --format json
|
||||
```
|
||||
|
||||
Extract LSI keywords with their correlation and deficit values.
|
||||
|
||||
## Step 2: Research
|
||||
|
||||
### 2a. Fetch Current Page (if IMSURL provided)
|
||||
|
||||
If a target URL is provided AND it is not `seotoollab.com/blank.html`, use the BS4 scraper to get the actual page content -- do NOT use WebFetch (it runs through AI summarization and loses heading structure):
|
||||
|
||||
```bash
|
||||
cd .claude/skills/content-researcher/scripts && uv run --with requests,beautifulsoup4 python competitor_scraper.py "{imsurl}" --output-dir ./working/
|
||||
```
|
||||
|
||||
Read the output file to understand:
|
||||
- Current heading structure
|
||||
- Current word count
|
||||
- What content exists already
|
||||
- Current style and tone
|
||||
|
||||
If no IMSURL is provided, or if the URL is `seotoollab.com/blank.html` (used as a placeholder for Cora when the real page doesn't exist yet), this is a new page -- skip this step.
|
||||
|
||||
### 2b. Competitor Research
|
||||
|
||||
Use WebSearch to find the top 5-10 competitor pages for the keyword. Use the BS4 scraper to pull the best 3-5:
|
||||
|
||||
```bash
|
||||
cd .claude/skills/content-researcher/scripts && uv run --with requests,beautifulsoup4 python competitor_scraper.py "URL1" "URL2" "URL3" --output-dir ./working/competitor_content/
|
||||
```
|
||||
|
||||
Read the scraped files. Focus on:
|
||||
- What subtopics they cover
|
||||
- How they structure content (H2/H3 patterns)
|
||||
- Common themes everyone covers
|
||||
- Gaps -- what they miss or cover poorly
|
||||
|
||||
### 2c. Fan-Out Queries
|
||||
|
||||
Generate 10-15 search queries representing the topic cluster -- the natural "next searches" someone would run after the primary keyword. These become H3 heading candidates.
|
||||
|
||||
## Step 3: Build the Output File
|
||||
|
||||
The output file has two parts separated by a clear divider. The top is the editable outline. The bottom is reference data for the draft stage.
|
||||
|
||||
### PART 1: The Outline (top of file)
|
||||
|
||||
This is the part the human will read and edit. Keep it **clean and scannable**.
|
||||
|
||||
Format:
|
||||
|
||||
```
|
||||
# [Keyword] -- Content Outline
|
||||
|
||||
**Client:** [name]
|
||||
**Keyword:** [keyword]
|
||||
**Word Count Target:** [number]
|
||||
|
||||
---
|
||||
|
||||
## H1: [Heading text]
|
||||
|
||||
## H2: [Section heading]
|
||||
~[word count] words
|
||||
[1-2 sentence description of what goes here and key points to cover]
|
||||
|
||||
### H3: [Sub-section heading]
|
||||
|
||||
## H2: [Next section heading]
|
||||
~[word count] words
|
||||
[1-2 sentence description]
|
||||
|
||||
...
|
||||
|
||||
### Word Count Total
|
||||
[section-by-section breakdown adding up to Cora target]
|
||||
|
||||
---
|
||||
<!-- FOQ SECTION - excluded from word count -->
|
||||
|
||||
### [Question as heading]?
|
||||
### [Question as heading]?
|
||||
...
|
||||
```
|
||||
|
||||
Rules for the outline:
|
||||
- **Headings only** -- no variation counts, no entity lists, no Cora numbers in this section
|
||||
- Each H2 gets a word count target and a brief description (1-2 sentences max)
|
||||
- H3s are just the heading text, no description needed
|
||||
- Section word counts MUST add up to the Cora total (within 10%)
|
||||
- Fan-out queries go after a `<!-- FOQ SECTION -->` marker, excluded from word count
|
||||
- The human should be able to read this on their phone and rearrange sections easily
|
||||
|
||||
### Structure Rules
|
||||
|
||||
- **H1**: Exactly 1. Contains the exact-match keyword.
|
||||
- **H2 count**: Match the Cora structure target.
|
||||
- **H3 count**: Match the Cora structure target.
|
||||
- **H4**: Only add if Cora shows competitors using them. Low priority.
|
||||
- **H5/H6**: Ignore completely.
|
||||
|
||||
### Heading Content Rules
|
||||
|
||||
- Pack keyword variations into H2 and H3 headings where natural.
|
||||
- Pack relevant entities into headings where natural.
|
||||
- Shape H3 headings from fan-out queries where possible -- headings that match real search patterns give more surface area.
|
||||
|
||||
### Word Count Discipline -- CRITICAL
|
||||
|
||||
Do NOT pad sections. Do NOT exceed the Cora target by more than 10%. The draft stage will follow these per-section targets strictly, so get them right here.
|
||||
|
||||
### PART 2: Writer's Reference (bottom of file)
|
||||
|
||||
After the outline, add a clear divider and the data the draft writer needs. Keep this section compact.
|
||||
|
||||
```
|
||||
---
|
||||
# Writer's Reference -- DO NOT EDIT ABOVE THIS LINE
|
||||
---
|
||||
```
|
||||
|
||||
Include these sections:
|
||||
|
||||
**1. Variation Placement Map**
|
||||
|
||||
Table showing each keyword variation with page1_avg > 0, its target count, and which outline sections it belongs in:
|
||||
|
||||
```
|
||||
| Variation | Target | Sections |
|
||||
|-----------|--------|----------|
|
||||
| ac drive repair | 9 | H1, Section 2, Section 4 |
|
||||
| drive repair | 25 | Section 2, Section 3, Section 4 |
|
||||
```
|
||||
|
||||
Only include variations with page1_avg > 0. Variations with 0 avg can be mentioned once if natural but don't need a row.
|
||||
|
||||
**2. Entity Checklist**
|
||||
|
||||
Just the entity names grouped by priority. No correlation scores, no deficit numbers -- the draft writer doesn't need them:
|
||||
|
||||
```
|
||||
Must mention (1+ times each):
|
||||
- variable frequency drive, vfd, inverter, frequency, ac drives, ...
|
||||
|
||||
Brand names (use in brands section):
|
||||
- allen bradley
|
||||
|
||||
Low priority (mention if natural):
|
||||
- plc, automation
|
||||
```
|
||||
|
||||
Flag any outlier entities with a note: "servo -- competitor catalog inflates this, use 2-3x max"
|
||||
|
||||
**3. Top 20 LSI Terms**
|
||||
|
||||
Just the terms, no tables. The draft writer should weave these in naturally:
|
||||
|
||||
```
|
||||
drive repair, test, inverter, solutions, torque, motor, power, energy, brands, equipment, ...
|
||||
```
|
||||
|
||||
**4. Entity Rules**
|
||||
|
||||
- Never remove entity mentions -- only add. Removing entities can damage variation counts.
|
||||
- Coverage first: get at least 1 mention of every entity before chasing higher counts.
|
||||
- Variations take priority over entity deficit counts.
|
||||
|
||||
## Step 4: Self-Verification
|
||||
|
||||
Before finishing, verify:
|
||||
|
||||
- [ ] Outline heading counts match Cora structure targets (H1=1, H2, H3 counts)
|
||||
- [ ] Every H2 section has an explicit word count target
|
||||
- [ ] Section word counts add up to the Cora total (within 10%)
|
||||
- [ ] Fan-out queries are separated with `<!-- FOQ SECTION -->` marker
|
||||
- [ ] Writer's Reference has variation map, entity checklist, and LSI terms
|
||||
- [ ] Outline section is clean -- no Cora numbers, no variation counts, no entity tables
|
||||
- [ ] No local file paths anywhere in the output
|
||||
|
||||
## Output Files
|
||||
|
||||
You MUST write exactly 1 file to the current working directory. Use the **keyword** from the task context in the filename.
|
||||
|
||||
Example -- if the keyword is "fuel treatment":
|
||||
|
||||
| File | Format | Contents |
|
||||
|------|--------|----------|
|
||||
| `fuel treatment - Outline.md` | Markdown | Clean outline on top, writer's reference data on bottom |
|
||||
|
||||
Do NOT create any other files. Do NOT create subdirectories.
|
||||
|
|
@ -1,135 +0,0 @@
|
|||
---
|
||||
name: create-task-set
|
||||
description: Create a bundled set of ClickUp tasks (LINKS, PR, NEW, OPT) for a client keyword. Use when the user asks to create a task set, task bundle, or multiple related SEO tasks at once.
|
||||
tools: [clickup_create_task, clickup_add_dependency]
|
||||
agents: [default]
|
||||
---
|
||||
|
||||
# Create Task Set
|
||||
|
||||
Creates a coordinated set of ClickUp tasks for a client keyword. The user specifies which task types to create and you build them with the correct fields, time estimates, due dates, and dependencies.
|
||||
|
||||
## Task Types
|
||||
|
||||
| Code | Task Name | Work Category | Time Estimate |
|
||||
|------|-----------|--------------|---------------|
|
||||
| LINKS | `LINKS - {keyword}` | Link Building | 2h 30m (9000000 ms) |
|
||||
| PR | `PR - {keyword}` | Press Release | 1h 30m (5400000 ms) |
|
||||
| NEW | `NEW - {keyword}` | Content Creation | 4h (14400000 ms) |
|
||||
| OPT | `OPT - {keyword}` | On Page Optimization | 2h (7200000 ms) |
|
||||
|
||||
## Required Information (ask if not provided)
|
||||
|
||||
- **client**: Client/folder name (e.g. "RPM Rubber")
|
||||
- **keyword**: The SEO keyword (e.g. "rubber gaskets")
|
||||
- **types**: Which tasks to create -- any combination of LINKS, PR, NEW, OPT
|
||||
- **due_date**: Due date for the content task (YYYY-MM-DD)
|
||||
- **tag**: Month tag (e.g. "apr26")
|
||||
|
||||
## Conditionally Required
|
||||
|
||||
- **pr_topic**: Required ONLY when PR is in the set. The press release topic (different from keyword).
|
||||
- **url**: The IMSURL. Required when OPT is in the set or when LINKS/PR are in the set WITHOUT a NEW task. If NEW is in the set and no URL is provided, LINKS and PR are blocked by the NEW task.
|
||||
|
||||
## Optional
|
||||
|
||||
- **articles**: Number of tier-1 articles for LINKS task. Sets CLIFlags to `--tier1-count {N}`.
|
||||
- **anchors**: Custom anchor texts for LINKS task. Comma-delimited string goes into CustomAnchors field.
|
||||
- **priority**: Default is 2 (High).
|
||||
- **assignee**: Default is 10765627 (Bryan Bigari).
|
||||
|
||||
## Common Fields (set on ALL tasks)
|
||||
|
||||
Every task gets these fields set after creation:
|
||||
- **Client**: Set to the client name (dropdown)
|
||||
- **Delegate to Claude**: Set to `true` (checkbox)
|
||||
- **Tag**: The month tag
|
||||
- **Priority**: High (2) unless overridden
|
||||
- **Assignee**: Bryan unless overridden
|
||||
|
||||
## Due Date Logic
|
||||
|
||||
- The content task (NEW or OPT) gets the provided due date.
|
||||
- LINKS and PR get due date + 7 days (same day of week, one week later).
|
||||
- If there is no content task in the set, all tasks get the provided due date.
|
||||
|
||||
## Dependency Logic
|
||||
|
||||
When NEW is in the set and no URL is provided:
|
||||
- Create the NEW task FIRST.
|
||||
- For each LINKS and PR task in the set, call `clickup_add_dependency` with `blocked_task_id` = the LINKS/PR task ID and `blocker_task_id` = the NEW task ID.
|
||||
- Do NOT set IMSURL on LINKS or PR since the URL does not exist yet.
|
||||
|
||||
When a URL is provided (or OPT is used instead of NEW):
|
||||
- Set IMSURL on all tasks that have the field.
|
||||
- No dependencies needed.
|
||||
|
||||
## Field Reference Per Task Type
|
||||
|
||||
### LINKS (Link Building)
|
||||
- Work Category: "Link Building"
|
||||
- Stage: "run_cora"
|
||||
- LB Method: "Cora Backlinks"
|
||||
- BrandedPlusRatio: "0.80"
|
||||
- IMSURL: the url (if available)
|
||||
- CLIFlags: `--tier1-count {N}` (only if articles specified)
|
||||
- CustomAnchors: comma-delimited anchors (only if provided)
|
||||
- Time estimate: 9000000 ms
|
||||
|
||||
### PR (Press Release)
|
||||
- Work Category: "Press Release"
|
||||
- Stage: "draft"
|
||||
- PR Topic: the pr_topic value
|
||||
- IMSURL: the url (if available)
|
||||
- Time estimate: 5400000 ms
|
||||
|
||||
### NEW (Content Creation)
|
||||
- Work Category: "Content Creation"
|
||||
- Stage: "run_cora"
|
||||
- IMSURL: not set (URL does not exist yet)
|
||||
- Time estimate: 14400000 ms
|
||||
|
||||
### OPT (On Page Optimization)
|
||||
- Work Category: "On Page Optimization"
|
||||
- Stage: "run_cora"
|
||||
- IMSURL: the url (required)
|
||||
- Time estimate: 7200000 ms
|
||||
|
||||
## Execution Steps
|
||||
|
||||
1. Confirm you have all required info for the requested task types. Ask for anything missing.
|
||||
2. Convert the due date to Unix milliseconds (noon UTC): parse YYYY-MM-DD, set to 12:00 UTC, multiply epoch seconds by 1000.
|
||||
3. Calculate the +7 day due date for LINKS/PR if a content task is in the set.
|
||||
4. If NEW is in the set, create it first (you need its task ID for dependencies).
|
||||
5. Create each task using `clickup_create_task` with:
|
||||
- name: `{TYPE} - {keyword}`
|
||||
- client: the client name
|
||||
- work_category: per the table above
|
||||
- status: "to do"
|
||||
- due_date: the appropriate Unix ms value
|
||||
- tags: the tag value
|
||||
- priority: 2 (or override)
|
||||
- assignee: 10765627 (or override)
|
||||
- time_estimate_ms: per the table above
|
||||
- custom_fields_json: JSON object with all type-specific fields plus `{"Delegate to Claude": true}`
|
||||
6. If NEW is in the set with no URL, call `clickup_add_dependency` for each LINKS/PR task.
|
||||
7. Report back with a summary: task names, IDs, URLs, and any dependencies set.
|
||||
|
||||
## Examples
|
||||
|
||||
User: "Create LINKS, PR, NEW for RPM Rubber, keyword rubber gaskets, due April 15, tag apr26, pr topic RPM Rubber launches new gasket product line"
|
||||
|
||||
-> Create 3 tasks:
|
||||
1. `NEW - rubber gaskets` (Content Creation, due 2026-04-15, 4h)
|
||||
2. `LINKS - rubber gaskets` (Link Building, due 2026-04-22, 2.5h, blocked by NEW)
|
||||
3. `PR - rubber gaskets` (Press Release, due 2026-04-22, 1.5h, blocked by NEW)
|
||||
|
||||
User: "Create OPT, LINKS for RPM Rubber, keyword rubber seals, url https://rpmrubber.com/rubber-seals, due April 10, tag apr26"
|
||||
|
||||
-> Create 2 tasks:
|
||||
1. `OPT - rubber seals` (On Page Optimization, due 2026-04-10, 2h, IMSURL set)
|
||||
2. `LINKS - rubber seals` (Link Building, due 2026-04-17, 2.5h, IMSURL set)
|
||||
|
||||
User: "Create LINKS, PR, NEW for RPM Rubber, keyword rubber gaskets, due April 15, tag apr26, pr topic RPM launches gasket line, 4 articles, anchors rubber gaskets,custom rubber gaskets,industrial rubber gaskets"
|
||||
|
||||
-> Same as first example but LINKS also gets CLIFlags=`--tier1-count 4` and CustomAnchors=`rubber gaskets,custom rubber gaskets,industrial rubber gaskets`
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
---
|
||||
task_type: Press Release
|
||||
subject: "Press Releases Ready for Review - {company_name}"
|
||||
---
|
||||
{opening}
|
||||
|
||||
We have prepared press releases for {company_name} and they are ready for your review. Please find them linked below:
|
||||
|
||||
{google_doc_links}
|
||||
|
||||
Please let us know if you would like any changes or if these are approved for distribution.
|
||||
|
||||
Thank you,
|
||||
Bryan
|
||||
|
||||
Ref: CU-{task_id}
|
||||
|
|
@ -1,227 +1,234 @@
|
|||
# Press Release Writer -- Autonomous Pipeline
|
||||
---
|
||||
name: press-release-writer
|
||||
description: Professional press release writing that follows Press Advantage guidelines and journalistic standards. Use when the user asks to write a press release, create a news announcement, draft a PR, or mentions Press Advantage distribution. Automatically generates LSI terms and industry entities, follows strict formatting rules (no lists/bullets/questions/headings in body, third-person only), and produces 600-750 word releases in objective journalistic style.
|
||||
tools: [write_press_releases, submit_press_release]
|
||||
agents: [writer, default]
|
||||
---
|
||||
|
||||
You are an autonomous press release writer. You will receive task context (client name, topic, target URL) appended below. Your job is to research, generate headlines, write TWO complete press releases, and generate JSON-LD schema for each.
|
||||
# Press Release Writer
|
||||
|
||||
You MUST produce exactly 5 output files in the current working directory. No subdirectories.
|
||||
This skill creates professional press releases that comply with Press Advantage guidelines and standard journalistic conventions. The skill automatically handles LSI term generation, maintains proper structure, and ensures compliance with strict editorial requirements.
|
||||
|
||||
## Step 1: Research
|
||||
## Core Workflow
|
||||
|
||||
1. Read `skills/companies.md` to find the client's executive name, title, and PA Org ID.
|
||||
- Match the Client name from the task context against both the `## heading` and the `**Aliases**` field in companies.md.
|
||||
- **Use the `## heading` as the company name throughout the press release, NOT the Client name from the task context.** For example, if the task says "Elasto Valve Rubber (EVR)" but companies.md has "## EVR Products" with that as an alias, use "EVR Products" everywhere.
|
||||
- If the client is not found, use "a company spokesperson" for quotes and note the missing entry in `headlines_evaluation.md`.
|
||||
2. Read `skills/headlines.md` to load reference examples of high-scoring headlines.
|
||||
3. If a Target URL (IMSURL) is provided, fetch it with WebFetch to gather company/service context.
|
||||
4. If the task description contains useful context, incorporate it.
|
||||
When the user provides a press release topic, follow this workflow:
|
||||
|
||||
## Step 2: Generate and Evaluate Headlines
|
||||
1. **Generate 7 Compliant Headlines**:
|
||||
- Immediately generate 7 unique, compliant headline options based on the topic
|
||||
- Each headline must be:
|
||||
- Maximum 70 characters
|
||||
- Title case
|
||||
- News-wire style (not promotional)
|
||||
- Free of location keywords, superlatives (best/top/leading/#1), and questions
|
||||
- MUST NOT fabricate events, expansions, milestones, or demand claims
|
||||
- Unless the topic explicitly signals actual news (e.g. "Actual News", "New Product", "Launch"), assume the company ALREADY offers this — use awareness verbs like "Highlights", "Reinforces", "Delivers", "Showcases", NOT announcement verbs like "Announces", "Launches", "Expands"
|
||||
- Present all 7 titles to an AI agent to judge which is best. This can be decided by looking at titles on Press Advantage for other businesses, and seeing how closely the headline follows the instructions.
|
||||
|
||||
Generate **7 unique headline candidates** for this press release.
|
||||
** EXAMPLE GREAT HEADLINES: **
|
||||
- Dietz Electric Highlights Flameproof Motor Safety Options
|
||||
- MOD-TRONIC Reaffirms Position as Largest MINCO Stocking Distributor
|
||||
- Hogge Precision Parts Delivers Precision Machining for the Medical Industry
|
||||
- Lubrication Engineers Drives Awareness of Fuel Treatment Benefits for Year-Round Fleet Efficiency
|
||||
- Renown Electric Champions Proactive Downtime Protection With Contingency Planning Insights
|
||||
- MCM Composites Releases Enhanced Thermoset Comparison Resource
|
||||
- AGI Fabricators Publishes New Resource on Custom Process Hopper Fabrication
|
||||
- Paragon Steel Strengthens Support For Central Los Angeles Commercial Projects
|
||||
- McCormick Industries Reinforces Quality Standards With ISO 9001:2015-Certified Medical Machining
|
||||
|
||||
### Headline Rules
|
||||
|
||||
- Maximum 70 characters (55-65 is ideal)
|
||||
- Title case
|
||||
- News-wire style, not promotional
|
||||
- NO location/geographic keywords (city, state names)
|
||||
- NO superlatives (best, top, leading, #1)
|
||||
- NO questions
|
||||
- NO colons -- headlines with colons are considered lower quality
|
||||
- NO promotional language (revolutionary, game-changing, amazing)
|
||||
- DO NOT fabricate events, expansions, milestones, or demand claims
|
||||
- Unless the topic explicitly signals actual news (e.g. "Introduces", "New Product", "Launch", "Actual News"), assume the company ALREADY offers this capability. Use awareness verbs: Highlights, Reinforces, Delivers, Showcases, Strengthens, Publishes, Expands. NOT announcement verbs: Announces, Launches, Introduces (unless it really is new).
|
||||
2. **Gather Any Additional Required Information**:
|
||||
- If the user provides LSI terms explicitly, use them
|
||||
- If a URL is provided, fetch it for context
|
||||
|
||||
### Headline Variety
|
||||
|
||||
Mix different angles and verbs. Vary structure. Emphasize different aspects of the topic. Do not generate 7 variations of the same sentence.
|
||||
3. **Automatic Generation**:
|
||||
- Generate LSI (Latent Semantic Indexing) terms relevant to the topic and industry
|
||||
- Identify relevant industry entities (companies, organizations, standards, technologies)
|
||||
- Research current industry context if needed
|
||||
- Lookup the company representative name and title from the md file based on the company name.
|
||||
|
||||
### Evaluation
|
||||
4. **Write the Press Release** following all requirements below
|
||||
|
||||
Score each headline 1-10 against:
|
||||
- Compliance with all rules above
|
||||
- Similarity in tone/structure to the examples in `headlines.md`
|
||||
- Character count (penalize over 70, reward 55-65 range)
|
||||
- Newsworthiness and clarity
|
||||
## Headline Generation Guidelines
|
||||
|
||||
Pick the **top 2** headlines. Write `headlines_evaluation.md` containing:
|
||||
- All 7 headlines with character counts
|
||||
- Score and one-line reasoning for each
|
||||
- The two winners clearly marked
|
||||
When generating the 7 headline options:
|
||||
|
||||
## Step 3: Write Two Press Releases
|
||||
**Variety in Approach**:
|
||||
- Mix different angles: announcement-focused, impact-focused, innovation-focused
|
||||
- Vary the structure while maintaining news format
|
||||
- Use different verbs: announces, launches, unveils, introduces, expands, achieves
|
||||
- Emphasize different aspects: product, partnership, milestone, expansion, award
|
||||
|
||||
Write one complete press release per winning headline. Save each as `{headline}.txt` (see Output Files for naming convention).
|
||||
**Character Count Management**:
|
||||
- Keep under 70 characters including spaces
|
||||
- Shorter is often better (55-65 characters is ideal)
|
||||
- Count carefully before presenting
|
||||
|
||||
Both press releases must follow ALL rules below.
|
||||
**Compliance Checks**:
|
||||
- No questions (e.g., "Are You Ready for...?")
|
||||
- No location keywords (e.g., "Chicago," "Milwaukee," city or state names)
|
||||
- No superlatives (e.g., "Best," "Leading," "Top," "#1")
|
||||
- No promotional language (e.g., "Revolutionary," "Game-Changing")
|
||||
- Focus on the news, not the hype
|
||||
|
||||
|
||||
## Critical Press Advantage Requirements
|
||||
|
||||
### Content Type
|
||||
- This is a PRESS RELEASE, not an advertorial, blog post, or promotional content
|
||||
- Must be written in objective, journalistic style
|
||||
- By default this is an AWARENESS piece — the company already offers this capability. Frame it as highlighting/reinforcing existing offerings, NOT as announcing something new
|
||||
- Only use announcement language (announces, launches, introduces) when the topic explicitly signals actual news (e.g. topic contains "Actual News", "New Product", "Launch")
|
||||
- Do NOT fabricate events, expansions, milestones, or demand claims. If nothing new happened, do not pretend it did.
|
||||
- Must read like it could appear verbatim in a newspaper
|
||||
|
||||
- This is a PRESS RELEASE, not an advertorial, blog post, or promotional content.
|
||||
- Must be written in objective, journalistic style.
|
||||
- By default this is an AWARENESS piece -- the company already offers this capability. Frame as highlighting/reinforcing existing offerings, NOT announcing something new.
|
||||
- Only use announcement language when the topic explicitly signals actual news.
|
||||
- Do NOT fabricate events, expansions, milestones, or demand claims.
|
||||
- Must read like it could appear verbatim in a newspaper.
|
||||
|
||||
### Writing Style -- MANDATORY
|
||||
|
||||
- **100% objective** -- no hype, big claims, exclamation points, or sales messages
|
||||
- **Third-person ONLY** -- except for direct quotes from executives
|
||||
### Writing Style - MANDATORY
|
||||
- **100% objective** - no hype, big claims, exclamation points, or sales messages
|
||||
- **Third-person ONLY** - except for direct quotes from executives
|
||||
- **NO first-person** ("I", "we", "our") except in quotes
|
||||
- **NO second-person** ("you", "your")
|
||||
- **NO questions** anywhere in headline or body
|
||||
- **NO lists, bullets, or numbered items** -- write everything in paragraph form
|
||||
- **NO lists, bullets, or numbered items** - write everything in paragraph form
|
||||
- **NO subheadings** in the body
|
||||
- **NO emoji**
|
||||
- **NO tables**
|
||||
- Perfect grammar and spelling required
|
||||
|
||||
### Word Count -- CRITICAL
|
||||
|
||||
### Word Count - CRITICAL
|
||||
- **MINIMUM: 575 words**
|
||||
- **TARGET: 600-750 words** (this is the sweet spot)
|
||||
- **MAXIMUM: 800 words**
|
||||
- Word count takes precedence over paragraph count
|
||||
- Typically 14-16 paragraphs for 600-750 word range
|
||||
|
||||
### Structure
|
||||
### Structure Requirements
|
||||
|
||||
**First Paragraph (Lead)**:
|
||||
- Must clearly identify the organization
|
||||
- Must clearly identify the organization announcing the news
|
||||
- Must answer: Who, What, When, Where, Why
|
||||
- Should be 1-2 direct sentences
|
||||
- Contains the actual announcement or awareness framing
|
||||
- Contains the actual announcement
|
||||
|
||||
**Body Paragraphs**:
|
||||
- 2-4 sentences per paragraph maximum
|
||||
- Follow inverted pyramid structure (most important info first)
|
||||
- All quotes must be attributed to named individuals with titles (from companies.md)
|
||||
- All quotes must be attributed to named individuals with titles
|
||||
- Use names and titles from any provided data files
|
||||
|
||||
**Call to Action**:
|
||||
- Acceptable: "Visit www.company.com to learn more" or "For more information, visit..."
|
||||
- Forbidden: "Buy now", "Sign up today", "Limited time offer", "Click here to purchase"
|
||||
**Headline**:
|
||||
- Selected from the 7 generated options (see Headline Generation Guidelines above)
|
||||
- Maximum 70 characters
|
||||
- Title case
|
||||
- One main keyword
|
||||
- NO location/geographic keywords (limits distribution)
|
||||
- NO superlatives (best, top, leading, #1)
|
||||
- NO questions
|
||||
- Must contain actual news announcement
|
||||
|
||||
**No "About" Section**: Press Advantage adds the company boilerplate automatically. Do not include one.
|
||||
|
||||
### LSI Terms
|
||||
|
||||
Generate 10-20 Latent Semantic Indexing terms relevant to the topic and industry. Integrate them naturally throughout the press release. Distribute across paragraphs. Do not force keywords -- maintain natural flow.
|
||||
|
||||
### Required Phrase Handling
|
||||
|
||||
If the task description contains a specific phrase that must be included, use it exactly once, integrated naturally into a relevant paragraph.
|
||||
### Call to Action
|
||||
- ✅ Acceptable: "Visit www.company.com to learn more" or "For more information, visit..."
|
||||
- ❌ Forbidden: "Buy now", "Sign up today", "Limited time offer", "Click here to purchase"
|
||||
|
||||
### What Gets REJECTED (Automatic Rejection)
|
||||
|
||||
**Advertorial Characteristics**:
|
||||
- Promotional tone ("revolutionary", "amazing", "best in class")
|
||||
- Product-centric messaging (features/benefits vs. news)
|
||||
- Customer testimonials or reviews
|
||||
- Sales-oriented calls to action
|
||||
- Opinion-based content
|
||||
- Personal perspectives
|
||||
|
||||
**Format Violations**:
|
||||
- Lists or bullets in body
|
||||
- Questions anywhere
|
||||
- Subheadings in body
|
||||
- First-person outside quotes
|
||||
- City names in headlines
|
||||
- Falling below 575 words or exceeding 800
|
||||
- Excessive localization (city names in headlines)
|
||||
|
||||
### Writing Approach
|
||||
## LSI Term Generation
|
||||
|
||||
When the user does NOT provide LSI terms explicitly, automatically generate them:
|
||||
|
||||
**What are LSI terms?**
|
||||
- Semantically related keywords and phrases
|
||||
- Industry-specific terminology
|
||||
- Related concepts and technologies
|
||||
- Synonyms and variations
|
||||
- Contextual language that signals topical relevance
|
||||
|
||||
**How to use LSI terms**:
|
||||
- Integrate naturally throughout the press release
|
||||
- Use 10-2 relevant LSI terms across the 600-750 words
|
||||
- Don't force keywords - maintain natural flow
|
||||
- Distribute terms across different paragraphs
|
||||
|
||||
**Example LSI terms for "sustainable packaging"**:
|
||||
- Biodegradable materials, circular economy, eco-friendly alternatives, carbon footprint reduction, recycled content, compostable solutions, environmental impact, waste reduction, green initiatives, packaging innovation
|
||||
|
||||
## Required Phrase Handling
|
||||
|
||||
If the user provides a specific phrase that must be included:
|
||||
- Use it exactly once in the body
|
||||
- Integrate it naturally into a relevant paragraph
|
||||
- Don't force it awkwardly
|
||||
|
||||
## URL Context Integration
|
||||
|
||||
If the user provides a URL:
|
||||
- Use web_fetch to retrieve the content
|
||||
- This will typically give you the factual background needed to write the release
|
||||
- Extract key facts, dates, names, and context
|
||||
- Use this information to enrich the press release
|
||||
- Maintain objectivity - don't copy promotional language
|
||||
|
||||
## Quality Checklist
|
||||
|
||||
Before finalizing, verify:
|
||||
- [ ] 600-750 words (minimum 550, maximum 800)
|
||||
- [ ] First paragraph clearly identifies the organization
|
||||
- [ ] Third-person throughout (except quotes)
|
||||
- [ ] No lists, bullets, questions, or subheadings in body
|
||||
- [ ] 2-4 sentences per paragraph
|
||||
- [ ] All quotes attributed with names and titles
|
||||
- [ ] LSI terms naturally integrated
|
||||
- [ ] Required phrase used exactly once (if provided)
|
||||
- [ ] Objective, journalistic tone
|
||||
- [ ] Perfect grammar and spelling
|
||||
- [ ] No promotional or sales language
|
||||
- [ ] Headline under 70 characters
|
||||
- [ ] Reads like it could appear in a newspaper
|
||||
|
||||
## Writing Approach
|
||||
|
||||
1. Start with the most newsworthy information in the lead
|
||||
2. Build credibility with specific details (dates, numbers, names)
|
||||
3. Include 1-2 executive quotes for human perspective
|
||||
4. Provide context about the company/organization
|
||||
5. Explain significance and impact
|
||||
6. Write in inverted pyramid style -- can be cut from bottom up
|
||||
6. Do NOT include an "About" section or company boilerplate — Press Advantage adds this automatically
|
||||
7. Write in inverted pyramid style - can be cut from bottom up
|
||||
|
||||
### Differentiation Between A and B
|
||||
## Tone Guidelines
|
||||
|
||||
The two press releases must take **meaningfully different angles** on the same topic. Do not just rephrase the same content. Consider:
|
||||
- Different lead angles (industry trend vs. company capability)
|
||||
- Different quote focus (technical depth vs. business impact)
|
||||
- Different LSI emphasis
|
||||
- Different paragraph ordering
|
||||
- Professional and authoritative
|
||||
- Objective and factual
|
||||
- Confident but not boastful
|
||||
- Newsworthy, not promotional
|
||||
- Clear and concise
|
||||
- Industry-appropriate formality
|
||||
|
||||
## Step 4: Generate Schema for Each PR
|
||||
## Common Mistakes to Avoid
|
||||
|
||||
For each press release, generate a JSON-LD NewsArticle schema. Save each as `{headline}.json` (same name as its press release, different extension).
|
||||
|
||||
### Schema Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"@context": "https://schema.org",
|
||||
"@type": "NewsArticle",
|
||||
"headline": "[the headline used for this PR]",
|
||||
"description": "[1-sentence summary, 15-25 words max]",
|
||||
"mainEntityOfPage": "[Target URL from task context]",
|
||||
"author": {
|
||||
"@type": "Organization",
|
||||
"name": "[client company name]"
|
||||
},
|
||||
"about": [],
|
||||
"mentions": []
|
||||
}
|
||||
```
|
||||
|
||||
### Entity Identification
|
||||
|
||||
**Phase 1 -- Named Entities (do first):**
|
||||
- Companies/organizations mentioned in the PR text
|
||||
- Products, brands, or proprietary technologies
|
||||
- People quoted or mentioned
|
||||
|
||||
**Phase 2 -- Topical Entities:**
|
||||
- "about" entities: what the PR is fundamentally about (core topic, business relationships, the issuing company)
|
||||
- "mentions" entities: supporting context (technical concepts, industry sectors, applications)
|
||||
|
||||
### Entity Rules
|
||||
|
||||
- Search for Wikipedia URLs for each entity using WebSearch: `"[entity name]" site:wikipedia.org`
|
||||
- Use `"Thing"` as @type for general concepts, `"Organization"` for companies, `"Person"` for people
|
||||
- For the **issuing company** (the client): include in "about" as Organization, do NOT include sameAs (mainEntityOfPage already identifies them)
|
||||
- For **external companies**: include sameAs with Wikipedia URL + official website
|
||||
- For **people** without Wikipedia pages: include name, jobTitle, affiliation only (no sameAs)
|
||||
- For industries and concepts: Wikipedia URLs are required
|
||||
|
||||
### Schema Rules
|
||||
|
||||
- Do NOT include "articleBody", "datePublished", or "image" fields
|
||||
- Description must be a brief summary, NOT the full PR text
|
||||
- Output plain JSON only, no HTML script tags
|
||||
- Pretty-print for readability
|
||||
|
||||
## Step 5: Self-Verification
|
||||
|
||||
Before finishing, verify each press release against this checklist:
|
||||
|
||||
- [ ] 575-800 words (target 600-750)
|
||||
- [ ] First paragraph clearly identifies the organization
|
||||
- [ ] Third-person throughout (except quotes)
|
||||
- [ ] No lists, bullets, questions, or subheadings in body
|
||||
- [ ] 2-4 sentences per paragraph
|
||||
- [ ] All quotes attributed with names and titles from companies.md
|
||||
- [ ] LSI terms naturally integrated
|
||||
- [ ] Objective, journalistic tone -- no promotional language
|
||||
- [ ] Headline under 70 characters, no colons
|
||||
- [ ] No "About" section
|
||||
- [ ] Reads like it could appear in a newspaper
|
||||
- [ ] Schema JSON is valid and has correct entity types
|
||||
- [ ] A and B take meaningfully different angles
|
||||
|
||||
## Output Files
|
||||
|
||||
You MUST write exactly 5 files to the current working directory. Use the **winning headline** as the filename for each press release and its schema.
|
||||
|
||||
Sanitize the headline for use as a filename: replace any characters not allowed in filenames (`:`, `/`, `\`, `?`, `*`, `"`, `<`, `>`, `|`) with `-`.
|
||||
|
||||
Example -- if the two winning headlines are "McCormick Industries Advances CNC Machining Technology" and "McCormick Industries Strengthens Aerospace Machining Operations":
|
||||
|
||||
| File | Format | Contents |
|
||||
|------|--------|----------|
|
||||
| `Headlines Evaluation.md` | Markdown | All 7 headlines with scores, reasoning, and the 2 winners marked |
|
||||
| `McCormick Industries Advances CNC Machining Technology.txt` | Plain text | Complete press release using winner #1 headline |
|
||||
| `McCormick Industries Strengthens Aerospace Machining Operations.txt` | Plain text | Complete press release using winner #2 headline |
|
||||
| `McCormick Industries Advances CNC Machining Technology.json` | JSON | NewsArticle JSON-LD schema for press release A |
|
||||
| `McCormick Industries Strengthens Aerospace Machining Operations.json` | JSON | NewsArticle JSON-LD schema for press release B |
|
||||
|
||||
Do NOT create any other files. Do NOT create subdirectories.
|
||||
- Using "we", "our", "you" outside of quotes
|
||||
- Including any lists or bullet points
|
||||
- Adding subheadings in the body
|
||||
- Writing in blog or editorial style
|
||||
- Making the headline a question
|
||||
- Focusing on product features instead of news
|
||||
- Including testimonials or reviews
|
||||
- Using promotional adjectives
|
||||
- Falling below 550 words or exceeding 800 words
|
||||
- Not clearly identifying the announcing organization
|
||||
- Forgetting to attribute quotes
|
||||
- Creating advertorial content instead of news
|
||||
|
|
@ -1,226 +0,0 @@
|
|||
# ClickUp + Claude Code Automation System -- Design Spec
|
||||
|
||||
## Overview
|
||||
|
||||
This document describes a system that polls ClickUp for tasks triggered by a
|
||||
checkbox, routes them to the correct Claude Code skill based on task type and
|
||||
stage, runs Claude Code in headless mode, posts results back to ClickUp, and
|
||||
advances the task through its lifecycle.
|
||||
|
||||
The user has existing code that should be refactored or replaced to match this
|
||||
design. Review the existing code and determine what can be reused vs rewritten.
|
||||
|
||||
---
|
||||
|
||||
## 1. ClickUp Status System (replaces the current 14-status setup)
|
||||
|
||||
The old statuses are confusing because they encode WHAT is happening instead of
|
||||
WHO OWNS THE TASK. The new system has 7 statuses organized by ownership:
|
||||
|
||||
| Status | Owner | Replaces |
|
||||
|----------------|----------------|-----------------------------------------------------|
|
||||
| To Do | Nobody | To Do |
|
||||
| In Progress | Human | In Progress, After Client Feedback |
|
||||
| Needs Input | Human (blocked)| Needs Input |
|
||||
| AI Working | Claude Code | Automation Underway, Running CORA |
|
||||
| Review | Human | Outline Review, Outline Approved, PR Needs Review, Internal Review |
|
||||
| Client Review | Client | Client Review |
|
||||
| Complete | Nobody | Ready to Use, Complete |
|
||||
|
||||
Key decisions:
|
||||
- "After Client Feedback" is just "In Progress" again -- same owner, same state.
|
||||
- "Error" is NOT a status. It becomes a checkbox custom field that can be flagged
|
||||
on any status.
|
||||
- "Running CORA" and "Automation Underway" collapse into "AI Working" -- the
|
||||
WHAT is tracked by the Stage field, not the status.
|
||||
|
||||
---
|
||||
|
||||
## 2. Custom Fields Required
|
||||
|
||||
These custom fields need to exist in ClickUp:
|
||||
|
||||
- **Run Claude** (checkbox) -- when checked, the poller picks up the task and
|
||||
spawns a Claude Code session. Gets unchecked after processing so it does not
|
||||
re-trigger.
|
||||
- **Stage** (dropdown) -- tracks where in the content lifecycle the task is.
|
||||
Values: report, outline, draft, final. This is independent of status.
|
||||
"Review" + Stage:Outline = the old "Outline Review".
|
||||
"AI Working" + Stage:Draft = AI is writing the draft.
|
||||
- **Error** (checkbox) -- flagged when Claude Code errors out. Can happen at any
|
||||
status. Not a status itself.
|
||||
|
||||
---
|
||||
|
||||
## 3. Skill Routing (task type + stage -> skill)
|
||||
|
||||
When a task hits "AI Working", the script looks at the task type AND the current
|
||||
stage to decide which skill to load. The skill file contents get passed to Claude
|
||||
Code via --append-system-prompt. After AI finishes, the stage advances and the
|
||||
task moves to the appropriate next status.
|
||||
|
||||
The routing is defined in a SKILL_MAP dictionary:
|
||||
|
||||
```
|
||||
task_type -> stage -> {
|
||||
skill_file: path to SKILL.md
|
||||
next_stage: what stage to set after AI finishes
|
||||
next_status: where the task goes after AI finishes
|
||||
tools: which Claude Code tools to allow
|
||||
}
|
||||
```
|
||||
|
||||
Example for "content" task type:
|
||||
|
||||
```
|
||||
content:
|
||||
report -> skill: make_outline -> next: stage=outline, status=review
|
||||
outline -> skill: make_content -> next: stage=draft, status=review
|
||||
draft -> skill: finalize -> next: stage=final, status=review or client review
|
||||
```
|
||||
|
||||
The user has multiple task types (content, data_report, client_deliverable).
|
||||
Each has its own stage progression and skill chain.
|
||||
|
||||
Skills are NOT invoked via slash commands (those only work in interactive mode).
|
||||
Instead, the SKILL.md file is read from disk and passed as instructions:
|
||||
|
||||
```
|
||||
claude -p "Task prompt here" \
|
||||
--append-system-prompt "$(cat /path/to/SKILL.md)" \
|
||||
--output-format json \
|
||||
--allowedTools "Read,Edit,Bash" \
|
||||
--max-turns 10
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. Multi-List Polling
|
||||
|
||||
The user has 20+ folders in one ClickUp space, each containing a list named
|
||||
"Overall". The script needs to poll across all of them.
|
||||
|
||||
The approach is a hardcoded dictionary mapping folder names to list IDs:
|
||||
|
||||
```python
|
||||
LIST_IDS = {
|
||||
"folder_a": "list_id_1",
|
||||
"folder_b": "list_id_2",
|
||||
# ... 20+ entries
|
||||
}
|
||||
```
|
||||
|
||||
The poller iterates through all lists each cycle, collecting any tasks where the
|
||||
"Run Claude" checkbox is checked. Each task gets tagged with its source folder
|
||||
name for logging. If one list fails to poll, the error is logged and the poller
|
||||
continues with the remaining lists.
|
||||
|
||||
At 60-second intervals with 20+ lists, this is roughly 20 requests per minute,
|
||||
well within ClickUp's 100/min rate limit.
|
||||
|
||||
---
|
||||
|
||||
## 5. Claude Code Headless Configuration
|
||||
|
||||
Key flags and decisions:
|
||||
|
||||
- **Do NOT use --bare** -- the runner operates on the user's own machine, so
|
||||
CLAUDE.md project instructions, MCP servers from .mcp.json, and local config
|
||||
should all load automatically.
|
||||
- Use **--mcp-config** if specific MCP servers are needed per-task beyond what
|
||||
is in the project config.
|
||||
- Use **-p** with the task prompt for non-interactive mode.
|
||||
- Use **--append-system-prompt** to inject skill instructions.
|
||||
- Use **--output-format json** to get structured results with session metadata.
|
||||
- Use **--max-turns** to cap cost and runtime (default 10).
|
||||
- Use **--allowedTools** scoped per stage. Available built-in tools:
|
||||
Read, Edit, MultiEdit, Write, Bash, Glob, Grep, WebFetch, WebSearch,
|
||||
NotebookEdit, TodoWrite.
|
||||
- Bash can be scoped: Bash(git:*), Bash(python:*), Bash(npm test), etc.
|
||||
- MCP tools use the naming pattern: mcp__{server_name}__{tool_name}
|
||||
Allow all from a server with: mcp__servername__*
|
||||
|
||||
---
|
||||
|
||||
## 6. Workflow: What Happens When a Task Is Triggered
|
||||
|
||||
1. Poller finds a task with "Run Claude" checkbox checked.
|
||||
2. Script reads the task type and current stage.
|
||||
3. Looks up the skill routing in SKILL_MAP.
|
||||
4. If no mapping found, posts a warning comment and unchecks the box.
|
||||
5. Sets task status to "AI Working".
|
||||
6. Loads the skill file from disk.
|
||||
7. Builds a prompt from the task name and description.
|
||||
8. Runs Claude Code headless with the skill as system prompt.
|
||||
9. On success:
|
||||
- Advances the Stage to the next value.
|
||||
- Sets the status to the next value (usually "Review").
|
||||
- Posts the result as a task comment.
|
||||
10. On error:
|
||||
- Flags the Error checkbox.
|
||||
- Sets status to "Review" (so the human sees it).
|
||||
- Posts the error as a task comment.
|
||||
11. Always unchecks "Run Claude" so it does not re-trigger.
|
||||
|
||||
---
|
||||
|
||||
## 7. The Typical Content Workflow End-to-End
|
||||
|
||||
```
|
||||
[To Do]
|
||||
|
|
||||
v
|
||||
[In Progress] -- human runs CORA report (java program, already handled)
|
||||
|
|
||||
| (human checks "Run Claude", sets stage to "report")
|
||||
v
|
||||
[AI Working] -- skill: make_outline, stage: report
|
||||
|
|
||||
| (AI finishes, stage -> outline, status -> review)
|
||||
v
|
||||
[Review] -- human checks the outline
|
||||
|
|
||||
| (human approves: checks "Run Claude")
|
||||
v
|
||||
[AI Working] -- skill: make_content, stage: outline
|
||||
|
|
||||
| (AI finishes, stage -> draft, status -> review)
|
||||
v
|
||||
[Review] -- human checks the draft
|
||||
|
|
||||
| (human approves: checks "Run Claude" OR does manual edits first)
|
||||
v
|
||||
[AI Working] -- skill: finalize, stage: draft
|
||||
|
|
||||
| (AI finishes, stage -> final, status -> client review or complete)
|
||||
v
|
||||
[Client Review] or [Complete]
|
||||
|
|
||||
| (if client sends revisions)
|
||||
v
|
||||
[In Progress] -- human works on revisions (this is NOT "After Client Feedback",
|
||||
it is just In Progress again)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 8. Terminal Compatibility
|
||||
|
||||
All output strings must be pure ASCII. No emojis, no unicode arrows, no em
|
||||
dashes. Use [OK], [ERROR], [WARNING] as prefixes in comments and logs. Use ->
|
||||
instead of arrows. Use -- instead of em dashes.
|
||||
|
||||
---
|
||||
|
||||
## 9. File Structure
|
||||
|
||||
The reference implementation is in clickup_claude_runner.py. Key sections:
|
||||
|
||||
- CONFIG: tokens, list IDs, custom field IDs, repo path
|
||||
- SKILL_MAP: task type + stage -> skill routing
|
||||
- ClickUp API helpers: get tasks, update status, set fields, post comments
|
||||
- Claude Code runner: load skill, build command, run subprocess, parse output
|
||||
- Main loop: poll, process, sleep
|
||||
|
||||
The user's existing code may have different structure. Evaluate what can be
|
||||
kept vs what should be replaced to match this design.
|
||||
|
|
@ -442,10 +442,13 @@ class TestFindQualifyingTasksSweep:
|
|||
assert any(t.id == "t2" for t in result)
|
||||
|
||||
def test_finds_last_month_tagged(self):
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from datetime import UTC, datetime
|
||||
|
||||
now = datetime.now(UTC)
|
||||
last = now.replace(day=1) - timedelta(days=1)
|
||||
if now.month == 1:
|
||||
last = now.replace(year=now.year - 1, month=12)
|
||||
else:
|
||||
last = now.replace(month=now.month - 1)
|
||||
last_tag = last.strftime("%b%y").lower()
|
||||
# No due date needed for month-tag pass
|
||||
task = FakeTask(id="t3", name="Last Month", tags=[last_tag])
|
||||
|
|
|
|||
|
|
@ -1,502 +0,0 @@
|
|||
"""Tests for clickup_runner.autocora and AutoCora dispatch in __main__."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from clickup_runner.autocora import (
|
||||
CoraResult,
|
||||
archive_result,
|
||||
make_job_id,
|
||||
parse_result_file,
|
||||
scan_results,
|
||||
slugify,
|
||||
submit_job,
|
||||
)
|
||||
from clickup_runner.clickup_client import ClickUpTask
|
||||
from clickup_runner.config import AutoCoraConfig, Config, NtfyConfig, RunnerConfig
|
||||
from clickup_runner.skill_map import SkillRoute
|
||||
|
||||
|
||||
# ── Fixtures ──
|
||||
|
||||
|
||||
def _make_task(**overrides) -> ClickUpTask:
|
||||
defaults = {
|
||||
"id": "task_abc",
|
||||
"name": "SEO for CNC Machining",
|
||||
"status": "to do",
|
||||
"description": "Content creation for CNC machining page.",
|
||||
"task_type": "Content Creation",
|
||||
"url": "https://app.clickup.com/t/task_abc",
|
||||
"list_id": "list_1",
|
||||
"custom_fields": {
|
||||
"Client": "Acme Corp",
|
||||
"Keyword": "CNC Machining",
|
||||
"IMSURL": "https://acme.com/cnc-machining",
|
||||
"Delegate to Claude": True,
|
||||
"Stage": "run_cora",
|
||||
},
|
||||
}
|
||||
defaults.update(overrides)
|
||||
return ClickUpTask(**defaults)
|
||||
|
||||
|
||||
def _make_config(**overrides) -> Config:
|
||||
cfg = Config()
|
||||
cfg.runner = RunnerConfig(claude_timeout_seconds=60)
|
||||
cfg.ntfy = NtfyConfig()
|
||||
for k, v in overrides.items():
|
||||
setattr(cfg, k, v)
|
||||
return cfg
|
||||
|
||||
|
||||
# ── slugify ──
|
||||
|
||||
|
||||
class TestSlugify:
|
||||
def test_basic(self):
|
||||
assert slugify("CNC Machining") == "cnc-machining"
|
||||
|
||||
def test_special_chars(self):
|
||||
assert slugify("Hello, World! & Co.") == "hello-world-co"
|
||||
|
||||
def test_max_length(self):
|
||||
result = slugify("a" * 100, max_len=20)
|
||||
assert len(result) <= 20
|
||||
|
||||
def test_empty_string(self):
|
||||
assert slugify("") == "unknown"
|
||||
|
||||
def test_only_special_chars(self):
|
||||
assert slugify("!!!@@@") == "unknown"
|
||||
|
||||
def test_leading_trailing_hyphens(self):
|
||||
assert slugify("--hello--") == "hello"
|
||||
|
||||
def test_preserves_numbers(self):
|
||||
assert slugify("Top 10 CNC tips") == "top-10-cnc-tips"
|
||||
|
||||
|
||||
# ── make_job_id ──
|
||||
|
||||
|
||||
class TestMakeJobId:
|
||||
def test_format(self):
|
||||
job_id = make_job_id("CNC Machining")
|
||||
assert job_id.startswith("job-")
|
||||
assert "cnc-machining" in job_id
|
||||
|
||||
def test_uniqueness(self):
|
||||
# Two calls should produce different IDs (different timestamps)
|
||||
id1 = make_job_id("test")
|
||||
id2 = make_job_id("test")
|
||||
# Could be same in same millisecond, but format should be valid
|
||||
assert id1.startswith("job-")
|
||||
assert id2.startswith("job-")
|
||||
|
||||
|
||||
# ── submit_job ──
|
||||
|
||||
|
||||
class TestSubmitJob:
|
||||
def test_creates_job_file(self, tmp_path):
|
||||
jobs_dir = tmp_path / "jobs"
|
||||
job_id = submit_job("CNC Machining", "https://acme.com", "task_1", str(jobs_dir))
|
||||
|
||||
assert job_id is not None
|
||||
assert jobs_dir.exists()
|
||||
|
||||
# Find the job file
|
||||
files = list(jobs_dir.glob("job-*.json"))
|
||||
assert len(files) == 1
|
||||
|
||||
data = json.loads(files[0].read_text())
|
||||
assert data["keyword"] == "CNC Machining"
|
||||
assert data["url"] == "https://acme.com"
|
||||
assert data["task_ids"] == ["task_1"]
|
||||
|
||||
def test_fallback_url(self, tmp_path):
|
||||
jobs_dir = tmp_path / "jobs"
|
||||
submit_job("test", "", "task_1", str(jobs_dir))
|
||||
|
||||
files = list(jobs_dir.glob("job-*.json"))
|
||||
data = json.loads(files[0].read_text())
|
||||
assert data["url"] == "https://seotoollab.com/blank.html"
|
||||
|
||||
def test_unreachable_dir(self):
|
||||
result = submit_job("test", "http://x.com", "t1", "//NONEXISTENT/share/jobs")
|
||||
assert result is None
|
||||
|
||||
def test_creates_parent_dirs(self, tmp_path):
|
||||
jobs_dir = tmp_path / "deep" / "nested" / "jobs"
|
||||
job_id = submit_job("test", "http://x.com", "t1", str(jobs_dir))
|
||||
assert job_id is not None
|
||||
assert jobs_dir.exists()
|
||||
|
||||
|
||||
# ── parse_result_file ──
|
||||
|
||||
|
||||
class TestParseResultFile:
|
||||
def test_json_success(self, tmp_path):
|
||||
f = tmp_path / "job-123-test.result"
|
||||
f.write_text(json.dumps({
|
||||
"status": "SUCCESS",
|
||||
"keyword": "CNC Machining",
|
||||
"task_ids": ["t1", "t2"],
|
||||
}))
|
||||
|
||||
result = parse_result_file(f)
|
||||
assert result is not None
|
||||
assert result.status == "SUCCESS"
|
||||
assert result.keyword == "CNC Machining"
|
||||
assert result.task_ids == ["t1", "t2"]
|
||||
assert result.job_id == "job-123-test"
|
||||
|
||||
def test_json_failure(self, tmp_path):
|
||||
f = tmp_path / "job-456.result"
|
||||
f.write_text(json.dumps({
|
||||
"status": "FAILURE",
|
||||
"keyword": "test",
|
||||
"task_ids": ["t1"],
|
||||
"reason": "Cora timed out",
|
||||
}))
|
||||
|
||||
result = parse_result_file(f)
|
||||
assert result.status == "FAILURE"
|
||||
assert result.reason == "Cora timed out"
|
||||
|
||||
def test_legacy_success(self, tmp_path):
|
||||
f = tmp_path / "job-789.result"
|
||||
f.write_text("SUCCESS")
|
||||
|
||||
result = parse_result_file(f)
|
||||
assert result.status == "SUCCESS"
|
||||
assert result.task_ids == []
|
||||
|
||||
def test_legacy_failure(self, tmp_path):
|
||||
f = tmp_path / "job-101.result"
|
||||
f.write_text("FAILURE: Network timeout")
|
||||
|
||||
result = parse_result_file(f)
|
||||
assert result.status == "FAILURE"
|
||||
assert result.reason == "Network timeout"
|
||||
|
||||
def test_empty_file(self, tmp_path):
|
||||
f = tmp_path / "empty.result"
|
||||
f.write_text("")
|
||||
assert parse_result_file(f) is None
|
||||
|
||||
def test_unrecognized_format(self, tmp_path):
|
||||
f = tmp_path / "weird.result"
|
||||
f.write_text("something random")
|
||||
assert parse_result_file(f) is None
|
||||
|
||||
def test_missing_file(self, tmp_path):
|
||||
f = tmp_path / "missing.result"
|
||||
assert parse_result_file(f) is None
|
||||
|
||||
|
||||
# ── scan_results ──
|
||||
|
||||
|
||||
class TestScanResults:
|
||||
def test_finds_result_files(self, tmp_path):
|
||||
(tmp_path / "job-1.result").write_text(json.dumps({"status": "SUCCESS"}))
|
||||
(tmp_path / "job-2.result").write_text(json.dumps({"status": "FAILURE", "reason": "x"}))
|
||||
(tmp_path / "not-a-result.txt").write_text("ignore me")
|
||||
|
||||
results = scan_results(str(tmp_path))
|
||||
assert len(results) == 2
|
||||
|
||||
def test_empty_dir(self, tmp_path):
|
||||
assert scan_results(str(tmp_path)) == []
|
||||
|
||||
def test_nonexistent_dir(self):
|
||||
assert scan_results("//NONEXISTENT/path") == []
|
||||
|
||||
def test_skips_unparseable(self, tmp_path):
|
||||
(tmp_path / "good.result").write_text(json.dumps({"status": "SUCCESS"}))
|
||||
(tmp_path / "bad.result").write_text("")
|
||||
|
||||
results = scan_results(str(tmp_path))
|
||||
assert len(results) == 1
|
||||
|
||||
|
||||
# ── archive_result ──
|
||||
|
||||
|
||||
class TestArchiveResult:
|
||||
def test_moves_to_processed(self, tmp_path):
|
||||
f = tmp_path / "job-1.result"
|
||||
f.write_text("SUCCESS")
|
||||
|
||||
result = CoraResult(
|
||||
job_id="job-1",
|
||||
status="SUCCESS",
|
||||
keyword="test",
|
||||
task_ids=[],
|
||||
reason="",
|
||||
result_path=f,
|
||||
)
|
||||
|
||||
assert archive_result(result) is True
|
||||
assert not f.exists()
|
||||
assert (tmp_path / "processed" / "job-1.result").exists()
|
||||
|
||||
def test_creates_processed_dir(self, tmp_path):
|
||||
f = tmp_path / "job-2.result"
|
||||
f.write_text("data")
|
||||
|
||||
result = CoraResult(
|
||||
job_id="job-2", status="SUCCESS", keyword="",
|
||||
task_ids=[], reason="", result_path=f,
|
||||
)
|
||||
|
||||
archive_result(result)
|
||||
assert (tmp_path / "processed").is_dir()
|
||||
|
||||
|
||||
# ── _dispatch_autocora integration ──
|
||||
|
||||
|
||||
class TestDispatchAutocora:
|
||||
def _setup(self, tmp_path):
|
||||
cfg = _make_config()
|
||||
cfg.autocora = AutoCoraConfig(
|
||||
jobs_dir=str(tmp_path / "jobs"),
|
||||
results_dir=str(tmp_path / "results"),
|
||||
)
|
||||
|
||||
client = MagicMock()
|
||||
db = MagicMock()
|
||||
db.log_run_start.return_value = 1
|
||||
|
||||
task = _make_task()
|
||||
route = SkillRoute(
|
||||
handler="autocora",
|
||||
next_stage="outline",
|
||||
next_status="review",
|
||||
)
|
||||
|
||||
return cfg, client, db, task, route
|
||||
|
||||
def test_success_submission(self, tmp_path):
|
||||
from clickup_runner.__main__ import _dispatch_autocora
|
||||
|
||||
cfg, client, db, task, route = self._setup(tmp_path)
|
||||
|
||||
_dispatch_autocora(client, cfg, db, task, route, run_id=1)
|
||||
|
||||
# Job file created
|
||||
job_files = list((tmp_path / "jobs").glob("job-*.json"))
|
||||
assert len(job_files) == 1
|
||||
data = json.loads(job_files[0].read_text())
|
||||
assert data["keyword"] == "CNC Machining"
|
||||
assert data["task_ids"] == ["task_abc"]
|
||||
|
||||
# Status set to ai working
|
||||
client.update_task_status.assert_called_with("task_abc", "ai working")
|
||||
|
||||
# Comment posted
|
||||
client.add_comment.assert_called_once()
|
||||
comment = client.add_comment.call_args[0][1]
|
||||
assert "CNC Machining" in comment
|
||||
|
||||
# Delegate unchecked
|
||||
client.set_checkbox.assert_called_with(
|
||||
"task_abc", "list_1", "Delegate to Claude", False
|
||||
)
|
||||
|
||||
# State DB updated
|
||||
db.kv_set_json.assert_called_once()
|
||||
kv_key = db.kv_set_json.call_args[0][0]
|
||||
assert kv_key.startswith("autocora:job:")
|
||||
|
||||
# Run logged as submitted
|
||||
db.log_run_finish.assert_called_once()
|
||||
assert db.log_run_finish.call_args[0][1] == "submitted"
|
||||
|
||||
def test_missing_keyword(self, tmp_path):
|
||||
from clickup_runner.__main__ import _dispatch_autocora
|
||||
|
||||
cfg, client, db, task, route = self._setup(tmp_path)
|
||||
task.custom_fields["Keyword"] = None
|
||||
|
||||
_dispatch_autocora(client, cfg, db, task, route, run_id=1)
|
||||
|
||||
# Error comment posted
|
||||
comment = client.add_comment.call_args[0][1]
|
||||
assert "Keyword" in comment
|
||||
|
||||
# Run logged as failed
|
||||
db.log_run_finish.assert_called_once()
|
||||
assert db.log_run_finish.call_args[0][1] == "failed"
|
||||
|
||||
def test_unreachable_nas(self, tmp_path):
|
||||
from clickup_runner.__main__ import _dispatch_autocora
|
||||
|
||||
cfg, client, db, task, route = self._setup(tmp_path)
|
||||
cfg.autocora.jobs_dir = "//NONEXISTENT/share/jobs"
|
||||
|
||||
_dispatch_autocora(client, cfg, db, task, route, run_id=1)
|
||||
|
||||
# Error comment posted about NAS
|
||||
comment = client.add_comment.call_args[0][1]
|
||||
assert "ERROR" in comment
|
||||
|
||||
# Error checkbox set
|
||||
client.set_checkbox.assert_any_call(
|
||||
"task_abc", "list_1", "Error", True
|
||||
)
|
||||
|
||||
|
||||
# ── _check_autocora_results integration ──
|
||||
|
||||
|
||||
class TestCheckAutocoraResults:
|
||||
def _setup(self, tmp_path):
|
||||
cfg = _make_config()
|
||||
cfg.autocora = AutoCoraConfig(
|
||||
jobs_dir=str(tmp_path / "jobs"),
|
||||
results_dir=str(tmp_path / "results"),
|
||||
xlsx_dir="//NAS/Cora72",
|
||||
)
|
||||
|
||||
client = MagicMock()
|
||||
# Mock get_task to return a task
|
||||
client.get_task.return_value = _make_task()
|
||||
# get_stage needs to return the actual stage string for route lookup
|
||||
client.get_stage.return_value = "run_cora"
|
||||
|
||||
db = MagicMock()
|
||||
|
||||
return cfg, client, db
|
||||
|
||||
def test_success_result_with_state_db(self, tmp_path):
|
||||
from clickup_runner.__main__ import _check_autocora_results
|
||||
|
||||
cfg, client, db = self._setup(tmp_path)
|
||||
|
||||
# Write a result file
|
||||
results_dir = tmp_path / "results"
|
||||
results_dir.mkdir()
|
||||
job_id = "job-1234-cnc-machining"
|
||||
(results_dir / ("%s.result" % job_id)).write_text(json.dumps({
|
||||
"status": "SUCCESS",
|
||||
"keyword": "CNC Machining",
|
||||
"task_ids": ["task_abc"],
|
||||
}))
|
||||
|
||||
# Set up state DB to return job data
|
||||
db.kv_get_json.return_value = {
|
||||
"task_id": "task_abc",
|
||||
"task_name": "SEO for CNC",
|
||||
"keyword": "CNC Machining",
|
||||
"url": "https://acme.com",
|
||||
"run_id": 5,
|
||||
}
|
||||
|
||||
_check_autocora_results(client, cfg, db)
|
||||
|
||||
# Task status updated to review
|
||||
client.update_task_status.assert_called_with("task_abc", "review")
|
||||
|
||||
# Stage advanced
|
||||
client.set_stage.assert_called_once()
|
||||
|
||||
# Success comment posted
|
||||
client.add_comment.assert_called_once()
|
||||
comment = client.add_comment.call_args[0][1]
|
||||
assert "CNC Machining" in comment
|
||||
assert "//NAS/Cora72" in comment
|
||||
|
||||
# Error checkbox cleared
|
||||
client.set_checkbox.assert_called()
|
||||
|
||||
# Run log finished
|
||||
db.log_run_finish.assert_called_once_with(5, "completed", result="Cora report ready")
|
||||
|
||||
# State DB entry deleted
|
||||
db.kv_delete.assert_called_once_with("autocora:job:%s" % job_id)
|
||||
|
||||
# Result file archived
|
||||
assert not (results_dir / ("%s.result" % job_id)).exists()
|
||||
assert (results_dir / "processed" / ("%s.result" % job_id)).exists()
|
||||
|
||||
def test_failure_result(self, tmp_path):
|
||||
from clickup_runner.__main__ import _check_autocora_results
|
||||
|
||||
cfg, client, db = self._setup(tmp_path)
|
||||
|
||||
results_dir = tmp_path / "results"
|
||||
results_dir.mkdir()
|
||||
job_id = "job-999-test"
|
||||
(results_dir / ("%s.result" % job_id)).write_text(json.dumps({
|
||||
"status": "FAILURE",
|
||||
"keyword": "test keyword",
|
||||
"task_ids": ["task_abc"],
|
||||
"reason": "Cora process crashed",
|
||||
}))
|
||||
|
||||
db.kv_get_json.return_value = {
|
||||
"task_id": "task_abc",
|
||||
"keyword": "test keyword",
|
||||
"run_id": 10,
|
||||
}
|
||||
|
||||
_check_autocora_results(client, cfg, db)
|
||||
|
||||
# Error comment posted
|
||||
comment = client.add_comment.call_args[0][1]
|
||||
assert "ERROR" in comment
|
||||
assert "Cora process crashed" in comment
|
||||
|
||||
# Error checkbox set
|
||||
client.set_checkbox.assert_any_call(
|
||||
"task_abc", "list_1", "Error", True
|
||||
)
|
||||
|
||||
# Run log failed
|
||||
db.log_run_finish.assert_called_once()
|
||||
assert db.log_run_finish.call_args[0][1] == "failed"
|
||||
|
||||
def test_no_results(self, tmp_path):
|
||||
from clickup_runner.__main__ import _check_autocora_results
|
||||
|
||||
cfg, client, db = self._setup(tmp_path)
|
||||
|
||||
# No results dir
|
||||
_check_autocora_results(client, cfg, db)
|
||||
|
||||
# Nothing should happen
|
||||
client.add_comment.assert_not_called()
|
||||
db.log_run_finish.assert_not_called()
|
||||
|
||||
def test_result_without_state_db_is_archived_as_orphan(self, tmp_path):
|
||||
from clickup_runner.__main__ import _check_autocora_results
|
||||
|
||||
cfg, client, db = self._setup(tmp_path)
|
||||
|
||||
results_dir = tmp_path / "results"
|
||||
results_dir.mkdir()
|
||||
(results_dir / "job-orphan.result").write_text(json.dumps({
|
||||
"status": "SUCCESS",
|
||||
"keyword": "orphan",
|
||||
"task_ids": ["task_abc"],
|
||||
}))
|
||||
|
||||
# No state DB entry
|
||||
db.kv_get_json.return_value = None
|
||||
|
||||
_check_autocora_results(client, cfg, db)
|
||||
|
||||
# Should skip processing and archive the orphan
|
||||
client.update_task_status.assert_not_called()
|
||||
client.add_comment.assert_not_called()
|
||||
assert (results_dir / "processed" / "job-orphan.result").exists()
|
||||
|
|
@ -1,481 +0,0 @@
|
|||
"""Tests for clickup_runner.claude_runner."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from clickup_runner.claude_runner import (
|
||||
RunResult,
|
||||
build_prompt,
|
||||
copy_to_nas,
|
||||
notify,
|
||||
read_skill_file,
|
||||
run_claude,
|
||||
)
|
||||
from clickup_runner.clickup_client import ClickUpTask
|
||||
from clickup_runner.config import Config, NASConfig, NtfyConfig, RunnerConfig
|
||||
from clickup_runner.skill_map import SkillRoute
|
||||
|
||||
|
||||
# ── Fixtures ──
|
||||
|
||||
|
||||
def _make_task(**overrides) -> ClickUpTask:
|
||||
"""Build a ClickUpTask with sensible defaults."""
|
||||
defaults = {
|
||||
"id": "task_123",
|
||||
"name": "Write blog post about widgets",
|
||||
"status": "to do",
|
||||
"description": "A 1500-word SEO article about widgets.",
|
||||
"task_type": "Content Creation",
|
||||
"url": "https://app.clickup.com/t/task_123",
|
||||
"list_id": "list_1",
|
||||
"custom_fields": {
|
||||
"Client": "Acme Corp",
|
||||
"IMSURL": "https://acme.com/widgets",
|
||||
},
|
||||
}
|
||||
defaults.update(overrides)
|
||||
return ClickUpTask(**defaults)
|
||||
|
||||
|
||||
def _make_route(**overrides) -> SkillRoute:
|
||||
defaults = {
|
||||
"skill_file": "content_draft.md",
|
||||
"next_stage": "final",
|
||||
"next_status": "review",
|
||||
"tools": "Read,Edit,Write,Bash",
|
||||
"max_turns": 25,
|
||||
}
|
||||
defaults.update(overrides)
|
||||
return SkillRoute(**defaults)
|
||||
|
||||
|
||||
def _make_config(**overrides) -> Config:
|
||||
cfg = Config()
|
||||
cfg.runner = RunnerConfig(claude_timeout_seconds=60)
|
||||
for k, v in overrides.items():
|
||||
setattr(cfg, k, v)
|
||||
return cfg
|
||||
|
||||
|
||||
# ── build_prompt ──
|
||||
|
||||
|
||||
class TestBuildPrompt:
|
||||
def test_includes_skill_content(self):
|
||||
task = _make_task()
|
||||
route = _make_route()
|
||||
prompt = build_prompt(task, route, "# My Skill\nDo the thing.")
|
||||
assert "# My Skill" in prompt
|
||||
assert "Do the thing." in prompt
|
||||
|
||||
def test_includes_task_name(self):
|
||||
task = _make_task(name="Optimize landing page")
|
||||
route = _make_route()
|
||||
prompt = build_prompt(task, route, "skill content")
|
||||
assert "Task: Optimize landing page" in prompt
|
||||
|
||||
def test_includes_customer(self):
|
||||
task = _make_task()
|
||||
route = _make_route()
|
||||
prompt = build_prompt(task, route, "skill content")
|
||||
assert "Client: Acme Corp" in prompt
|
||||
|
||||
def test_includes_target_url(self):
|
||||
task = _make_task()
|
||||
route = _make_route()
|
||||
prompt = build_prompt(task, route, "skill content")
|
||||
assert "Target URL: https://acme.com/widgets" in prompt
|
||||
|
||||
def test_includes_clickup_link(self):
|
||||
task = _make_task()
|
||||
route = _make_route()
|
||||
prompt = build_prompt(task, route, "skill content")
|
||||
assert "ClickUp Task: https://app.clickup.com/t/task_123" in prompt
|
||||
|
||||
def test_includes_description(self):
|
||||
task = _make_task(description="Write about blue widgets")
|
||||
route = _make_route()
|
||||
prompt = build_prompt(task, route, "skill content")
|
||||
assert "Write about blue widgets" in prompt
|
||||
|
||||
|
||||
def test_no_customer_when_missing(self):
|
||||
task = _make_task(custom_fields={})
|
||||
route = _make_route()
|
||||
prompt = build_prompt(task, route, "skill")
|
||||
assert "Client:" not in prompt
|
||||
|
||||
def test_output_instructions_present(self):
|
||||
task = _make_task()
|
||||
route = _make_route()
|
||||
prompt = build_prompt(task, route, "skill")
|
||||
assert "Write all output files to the current working directory" in prompt
|
||||
|
||||
|
||||
# ── run_claude ──
|
||||
|
||||
|
||||
class TestRunClaude:
|
||||
def test_success(self, tmp_path):
|
||||
route = _make_route()
|
||||
cfg = _make_config()
|
||||
|
||||
# Pre-create an output file as if Claude wrote it
|
||||
(tmp_path / "output.docx").write_bytes(b"fake docx")
|
||||
|
||||
mock_result = subprocess.CompletedProcess(
|
||||
args=[], returncode=0, stdout="Done!", stderr=""
|
||||
)
|
||||
with patch("clickup_runner.claude_runner.subprocess.run", return_value=mock_result):
|
||||
result = run_claude("prompt", route, cfg, work_dir=tmp_path)
|
||||
|
||||
assert result.success is True
|
||||
assert result.output == "Done!"
|
||||
assert len(result.output_files) == 1
|
||||
assert result.output_files[0].name == "output.docx"
|
||||
|
||||
def test_nonzero_exit(self, tmp_path):
|
||||
route = _make_route()
|
||||
cfg = _make_config()
|
||||
|
||||
mock_result = subprocess.CompletedProcess(
|
||||
args=[], returncode=1, stdout="", stderr="Something broke"
|
||||
)
|
||||
with patch("clickup_runner.claude_runner.subprocess.run", return_value=mock_result):
|
||||
result = run_claude("prompt", route, cfg, work_dir=tmp_path)
|
||||
|
||||
assert result.success is False
|
||||
assert "code 1" in result.error
|
||||
assert "Something broke" in result.error
|
||||
|
||||
def test_timeout(self, tmp_path):
|
||||
route = _make_route()
|
||||
cfg = _make_config()
|
||||
|
||||
with patch(
|
||||
"clickup_runner.claude_runner.subprocess.run",
|
||||
side_effect=subprocess.TimeoutExpired(cmd="claude", timeout=60),
|
||||
):
|
||||
result = run_claude("prompt", route, cfg, work_dir=tmp_path)
|
||||
|
||||
assert result.success is False
|
||||
assert "timed out" in result.error
|
||||
|
||||
def test_claude_not_found(self, tmp_path):
|
||||
route = _make_route()
|
||||
cfg = _make_config()
|
||||
|
||||
with patch(
|
||||
"clickup_runner.claude_runner.subprocess.run",
|
||||
side_effect=FileNotFoundError(),
|
||||
):
|
||||
result = run_claude("prompt", route, cfg, work_dir=tmp_path)
|
||||
|
||||
assert result.success is False
|
||||
assert "not found" in result.error
|
||||
|
||||
def test_passes_allowed_tools(self, tmp_path):
|
||||
route = _make_route(tools="Read,Write")
|
||||
cfg = _make_config()
|
||||
|
||||
mock_result = subprocess.CompletedProcess(
|
||||
args=[], returncode=0, stdout="ok", stderr=""
|
||||
)
|
||||
with patch("clickup_runner.claude_runner.subprocess.run", return_value=mock_result) as mock_run:
|
||||
run_claude("prompt", route, cfg, work_dir=tmp_path)
|
||||
|
||||
cmd = mock_run.call_args[0][0]
|
||||
idx = cmd.index("--allowedTools")
|
||||
assert cmd[idx + 1] == "Read,Write"
|
||||
|
||||
def test_passes_max_turns(self, tmp_path):
|
||||
route = _make_route(max_turns=42)
|
||||
cfg = _make_config()
|
||||
|
||||
mock_result = subprocess.CompletedProcess(
|
||||
args=[], returncode=0, stdout="ok", stderr=""
|
||||
)
|
||||
with patch("clickup_runner.claude_runner.subprocess.run", return_value=mock_result) as mock_run:
|
||||
run_claude("prompt", route, cfg, work_dir=tmp_path)
|
||||
|
||||
cmd = mock_run.call_args[0][0]
|
||||
idx = cmd.index("--max-turns")
|
||||
assert cmd[idx + 1] == "42"
|
||||
|
||||
def test_uses_bypass_permissions(self, tmp_path):
|
||||
route = _make_route()
|
||||
cfg = _make_config()
|
||||
|
||||
mock_result = subprocess.CompletedProcess(
|
||||
args=[], returncode=0, stdout="ok", stderr=""
|
||||
)
|
||||
with patch("clickup_runner.claude_runner.subprocess.run", return_value=mock_result) as mock_run:
|
||||
run_claude("prompt", route, cfg, work_dir=tmp_path)
|
||||
|
||||
cmd = mock_run.call_args[0][0]
|
||||
assert "--permission-mode" in cmd
|
||||
assert "bypassPermissions" in cmd
|
||||
|
||||
def test_collects_multiple_files(self, tmp_path):
|
||||
route = _make_route()
|
||||
cfg = _make_config()
|
||||
|
||||
(tmp_path / "article.md").write_text("content")
|
||||
(tmp_path / "schema.json").write_text("{}")
|
||||
(tmp_path / "notes.txt").write_text("notes")
|
||||
|
||||
mock_result = subprocess.CompletedProcess(
|
||||
args=[], returncode=0, stdout="done", stderr=""
|
||||
)
|
||||
with patch("clickup_runner.claude_runner.subprocess.run", return_value=mock_result):
|
||||
result = run_claude("prompt", route, cfg, work_dir=tmp_path)
|
||||
|
||||
assert len(result.output_files) == 3
|
||||
names = [f.name for f in result.output_files]
|
||||
assert "article.md" in names
|
||||
assert "schema.json" in names
|
||||
|
||||
|
||||
# ── copy_to_nas ──
|
||||
|
||||
|
||||
class TestCopyToNas:
|
||||
def test_copies_files(self, tmp_path):
|
||||
src = tmp_path / "src"
|
||||
src.mkdir()
|
||||
(src / "file1.txt").write_text("hello")
|
||||
(src / "file2.txt").write_text("world")
|
||||
|
||||
nas = tmp_path / "nas"
|
||||
nas.mkdir()
|
||||
|
||||
copied = copy_to_nas(
|
||||
[src / "file1.txt", src / "file2.txt"],
|
||||
"Acme Corp",
|
||||
str(nas),
|
||||
)
|
||||
|
||||
assert len(copied) == 2
|
||||
assert (nas / "Acme Corp" / "file1.txt").exists()
|
||||
assert (nas / "Acme Corp" / "file2.txt").read_text() == "world"
|
||||
|
||||
def test_skips_when_no_customer(self, tmp_path):
|
||||
copied = copy_to_nas([], "", str(tmp_path))
|
||||
assert copied == []
|
||||
|
||||
def test_skips_when_no_nas_dir(self, tmp_path):
|
||||
copied = copy_to_nas([], "Acme", "")
|
||||
assert copied == []
|
||||
|
||||
def test_handles_unreachable_nas(self, tmp_path):
|
||||
src = tmp_path / "file.txt"
|
||||
src.write_text("data")
|
||||
# Use a path that can't exist
|
||||
copied = copy_to_nas([src], "Acme", "//NONEXISTENT_HOST/share")
|
||||
assert copied == []
|
||||
|
||||
|
||||
# ── read_skill_file ──
|
||||
|
||||
|
||||
class TestReadSkillFile:
|
||||
def test_reads_existing_file(self, tmp_path):
|
||||
skill = tmp_path / "my_skill.md"
|
||||
skill.write_text("# Skill\nDo stuff.")
|
||||
route = _make_route(skill_file="my_skill.md")
|
||||
content = read_skill_file(route, tmp_path)
|
||||
assert "# Skill" in content
|
||||
|
||||
def test_raises_on_missing_file(self, tmp_path):
|
||||
route = _make_route(skill_file="nonexistent.md")
|
||||
with pytest.raises(FileNotFoundError, match="nonexistent.md"):
|
||||
read_skill_file(route, tmp_path)
|
||||
|
||||
|
||||
# ── notify ──
|
||||
|
||||
|
||||
class TestNotify:
|
||||
def test_sends_error_notification(self):
|
||||
cfg = _make_config()
|
||||
cfg.ntfy = NtfyConfig(
|
||||
enabled=True,
|
||||
server="https://ntfy.sh",
|
||||
error_topic="test-errors",
|
||||
success_topic="test-ok",
|
||||
)
|
||||
|
||||
with patch("httpx.post") as mock_post:
|
||||
notify(cfg, "Failed: task", "Something went wrong", is_error=True)
|
||||
|
||||
mock_post.assert_called_once()
|
||||
call_args = mock_post.call_args
|
||||
assert "test-errors" in call_args[0][0]
|
||||
assert call_args[1]["headers"]["Priority"] == "high"
|
||||
|
||||
def test_sends_success_notification(self):
|
||||
cfg = _make_config()
|
||||
cfg.ntfy = NtfyConfig(
|
||||
enabled=True,
|
||||
server="https://ntfy.sh",
|
||||
error_topic="test-errors",
|
||||
success_topic="test-ok",
|
||||
)
|
||||
|
||||
with patch("httpx.post") as mock_post:
|
||||
notify(cfg, "Done: task", "All good", is_error=False)
|
||||
|
||||
call_args = mock_post.call_args
|
||||
assert "test-ok" in call_args[0][0]
|
||||
|
||||
def test_noop_when_no_topic(self):
|
||||
cfg = _make_config()
|
||||
cfg.ntfy = NtfyConfig() # no topics set
|
||||
|
||||
with patch("httpx.post") as mock_post:
|
||||
notify(cfg, "title", "msg", is_error=True)
|
||||
|
||||
mock_post.assert_not_called()
|
||||
|
||||
|
||||
# ── _dispatch_claude integration (via __main__) ──
|
||||
|
||||
|
||||
class TestDispatchClaude:
|
||||
"""Test the full _dispatch_claude flow with mocked Claude + ClickUp."""
|
||||
|
||||
def _setup(self, tmp_path):
|
||||
"""Common setup for dispatch tests."""
|
||||
# Create skill file
|
||||
skills_dir = tmp_path / "skills"
|
||||
skills_dir.mkdir()
|
||||
(skills_dir / "content_draft.md").write_text("# Draft Skill\nWrite a draft.")
|
||||
|
||||
cfg = _make_config()
|
||||
cfg.skills_dir = skills_dir
|
||||
cfg.nas = NASConfig(generated_dir="")
|
||||
|
||||
client = MagicMock()
|
||||
db = MagicMock()
|
||||
db.log_run_start.return_value = 1
|
||||
|
||||
task = _make_task(
|
||||
attachments=[],
|
||||
)
|
||||
route = _make_route()
|
||||
|
||||
return cfg, client, db, task, route
|
||||
|
||||
def test_success_path(self, tmp_path):
|
||||
from clickup_runner.__main__ import _dispatch_claude
|
||||
|
||||
cfg, client, db, task, route = self._setup(tmp_path)
|
||||
|
||||
mock_result = RunResult(
|
||||
success=True,
|
||||
output="Draft complete.",
|
||||
output_files=[],
|
||||
work_dir=tmp_path,
|
||||
)
|
||||
|
||||
with patch("clickup_runner.__main__.run_claude", return_value=mock_result):
|
||||
with patch("clickup_runner.__main__.read_skill_file", return_value="# Skill"):
|
||||
_dispatch_claude(client, cfg, db, task, route, run_id=1)
|
||||
|
||||
# Status set to ai working first
|
||||
client.update_task_status.assert_any_call(task.id, "ai working")
|
||||
# Stage advanced
|
||||
client.set_stage.assert_called_once()
|
||||
# Status set to review
|
||||
client.update_task_status.assert_any_call(task.id, "review")
|
||||
# Success comment posted
|
||||
client.add_comment.assert_called_once()
|
||||
assert "complete" in client.add_comment.call_args[0][1].lower()
|
||||
# Delegate unchecked
|
||||
client.set_checkbox.assert_any_call(
|
||||
task.id, task.list_id, "Delegate to Claude", False
|
||||
)
|
||||
# Error cleared
|
||||
client.set_checkbox.assert_any_call(
|
||||
task.id, task.list_id, "Error", False
|
||||
)
|
||||
# Run logged as completed
|
||||
db.log_run_finish.assert_called_once_with(1, "completed", result="0 files uploaded")
|
||||
|
||||
def test_error_path_claude_fails(self, tmp_path):
|
||||
from clickup_runner.__main__ import _dispatch_claude
|
||||
|
||||
cfg, client, db, task, route = self._setup(tmp_path)
|
||||
|
||||
mock_result = RunResult(
|
||||
success=False,
|
||||
output="",
|
||||
error="Claude exited with code 1: crash",
|
||||
work_dir=tmp_path,
|
||||
)
|
||||
|
||||
with patch("clickup_runner.__main__.run_claude", return_value=mock_result):
|
||||
with patch("clickup_runner.__main__.read_skill_file", return_value="# Skill"):
|
||||
_dispatch_claude(client, cfg, db, task, route, run_id=1)
|
||||
|
||||
# Error checkbox set
|
||||
client.set_checkbox.assert_any_call(
|
||||
task.id, task.list_id, "Error", True
|
||||
)
|
||||
# Error comment posted
|
||||
comment = client.add_comment.call_args[0][1]
|
||||
assert "[ERROR]" in comment
|
||||
assert "crash" in comment
|
||||
# Run logged as failed
|
||||
db.log_run_finish.assert_called_once_with(
|
||||
1, "failed", error="Claude exited with code 1: crash"
|
||||
)
|
||||
|
||||
def test_error_path_missing_skill_file(self, tmp_path):
|
||||
from clickup_runner.__main__ import _dispatch_claude
|
||||
|
||||
cfg, client, db, task, route = self._setup(tmp_path)
|
||||
route = _make_route(skill_file="nonexistent.md")
|
||||
|
||||
# Use real read_skill_file (it will fail)
|
||||
with patch("clickup_runner.__main__.read_skill_file", side_effect=FileNotFoundError("Skill file not found: nonexistent.md")):
|
||||
_dispatch_claude(client, cfg, db, task, route, run_id=1)
|
||||
|
||||
# Error checkbox set
|
||||
client.set_checkbox.assert_any_call(
|
||||
task.id, task.list_id, "Error", True
|
||||
)
|
||||
db.log_run_finish.assert_called_once()
|
||||
assert db.log_run_finish.call_args[0][1] == "failed"
|
||||
|
||||
def test_uploads_output_files(self, tmp_path):
|
||||
from clickup_runner.__main__ import _dispatch_claude
|
||||
|
||||
cfg, client, db, task, route = self._setup(tmp_path)
|
||||
|
||||
out1 = tmp_path / "article.docx"
|
||||
out2 = tmp_path / "schema.json"
|
||||
out1.write_bytes(b"docx")
|
||||
out2.write_text("{}")
|
||||
|
||||
mock_result = RunResult(
|
||||
success=True,
|
||||
output="done",
|
||||
output_files=[out1, out2],
|
||||
work_dir=tmp_path,
|
||||
)
|
||||
|
||||
client.upload_attachment.return_value = True
|
||||
|
||||
with patch("clickup_runner.__main__.run_claude", return_value=mock_result):
|
||||
with patch("clickup_runner.__main__.read_skill_file", return_value="# Skill"):
|
||||
_dispatch_claude(client, cfg, db, task, route, run_id=1)
|
||||
|
||||
assert client.upload_attachment.call_count == 2
|
||||
db.log_run_finish.assert_called_once_with(1, "completed", result="2 files uploaded")
|
||||
|
|
@ -1,341 +0,0 @@
|
|||
"""Tests for clickup_runner.clickup_client.
|
||||
|
||||
Unit tests use respx to mock HTTP. Integration tests hit the real API.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import respx
|
||||
import httpx
|
||||
|
||||
from clickup_runner.clickup_client import ClickUpClient, ClickUpTask, BASE_URL
|
||||
|
||||
|
||||
# ── Fixtures ──
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_task_data():
|
||||
"""A realistic ClickUp API task response."""
|
||||
return {
|
||||
"id": "task_001",
|
||||
"name": "Plumber SEO Page - Miami",
|
||||
"status": {"status": "to do"},
|
||||
"description": "Write optimized content for plumber services in Miami",
|
||||
"url": "https://app.clickup.com/t/task_001",
|
||||
"due_date": "1711929600000", # some timestamp
|
||||
"list": {"id": "list_100", "name": "Overall"},
|
||||
"folder": {"id": "fold_1", "name": "Acme Plumbing"},
|
||||
"tags": [{"name": "mar26"}, {"name": "content"}],
|
||||
"custom_fields": [
|
||||
{
|
||||
"id": "cf_wc",
|
||||
"name": "Work Category",
|
||||
"type": "drop_down",
|
||||
"value": "opt_cc",
|
||||
"type_config": {
|
||||
"options": [
|
||||
{"id": "opt_cc", "name": "Content Creation", "orderindex": 0},
|
||||
{"id": "opt_pr", "name": "Press Release", "orderindex": 1},
|
||||
{"id": "opt_lb", "name": "Link Building", "orderindex": 2},
|
||||
]
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "cf_stage",
|
||||
"name": "Stage",
|
||||
"type": "drop_down",
|
||||
"value": "opt_outline",
|
||||
"type_config": {
|
||||
"options": [
|
||||
{"id": "opt_runcora", "name": "run_cora", "orderindex": 0},
|
||||
{"id": "opt_outline", "name": "outline", "orderindex": 1},
|
||||
{"id": "opt_draft", "name": "draft", "orderindex": 2},
|
||||
]
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "cf_delegate",
|
||||
"name": "Delegate to Claude",
|
||||
"type": "checkbox",
|
||||
"value": True,
|
||||
},
|
||||
{
|
||||
"id": "cf_error",
|
||||
"name": "Error",
|
||||
"type": "checkbox",
|
||||
"value": False,
|
||||
},
|
||||
{
|
||||
"id": "cf_keyword",
|
||||
"name": "Keyword",
|
||||
"type": "short_text",
|
||||
"value": "plumber miami",
|
||||
},
|
||||
{
|
||||
"id": "cf_imsurl",
|
||||
"name": "IMSURL",
|
||||
"type": "url",
|
||||
"value": "https://acmeplumbing.com/miami",
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_task(sample_task_data):
|
||||
return ClickUpTask.from_api(sample_task_data)
|
||||
|
||||
|
||||
# ── ClickUpTask parsing tests ──
|
||||
|
||||
|
||||
class TestClickUpTaskFromApi:
|
||||
def test_basic_fields(self, sample_task):
|
||||
assert sample_task.id == "task_001"
|
||||
assert sample_task.name == "Plumber SEO Page - Miami"
|
||||
assert sample_task.status == "to do"
|
||||
assert sample_task.list_id == "list_100"
|
||||
assert sample_task.folder_name == "Acme Plumbing"
|
||||
|
||||
def test_dropdown_resolved_to_label(self, sample_task):
|
||||
assert sample_task.task_type == "Content Creation"
|
||||
assert sample_task.custom_fields["Stage"] == "outline"
|
||||
|
||||
def test_checkbox_fields(self, sample_task):
|
||||
assert sample_task.custom_fields["Delegate to Claude"] is True
|
||||
assert sample_task.custom_fields["Error"] is False
|
||||
|
||||
def test_text_fields(self, sample_task):
|
||||
assert sample_task.custom_fields["Keyword"] == "plumber miami"
|
||||
assert sample_task.custom_fields["IMSURL"] == "https://acmeplumbing.com/miami"
|
||||
|
||||
def test_tags(self, sample_task):
|
||||
assert "mar26" in sample_task.tags
|
||||
assert "content" in sample_task.tags
|
||||
|
||||
def test_due_date(self, sample_task):
|
||||
assert sample_task.due_date == "1711929600000"
|
||||
|
||||
def test_missing_due_date(self, sample_task_data):
|
||||
sample_task_data["due_date"] = None
|
||||
task = ClickUpTask.from_api(sample_task_data)
|
||||
assert task.due_date == ""
|
||||
|
||||
def test_missing_custom_fields(self):
|
||||
task = ClickUpTask.from_api({"id": "t1", "name": "test"})
|
||||
assert task.task_type == ""
|
||||
assert task.custom_fields == {}
|
||||
|
||||
def test_unknown_dropdown_value_kept_as_is(self, sample_task_data):
|
||||
"""If dropdown value doesn't match any option, keep raw value."""
|
||||
sample_task_data["custom_fields"][0]["value"] = "unknown_opt_id"
|
||||
task = ClickUpTask.from_api(sample_task_data)
|
||||
assert task.task_type == "unknown_opt_id"
|
||||
|
||||
|
||||
class TestClickUpTaskHelpers:
|
||||
def test_get_field_value(self, sample_task):
|
||||
assert sample_task.get_field_value("Keyword") == "plumber miami"
|
||||
assert sample_task.get_field_value("Nonexistent") is None
|
||||
|
||||
def test_has_xlsx_attachment_false_when_empty(self, sample_task):
|
||||
sample_task.attachments = []
|
||||
assert not sample_task.has_xlsx_attachment()
|
||||
|
||||
def test_has_xlsx_attachment_true(self, sample_task):
|
||||
sample_task.attachments = [
|
||||
{"title": "report.xlsx", "url": "https://..."}
|
||||
]
|
||||
assert sample_task.has_xlsx_attachment()
|
||||
|
||||
def test_has_xlsx_attachment_case_insensitive(self, sample_task):
|
||||
sample_task.attachments = [
|
||||
{"title": "Report.XLSX", "url": "https://..."}
|
||||
]
|
||||
assert sample_task.has_xlsx_attachment()
|
||||
|
||||
|
||||
# ── ClickUpClient tests (respx mocked) ──
|
||||
|
||||
|
||||
class TestClientCheckbox:
|
||||
def test_is_checkbox_checked_true(self, sample_task):
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert client.is_checkbox_checked(sample_task, "Delegate to Claude")
|
||||
|
||||
def test_is_checkbox_checked_false(self, sample_task):
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert not client.is_checkbox_checked(sample_task, "Error")
|
||||
|
||||
def test_is_checkbox_checked_missing_field(self, sample_task):
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert not client.is_checkbox_checked(sample_task, "Nonexistent")
|
||||
|
||||
def test_is_checkbox_checked_string_true(self, sample_task):
|
||||
client = ClickUpClient(api_token="fake")
|
||||
sample_task.custom_fields["Delegate to Claude"] = "true"
|
||||
assert client.is_checkbox_checked(sample_task, "Delegate to Claude")
|
||||
|
||||
def test_is_checkbox_checked_string_false(self, sample_task):
|
||||
client = ClickUpClient(api_token="fake")
|
||||
sample_task.custom_fields["Delegate to Claude"] = "false"
|
||||
assert not client.is_checkbox_checked(sample_task, "Delegate to Claude")
|
||||
|
||||
|
||||
class TestClientStage:
|
||||
def test_get_stage(self, sample_task):
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert client.get_stage(sample_task) == "outline"
|
||||
|
||||
def test_get_stage_empty(self):
|
||||
client = ClickUpClient(api_token="fake")
|
||||
task = ClickUpTask(id="t1", name="test", status="to do")
|
||||
assert client.get_stage(task) == ""
|
||||
|
||||
def test_get_stage_custom_field_name(self, sample_task):
|
||||
client = ClickUpClient(api_token="fake")
|
||||
sample_task.custom_fields["Custom Stage"] = "DRAFT"
|
||||
assert client.get_stage(sample_task, field_name="Custom Stage") == "draft"
|
||||
|
||||
|
||||
@respx.mock
|
||||
class TestClientHTTP:
|
||||
def test_get_task(self):
|
||||
task_data = {
|
||||
"id": "t1",
|
||||
"name": "Test",
|
||||
"status": {"status": "to do"},
|
||||
}
|
||||
respx.get(f"{BASE_URL}/task/t1").mock(
|
||||
return_value=httpx.Response(200, json=task_data)
|
||||
)
|
||||
client = ClickUpClient(api_token="fake")
|
||||
task = client.get_task("t1")
|
||||
assert task.id == "t1"
|
||||
assert task.name == "Test"
|
||||
client.close()
|
||||
|
||||
def test_update_task_status(self):
|
||||
respx.put(f"{BASE_URL}/task/t1").mock(
|
||||
return_value=httpx.Response(200, json={})
|
||||
)
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert client.update_task_status("t1", "ai working") is True
|
||||
client.close()
|
||||
|
||||
def test_update_task_status_failure(self):
|
||||
respx.put(f"{BASE_URL}/task/t1").mock(
|
||||
return_value=httpx.Response(404, json={"err": "not found"})
|
||||
)
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert client.update_task_status("t1", "ai working") is False
|
||||
client.close()
|
||||
|
||||
def test_add_comment(self):
|
||||
respx.post(f"{BASE_URL}/task/t1/comment").mock(
|
||||
return_value=httpx.Response(200, json={})
|
||||
)
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert client.add_comment("t1", "hello") is True
|
||||
client.close()
|
||||
|
||||
def test_get_folders(self):
|
||||
respx.get(f"{BASE_URL}/space/sp1/folder").mock(
|
||||
return_value=httpx.Response(200, json={
|
||||
"folders": [
|
||||
{
|
||||
"id": "f1",
|
||||
"name": "Acme",
|
||||
"lists": [
|
||||
{"id": "l1", "name": "Overall"},
|
||||
{"id": "l2", "name": "Archive"},
|
||||
],
|
||||
}
|
||||
]
|
||||
})
|
||||
)
|
||||
client = ClickUpClient(api_token="fake")
|
||||
folders = client.get_folders("sp1")
|
||||
assert len(folders) == 1
|
||||
assert folders[0]["name"] == "Acme"
|
||||
assert len(folders[0]["lists"]) == 2
|
||||
client.close()
|
||||
|
||||
def test_get_tasks_from_overall_lists(self):
|
||||
# Mock folders endpoint
|
||||
respx.get(f"{BASE_URL}/space/sp1/folder").mock(
|
||||
return_value=httpx.Response(200, json={
|
||||
"folders": [
|
||||
{
|
||||
"id": "f1",
|
||||
"name": "Client A",
|
||||
"lists": [
|
||||
{"id": "l1", "name": "Overall"},
|
||||
{"id": "l2", "name": "Archive"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "f2",
|
||||
"name": "Client B",
|
||||
"lists": [
|
||||
{"id": "l3", "name": "Overall"},
|
||||
],
|
||||
},
|
||||
]
|
||||
})
|
||||
)
|
||||
# Mock task endpoints -- only Overall lists should be hit
|
||||
respx.get(f"{BASE_URL}/list/l1/task").mock(
|
||||
return_value=httpx.Response(200, json={
|
||||
"tasks": [
|
||||
{"id": "t1", "name": "Task 1", "status": {"status": "to do"}},
|
||||
]
|
||||
})
|
||||
)
|
||||
respx.get(f"{BASE_URL}/list/l3/task").mock(
|
||||
return_value=httpx.Response(200, json={
|
||||
"tasks": [
|
||||
{"id": "t2", "name": "Task 2", "status": {"status": "review"}},
|
||||
]
|
||||
})
|
||||
)
|
||||
# l2 (Archive) should NOT be called
|
||||
|
||||
client = ClickUpClient(api_token="fake")
|
||||
tasks = client.get_tasks_from_overall_lists("sp1")
|
||||
assert len(tasks) == 2
|
||||
ids = {t.id for t in tasks}
|
||||
assert ids == {"t1", "t2"}
|
||||
client.close()
|
||||
|
||||
def test_retry_on_5xx(self):
|
||||
route = respx.put(f"{BASE_URL}/task/t1")
|
||||
route.side_effect = [
|
||||
httpx.Response(500, json={"err": "internal"}),
|
||||
httpx.Response(200, json={}),
|
||||
]
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert client.update_task_status("t1", "ai working") is True
|
||||
client.close()
|
||||
|
||||
def test_no_retry_on_4xx(self):
|
||||
respx.put(f"{BASE_URL}/task/t1").mock(
|
||||
return_value=httpx.Response(400, json={"err": "bad request"})
|
||||
)
|
||||
client = ClickUpClient(api_token="fake")
|
||||
assert client.update_task_status("t1", "ai working") is False
|
||||
client.close()
|
||||
|
||||
def test_get_task_attachments(self):
|
||||
respx.get(f"{BASE_URL}/task/t1/attachment").mock(
|
||||
return_value=httpx.Response(200, json={
|
||||
"attachments": [
|
||||
{"id": "a1", "title": "report.xlsx", "url": "https://..."},
|
||||
]
|
||||
})
|
||||
)
|
||||
client = ClickUpClient(api_token="fake")
|
||||
atts = client.get_task_attachments("t1")
|
||||
assert len(atts) == 1
|
||||
assert atts[0]["title"] == "report.xlsx"
|
||||
client.close()
|
||||
|
|
@ -1,104 +0,0 @@
|
|||
"""Tests for clickup_runner.config."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
|
||||
from clickup_runner.config import Config, load_config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def yaml_config(tmp_path):
|
||||
"""Write a config YAML file and return its path."""
|
||||
cfg = {
|
||||
"clickup": {
|
||||
"space_id": "space_from_yaml",
|
||||
"delegate_field_name": "Run It",
|
||||
"ai_working_status": "bot working",
|
||||
},
|
||||
"runner": {
|
||||
"poll_interval_seconds": 300,
|
||||
"claude_timeout_seconds": 1800,
|
||||
},
|
||||
"autocora": {
|
||||
"jobs_dir": "/tmp/autocora/jobs",
|
||||
"xlsx_dir": "/tmp/autocora/xlsx",
|
||||
},
|
||||
}
|
||||
p = tmp_path / "clickup_runner.yaml"
|
||||
p.write_text(yaml.dump(cfg))
|
||||
return p
|
||||
|
||||
|
||||
def test_defaults():
|
||||
"""Config defaults are sensible without any YAML or env vars."""
|
||||
cfg = Config()
|
||||
assert cfg.clickup.delegate_field_name == "Delegate to Claude"
|
||||
assert cfg.clickup.stage_field_name == "Stage"
|
||||
assert cfg.clickup.error_field_name == "Error"
|
||||
assert cfg.clickup.ai_working_status == "ai working"
|
||||
assert cfg.runner.poll_interval_seconds == 720
|
||||
assert cfg.runner.claude_timeout_seconds == 2700
|
||||
assert cfg.autocora.poll_interval_seconds == 120
|
||||
|
||||
|
||||
def test_yaml_overrides(yaml_config, monkeypatch):
|
||||
"""YAML values override defaults."""
|
||||
monkeypatch.delenv("CLICKUP_SPACE_ID", raising=False)
|
||||
monkeypatch.delenv("CLICKUP_API_TOKEN", raising=False)
|
||||
cfg = load_config(yaml_path=yaml_config)
|
||||
assert cfg.clickup.space_id == "space_from_yaml"
|
||||
assert cfg.clickup.delegate_field_name == "Run It"
|
||||
assert cfg.clickup.ai_working_status == "bot working"
|
||||
assert cfg.runner.poll_interval_seconds == 300
|
||||
assert cfg.runner.claude_timeout_seconds == 1800
|
||||
assert cfg.autocora.jobs_dir == "/tmp/autocora/jobs"
|
||||
assert cfg.autocora.xlsx_dir == "/tmp/autocora/xlsx"
|
||||
|
||||
|
||||
def test_env_overrides_yaml(yaml_config, monkeypatch):
|
||||
"""Env vars take precedence over YAML."""
|
||||
monkeypatch.setenv("CLICKUP_API_TOKEN", "pk_env_token")
|
||||
monkeypatch.setenv("CLICKUP_SPACE_ID", "space_from_env")
|
||||
cfg = load_config(yaml_path=yaml_config)
|
||||
assert cfg.clickup.api_token == "pk_env_token"
|
||||
# Env var overrides YAML
|
||||
assert cfg.clickup.space_id == "space_from_env"
|
||||
|
||||
|
||||
def test_missing_yaml_uses_defaults(tmp_path):
|
||||
"""If YAML doesn't exist, all defaults are used."""
|
||||
nonexistent = tmp_path / "nope.yaml"
|
||||
cfg = load_config(yaml_path=nonexistent)
|
||||
assert cfg.clickup.delegate_field_name == "Delegate to Claude"
|
||||
assert cfg.runner.poll_interval_seconds == 720
|
||||
|
||||
|
||||
def test_unknown_yaml_keys_ignored(tmp_path, monkeypatch):
|
||||
"""Unknown keys in YAML don't cause errors."""
|
||||
monkeypatch.delenv("CLICKUP_SPACE_ID", raising=False)
|
||||
monkeypatch.delenv("CLICKUP_API_TOKEN", raising=False)
|
||||
p = tmp_path / "clickup_runner.yaml"
|
||||
p.write_text(yaml.dump({
|
||||
"clickup": {"space_id": "test", "bogus_key": "whatever"},
|
||||
"totally_unknown_section": {"foo": "bar"},
|
||||
}))
|
||||
cfg = load_config(yaml_path=p)
|
||||
assert cfg.clickup.space_id == "test"
|
||||
assert not hasattr(cfg.clickup, "bogus_key")
|
||||
|
||||
|
||||
def test_db_path_parent_created(tmp_path, monkeypatch):
|
||||
"""load_config ensures the DB parent directory exists."""
|
||||
# Patch ROOT_DIR so db_path points inside tmp_path
|
||||
import clickup_runner.config as config_mod
|
||||
|
||||
fake_root = tmp_path / "project"
|
||||
fake_root.mkdir()
|
||||
monkeypatch.setattr(config_mod, "ROOT_DIR", fake_root)
|
||||
|
||||
cfg = load_config(yaml_path=tmp_path / "nope.yaml")
|
||||
# The default db_path parent should have been created
|
||||
assert cfg.db_path.parent.exists()
|
||||
|
|
@ -1,150 +0,0 @@
|
|||
"""Tests for clickup_runner.skill_map."""
|
||||
|
||||
import pytest
|
||||
|
||||
from clickup_runner.skill_map import (
|
||||
SKILL_MAP,
|
||||
SkillRoute,
|
||||
get_route,
|
||||
get_supported_task_types,
|
||||
get_valid_stages,
|
||||
)
|
||||
|
||||
|
||||
class TestGetRoute:
|
||||
def test_content_creation_run_cora(self):
|
||||
route = get_route("Content Creation", "run_cora")
|
||||
assert route is not None
|
||||
assert route.handler == "autocora"
|
||||
assert route.next_stage == "outline"
|
||||
assert route.next_status == "review"
|
||||
|
||||
def test_content_creation_outline(self):
|
||||
route = get_route("Content Creation", "outline")
|
||||
assert route is not None
|
||||
assert route.handler == "claude"
|
||||
assert route.skill_file == "content_outline.md"
|
||||
assert route.next_stage == "draft"
|
||||
|
||||
def test_content_creation_draft(self):
|
||||
route = get_route("Content Creation", "draft")
|
||||
assert route is not None
|
||||
assert route.next_stage == "final"
|
||||
assert route.max_turns == 30
|
||||
|
||||
def test_on_page_optimization_has_hidden_div(self):
|
||||
route = get_route("On Page Optimization", "hidden div")
|
||||
assert route is not None
|
||||
assert route.skill_file == "content_hidden_div.md"
|
||||
assert route.next_stage == "final"
|
||||
|
||||
def test_on_page_draft_goes_to_hidden_div(self):
|
||||
route = get_route("On Page Optimization", "draft")
|
||||
assert route is not None
|
||||
assert route.next_stage == "hidden div"
|
||||
|
||||
def test_press_release_single_stage(self):
|
||||
route = get_route("Press Release", "draft")
|
||||
assert route is not None
|
||||
assert route.skill_file == "press_release_prompt.md"
|
||||
assert route.next_stage == "final"
|
||||
assert route.next_status == "review"
|
||||
|
||||
def test_press_release_no_run_cora(self):
|
||||
"""Press releases don't need Cora."""
|
||||
route = get_route("Press Release", "run_cora")
|
||||
assert route is None
|
||||
|
||||
def test_link_building_run_cora(self):
|
||||
route = get_route("Link Building", "run_cora")
|
||||
assert route is not None
|
||||
assert route.handler == "autocora"
|
||||
assert route.next_stage == "build"
|
||||
|
||||
def test_link_building_build(self):
|
||||
route = get_route("Link Building", "build")
|
||||
assert route is not None
|
||||
assert route.handler == "blm"
|
||||
|
||||
def test_unknown_task_type_returns_none(self):
|
||||
assert get_route("Banana Farming", "draft") is None
|
||||
|
||||
def test_unknown_stage_returns_none(self):
|
||||
assert get_route("Content Creation", "nonexistent") is None
|
||||
|
||||
def test_stage_is_case_insensitive(self):
|
||||
route = get_route("Content Creation", "RUN_CORA")
|
||||
assert route is not None
|
||||
assert route.handler == "autocora"
|
||||
|
||||
def test_stage_strips_whitespace(self):
|
||||
route = get_route("Content Creation", " outline ")
|
||||
assert route is not None
|
||||
assert route.handler == "claude"
|
||||
|
||||
|
||||
class TestGetValidStages:
|
||||
def test_content_creation(self):
|
||||
stages = get_valid_stages("Content Creation")
|
||||
assert stages == ["run_cora", "outline", "draft"]
|
||||
|
||||
def test_on_page_optimization(self):
|
||||
stages = get_valid_stages("On Page Optimization")
|
||||
assert "hidden div" in stages
|
||||
assert len(stages) == 4
|
||||
|
||||
def test_press_release(self):
|
||||
stages = get_valid_stages("Press Release")
|
||||
assert stages == ["draft"]
|
||||
|
||||
def test_link_building(self):
|
||||
stages = get_valid_stages("Link Building")
|
||||
assert stages == ["run_cora", "build"]
|
||||
|
||||
def test_unknown_type(self):
|
||||
assert get_valid_stages("Nope") == []
|
||||
|
||||
|
||||
class TestGetSupportedTaskTypes:
|
||||
def test_returns_all_four(self):
|
||||
types = get_supported_task_types()
|
||||
assert "Content Creation" in types
|
||||
assert "On Page Optimization" in types
|
||||
assert "Press Release" in types
|
||||
assert "Link Building" in types
|
||||
assert len(types) == 4
|
||||
|
||||
|
||||
class TestSkillRouteDataclass:
|
||||
def test_defaults(self):
|
||||
route = SkillRoute(next_stage="x", next_status="y")
|
||||
assert route.handler == "claude"
|
||||
assert route.skill_file == ""
|
||||
assert route.tools == ""
|
||||
assert route.max_turns == 10
|
||||
|
||||
def test_frozen(self):
|
||||
route = SkillRoute(next_stage="x", next_status="y")
|
||||
with pytest.raises(AttributeError):
|
||||
route.next_stage = "z" # type: ignore[misc]
|
||||
|
||||
|
||||
class TestAllRoutesHaveRequiredFields:
|
||||
"""Every route in the map should be well-formed."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"task_type,stage",
|
||||
[
|
||||
(tt, s)
|
||||
for tt, stages in SKILL_MAP.items()
|
||||
for s in stages
|
||||
],
|
||||
)
|
||||
def test_route_has_required_fields(self, task_type, stage):
|
||||
route = get_route(task_type, stage)
|
||||
assert route is not None
|
||||
assert route.next_stage, f"{task_type}/{stage} missing next_stage"
|
||||
assert route.next_status, f"{task_type}/{stage} missing next_status"
|
||||
if route.handler == "claude":
|
||||
assert route.skill_file, f"{task_type}/{stage} missing skill_file"
|
||||
assert route.tools, f"{task_type}/{stage} missing tools"
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
"""Tests for clickup_runner.state."""
|
||||
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from clickup_runner.state import StateDB
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db(tmp_path):
|
||||
return StateDB(tmp_path / "test.db")
|
||||
|
||||
|
||||
class TestKVStore:
|
||||
def test_set_and_get(self, db):
|
||||
db.kv_set("key1", "value1")
|
||||
assert db.kv_get("key1") == "value1"
|
||||
|
||||
def test_get_missing_key(self, db):
|
||||
assert db.kv_get("nope") is None
|
||||
|
||||
def test_set_overwrites(self, db):
|
||||
db.kv_set("key1", "v1")
|
||||
db.kv_set("key1", "v2")
|
||||
assert db.kv_get("key1") == "v2"
|
||||
|
||||
def test_delete(self, db):
|
||||
db.kv_set("key1", "v1")
|
||||
db.kv_delete("key1")
|
||||
assert db.kv_get("key1") is None
|
||||
|
||||
def test_scan(self, db):
|
||||
db.kv_set("autocora:job:kw1", "submitted")
|
||||
db.kv_set("autocora:job:kw2", "submitted")
|
||||
db.kv_set("other:key", "val")
|
||||
results = db.kv_scan("autocora:job:")
|
||||
assert len(results) == 2
|
||||
keys = {k for k, _ in results}
|
||||
assert keys == {"autocora:job:kw1", "autocora:job:kw2"}
|
||||
|
||||
def test_json_round_trip(self, db):
|
||||
data = {"status": "submitted", "job_id": "job-001", "task_ids": ["t1", "t2"]}
|
||||
db.kv_set_json("autocora:job:test", data)
|
||||
result = db.kv_get_json("autocora:job:test")
|
||||
assert result == data
|
||||
|
||||
def test_json_get_missing(self, db):
|
||||
assert db.kv_get_json("nope") is None
|
||||
|
||||
|
||||
class TestRunLog:
|
||||
def test_log_start_and_finish(self, db):
|
||||
run_id = db.log_run_start("t1", "Test Task", "Content Creation", "outline")
|
||||
assert run_id > 0
|
||||
|
||||
db.log_run_finish(run_id, "completed", result="outline.md created")
|
||||
runs = db.get_recent_runs(limit=1)
|
||||
assert len(runs) == 1
|
||||
assert runs[0]["task_id"] == "t1"
|
||||
assert runs[0]["status"] == "completed"
|
||||
assert runs[0]["result"] == "outline.md created"
|
||||
assert runs[0]["error"] is None
|
||||
|
||||
def test_log_error(self, db):
|
||||
run_id = db.log_run_start("t2", "Failing Task", "Press Release", "draft")
|
||||
db.log_run_finish(run_id, "error", error="Claude Code exit code 1")
|
||||
runs = db.get_recent_runs(limit=1)
|
||||
assert runs[0]["status"] == "error"
|
||||
assert "exit code 1" in runs[0]["error"]
|
||||
|
||||
def test_recent_runs_ordered(self, db):
|
||||
r1 = db.log_run_start("t1", "First", "PR", "draft")
|
||||
db.log_run_finish(r1, "completed")
|
||||
r2 = db.log_run_start("t2", "Second", "CC", "outline")
|
||||
db.log_run_finish(r2, "completed")
|
||||
|
||||
runs = db.get_recent_runs(limit=10)
|
||||
# Most recent first
|
||||
assert runs[0]["task_name"] == "Second"
|
||||
assert runs[1]["task_name"] == "First"
|
||||
|
|
@ -1,126 +0,0 @@
|
|||
"""Tests for the adversarial fact-checker helpers in press_release.py."""
|
||||
|
||||
from cheddahbot.tools.press_release import _apply_fact_check, _build_fact_check_prompt
|
||||
|
||||
|
||||
class TestApplyFactCheck:
|
||||
"""Tests for _apply_fact_check output parsing."""
|
||||
|
||||
ORIGINAL = (
|
||||
"Acme Corp Delivers Advanced Widget Solutions\n\n"
|
||||
"Acme Corp, a leading manufacturer of widgets, today highlighted "
|
||||
"its expanded product line. The company, based in Milwaukee, Wisconsin, "
|
||||
"produces over 500 widget variants for industrial applications."
|
||||
)
|
||||
|
||||
def test_no_errors_returns_original(self):
|
||||
text, status, changes = _apply_fact_check("[NO_ERRORS]", self.ORIGINAL)
|
||||
assert status == "clean"
|
||||
assert text == self.ORIGINAL
|
||||
assert changes == ""
|
||||
|
||||
def test_no_errors_with_trailing_whitespace(self):
|
||||
text, status, changes = _apply_fact_check("[NO_ERRORS] \n", self.ORIGINAL)
|
||||
assert status == "clean"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
def test_corrected_with_changes(self):
|
||||
corrected_pr = self.ORIGINAL.replace("500 widget", "300 widget")
|
||||
raw = (
|
||||
f"[CORRECTED]\n{corrected_pr}\n\n"
|
||||
"CHANGES:\n1. Changed '500 widget variants' to '300 widget variants' "
|
||||
"-- company website lists 300."
|
||||
)
|
||||
text, status, changes = _apply_fact_check(raw, self.ORIGINAL)
|
||||
assert status == "corrected"
|
||||
assert "300 widget" in text
|
||||
assert "500" not in text
|
||||
assert "300 widget variants" in changes
|
||||
|
||||
def test_corrected_without_changes_section(self):
|
||||
corrected_pr = self.ORIGINAL.replace("500", "300")
|
||||
raw = f"[CORRECTED]\n{corrected_pr}"
|
||||
text, status, changes = _apply_fact_check(raw, self.ORIGINAL)
|
||||
assert status == "corrected"
|
||||
assert "300" in text
|
||||
assert changes == ""
|
||||
|
||||
def test_empty_output_returns_skipped(self):
|
||||
text, status, changes = _apply_fact_check("", self.ORIGINAL)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
def test_none_like_output_returns_skipped(self):
|
||||
text, status, changes = _apply_fact_check(" \n ", self.ORIGINAL)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
def test_garbage_output_returns_skipped(self):
|
||||
text, status, changes = _apply_fact_check(
|
||||
"I reviewed the press release and it looks good overall.", self.ORIGINAL
|
||||
)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
def test_rejects_oversized_rewrite(self):
|
||||
"""If fact-checker rewrites too much (>15% word count delta), reject."""
|
||||
# Double the content -- way more than 15%
|
||||
bloated = self.ORIGINAL + "\n\n" + self.ORIGINAL + "\n\nExtra content here."
|
||||
raw = f"[CORRECTED]\n{bloated}\n\nCHANGES:\n1. Added more detail."
|
||||
text, status, changes = _apply_fact_check(raw, self.ORIGINAL)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
assert "word count delta" in changes
|
||||
|
||||
def test_accepts_minor_word_count_change(self):
|
||||
"""Small changes (within 15%) should be accepted."""
|
||||
# Change one word -- well within 15%
|
||||
minor_edit = self.ORIGINAL.replace("500 widget variants", "480 widget variants")
|
||||
raw = (
|
||||
f"[CORRECTED]\n{minor_edit}\n\n"
|
||||
"CHANGES:\n1. Corrected variant count from 500 to 480."
|
||||
)
|
||||
text, status, changes = _apply_fact_check(raw, self.ORIGINAL)
|
||||
assert status == "corrected"
|
||||
assert "480" in text
|
||||
|
||||
def test_corrected_but_empty_body_returns_skipped(self):
|
||||
text, status, changes = _apply_fact_check("[CORRECTED]\n", self.ORIGINAL)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
|
||||
class TestBuildFactCheckPrompt:
|
||||
"""Tests for _build_fact_check_prompt structure."""
|
||||
|
||||
def test_includes_ground_truth_data(self):
|
||||
prompt = _build_fact_check_prompt(
|
||||
"Some PR text here.",
|
||||
company_name="Acme Corp",
|
||||
url="https://acme.com",
|
||||
topic="widgets",
|
||||
keyword="industrial widgets",
|
||||
)
|
||||
assert "Acme Corp" in prompt
|
||||
assert "https://acme.com" in prompt
|
||||
assert "widgets" in prompt
|
||||
assert "industrial widgets" in prompt
|
||||
assert "ground truth" in prompt.lower() or "GROUND TRUTH" in prompt
|
||||
|
||||
def test_includes_pr_text(self):
|
||||
prompt = _build_fact_check_prompt(
|
||||
"The quick brown fox.",
|
||||
company_name="Test",
|
||||
url="https://test.com",
|
||||
topic="foxes",
|
||||
keyword="brown fox",
|
||||
)
|
||||
assert "The quick brown fox." in prompt
|
||||
|
||||
def test_output_format_instructions(self):
|
||||
prompt = _build_fact_check_prompt(
|
||||
"Text.", company_name="X", url="u", topic="t", keyword="k"
|
||||
)
|
||||
assert "[NO_ERRORS]" in prompt
|
||||
assert "[CORRECTED]" in prompt
|
||||
assert "CHANGES:" in prompt
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
"""Tests for clickup_runner.fact_check module."""
|
||||
|
||||
from clickup_runner.fact_check import apply_fact_check, build_fact_check_prompt
|
||||
|
||||
|
||||
class TestApplyFactCheck:
|
||||
"""Tests for apply_fact_check output parsing."""
|
||||
|
||||
ORIGINAL = (
|
||||
"Acme Corp Delivers Advanced Widget Solutions\n\n"
|
||||
"Acme Corp, a leading manufacturer of widgets, today highlighted "
|
||||
"its expanded product line. The company, based in Milwaukee, Wisconsin, "
|
||||
"produces over 500 widget variants for industrial applications."
|
||||
)
|
||||
|
||||
def test_no_errors_returns_original(self):
|
||||
text, status, changes = apply_fact_check("[NO_ERRORS]", self.ORIGINAL)
|
||||
assert status == "clean"
|
||||
assert text == self.ORIGINAL
|
||||
assert changes == ""
|
||||
|
||||
def test_no_errors_with_trailing_whitespace(self):
|
||||
text, status, changes = apply_fact_check("[NO_ERRORS] \n", self.ORIGINAL)
|
||||
assert status == "clean"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
def test_corrected_with_changes(self):
|
||||
corrected_pr = self.ORIGINAL.replace("500 widget", "300 widget")
|
||||
raw = (
|
||||
"[CORRECTED]\n%s\n\n"
|
||||
"CHANGES:\n1. Changed '500 widget variants' to '300 widget variants' "
|
||||
"-- company website lists 300." % corrected_pr
|
||||
)
|
||||
text, status, changes = apply_fact_check(raw, self.ORIGINAL)
|
||||
assert status == "corrected"
|
||||
assert "300 widget" in text
|
||||
assert "500" not in text
|
||||
assert "300 widget variants" in changes
|
||||
|
||||
def test_corrected_without_changes_section(self):
|
||||
corrected_pr = self.ORIGINAL.replace("500", "300")
|
||||
raw = "[CORRECTED]\n%s" % corrected_pr
|
||||
text, status, changes = apply_fact_check(raw, self.ORIGINAL)
|
||||
assert status == "corrected"
|
||||
assert "300" in text
|
||||
assert changes == ""
|
||||
|
||||
def test_empty_output_returns_skipped(self):
|
||||
text, status, changes = apply_fact_check("", self.ORIGINAL)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
def test_whitespace_only_returns_skipped(self):
|
||||
text, status, changes = apply_fact_check(" \n ", self.ORIGINAL)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
def test_garbage_output_returns_skipped(self):
|
||||
text, status, changes = apply_fact_check(
|
||||
"I reviewed the press release and it looks good overall.", self.ORIGINAL
|
||||
)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
def test_rejects_oversized_rewrite(self):
|
||||
bloated = self.ORIGINAL + "\n\n" + self.ORIGINAL + "\n\nExtra content."
|
||||
raw = "[CORRECTED]\n%s\n\nCHANGES:\n1. Added more detail." % bloated
|
||||
text, status, changes = apply_fact_check(raw, self.ORIGINAL)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
assert "word count delta" in changes
|
||||
|
||||
def test_accepts_minor_word_count_change(self):
|
||||
minor_edit = self.ORIGINAL.replace("500 widget variants", "480 widget variants")
|
||||
raw = (
|
||||
"[CORRECTED]\n%s\n\n"
|
||||
"CHANGES:\n1. Corrected variant count from 500 to 480." % minor_edit
|
||||
)
|
||||
text, status, changes = apply_fact_check(raw, self.ORIGINAL)
|
||||
assert status == "corrected"
|
||||
assert "480" in text
|
||||
|
||||
def test_corrected_empty_body_returns_skipped(self):
|
||||
text, status, changes = apply_fact_check("[CORRECTED]\n", self.ORIGINAL)
|
||||
assert status == "skipped"
|
||||
assert text == self.ORIGINAL
|
||||
|
||||
|
||||
class TestBuildFactCheckPrompt:
|
||||
"""Tests for build_fact_check_prompt structure."""
|
||||
|
||||
def test_includes_ground_truth_data(self):
|
||||
prompt = build_fact_check_prompt(
|
||||
"Some PR text.",
|
||||
company_name="Acme Corp",
|
||||
url="https://acme.com",
|
||||
topic="widgets",
|
||||
keyword="industrial widgets",
|
||||
)
|
||||
assert "Acme Corp" in prompt
|
||||
assert "https://acme.com" in prompt
|
||||
assert "widgets" in prompt
|
||||
assert "industrial widgets" in prompt
|
||||
assert "GROUND TRUTH" in prompt
|
||||
|
||||
def test_includes_pr_text(self):
|
||||
prompt = build_fact_check_prompt(
|
||||
"The quick brown fox.",
|
||||
company_name="Test",
|
||||
url="https://test.com",
|
||||
topic="foxes",
|
||||
keyword="brown fox",
|
||||
)
|
||||
assert "The quick brown fox." in prompt
|
||||
|
||||
def test_output_format_instructions(self):
|
||||
prompt = build_fact_check_prompt(
|
||||
"Text.", company_name="X", url="u", topic="t", keyword="k"
|
||||
)
|
||||
assert "[NO_ERRORS]" in prompt
|
||||
assert "[CORRECTED]" in prompt
|
||||
assert "CHANGES:" in prompt
|
||||
438
uv.lock
438
uv.lock
|
|
@ -315,165 +315,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "2.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pycparser", marker = "implementation_name != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/d7/b5b7020a0565c2e9fa8c09f4b5fa6232feb326b8c20081ccded47ea368fd/charset_normalizer-3.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7641bb8895e77f921102f72833904dcd9901df5d6d72a2ab8f31d04b7e51e4e7", size = 309705, upload-time = "2026-04-02T09:26:02.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/53/58c29116c340e5456724ecd2fff4196d236b98f3da97b404bc5e51ac3493/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:202389074300232baeb53ae2569a60901f7efadd4245cf3a3bf0617d60b439d7", size = 206419, upload-time = "2026-04-02T09:26:03.583Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/02/e8146dc6591a37a00e5144c63f29fb7c97a734ea8a111190783c0e60ab63/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:30b8d1d8c52a48c2c5690e152c169b673487a2a58de1ec7393196753063fcd5e", size = 227901, upload-time = "2026-04-02T09:26:04.738Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/73/77486c4cd58f1267bf17db420e930c9afa1b3be3fe8c8b8ebbebc9624359/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:532bc9bf33a68613fd7d65e4b1c71a6a38d7d42604ecf239c77392e9b4e8998c", size = 222742, upload-time = "2026-04-02T09:26:06.36Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/fa/f74eb381a7d94ded44739e9d94de18dc5edc9c17fb8c11f0a6890696c0a9/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe249cb4651fd12605b7288b24751d8bfd46d35f12a20b1ba33dea122e690df", size = 214061, upload-time = "2026-04-02T09:26:08.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/92/42bd3cefcf7687253fb86694b45f37b733c97f59af3724f356fa92b8c344/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:65bcd23054beab4d166035cabbc868a09c1a49d1efe458fe8e4361215df40265", size = 199239, upload-time = "2026-04-02T09:26:09.823Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/3d/069e7184e2aa3b3cddc700e3dd267413dc259854adc3380421c805c6a17d/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:08e721811161356f97b4059a9ba7bafb23ea5ee2255402c42881c214e173c6b4", size = 210173, upload-time = "2026-04-02T09:26:10.953Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/51/9d56feb5f2e7074c46f93e0ebdbe61f0848ee246e2f0d89f8e20b89ebb8f/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e060d01aec0a910bdccb8be71faf34e7799ce36950f8294c8bf612cba65a2c9e", size = 209841, upload-time = "2026-04-02T09:26:12.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/59/893d8f99cc4c837dda1fe2f1139079703deb9f321aabcb032355de13b6c7/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:38c0109396c4cfc574d502df99742a45c72c08eff0a36158b6f04000043dbf38", size = 200304, upload-time = "2026-04-02T09:26:13.711Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/1d/ee6f3be3464247578d1ed5c46de545ccc3d3ff933695395c402c21fa6b77/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1c2a768fdd44ee4a9339a9b0b130049139b8ce3c01d2ce09f67f5a68048d477c", size = 229455, upload-time = "2026-04-02T09:26:14.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/bb/8fb0a946296ea96a488928bdce8ef99023998c48e4713af533e9bb98ef07/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:1a87ca9d5df6fe460483d9a5bbf2b18f620cbed41b432e2bddb686228282d10b", size = 210036, upload-time = "2026-04-02T09:26:16.478Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/bc/015b2387f913749f82afd4fcba07846d05b6d784dd16123cb66860e0237d/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d635aab80466bc95771bb78d5370e74d36d1fe31467b6b29b8b57b2a3cd7d22c", size = 224739, upload-time = "2026-04-02T09:26:17.751Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/ab/63133691f56baae417493cba6b7c641571a2130eb7bceba6773367ab9ec5/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae196f021b5e7c78e918242d217db021ed2a6ace2bc6ae94c0fc596221c7f58d", size = 216277, upload-time = "2026-04-02T09:26:18.981Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/6d/3be70e827977f20db77c12a97e6a9f973631a45b8d186c084527e53e77a4/charset_normalizer-3.4.7-cp311-cp311-win32.whl", hash = "sha256:adb2597b428735679446b46c8badf467b4ca5f5056aae4d51a19f9570301b1ad", size = 147819, upload-time = "2026-04-02T09:26:20.295Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/d9/5f67790f06b735d7c7637171bbfd89882ad67201891b7275e51116ed8207/charset_normalizer-3.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:8e385e4267ab76874ae30db04c627faaaf0b509e1ccc11a95b3fc3e83f855c00", size = 159281, upload-time = "2026-04-02T09:26:21.74Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/83/6413f36c5a34afead88ce6f66684d943d91f233d76dd083798f9602b75ae/charset_normalizer-3.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:d4a48e5b3c2a489fae013b7589308a40146ee081f6f509e047e0e096084ceca1", size = 147843, upload-time = "2026-04-02T09:26:22.901Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/c8/c67cb8c70e19ef1960b97b22ed2a1567711de46c4ddf19799923adc836c2/charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0", size = 309234, upload-time = "2026-04-02T09:27:07.194Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/85/c091fdee33f20de70d6c8b522743b6f831a2f1cd3ff86de4c6a827c48a76/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a", size = 208042, upload-time = "2026-04-02T09:27:08.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/1c/ab2ce611b984d2fd5d86a5a8a19c1ae26acac6bad967da4967562c75114d/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b", size = 228706, upload-time = "2026-04-02T09:27:09.951Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/29/2b1d2cb00bf085f59d29eb773ce58ec2d325430f8c216804a0a5cd83cbca/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41", size = 224727, upload-time = "2026-04-02T09:27:11.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/5c/032c2d5a07fe4d4855fea851209cca2b6f03ebeb6d4e3afdb3358386a684/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e", size = 215882, upload-time = "2026-04-02T09:27:12.446Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/c2/356065d5a8b78ed04499cae5f339f091946a6a74f91e03476c33f0ab7100/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae", size = 200860, upload-time = "2026-04-02T09:27:13.721Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/cd/a32a84217ced5039f53b29f460962abb2d4420def55afabe45b1c3c7483d/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18", size = 211564, upload-time = "2026-04-02T09:27:15.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/86/58e6f13ce26cc3b8f4a36b94a0f22ae2f00a72534520f4ae6857c4b81f89/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b", size = 211276, upload-time = "2026-04-02T09:27:16.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/fe/d17c32dc72e17e155e06883efa84514ca375f8a528ba2546bee73fc4df81/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356", size = 201238, upload-time = "2026-04-02T09:27:18.229Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/29/f33daa50b06525a237451cdb6c69da366c381a3dadcd833fa5676bc468b3/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab", size = 230189, upload-time = "2026-04-02T09:27:19.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/6e/52c84015394a6a0bdcd435210a7e944c5f94ea1055f5cc5d56c5fe368e7b/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46", size = 211352, upload-time = "2026-04-02T09:27:20.79Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/d7/4353be581b373033fb9198bf1da3cf8f09c1082561e8e922aa7b39bf9fe8/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44", size = 227024, upload-time = "2026-04-02T09:27:22.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/45/99d18aa925bd1740098ccd3060e238e21115fffbfdcb8f3ece837d0ace6c/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72", size = 217869, upload-time = "2026-04-02T09:27:23.486Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/05/5ee478aa53f4bb7996482153d4bfe1b89e0f087f0ab6b294fcf92d595873/charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10", size = 148541, upload-time = "2026-04-02T09:27:25.146Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/77/72dcb0921b2ce86420b2d79d454c7022bf5be40202a2a07906b9f2a35c97/charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f", size = 159634, upload-time = "2026-04-02T09:27:26.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/a3/c2369911cd72f02386e4e340770f6e158c7980267da16af8f668217abaa0/charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246", size = 148384, upload-time = "2026-04-02T09:27:28.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/09/7e8a7f73d24dba1f0035fbbf014d2c36828fc1bf9c88f84093e57d315935/charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24", size = 330133, upload-time = "2026-04-02T09:27:29.474Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/da/96975ddb11f8e977f706f45cddd8540fd8242f71ecdb5d18a80723dcf62c/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79", size = 216257, upload-time = "2026-04-02T09:27:30.793Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/e8/1d63bf8ef2d388e95c64b2098f45f84758f6d102a087552da1485912637b/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960", size = 234851, upload-time = "2026-04-02T09:27:32.44Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/40/e5ff04233e70da2681fa43969ad6f66ca5611d7e669be0246c4c7aaf6dc8/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4", size = 233393, upload-time = "2026-04-02T09:27:34.03Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/c1/06c6c49d5a5450f76899992f1ee40b41d076aee9279b49cf9974d2f313d5/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e", size = 223251, upload-time = "2026-04-02T09:27:35.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/9f/f2ff16fb050946169e3e1f82134d107e5d4ae72647ec8a1b1446c148480f/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1", size = 206609, upload-time = "2026-04-02T09:27:36.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/d5/a527c0cd8d64d2eab7459784fb4169a0ac76e5a6fc5237337982fd61347e/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44", size = 220014, upload-time = "2026-04-02T09:27:38.019Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/80/8a7b8104a3e203074dc9aa2c613d4b726c0e136bad1cc734594b02867972/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e", size = 218979, upload-time = "2026-04-02T09:27:39.37Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/9a/b759b503d507f375b2b5c153e4d2ee0a75aa215b7f2489cf314f4541f2c0/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3", size = 209238, upload-time = "2026-04-02T09:27:40.722Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/4e/0f3f5d47b86bdb79256e7290b26ac847a2832d9a4033f7eb2cd4bcf4bb5b/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0", size = 236110, upload-time = "2026-04-02T09:27:42.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/23/bce28734eb3ed2c91dcf93abeb8a5cf393a7b2749725030bb630e554fdd8/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e", size = 219824, upload-time = "2026-04-02T09:27:43.924Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/6f/6e897c6984cc4d41af319b077f2f600fc8214eb2fe2d6bcb79141b882400/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb", size = 233103, upload-time = "2026-04-02T09:27:45.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/22/ef7bd0fe480a0ae9b656189ec00744b60933f68b4f42a7bb06589f6f576a/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe", size = 225194, upload-time = "2026-04-02T09:27:46.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/a7/0e0ab3e0b5bc1219bd80a6a0d4d72ca74d9250cb2382b7c699c147e06017/charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0", size = 159827, upload-time = "2026-04-02T09:27:48.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/1d/29d32e0fb40864b1f878c7f5a0b343ae676c6e2b271a2d55cc3a152391da/charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c", size = 174168, upload-time = "2026-04-02T09:27:49.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/32/d92444ad05c7a6e41fb2036749777c163baf7a0301a040cb672d6b2b1ae9/charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d", size = 153018, upload-time = "2026-04-02T09:27:51.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cheddahbot"
|
||||
version = "0.1.0"
|
||||
|
|
@ -482,8 +323,6 @@ dependencies = [
|
|||
{ name = "beautifulsoup4" },
|
||||
{ name = "croniter" },
|
||||
{ name = "edge-tts" },
|
||||
{ name = "google-api-python-client" },
|
||||
{ name = "google-auth-oauthlib" },
|
||||
{ name = "gradio" },
|
||||
{ name = "httpx" },
|
||||
{ name = "jinja2" },
|
||||
|
|
@ -519,8 +358,6 @@ requires-dist = [
|
|||
{ name = "beautifulsoup4", specifier = ">=4.12" },
|
||||
{ name = "croniter", specifier = ">=2.0" },
|
||||
{ name = "edge-tts", specifier = ">=6.1" },
|
||||
{ name = "google-api-python-client", specifier = ">=2.194.0" },
|
||||
{ name = "google-auth-oauthlib", specifier = ">=1.3.1" },
|
||||
{ name = "gradio", specifier = ">=5.0" },
|
||||
{ name = "httpx", specifier = ">=0.27" },
|
||||
{ name = "jinja2", specifier = ">=3.1.6" },
|
||||
|
|
@ -687,65 +524,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/0c/dca8abb64e7ca4f6b2978769f6fea5ad06686a190cec381f0a796fdcaaba/cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f", size = 3476879, upload-time = "2026-04-08T01:57:38.664Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/ea/075aac6a84b7c271578d81a2f9968acb6e273002408729f2ddff517fed4a/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15", size = 4219700, upload-time = "2026-04-08T01:57:40.625Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/7b/1c55db7242b5e5612b29fc7a630e91ee7a6e3c8e7bf5406d22e206875fbd/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455", size = 4385982, upload-time = "2026-04-08T01:57:42.725Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/da/9870eec4b69c63ef5925bf7d8342b7e13bc2ee3d47791461c4e49ca212f4/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65", size = 4219115, upload-time = "2026-04-08T01:57:44.939Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/72/05aa5832b82dd341969e9a734d1812a6aadb088d9eb6f0430fc337cc5a8f/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968", size = 4385479, upload-time = "2026-04-08T01:57:46.86Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/2a/1b016902351a523aa2bd446b50a5bc1175d7a7d1cf90fe2ef904f9b84ebc/cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4", size = 3412829, upload-time = "2026-04-08T01:57:48.874Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cuda-bindings"
|
||||
version = "12.9.4"
|
||||
|
|
@ -951,89 +729,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437", size = 202505, upload-time = "2026-02-05T21:50:51.819Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-core"
|
||||
version = "2.30.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-auth" },
|
||||
{ name = "googleapis-common-protos" },
|
||||
{ name = "proto-plus" },
|
||||
{ name = "protobuf" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1a/2e/83ca41eb400eb228f9279ec14ed66f6475218b59af4c6daec2d5a509fe83/google_api_core-2.30.2.tar.gz", hash = "sha256:9a8113e1a88bdc09a7ff629707f2214d98d61c7f6ceb0ea38c42a095d02dc0f9", size = 176862, upload-time = "2026-04-02T21:23:44.876Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/84/e1/ebd5100cbb202e561c0c8b59e485ef3bd63fa9beb610f3fdcaea443f0288/google_api_core-2.30.2-py3-none-any.whl", hash = "sha256:a4c226766d6af2580577db1f1a51bf53cd262f722b49731ce7414c43068a9594", size = 173236, upload-time = "2026-04-02T21:23:06.395Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.194.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-api-core" },
|
||||
{ name = "google-auth" },
|
||||
{ name = "google-auth-httplib2" },
|
||||
{ name = "httplib2" },
|
||||
{ name = "uritemplate" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/ab/e83af0eb043e4ccc49571ca7a6a49984e9d00f4e9e6e6f1238d60bc84dce/google_api_python_client-2.194.0.tar.gz", hash = "sha256:db92647bd1a90f40b79c9618461553c2b20b6a43ce7395fa6de07132dc14f023", size = 14443469, upload-time = "2026-04-08T23:07:35.757Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/34/5a624e49f179aa5b0cb87b2ce8093960299030ff40423bfbde09360eb908/google_api_python_client-2.194.0-py3-none-any.whl", hash = "sha256:61eaaac3b8fc8fdf11c08af87abc3d1342d1b37319cc1b57405f86ef7697e717", size = 15016514, upload-time = "2026-04-08T23:07:33.093Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-auth"
|
||||
version = "2.49.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cryptography" },
|
||||
{ name = "pyasn1-modules" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ea/80/6a696a07d3d3b0a92488933532f03dbefa4a24ab80fb231395b9a2a1be77/google_auth-2.49.1.tar.gz", hash = "sha256:16d40da1c3c5a0533f57d268fe72e0ebb0ae1cc3b567024122651c045d879b64", size = 333825, upload-time = "2026-03-12T19:30:58.135Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/eb/c6c2478d8a8d633460be40e2a8a6f8f429171997a35a96f81d3b680dec83/google_auth-2.49.1-py3-none-any.whl", hash = "sha256:195ebe3dca18eddd1b3db5edc5189b76c13e96f29e73043b923ebcf3f1a860f7", size = 240737, upload-time = "2026-03-12T19:30:53.159Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-auth-httplib2"
|
||||
version = "0.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-auth" },
|
||||
{ name = "httplib2" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ed/99/107612bef8d24b298bb5a7c8466f908ecda791d43f9466f5c3978f5b24c1/google_auth_httplib2-0.3.1.tar.gz", hash = "sha256:0af542e815784cb64159b4469aa5d71dd41069ba93effa006e1916b1dcd88e55", size = 11152, upload-time = "2026-03-30T22:50:26.766Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/97/e9/93afb14d23a949acaa3f4e7cc51a0024671174e116e35f42850764b99634/google_auth_httplib2-0.3.1-py3-none-any.whl", hash = "sha256:682356a90ef4ba3d06548c37e9112eea6fc00395a11b0303a644c1a86abc275c", size = 9534, upload-time = "2026-03-30T22:49:03.384Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-auth-oauthlib"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-auth" },
|
||||
{ name = "requests-oauthlib" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a6/82/62482931dcbe5266a2680d0da17096f2aab983ecb320277d9556700ce00e/google_auth_oauthlib-1.3.1.tar.gz", hash = "sha256:14c22c7b3dd3d06dbe44264144409039465effdd1eef94f7ce3710e486cc4bfa", size = 21663, upload-time = "2026-03-30T22:49:56.408Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/e0/cb454a95f460903e39f101e950038ec24a072ca69d0a294a6df625cc1627/google_auth_oauthlib-1.3.1-py3-none-any.whl", hash = "sha256:1a139ef23f1318756805b0e95f655c238bffd29655329a2978218248da4ee7f8", size = 19247, upload-time = "2026-03-30T20:02:23.894Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "googleapis-common-protos"
|
||||
version = "1.74.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "protobuf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/18/a746c8344152d368a5aac738d4c857012f2c5d1fd2eac7e17b647a7861bd/googleapis_common_protos-1.74.0.tar.gz", hash = "sha256:57971e4eeeba6aad1163c1f0fc88543f965bb49129b8bb55b2b7b26ecab084f1", size = 151254, upload-time = "2026-04-02T21:23:26.679Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/b0/be5d3329badb9230b765de6eea66b73abd5944bdeb5afb3562ddcd80ae84/googleapis_common_protos-1.74.0-py3-none-any.whl", hash = "sha256:702216f78610bb510e3f12ac3cafd281b7ac45cc5d86e90ad87e4d301a3426b5", size = 300743, upload-time = "2026-04-02T21:22:49.108Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gradio"
|
||||
version = "6.5.1"
|
||||
|
|
@ -1150,18 +845,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httplib2"
|
||||
version = "0.31.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyparsing" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c1/1f/e86365613582c027dda5ddb64e1010e57a3d53e99ab8a72093fa13d565ec/httplib2-0.31.2.tar.gz", hash = "sha256:385e0869d7397484f4eab426197a4c020b606edd43372492337c0b4010ae5d24", size = 250800, upload-time = "2026-01-23T11:04:44.165Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/90/fd509079dfcab01102c0fdd87f3a9506894bc70afcf9e9785ef6b2b3aff6/httplib2-0.31.2-py3-none-any.whl", hash = "sha256:dbf0c2fa3862acf3c55c078ea9c0bc4481d7dc5117cae71be9514912cf9f8349", size = 91099, upload-time = "2026-01-23T11:04:42.78Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
|
|
@ -1867,15 +1550,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954, upload-time = "2025-03-07T01:42:44.131Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "oauthlib"
|
||||
version = "3.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "2.21.0"
|
||||
|
|
@ -2239,63 +1913,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proto-plus"
|
||||
version = "1.27.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "protobuf" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/81/0d/94dfe80193e79d55258345901acd2917523d56e8381bc4dee7fd38e3868a/proto_plus-1.27.2.tar.gz", hash = "sha256:b2adde53adadf75737c44d3dcb0104fde65250dfc83ad59168b4aa3e574b6a24", size = 57204, upload-time = "2026-03-26T22:18:57.174Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/84/f3/1fba73eeffafc998a25d59703b63f8be4fe8a5cb12eaff7386a0ba0f7125/proto_plus-1.27.2-py3-none-any.whl", hash = "sha256:6432f75893d3b9e70b9c412f1d2f03f65b11fb164b793d14ae2ca01821d22718", size = 50450, upload-time = "2026-03-26T22:13:42.927Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "protobuf"
|
||||
version = "7.34.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6b/6b/a0e95cad1ad7cc3f2c6821fcab91671bd5b78bd42afb357bb4765f29bc41/protobuf-7.34.1.tar.gz", hash = "sha256:9ce42245e704cc5027be797c1db1eb93184d44d1cdd71811fb2d9b25ad541280", size = 454708, upload-time = "2026-03-20T17:34:47.036Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/11/3325d41e6ee15bf1125654301211247b042563bcc898784351252549a8ad/protobuf-7.34.1-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8b2cc79c4d8f62b293ad9b11ec3aebce9af481fa73e64556969f7345ebf9fc7", size = 429247, upload-time = "2026-03-20T17:34:37.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/9d/aa69df2724ff63efa6f72307b483ce0827f4347cc6d6df24b59e26659fef/protobuf-7.34.1-cp310-abi3-manylinux2014_aarch64.whl", hash = "sha256:5185e0e948d07abe94bb76ec9b8416b604cfe5da6f871d67aad30cbf24c3110b", size = 325753, upload-time = "2026-03-20T17:34:38.751Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/e8/d174c91fd48e50101943f042b09af9029064810b734e4160bbe282fa1caa/protobuf-7.34.1-cp310-abi3-manylinux2014_s390x.whl", hash = "sha256:403b093a6e28a960372b44e5eb081775c9b056e816a8029c61231743d63f881a", size = 340198, upload-time = "2026-03-20T17:34:39.871Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/1b/3b431694a4dc6d37b9f653f0c64b0a0d9ec074ee810710c0c3da21d67ba7/protobuf-7.34.1-cp310-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ff40ce8cd688f7265326b38d5a1bed9bfdf5e6723d49961432f83e21d5713e4", size = 324267, upload-time = "2026-03-20T17:34:41.1Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/29/64de04a0ac142fb685fd09999bc3d337943fb386f3a0ec57f92fd8203f97/protobuf-7.34.1-cp310-abi3-win32.whl", hash = "sha256:34b84ce27680df7cca9f231043ada0daa55d0c44a2ddfaa58ec1d0d89d8bf60a", size = 426628, upload-time = "2026-03-20T17:34:42.536Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/87/cb5e585192a22b8bd457df5a2c16a75ea0db9674c3a0a39fc9347d84e075/protobuf-7.34.1-cp310-abi3-win_amd64.whl", hash = "sha256:e97b55646e6ce5cbb0954a8c28cd39a5869b59090dfaa7df4598a7fba869468c", size = 437901, upload-time = "2026-03-20T17:34:44.112Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/95/608f665226bca68b736b79e457fded9a2a38c4f4379a4a7614303d9db3bc/protobuf-7.34.1-py3-none-any.whl", hash = "sha256:bb3812cd53aefea2b028ef42bd780f5b96407247f20c6ef7c679807e9d188f11", size = 170715, upload-time = "2026-03-20T17:34:45.384Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.6.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1-modules"
|
||||
version = "0.4.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyasn1" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.12.5"
|
||||
|
|
@ -2426,15 +2043,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.2"
|
||||
|
|
@ -2676,34 +2284,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/95/e4/a3b9480c78cf8ee86626cb06f8d931d74d775897d44201ccb813097ae697/regex-2026.1.15-cp314-cp314t-win_arm64.whl", hash = "sha256:ca89c5e596fc05b015f27561b3793dc2fa0917ea0d7507eebb448efd35274a70", size = 274837, upload-time = "2026-01-14T23:17:23.146Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.33.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests-oauthlib"
|
||||
version = "2.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "oauthlib" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650, upload-time = "2024-03-22T20:32:29.939Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "respx"
|
||||
version = "0.22.0"
|
||||
|
|
@ -3295,24 +2875,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uritemplate"
|
||||
version = "4.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267, upload-time = "2025-06-02T15:12:06.318Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488, upload-time = "2025-06-02T15:12:03.405Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.6.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.40.0"
|
||||
|
|
|
|||
Loading…
Reference in New Issue