diff --git a/README.md b/README.md index 7afd0e6..63626c2 100644 --- a/README.md +++ b/README.md @@ -197,9 +197,11 @@ Coda doesn't just respond — it shows up on its own. Scheduled tasks run in the **Repo Sync** — Coda pulls the latest changes from all configured repositories every 5 minutes, so it's always working with current code. -Schedules are registered automatically on app startup — no manual setup needed. They're idempotent, so restarting the app just updates existing schedules. To trigger a triage manually: `POST /triage-issues`. +**Proactive Agno Post** — every 30 minutes, Coda posts a short Agno update grounded in recent repo activity (merged PRs, open PRs, commits) or a concrete file spotlight from the local `agno` clone. It supports posting directly to a channel or replying inside a configured Slack thread. -For issue triage to post to Slack, set `TRIAGE_CHANNEL` in your env to the target channel ID (right-click channel in Slack → View details → copy ID). +Schedules are registered automatically on app startup — no manual setup needed. They're idempotent, so restarting the app just updates existing schedules. Manual triggers: `POST /triage-issues`, `POST /digest`, and `POST /proactive-agno-post`. + +For issue triage to post to Slack, set `TRIAGE_CHANNEL` in your env to the target channel ID (right-click channel in Slack → View details → copy ID). For proactive Agno posts, set `PROACTIVE_POST_ENABLED=true`, `PROACTIVE_POST_CHANNEL=C09GL0WK0SU`, and optionally `PROACTIVE_POST_THREAD_TS=1775243740.375219` to keep updates in-thread. You can also build your own scheduled tasks — automatic PR review when new PRs are opened, stale branch alerts, or convention drift detection. See `tasks/` for examples. @@ -324,6 +326,10 @@ python -m evals.run --category security | `REPOS_DIR` | No | Path to cloned repos (default: /repos) | | `TRIAGE_CHANNEL` | No | Slack channel ID for daily issue triage | | `DIGEST_CHANNEL` | No | Slack channel ID for daily activity digest | +| `PROACTIVE_POST_ENABLED` | No | Enables the 30-minute proactive Agno Slack post schedule | +| `PROACTIVE_POST_CHANNEL` | No | Slack channel ID for proactive Agno posts | +| `PROACTIVE_POST_THREAD_TS` | No | Optional Slack thread timestamp for in-thread proactive posts | +| `PROACTIVE_POST_REPO` | No | Repo name to source proactive updates from (default: `agno`) | | `JWT_VERIFICATION_KEY` | Production | RBAC public key from [os.agno.com](https://os.agno.com?utm_source=github&utm_medium=example-repo&utm_campaign=agent-example&utm_content=coda&utm_term=agentos) | ## Security diff --git a/app/main.py b/app/main.py index cc6a7bb..1350dc5 100644 --- a/app/main.py +++ b/app/main.py @@ -17,6 +17,7 @@ from coda.team import coda from db import get_postgres_db from tasks.daily_digest import run_daily_digest +from tasks.proactive_agno_post import run_proactive_agno_post from tasks.review_issues import run_daily_triage from tasks.sync_repos import sync_all_repos @@ -81,6 +82,15 @@ def _register_schedules() -> None: description="Daily activity digest — merged PRs, open PRs, new/stale issues", if_exists="update", ) + if getenv("PROACTIVE_POST_ENABLED", "false").lower() == "true" and getenv("SLACK_TOKEN"): + mgr.create( + name="proactive-agno-post", + cron="*/30 * * * *", + endpoint="/proactive-agno-post", + timezone="UTC", + description="Proactive Agno Slack update every 30 minutes", + if_exists="update", + ) @asynccontextmanager @@ -133,6 +143,13 @@ def daily_digest() -> dict[str, str]: return {"status": "ok"} +@app.post("/proactive-agno-post") +def proactive_agno_post() -> dict[str, str]: + """Run proactive Agno posting from recent repo activity or repo spotlight.""" + run_proactive_agno_post() + return {"status": "ok"} + + if __name__ == "__main__": agent_os.serve( app="app.main:app", diff --git a/compose.yaml b/compose.yaml index 9b0bb5f..d41a88d 100644 --- a/compose.yaml +++ b/compose.yaml @@ -48,6 +48,10 @@ services: - SLACK_SIGNING_SECRET=${SLACK_SIGNING_SECRET:-} - TRIAGE_CHANNEL=${TRIAGE_CHANNEL:-} - DIGEST_CHANNEL=${DIGEST_CHANNEL:-} + - PROACTIVE_POST_ENABLED=${PROACTIVE_POST_ENABLED:-false} + - PROACTIVE_POST_CHANNEL=${PROACTIVE_POST_CHANNEL:-C09GL0WK0SU} + - PROACTIVE_POST_THREAD_TS=${PROACTIVE_POST_THREAD_TS:-1775243740.375219} + - PROACTIVE_POST_REPO=${PROACTIVE_POST_REPO:-agno} # Repos config - REPOS_DIR=/repos depends_on: diff --git a/docs/SPEC.md b/docs/SPEC.md index e3e89ab..32b7f38 100644 --- a/docs/SPEC.md +++ b/docs/SPEC.md @@ -107,6 +107,7 @@ Background tasks on a cron schedule via Agno ScheduleManager. - **Repo sync:** pulls latest changes every 5 minutes (`POST /sync`) - **Daily issue triage:** classifies new issues and posts to Slack (`POST /triage-issues`) - **Daily digest:** morning activity summary — merged PRs, open PRs, new/stale issues (`POST /digest`) +- **Proactive Agno post:** every 30 minutes, posts a repo-grounded Agno update to Slack or a configured thread (`POST /proactive-agno-post`) - **Startup sync:** repos are synced on application startup ### 10. Daily Issue Triage @@ -157,6 +158,30 @@ review, new issues, and stale issues. Pure GitHub API — no agent involved. - Requires `GITHUB_ACCESS_TOKEN` and `SLACK_TOKEN`. - Repos are read from `repos.yaml`. +### 12. Proactive Agno Post + +Half-hour Slack update grounded in the `agno` repository. The task prefers +recent merged PRs, then active open PRs, then local git commits, and finally a +real file spotlight from the local repo clone so it never falls back to generic filler. + +**Pipeline:** Fetch (GitHub API + local git) → Select → Dedupe → Format → Post (Slack SDK) + +**Behavior:** +- Targets one configured repo (default: `agno`) +- Posts to Slack channel `C09GL0WK0SU` by default +- Supports optional thread replies via `PROACTIVE_POST_THREAD_TS` +- Stores a lightweight fingerprint in `/repos/.coda-state/` to avoid repeating the same signal in the same 30-minute bucket + +**Schedule:** Every 30 minutes UTC. Register with `python -m tasks.proactive_agno_post --schedule`. + +**Manual trigger:** `POST /proactive-agno-post` or `python -m tasks.proactive_agno_post`. + +**Configuration:** +- Set `PROACTIVE_POST_ENABLED=true` to register and run it +- Set `PROACTIVE_POST_CHANNEL` to the Slack channel ID +- Set `PROACTIVE_POST_THREAD_TS` to reply in a thread instead of top-level posting +- Set `PROACTIVE_POST_REPO` to change the source repo (defaults to `agno`) + ## Agents ### Coda (Team Leader) diff --git a/example.env b/example.env index 5591709..20b4b57 100644 --- a/example.env +++ b/example.env @@ -62,6 +62,13 @@ SLACK_SIGNING_SECRET= # DIGEST_CHANNEL= +# Enable proactive Agno updates every 30 minutes. +# Uses the configured Slack channel ID, and can optionally post into a thread. +# PROACTIVE_POST_ENABLED=false +# PROACTIVE_POST_CHANNEL=C09GL0WK0SU +# PROACTIVE_POST_THREAD_TS=1775243740.375219 +# PROACTIVE_POST_REPO=agno + # ------------------------------------------ # Optional: Database # ------------------------------------------ diff --git a/pyproject.toml b/pyproject.toml index a9b8be7..2e10bea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ dependencies = [ ] [project.optional-dependencies] -dev = ["mypy", "ruff"] +dev = ["mypy", "ruff", "pytest"] [build-system] requires = ["setuptools"] diff --git a/tasks/proactive_agno_post.py b/tasks/proactive_agno_post.py new file mode 100644 index 0000000..8384921 --- /dev/null +++ b/tasks/proactive_agno_post.py @@ -0,0 +1,462 @@ +""" +Proactive Agno Post +=================== + +Posts a short Agno update to Slack every 30 minutes, grounded in recent +repo activity or a concrete file spotlight from the local agno clone. + +Manual trigger: + python -m tasks.proactive_agno_post + +Register/update schedule: + python -m tasks.proactive_agno_post --schedule +""" + +from __future__ import annotations + +import argparse +import hashlib +import json +import logging +import re +import subprocess +from datetime import datetime, timezone +from os import getenv +from pathlib import Path +from typing import Any + +import httpx +from slack_sdk import WebClient +from slack_sdk.errors import SlackApiError + +from coda.settings import REPOS_DIR +from tasks.sync_repos import load_repos_config + +log = logging.getLogger(__name__) + +GITHUB_API = "https://api.github.com" +DEFAULT_REPO = "agno" +DEFAULT_CHANNEL = "C09GL0WK0SU" +DEFAULT_THREAD_TS = "1775243740.375219" +RECENT_ACTIVITY_LIMIT = 6 +SPOTLIGHT_FILES = [ + "README.md", + "libs/agno/agno/agent/agent.py", + "libs/agno/agno/team/team.py", + "libs/agno/agno/tools/function.py", + "libs/agno/agno/memory/v2/memory.py", + "libs/agno/agno/os/app.py", +] + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- +def _parse_owner_repo(url: str) -> str: + """Extract 'owner/repo' from a GitHub URL.""" + match = re.search(r"github\.com[:/](.+?)(?:\.git)?$", url.rstrip("/")) + if not match: + raise ValueError(f"Cannot parse GitHub owner/repo from: {url}") + return match.group(1) + + +def _github_headers() -> dict[str, str]: + token = getenv("GITHUB_ACCESS_TOKEN", "") + headers: dict[str, str] = {"Accept": "application/vnd.github+json"} + if token: + headers["Authorization"] = f"Bearer {token}" + return headers + + +def _repo_name_from_url(url: str) -> str: + return url.rstrip("/").split("/")[-1].removesuffix(".git") + + +def _load_repo_config(repo_name: str) -> dict[str, Any] | None: + for repo in load_repos_config(): + url = repo.get("url", "") + if url and _repo_name_from_url(url) == repo_name: + return repo + return None + + +def _repo_path(repo_name: str) -> Path: + return REPOS_DIR / repo_name + + +def _run_git(repo_path: Path, *args: str) -> str: + result = subprocess.run( + ["git", *args], + cwd=repo_path, + capture_output=True, + text=True, + timeout=30, + check=True, + ) + return result.stdout.strip() + + +def _get_recent_commits(repo_path: Path, limit: int = RECENT_ACTIVITY_LIMIT) -> list[dict[str, str]]: + output = _run_git( + repo_path, + "log", + f"--max-count={limit}", + "--pretty=format:%H%x1f%s%x1f%an%x1f%cI", + "--name-only", + ) + commits: list[dict[str, str]] = [] + current: dict[str, str] | None = None + files: list[str] = [] + + for line in output.splitlines() + [""]: + if "\x1f" in line: + if current is not None: + current["files"] = ", ".join(files[:4]) + commits.append(current) + sha, subject, author, committed_at = line.split("\x1f") + current = { + "sha": sha, + "short_sha": sha[:7], + "subject": subject, + "author": author, + "committed_at": committed_at, + } + files = [] + elif current is not None and line.strip(): + files.append(line.strip()) + + return commits + + +def _get_file_summary(repo_path: Path, relative_path: str) -> dict[str, str] | None: + target = repo_path / relative_path + if not target.exists() or not target.is_file(): + return None + + content = target.read_text(encoding="utf-8", errors="ignore").splitlines() + non_empty = [line.strip() for line in content if line.strip()] + if not non_empty: + return None + + headline = non_empty[0] + detail = "" + for line in non_empty[1:]: + if len(line) > 20: + detail = line + break + return { + "path": relative_path, + "headline": headline[:140], + "detail": detail[:220], + } + + +def _pick_spotlight(repo_path: Path, run_bucket: str) -> dict[str, str] | None: + if not SPOTLIGHT_FILES: + return None + index = int(hashlib.sha256(run_bucket.encode()).hexdigest(), 16) % len(SPOTLIGHT_FILES) + for offset in range(len(SPOTLIGHT_FILES)): + candidate = SPOTLIGHT_FILES[(index + offset) % len(SPOTLIGHT_FILES)] + summary = _get_file_summary(repo_path, candidate) + if summary: + return summary + return None + + +def fetch_recent_merged_pr(owner_repo: str) -> dict[str, Any] | None: + try: + with httpx.Client(timeout=20) as client: + resp = client.get( + f"{GITHUB_API}/repos/{owner_repo}/pulls", + headers=_github_headers(), + params={ + "state": "closed", + "sort": "updated", + "direction": "desc", + "per_page": 20, + "page": 1, + }, + ) + resp.raise_for_status() + except Exception: + log.exception("Failed to fetch merged PRs for %s", owner_repo) + return None + + for pr in resp.json(): + if pr.get("merged_at"): + return { + "type": "merged_pr", + "number": pr["number"], + "title": pr["title"], + "url": pr["html_url"], + "author": pr["user"]["login"], + "merged_at": pr["merged_at"], + } + return None + + +def fetch_recent_open_pr(owner_repo: str) -> dict[str, Any] | None: + try: + with httpx.Client(timeout=20) as client: + resp = client.get( + f"{GITHUB_API}/repos/{owner_repo}/pulls", + headers=_github_headers(), + params={ + "state": "open", + "sort": "updated", + "direction": "desc", + "per_page": 10, + "page": 1, + }, + ) + resp.raise_for_status() + except Exception: + log.exception("Failed to fetch open PRs for %s", owner_repo) + return None + + for pr in resp.json(): + if pr.get("draft", False): + continue + return { + "type": "open_pr", + "number": pr["number"], + "title": pr["title"], + "url": pr["html_url"], + "author": pr["user"]["login"], + "updated_at": pr["updated_at"], + } + return None + + +def _half_hour_bucket(now: datetime | None = None) -> str: + now = now or datetime.now(timezone.utc) + minute_bucket = "00" if now.minute < 30 else "30" + return now.strftime(f"%Y-%m-%dT%H:{minute_bucket}Z") + + +def select_post_signal(repo_name: str, now: datetime | None = None) -> dict[str, Any] | None: + repo_config = _load_repo_config(repo_name) + if not repo_config: + log.warning("Repo '%s' not found in repos.yaml", repo_name) + return None + + url = repo_config.get("url") + if not url: + log.warning("Repo '%s' has no URL configured", repo_name) + return None + + owner_repo = _parse_owner_repo(url) + repo_path = _repo_path(repo_name) + if not repo_path.exists(): + log.warning("Repo clone missing at %s", repo_path) + return None + + merged_pr = fetch_recent_merged_pr(owner_repo) + if merged_pr: + return merged_pr + + open_pr = fetch_recent_open_pr(owner_repo) + if open_pr: + return open_pr + + try: + commits = _get_recent_commits(repo_path) + except Exception: + log.exception("Failed to inspect local git history for %s", repo_name) + commits = [] + + if commits: + commit = commits[0] + return { + "type": "commit", + "sha": commit["sha"], + "short_sha": commit["short_sha"], + "subject": commit["subject"], + "author": commit["author"], + "committed_at": commit["committed_at"], + "files": commit.get("files", ""), + "url": f"https://github.com/{owner_repo}/commit/{commit['sha']}", + } + + spotlight = _pick_spotlight(repo_path, _half_hour_bucket(now)) + if spotlight: + return { + "type": "spotlight", + "path": spotlight["path"], + "headline": spotlight["headline"], + "detail": spotlight["detail"], + "url": f"https://github.com/{owner_repo}/blob/main/{spotlight['path']}", + } + + return None + + +def _signal_fingerprint(signal: dict[str, Any], bucket: str) -> str: + identity = { + "bucket": bucket, + "type": signal.get("type"), + "url": signal.get("url"), + "number": signal.get("number"), + "sha": signal.get("sha"), + "path": signal.get("path"), + } + return hashlib.sha256(json.dumps(identity, sort_keys=True).encode()).hexdigest() + + +def _dedupe_state_path(repo_name: str) -> Path: + state_dir = REPOS_DIR / ".coda-state" + state_dir.mkdir(exist_ok=True) + return state_dir / f"proactive-post-{repo_name}.json" + + +def should_post_signal(repo_name: str, signal: dict[str, Any], bucket: str) -> bool: + state_path = _dedupe_state_path(repo_name) + fingerprint = _signal_fingerprint(signal, bucket) + if not state_path.exists(): + return True + + try: + state = json.loads(state_path.read_text()) + except Exception: + log.warning("Failed to read proactive post dedupe state at %s; proceeding", state_path) + return True + + return state.get("fingerprint") != fingerprint + + +def mark_signal_posted(repo_name: str, signal: dict[str, Any], bucket: str) -> None: + state_path = _dedupe_state_path(repo_name) + payload = { + "bucket": bucket, + "fingerprint": _signal_fingerprint(signal, bucket), + "signal_type": signal.get("type"), + "url": signal.get("url"), + "posted_at": datetime.now(timezone.utc).isoformat(), + } + state_path.write_text(json.dumps(payload, indent=2)) + + +def build_proactive_post(repo_name: str, signal: dict[str, Any]) -> str: + intro = "Agno update 👀" + if signal["type"] == "merged_pr": + return ( + f"{intro}\n" + f"Recent merge: <{signal['url']}|#{signal['number']} {signal['title']}> by @{signal['author']}.\n" + f"Why it matters: this is a fresh change landing in `{repo_name}`, so it's a good place to see what Agno is evolving right now." + ) + if signal["type"] == "open_pr": + return ( + f"{intro}\n" + f"Open PR to watch: <{signal['url']}|#{signal['number']} {signal['title']}> by @{signal['author']}.\n" + f"Why it matters: this is active work in flight on Agno, useful if you want a concrete thread to review or learn from." + ) + if signal["type"] == "commit": + files = signal.get("files") or "repo internals" + return ( + f"{intro}\n" + f"Commit highlight: <{signal['url']}|{signal['short_sha']}> {signal['subject']} — @{signal['author']}.\n" + f"Touched: `{files}`\n" + f"Why it matters: this points to the latest concrete code movement in Agno rather than a generic summary." + ) + if signal["type"] == "spotlight": + detail = f" {signal['detail']}" if signal.get("detail") else "" + return ( + f"{intro}\n" + f"Repo spotlight: <{signal['url']}|{signal['path']}>\n" + f"{signal['headline']}{detail}\n" + f"Why it matters: this keeps the post grounded in a real Agno file even when recent activity is quiet." + ) + raise ValueError(f"Unsupported signal type: {signal['type']}") + + +def post_to_slack(message: str) -> None: + token = getenv("SLACK_TOKEN", "") + channel = getenv("PROACTIVE_POST_CHANNEL", DEFAULT_CHANNEL) + thread_ts = getenv("PROACTIVE_POST_THREAD_TS", DEFAULT_THREAD_TS) + + if not token or not channel: + log.warning("SLACK_TOKEN or PROACTIVE_POST_CHANNEL not set — printing to stdout") + print(message) + return + + payload: dict[str, Any] = {"channel": channel, "text": message, "mrkdwn": True} + if thread_ts: + payload["thread_ts"] = thread_ts + + try: + client = WebClient(token=token) + client.chat_postMessage(**payload) + if thread_ts: + log.info("Posted proactive Agno update to Slack channel %s thread %s", channel, thread_ts) + else: + log.info("Posted proactive Agno update to Slack channel %s", channel) + except SlackApiError as e: + error = e.response.get("error", "unknown") + if error == "channel_not_found": + log.error("Channel '%s' not found. Use channel ID (e.g. C0XXXXXXX), not name.", channel) + elif error == "not_in_channel": + log.error("Bot not in channel '%s'. Run /invite @Coda first.", channel) + elif error == "invalid_auth": + log.error("SLACK_TOKEN is invalid or expired.") + elif error == "thread_not_found": + log.error("Slack thread '%s' was not found in channel '%s'.", thread_ts, channel) + else: + log.error("Slack API error: %s", error) + log.info("Falling back to stdout:") + print(message) + + +# --------------------------------------------------------------------------- +# Main entry +# --------------------------------------------------------------------------- +def run_proactive_agno_post() -> None: + """Fetch → select → dedupe → format → post a proactive Agno update.""" + enabled = getenv("PROACTIVE_POST_ENABLED", "false").lower() == "true" + repo_name = getenv("PROACTIVE_POST_REPO", DEFAULT_REPO) + bucket = _half_hour_bucket() + + if not enabled: + log.info("PROACTIVE_POST_ENABLED is false; skipping proactive Agno post") + return + + signal = select_post_signal(repo_name) + if not signal: + log.warning("No proactive Agno signal available for repo '%s'", repo_name) + return + + if not should_post_signal(repo_name, signal, bucket): + log.info("Skipping proactive Agno post for repo '%s'; duplicate signal in bucket %s", repo_name, bucket) + return + + message = build_proactive_post(repo_name, signal) + post_to_slack(message) + mark_signal_posted(repo_name, signal, bucket) + + +# --------------------------------------------------------------------------- +# CLI + Schedule +# --------------------------------------------------------------------------- +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s") + + parser = argparse.ArgumentParser(description="Scheduled proactive Agno Slack posts") + parser.add_argument("--schedule", action="store_true", help="Register/update the schedule") + args = parser.parse_args() + + if args.schedule: + from agno.scheduler import ScheduleManager + + from db import get_postgres_db + + mgr = ScheduleManager(get_postgres_db()) + schedule = mgr.create( + name="proactive-agno-post", + cron="*/30 * * * *", + endpoint="/proactive-agno-post", + timezone="UTC", + description="Post a proactive Agno update to Slack every 30 minutes", + if_exists="update", + ) + print(f"Schedule ready: {schedule.name} (next: {schedule.next_run_at})") + else: + run_proactive_agno_post() \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..2ffdd85 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +import sys +from pathlib import Path + +ROOT = Path(__file__).resolve().parents[1] +if str(ROOT) not in sys.path: + sys.path.insert(0, str(ROOT)) \ No newline at end of file diff --git a/tests/test_proactive_agno_post.py b/tests/test_proactive_agno_post.py new file mode 100644 index 0000000..10329fe --- /dev/null +++ b/tests/test_proactive_agno_post.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +import json +from pathlib import Path + +from tasks.proactive_agno_post import ( + _half_hour_bucket, + build_proactive_post, + mark_signal_posted, + should_post_signal, +) + + +def test_build_proactive_post_for_commit() -> None: + message = build_proactive_post( + "agno", + { + "type": "commit", + "sha": "abc123456789", + "short_sha": "abc1234", + "subject": "Improve agent memory wiring", + "author": "alice", + "files": "libs/agno/agno/memory/v2/memory.py", + "url": "https://github.com/agno-agi/agno/commit/abc123456789", + }, + ) + + assert "Agno update 👀" in message + assert "Improve agent memory wiring" in message + assert "memory.py" in message + + +def test_build_proactive_post_for_spotlight() -> None: + message = build_proactive_post( + "agno", + { + "type": "spotlight", + "path": "README.md", + "headline": "# Agno", + "detail": "Build multi-agent systems with memory, knowledge, and tools.", + "url": "https://github.com/agno-agi/agno/blob/main/README.md", + }, + ) + + assert "Repo spotlight" in message + assert "README.md" in message + assert "Build multi-agent systems" in message + + +def test_dedupe_state_blocks_repeat_in_same_bucket(tmp_path: Path, monkeypatch) -> None: + monkeypatch.setenv("REPOS_DIR", str(tmp_path)) + from tasks import proactive_agno_post as module + + module.REPOS_DIR = tmp_path + signal = { + "type": "commit", + "sha": "abc123456789", + "short_sha": "abc1234", + "subject": "Improve agent memory wiring", + "author": "alice", + "files": "libs/agno/agno/memory/v2/memory.py", + "url": "https://github.com/agno-agi/agno/commit/abc123456789", + } + bucket = "2026-04-03T19:00Z" + + assert should_post_signal("agno", signal, bucket) is True + mark_signal_posted("agno", signal, bucket) + assert should_post_signal("agno", signal, bucket) is False + + state_path = tmp_path / ".coda-state" / "proactive-post-agno.json" + payload = json.loads(state_path.read_text()) + assert payload["bucket"] == bucket + assert payload["signal_type"] == "commit" + + +def test_half_hour_bucket_rounds_down() -> None: + from datetime import datetime, timezone + + assert _half_hour_bucket(datetime(2026, 4, 3, 19, 4, tzinfo=timezone.utc)) == "2026-04-03T19:00Z" + assert _half_hour_bucket(datetime(2026, 4, 3, 19, 48, tzinfo=timezone.utc)) == "2026-04-03T19:30Z" \ No newline at end of file