@orchagent/cli 0.3.64 → 0.3.66
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,884 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* GitHub Weekly Summary template — file constants.
|
|
4
|
+
*
|
|
5
|
+
* Extracted from the working agent at agents/github-weekly-summary/.
|
|
6
|
+
* Python files are EXACT copies. orchagent.json and README.md use {{name}} substitution.
|
|
7
|
+
*/
|
|
8
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
+
exports.AVAILABLE_TEMPLATES = exports.TEMPLATE_README = exports.TEMPLATE_ENV_EXAMPLE = exports.TEMPLATE_WEEKLY_SUMMARY_PROMPT = exports.TEMPLATE_REQUIREMENTS_TXT = exports.TEMPLATE_MODELS_PY = exports.TEMPLATE_ANALYST_PY = exports.TEMPLATE_ACTIVITY_STORE_PY = exports.TEMPLATE_GITHUB_FETCHER_PY = exports.TEMPLATE_CONFIG_PY = exports.TEMPLATE_MAIN_PY = exports.TEMPLATE_MANIFEST = void 0;
|
|
10
|
+
// ─── orchagent.json ──────────────────────────────────────────────────────────
|
|
11
|
+
exports.TEMPLATE_MANIFEST = `{
|
|
12
|
+
"name": "{{name}}",
|
|
13
|
+
"version": "v1",
|
|
14
|
+
"type": "agent",
|
|
15
|
+
"description": "Weekly GitHub activity summary delivered to Discord. Uses Claude to analyse commits, PRs, and issues — surfaces patterns, risks, and trends.",
|
|
16
|
+
"runtime": {
|
|
17
|
+
"command": "python main.py"
|
|
18
|
+
},
|
|
19
|
+
"required_secrets": [
|
|
20
|
+
"ORCHAGENT_API_KEY",
|
|
21
|
+
"DISCORD_WEBHOOK_URL",
|
|
22
|
+
"ANTHROPIC_API_KEY",
|
|
23
|
+
"GITHUB_REPOS"
|
|
24
|
+
],
|
|
25
|
+
"bundle": {
|
|
26
|
+
"include": ["*.py", "prompts/*.md", "requirements.txt"],
|
|
27
|
+
"exclude": ["tests/", "__pycache__", "*.pyc", ".pytest_cache", ".env"]
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
`;
|
|
31
|
+
// ─── main.py ─────────────────────────────────────────────────────────────────
|
|
32
|
+
exports.TEMPLATE_MAIN_PY = `"""GitHub Weekly Summary Agent -- main entrypoint.
|
|
33
|
+
|
|
34
|
+
A scheduled on-demand agent that:
|
|
35
|
+
1. Fetches GitHub activity (commits, PRs, issues, reviews) via the orchagent proxy
|
|
36
|
+
2. Analyses the data with Claude to produce an intelligent summary
|
|
37
|
+
3. Posts the summary to Discord via webhook
|
|
38
|
+
4. Exits
|
|
39
|
+
|
|
40
|
+
Triggered weekly by orchagent cron scheduling. Runs in an E2B sandbox.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
import asyncio
|
|
44
|
+
import json
|
|
45
|
+
import logging
|
|
46
|
+
import sys
|
|
47
|
+
|
|
48
|
+
import httpx
|
|
49
|
+
|
|
50
|
+
from config import Config
|
|
51
|
+
from github_fetcher import GitHubFetcher
|
|
52
|
+
from activity_store import ActivityStore
|
|
53
|
+
from analyst import Analyst
|
|
54
|
+
|
|
55
|
+
# Configure logging
|
|
56
|
+
logging.basicConfig(
|
|
57
|
+
level=logging.INFO,
|
|
58
|
+
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
|
|
59
|
+
stream=sys.stdout,
|
|
60
|
+
)
|
|
61
|
+
logger = logging.getLogger("agent.main")
|
|
62
|
+
|
|
63
|
+
MAX_DISCORD_LENGTH = 1900
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
async def post_to_discord(webhook_url: str, content: str):
|
|
67
|
+
"""Post a message to Discord via webhook. Splits if over limit."""
|
|
68
|
+
chunks = _split_message(content)
|
|
69
|
+
|
|
70
|
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
71
|
+
for chunk in chunks:
|
|
72
|
+
payload = {"content": chunk}
|
|
73
|
+
response = await client.post(webhook_url, json=payload)
|
|
74
|
+
if response.status_code >= 400:
|
|
75
|
+
logger.error(
|
|
76
|
+
"Discord webhook failed (%d): %s",
|
|
77
|
+
response.status_code,
|
|
78
|
+
response.text[:200],
|
|
79
|
+
)
|
|
80
|
+
raise RuntimeError("Discord webhook returned %d" % response.status_code)
|
|
81
|
+
logger.info("Posted chunk (%d chars) to Discord", len(chunk))
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _split_message(text: str) -> list[str]:
|
|
85
|
+
"""Split a message into chunks that fit within Discord's limit."""
|
|
86
|
+
if len(text) <= MAX_DISCORD_LENGTH:
|
|
87
|
+
return [text]
|
|
88
|
+
|
|
89
|
+
chunks = []
|
|
90
|
+
remaining = text
|
|
91
|
+
while remaining:
|
|
92
|
+
if len(remaining) <= MAX_DISCORD_LENGTH:
|
|
93
|
+
chunks.append(remaining)
|
|
94
|
+
break
|
|
95
|
+
|
|
96
|
+
# Try paragraph boundary, then newline, then space, then hard split
|
|
97
|
+
split_at = remaining.rfind("\\n\\n", 0, MAX_DISCORD_LENGTH)
|
|
98
|
+
if split_at == -1:
|
|
99
|
+
split_at = remaining.rfind("\\n", 0, MAX_DISCORD_LENGTH)
|
|
100
|
+
if split_at == -1:
|
|
101
|
+
split_at = remaining.rfind(" ", 0, MAX_DISCORD_LENGTH)
|
|
102
|
+
if split_at == -1:
|
|
103
|
+
split_at = MAX_DISCORD_LENGTH
|
|
104
|
+
|
|
105
|
+
chunks.append(remaining[:split_at])
|
|
106
|
+
remaining = remaining[split_at:].lstrip()
|
|
107
|
+
|
|
108
|
+
return [c for c in chunks if c.strip()]
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
async def run():
|
|
112
|
+
"""Main execution: fetch activity, analyse, post summary."""
|
|
113
|
+
logger.info("Starting GitHub Weekly Summary Agent")
|
|
114
|
+
|
|
115
|
+
# Load config
|
|
116
|
+
config = Config()
|
|
117
|
+
logger.info("Config: %d repos, team=%s, model=%s", len(config.github_repos), config.team_name, config.model)
|
|
118
|
+
|
|
119
|
+
# Fetch GitHub activity
|
|
120
|
+
fetcher = GitHubFetcher(
|
|
121
|
+
gateway_url=config.orchagent_gateway_url,
|
|
122
|
+
api_key=config.orchagent_api_key,
|
|
123
|
+
)
|
|
124
|
+
store = ActivityStore(fetcher=fetcher, repos=config.github_repos)
|
|
125
|
+
await store.refresh(days=14)
|
|
126
|
+
|
|
127
|
+
if not store.window or (
|
|
128
|
+
not store.window.commits
|
|
129
|
+
and not store.window.pull_requests
|
|
130
|
+
and not store.window.issues
|
|
131
|
+
):
|
|
132
|
+
logger.warning("No activity found in the last 14 days. Posting minimal summary.")
|
|
133
|
+
summary = "No GitHub activity detected in the last 14 days across %s." % ", ".join(config.github_repos)
|
|
134
|
+
else:
|
|
135
|
+
# Generate intelligent summary
|
|
136
|
+
analyst = Analyst(
|
|
137
|
+
api_key=config.anthropic_api_key,
|
|
138
|
+
model=config.model,
|
|
139
|
+
team_name=config.team_name,
|
|
140
|
+
)
|
|
141
|
+
summary = await analyst.generate_weekly_summary(store)
|
|
142
|
+
|
|
143
|
+
# Post to Discord
|
|
144
|
+
from datetime import datetime, timezone
|
|
145
|
+
|
|
146
|
+
header = "**Weekly Development Summary -- %s**\\n\\n" % datetime.now(timezone.utc).strftime("%d %b %Y")
|
|
147
|
+
await post_to_discord(config.discord_webhook_url, header + summary)
|
|
148
|
+
|
|
149
|
+
logger.info("Done. Summary posted to Discord.")
|
|
150
|
+
|
|
151
|
+
# Output for orchagent run history
|
|
152
|
+
result = {
|
|
153
|
+
"status": "success",
|
|
154
|
+
"repos": config.github_repos,
|
|
155
|
+
"commits": len(store.window.commits) if store.window else 0,
|
|
156
|
+
"pull_requests": len(store.window.pull_requests) if store.window else 0,
|
|
157
|
+
"issues": len(store.window.issues) if store.window else 0,
|
|
158
|
+
"summary_length": len(summary),
|
|
159
|
+
}
|
|
160
|
+
print(json.dumps(result))
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
if __name__ == "__main__":
|
|
164
|
+
asyncio.run(run())
|
|
165
|
+
`;
|
|
166
|
+
// ─── config.py ───────────────────────────────────────────────────────────────
|
|
167
|
+
exports.TEMPLATE_CONFIG_PY = `"""Configuration -- loads and validates all env vars on startup."""
|
|
168
|
+
|
|
169
|
+
import os
|
|
170
|
+
import sys
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
class Config:
|
|
174
|
+
"""Agent configuration loaded from environment variables."""
|
|
175
|
+
|
|
176
|
+
def __init__(self):
|
|
177
|
+
# Secrets (required)
|
|
178
|
+
self.orchagent_api_key = _require("ORCHAGENT_API_KEY")
|
|
179
|
+
self.discord_webhook_url = _require("DISCORD_WEBHOOK_URL")
|
|
180
|
+
self.anthropic_api_key = _require("ANTHROPIC_API_KEY")
|
|
181
|
+
|
|
182
|
+
# orchagent gateway URL (default to production)
|
|
183
|
+
self.orchagent_gateway_url = os.getenv(
|
|
184
|
+
"ORCHAGENT_GATEWAY_URL", "https://api.orchagent.io"
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
# GitHub repos to track (required)
|
|
188
|
+
repos_raw = _require("GITHUB_REPOS")
|
|
189
|
+
self.github_repos = [r.strip() for r in repos_raw.split(",") if r.strip()]
|
|
190
|
+
|
|
191
|
+
# Team name (optional, used in prompts)
|
|
192
|
+
self.team_name = os.getenv("TEAM_NAME", "Team")
|
|
193
|
+
|
|
194
|
+
# LLM model
|
|
195
|
+
self.model = os.getenv("CLAUDE_MODEL", "claude-sonnet-4-5-20250929")
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _require(name: str) -> str:
|
|
199
|
+
"""Get a required env var or exit with a clear error."""
|
|
200
|
+
value = os.getenv(name)
|
|
201
|
+
if not value:
|
|
202
|
+
print(
|
|
203
|
+
"FATAL: Required environment variable %s is not set." % name,
|
|
204
|
+
file=sys.stderr,
|
|
205
|
+
)
|
|
206
|
+
sys.exit(1)
|
|
207
|
+
return value
|
|
208
|
+
`;
|
|
209
|
+
// ─── github_fetcher.py ───────────────────────────────────────────────────────
|
|
210
|
+
exports.TEMPLATE_GITHUB_FETCHER_PY = `"""Fetch GitHub activity via the orchagent GitHub Activity Proxy."""
|
|
211
|
+
|
|
212
|
+
import logging
|
|
213
|
+
from datetime import datetime, timezone, timedelta
|
|
214
|
+
|
|
215
|
+
import httpx
|
|
216
|
+
|
|
217
|
+
from models import Commit, PullRequest, Issue, ActivityWindow
|
|
218
|
+
|
|
219
|
+
logger = logging.getLogger("agent.github_fetcher")
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class GitHubFetcher:
|
|
223
|
+
"""Fetches GitHub activity through the orchagent gateway proxy."""
|
|
224
|
+
|
|
225
|
+
def __init__(self, gateway_url: str, api_key: str):
|
|
226
|
+
self.base_url = f"{gateway_url.rstrip('/')}/github/activity"
|
|
227
|
+
self.headers = {"Authorization": f"Bearer {api_key}"}
|
|
228
|
+
|
|
229
|
+
async def fetch_all_activity(
|
|
230
|
+
self, repos: list[str], days: int = 14
|
|
231
|
+
) -> ActivityWindow:
|
|
232
|
+
"""Fetch commits, PRs, and issues for all repos within the time window."""
|
|
233
|
+
since = datetime.now(timezone.utc) - timedelta(days=days)
|
|
234
|
+
since_iso = since.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
235
|
+
|
|
236
|
+
all_commits = []
|
|
237
|
+
all_prs = []
|
|
238
|
+
all_issues = []
|
|
239
|
+
|
|
240
|
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
241
|
+
for repo in repos:
|
|
242
|
+
owner, name = repo.split("/", 1)
|
|
243
|
+
|
|
244
|
+
commits = await self._fetch_commits(client, owner, name, since_iso)
|
|
245
|
+
all_commits.extend(commits)
|
|
246
|
+
|
|
247
|
+
prs = await self._fetch_pulls(client, owner, name)
|
|
248
|
+
# Filter PRs updated since the window
|
|
249
|
+
prs = [p for p in prs if p.updated_at >= since_iso]
|
|
250
|
+
# Fetch reviews for each PR
|
|
251
|
+
for pr in prs:
|
|
252
|
+
pr.reviews = await self._fetch_reviews(client, owner, name, pr.number)
|
|
253
|
+
all_prs.extend(prs)
|
|
254
|
+
|
|
255
|
+
issues = await self._fetch_issues(client, owner, name, since_iso)
|
|
256
|
+
all_issues.extend(issues)
|
|
257
|
+
|
|
258
|
+
now = datetime.now(timezone.utc)
|
|
259
|
+
return ActivityWindow(
|
|
260
|
+
repos=repos,
|
|
261
|
+
commits=all_commits,
|
|
262
|
+
pull_requests=all_prs,
|
|
263
|
+
issues=all_issues,
|
|
264
|
+
fetched_at=now,
|
|
265
|
+
period_start=since,
|
|
266
|
+
period_end=now,
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
async def _fetch_commits(
|
|
270
|
+
self, client: httpx.AsyncClient, owner: str, repo: str, since: str
|
|
271
|
+
) -> list[Commit]:
|
|
272
|
+
"""Fetch commits from the proxy."""
|
|
273
|
+
data = await self._get(
|
|
274
|
+
client, f"/repos/{owner}/{repo}/commits", {"since": since, "per_page": 100}
|
|
275
|
+
)
|
|
276
|
+
if not isinstance(data, list):
|
|
277
|
+
return []
|
|
278
|
+
|
|
279
|
+
commits = []
|
|
280
|
+
for item in data:
|
|
281
|
+
commit_data = item.get("commit", {})
|
|
282
|
+
author = item.get("author") or {}
|
|
283
|
+
commit_author = commit_data.get("author", {})
|
|
284
|
+
commits.append(
|
|
285
|
+
Commit(
|
|
286
|
+
sha=item.get("sha", "")[:8],
|
|
287
|
+
author_login=author.get("login", commit_author.get("name", "unknown")),
|
|
288
|
+
author_name=commit_author.get("name", "unknown"),
|
|
289
|
+
message=commit_data.get("message", "").split("\\n")[0], # First line only
|
|
290
|
+
date=commit_author.get("date", ""),
|
|
291
|
+
repo=f"{owner}/{repo}",
|
|
292
|
+
)
|
|
293
|
+
)
|
|
294
|
+
return commits
|
|
295
|
+
|
|
296
|
+
async def _fetch_pulls(
|
|
297
|
+
self, client: httpx.AsyncClient, owner: str, repo: str
|
|
298
|
+
) -> list[PullRequest]:
|
|
299
|
+
"""Fetch pull requests from the proxy."""
|
|
300
|
+
data = await self._get(
|
|
301
|
+
client,
|
|
302
|
+
f"/repos/{owner}/{repo}/pulls",
|
|
303
|
+
{"state": "all", "sort": "updated", "direction": "desc", "per_page": 100},
|
|
304
|
+
)
|
|
305
|
+
if not isinstance(data, list):
|
|
306
|
+
return []
|
|
307
|
+
|
|
308
|
+
prs = []
|
|
309
|
+
for item in data:
|
|
310
|
+
user = item.get("user", {})
|
|
311
|
+
# Determine if merged
|
|
312
|
+
state = item.get("state", "open")
|
|
313
|
+
if state == "closed" and item.get("merged_at"):
|
|
314
|
+
state = "merged"
|
|
315
|
+
|
|
316
|
+
prs.append(
|
|
317
|
+
PullRequest(
|
|
318
|
+
number=item.get("number", 0),
|
|
319
|
+
title=item.get("title", ""),
|
|
320
|
+
author_login=user.get("login", "unknown"),
|
|
321
|
+
state=state,
|
|
322
|
+
created_at=item.get("created_at", ""),
|
|
323
|
+
updated_at=item.get("updated_at", ""),
|
|
324
|
+
merged_at=item.get("merged_at"),
|
|
325
|
+
closed_at=item.get("closed_at"),
|
|
326
|
+
comments_count=item.get("comments", 0),
|
|
327
|
+
review_comments_count=item.get("review_comments", 0),
|
|
328
|
+
additions=item.get("additions", 0),
|
|
329
|
+
deletions=item.get("deletions", 0),
|
|
330
|
+
repo=f"{owner}/{repo}",
|
|
331
|
+
)
|
|
332
|
+
)
|
|
333
|
+
return prs
|
|
334
|
+
|
|
335
|
+
async def _fetch_issues(
|
|
336
|
+
self, client: httpx.AsyncClient, owner: str, repo: str, since: str
|
|
337
|
+
) -> list[Issue]:
|
|
338
|
+
"""Fetch issues (not PRs) from the proxy."""
|
|
339
|
+
data = await self._get(
|
|
340
|
+
client,
|
|
341
|
+
f"/repos/{owner}/{repo}/issues",
|
|
342
|
+
{"state": "all", "sort": "updated", "direction": "desc", "since": since, "per_page": 100},
|
|
343
|
+
)
|
|
344
|
+
if not isinstance(data, list):
|
|
345
|
+
return []
|
|
346
|
+
|
|
347
|
+
issues = []
|
|
348
|
+
for item in data:
|
|
349
|
+
# GitHub issues endpoint also returns PRs -- skip them
|
|
350
|
+
if "pull_request" in item:
|
|
351
|
+
continue
|
|
352
|
+
|
|
353
|
+
user = item.get("user", {})
|
|
354
|
+
labels = [l.get("name", "") for l in item.get("labels", [])]
|
|
355
|
+
issues.append(
|
|
356
|
+
Issue(
|
|
357
|
+
number=item.get("number", 0),
|
|
358
|
+
title=item.get("title", ""),
|
|
359
|
+
author_login=user.get("login", "unknown"),
|
|
360
|
+
state=item.get("state", "open"),
|
|
361
|
+
created_at=item.get("created_at", ""),
|
|
362
|
+
updated_at=item.get("updated_at", ""),
|
|
363
|
+
closed_at=item.get("closed_at"),
|
|
364
|
+
comments_count=item.get("comments", 0),
|
|
365
|
+
labels=labels,
|
|
366
|
+
repo=f"{owner}/{repo}",
|
|
367
|
+
)
|
|
368
|
+
)
|
|
369
|
+
return issues
|
|
370
|
+
|
|
371
|
+
async def _fetch_reviews(
|
|
372
|
+
self, client: httpx.AsyncClient, owner: str, repo: str, pull_number: int
|
|
373
|
+
) -> list[dict]:
|
|
374
|
+
"""Fetch reviews for a specific PR."""
|
|
375
|
+
data = await self._get(
|
|
376
|
+
client, f"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", {"per_page": 100}
|
|
377
|
+
)
|
|
378
|
+
if not isinstance(data, list):
|
|
379
|
+
return []
|
|
380
|
+
|
|
381
|
+
return [
|
|
382
|
+
{
|
|
383
|
+
"reviewer": item.get("user", {}).get("login", "unknown"),
|
|
384
|
+
"state": item.get("state", ""),
|
|
385
|
+
"submitted_at": item.get("submitted_at", ""),
|
|
386
|
+
}
|
|
387
|
+
for item in data
|
|
388
|
+
]
|
|
389
|
+
|
|
390
|
+
async def _get(self, client: httpx.AsyncClient, path: str, params: dict) -> list | dict:
|
|
391
|
+
"""Make a GET request to the proxy with retry on 429."""
|
|
392
|
+
url = f"{self.base_url}{path}"
|
|
393
|
+
try:
|
|
394
|
+
response = await client.get(url, headers=self.headers, params=params)
|
|
395
|
+
|
|
396
|
+
if response.status_code == 429:
|
|
397
|
+
logger.warning("Rate limited by proxy, waiting 60s before retry")
|
|
398
|
+
import asyncio
|
|
399
|
+
await asyncio.sleep(60)
|
|
400
|
+
response = await client.get(url, headers=self.headers, params=params)
|
|
401
|
+
|
|
402
|
+
if response.status_code >= 400:
|
|
403
|
+
logger.error("Proxy returned %d for %s: %s", response.status_code, path, response.text[:200])
|
|
404
|
+
return []
|
|
405
|
+
|
|
406
|
+
return response.json()
|
|
407
|
+
except httpx.TimeoutException:
|
|
408
|
+
logger.error("Timeout fetching %s", path)
|
|
409
|
+
return []
|
|
410
|
+
except Exception as e:
|
|
411
|
+
logger.error("Error fetching %s: %s", path, str(e))
|
|
412
|
+
return []
|
|
413
|
+
`;
|
|
414
|
+
// ─── activity_store.py ───────────────────────────────────────────────────────
|
|
415
|
+
exports.TEMPLATE_ACTIVITY_STORE_PY = `"""Rolling activity window with stats computation and LLM serialisation."""
|
|
416
|
+
|
|
417
|
+
import logging
|
|
418
|
+
from datetime import datetime, timezone, timedelta
|
|
419
|
+
from collections import defaultdict
|
|
420
|
+
|
|
421
|
+
from models import ActivityWindow, Commit, PullRequest, Issue
|
|
422
|
+
from github_fetcher import GitHubFetcher
|
|
423
|
+
|
|
424
|
+
logger = logging.getLogger("agent.activity_store")
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
class ActivityStore:
|
|
428
|
+
"""Maintains a rolling window of GitHub activity."""
|
|
429
|
+
|
|
430
|
+
def __init__(self, fetcher: GitHubFetcher, repos: list[str]):
|
|
431
|
+
self.fetcher = fetcher
|
|
432
|
+
self.repos = repos
|
|
433
|
+
self.window: ActivityWindow | None = None
|
|
434
|
+
|
|
435
|
+
def is_stale(self, max_age_minutes: int = 60) -> bool:
|
|
436
|
+
"""Check if the activity data needs refreshing."""
|
|
437
|
+
if self.window is None or self.window.fetched_at is None:
|
|
438
|
+
return True
|
|
439
|
+
age = datetime.now(timezone.utc) - self.window.fetched_at
|
|
440
|
+
return age > timedelta(minutes=max_age_minutes)
|
|
441
|
+
|
|
442
|
+
async def refresh(self, days: int = 14) -> ActivityWindow:
|
|
443
|
+
"""Fetch fresh activity data from GitHub."""
|
|
444
|
+
logger.info("Refreshing activity data for %d repos (%d day window)", len(self.repos), days)
|
|
445
|
+
self.window = await self.fetcher.fetch_all_activity(self.repos, days=days)
|
|
446
|
+
logger.info(
|
|
447
|
+
"Fetched: %d commits, %d PRs, %d issues",
|
|
448
|
+
len(self.window.commits),
|
|
449
|
+
len(self.window.pull_requests),
|
|
450
|
+
len(self.window.issues),
|
|
451
|
+
)
|
|
452
|
+
return self.window
|
|
453
|
+
|
|
454
|
+
async def ensure_fresh(self, max_age_minutes: int = 60) -> ActivityWindow:
|
|
455
|
+
"""Refresh if stale, return current window."""
|
|
456
|
+
if self.is_stale(max_age_minutes):
|
|
457
|
+
await self.refresh()
|
|
458
|
+
return self.window
|
|
459
|
+
|
|
460
|
+
def compute_stats(self) -> dict:
|
|
461
|
+
"""Compute summary statistics from the current window."""
|
|
462
|
+
if not self.window:
|
|
463
|
+
return {}
|
|
464
|
+
|
|
465
|
+
now = datetime.now(timezone.utc)
|
|
466
|
+
week_ago = now - timedelta(days=7)
|
|
467
|
+
two_weeks_ago = now - timedelta(days=14)
|
|
468
|
+
|
|
469
|
+
# Commits per author
|
|
470
|
+
commits_by_author = defaultdict(int)
|
|
471
|
+
commits_this_week = 0
|
|
472
|
+
commits_last_week = 0
|
|
473
|
+
for c in self.window.commits:
|
|
474
|
+
commits_by_author[c.author_login] += 1
|
|
475
|
+
if c.date >= week_ago.strftime("%Y-%m-%dT%H:%M:%SZ"):
|
|
476
|
+
commits_this_week += 1
|
|
477
|
+
elif c.date >= two_weeks_ago.strftime("%Y-%m-%dT%H:%M:%SZ"):
|
|
478
|
+
commits_last_week += 1
|
|
479
|
+
|
|
480
|
+
# PR stats
|
|
481
|
+
prs_opened = [p for p in self.window.pull_requests if p.created_at >= week_ago.strftime("%Y-%m-%dT%H:%M:%SZ")]
|
|
482
|
+
prs_merged = [p for p in self.window.pull_requests if p.state == "merged" and p.merged_at and p.merged_at >= week_ago.strftime("%Y-%m-%dT%H:%M:%SZ")]
|
|
483
|
+
prs_open = [p for p in self.window.pull_requests if p.state == "open"]
|
|
484
|
+
|
|
485
|
+
# Stale PRs (open > 5 days with no update)
|
|
486
|
+
stale_threshold = (now - timedelta(days=5)).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
487
|
+
stale_prs = [p for p in prs_open if p.updated_at < stale_threshold]
|
|
488
|
+
|
|
489
|
+
# Review turnaround (time from PR creation to first review)
|
|
490
|
+
review_turnarounds = []
|
|
491
|
+
for pr in self.window.pull_requests:
|
|
492
|
+
if pr.reviews:
|
|
493
|
+
first_review = min(pr.reviews, key=lambda r: r.get("submitted_at", ""))
|
|
494
|
+
submitted = first_review.get("submitted_at", "")
|
|
495
|
+
if submitted and pr.created_at:
|
|
496
|
+
try:
|
|
497
|
+
created = datetime.fromisoformat(pr.created_at.replace("Z", "+00:00"))
|
|
498
|
+
reviewed = datetime.fromisoformat(submitted.replace("Z", "+00:00"))
|
|
499
|
+
hours = (reviewed - created).total_seconds() / 3600
|
|
500
|
+
review_turnarounds.append(hours)
|
|
501
|
+
except (ValueError, TypeError):
|
|
502
|
+
pass
|
|
503
|
+
|
|
504
|
+
avg_review_turnaround = (
|
|
505
|
+
sum(review_turnarounds) / len(review_turnarounds)
|
|
506
|
+
if review_turnarounds
|
|
507
|
+
else None
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
# Issues
|
|
511
|
+
issues_opened = [i for i in self.window.issues if i.created_at >= week_ago.strftime("%Y-%m-%dT%H:%M:%SZ")]
|
|
512
|
+
issues_closed = [i for i in self.window.issues if i.state == "closed" and i.closed_at and i.closed_at >= week_ago.strftime("%Y-%m-%dT%H:%M:%SZ")]
|
|
513
|
+
|
|
514
|
+
return {
|
|
515
|
+
"period": "last 7 days",
|
|
516
|
+
"commits_total": len(self.window.commits),
|
|
517
|
+
"commits_this_week": commits_this_week,
|
|
518
|
+
"commits_last_week": commits_last_week,
|
|
519
|
+
"commits_by_author": dict(commits_by_author),
|
|
520
|
+
"prs_opened_this_week": len(prs_opened),
|
|
521
|
+
"prs_merged_this_week": len(prs_merged),
|
|
522
|
+
"prs_currently_open": len(prs_open),
|
|
523
|
+
"stale_prs": [{"number": p.number, "title": p.title, "repo": p.repo, "author": p.author_login, "days_since_update": (now - datetime.fromisoformat(p.updated_at.replace("Z", "+00:00"))).days} for p in stale_prs],
|
|
524
|
+
"avg_review_turnaround_hours": round(avg_review_turnaround, 1) if avg_review_turnaround else None,
|
|
525
|
+
"issues_opened_this_week": len(issues_opened),
|
|
526
|
+
"issues_closed_this_week": len(issues_closed),
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
def serialise_for_llm(self) -> str:
|
|
530
|
+
"""Serialise the activity window into a structured text block for LLM context."""
|
|
531
|
+
if not self.window:
|
|
532
|
+
return "No activity data available."
|
|
533
|
+
|
|
534
|
+
stats = self.compute_stats()
|
|
535
|
+
lines = []
|
|
536
|
+
|
|
537
|
+
lines.append(f"## GitHub Activity -- {stats['period']}")
|
|
538
|
+
lines.append(f"Repos: {', '.join(self.window.repos)}")
|
|
539
|
+
lines.append("")
|
|
540
|
+
|
|
541
|
+
# Commit summary
|
|
542
|
+
lines.append(f"### Commits: {stats['commits_this_week']} this week, {stats['commits_last_week']} last week")
|
|
543
|
+
if stats["commits_by_author"]:
|
|
544
|
+
for author, count in sorted(stats["commits_by_author"].items(), key=lambda x: -x[1]):
|
|
545
|
+
lines.append(f" - {author}: {count} commits")
|
|
546
|
+
lines.append("")
|
|
547
|
+
|
|
548
|
+
# Recent commit messages
|
|
549
|
+
lines.append("### Recent Commits (last 7 days)")
|
|
550
|
+
week_ago = (datetime.now(timezone.utc) - timedelta(days=7)).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
551
|
+
recent_commits = [c for c in self.window.commits if c.date >= week_ago][:20]
|
|
552
|
+
for c in recent_commits:
|
|
553
|
+
lines.append(f" - [{c.repo}] {c.author_login}: {c.message}")
|
|
554
|
+
if not recent_commits:
|
|
555
|
+
lines.append(" (none)")
|
|
556
|
+
lines.append("")
|
|
557
|
+
|
|
558
|
+
# PR summary
|
|
559
|
+
lines.append(f"### Pull Requests: {stats['prs_opened_this_week']} opened, {stats['prs_merged_this_week']} merged, {stats['prs_currently_open']} currently open")
|
|
560
|
+
if stats["avg_review_turnaround_hours"] is not None:
|
|
561
|
+
lines.append(f" Average review turnaround: {stats['avg_review_turnaround_hours']}h")
|
|
562
|
+
lines.append("")
|
|
563
|
+
|
|
564
|
+
# Open PRs
|
|
565
|
+
open_prs = [p for p in self.window.pull_requests if p.state == "open"]
|
|
566
|
+
if open_prs:
|
|
567
|
+
lines.append("### Open Pull Requests")
|
|
568
|
+
for p in open_prs:
|
|
569
|
+
review_status = "no reviews"
|
|
570
|
+
if p.reviews:
|
|
571
|
+
states = [r["state"] for r in p.reviews]
|
|
572
|
+
if "APPROVED" in states:
|
|
573
|
+
review_status = "approved"
|
|
574
|
+
elif "CHANGES_REQUESTED" in states:
|
|
575
|
+
review_status = "changes requested"
|
|
576
|
+
else:
|
|
577
|
+
review_status = f"{len(p.reviews)} review(s)"
|
|
578
|
+
lines.append(f" - #{p.number} [{p.repo}] \\"{p.title}\\" by {p.author_login} ({review_status})")
|
|
579
|
+
lines.append("")
|
|
580
|
+
|
|
581
|
+
# Merged PRs
|
|
582
|
+
merged_prs = [p for p in self.window.pull_requests if p.state == "merged"]
|
|
583
|
+
if merged_prs:
|
|
584
|
+
lines.append("### Recently Merged")
|
|
585
|
+
for p in merged_prs[:10]:
|
|
586
|
+
lines.append(f" - #{p.number} [{p.repo}] \\"{p.title}\\" by {p.author_login}")
|
|
587
|
+
lines.append("")
|
|
588
|
+
|
|
589
|
+
# Stale PRs
|
|
590
|
+
if stats["stale_prs"]:
|
|
591
|
+
lines.append("### Stale PRs (open > 5 days, no recent activity)")
|
|
592
|
+
for sp in stats["stale_prs"]:
|
|
593
|
+
lines.append(f" - #{sp['number']} [{sp['repo']}] \\"{sp['title']}\\" by {sp['author']} ({sp['days_since_update']} days idle)")
|
|
594
|
+
lines.append("")
|
|
595
|
+
|
|
596
|
+
# Issues
|
|
597
|
+
lines.append(f"### Issues: {stats['issues_opened_this_week']} opened, {stats['issues_closed_this_week']} closed this week")
|
|
598
|
+
open_issues = [i for i in self.window.issues if i.state == "open"]
|
|
599
|
+
if open_issues:
|
|
600
|
+
for i in open_issues[:10]:
|
|
601
|
+
label_str = f" [{', '.join(i.labels)}]" if i.labels else ""
|
|
602
|
+
lines.append(f" - #{i.number} [{i.repo}] \\"{i.title}\\"{label_str}")
|
|
603
|
+
lines.append("")
|
|
604
|
+
|
|
605
|
+
return "\\n".join(lines)
|
|
606
|
+
`;
|
|
607
|
+
// ─── analyst.py ──────────────────────────────────────────────────────────────
|
|
608
|
+
exports.TEMPLATE_ANALYST_PY = `"""LLM analyst -- generates intelligent weekly summaries."""
|
|
609
|
+
|
|
610
|
+
import logging
|
|
611
|
+
import os
|
|
612
|
+
|
|
613
|
+
import anthropic
|
|
614
|
+
|
|
615
|
+
from activity_store import ActivityStore
|
|
616
|
+
|
|
617
|
+
logger = logging.getLogger("agent.analyst")
|
|
618
|
+
|
|
619
|
+
|
|
620
|
+
class Analyst:
|
|
621
|
+
"""Uses Claude to reason about GitHub activity."""
|
|
622
|
+
|
|
623
|
+
def __init__(self, api_key: str, model: str, team_name: str):
|
|
624
|
+
self.client = anthropic.Anthropic(api_key=api_key)
|
|
625
|
+
self.model = model
|
|
626
|
+
self.team_name = team_name
|
|
627
|
+
self._summary_prompt = _load_prompt("prompts/weekly_summary.md")
|
|
628
|
+
|
|
629
|
+
async def generate_weekly_summary(self, store: ActivityStore) -> str:
|
|
630
|
+
"""Generate an intelligent weekly summary from the activity window."""
|
|
631
|
+
activity_data = store.serialise_for_llm()
|
|
632
|
+
repos = ", ".join(store.repos)
|
|
633
|
+
|
|
634
|
+
system_prompt = self._summary_prompt.replace(
|
|
635
|
+
"{team_name}", self.team_name
|
|
636
|
+
).replace(
|
|
637
|
+
"{repos}", repos
|
|
638
|
+
).replace(
|
|
639
|
+
"{activity_data}", activity_data
|
|
640
|
+
)
|
|
641
|
+
|
|
642
|
+
logger.info("Generating weekly summary (%d chars of activity data)", len(activity_data))
|
|
643
|
+
|
|
644
|
+
response = self.client.messages.create(
|
|
645
|
+
model=self.model,
|
|
646
|
+
max_tokens=1500,
|
|
647
|
+
messages=[
|
|
648
|
+
{"role": "user", "content": "Write the weekly development summary."}
|
|
649
|
+
],
|
|
650
|
+
system=system_prompt,
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
summary = response.content[0].text
|
|
654
|
+
logger.info(
|
|
655
|
+
"Summary generated: %d chars, %d input tokens, %d output tokens",
|
|
656
|
+
len(summary),
|
|
657
|
+
response.usage.input_tokens,
|
|
658
|
+
response.usage.output_tokens,
|
|
659
|
+
)
|
|
660
|
+
return summary
|
|
661
|
+
|
|
662
|
+
|
|
663
|
+
def _load_prompt(path: str) -> str:
|
|
664
|
+
"""Load a prompt template from file."""
|
|
665
|
+
base_dir = os.path.dirname(os.path.abspath(__file__))
|
|
666
|
+
full_path = os.path.join(base_dir, path)
|
|
667
|
+
with open(full_path, "r") as f:
|
|
668
|
+
return f.read()
|
|
669
|
+
`;
|
|
670
|
+
// ─── models.py ───────────────────────────────────────────────────────────────
|
|
671
|
+
exports.TEMPLATE_MODELS_PY = `"""Data models for normalised GitHub activity."""
|
|
672
|
+
|
|
673
|
+
from dataclasses import dataclass, field
|
|
674
|
+
from datetime import datetime
|
|
675
|
+
|
|
676
|
+
|
|
677
|
+
@dataclass
|
|
678
|
+
class Commit:
|
|
679
|
+
sha: str
|
|
680
|
+
author_login: str
|
|
681
|
+
author_name: str
|
|
682
|
+
message: str
|
|
683
|
+
date: str
|
|
684
|
+
repo: str
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
@dataclass
|
|
688
|
+
class PullRequest:
|
|
689
|
+
number: int
|
|
690
|
+
title: str
|
|
691
|
+
author_login: str
|
|
692
|
+
state: str # open, closed, merged
|
|
693
|
+
created_at: str
|
|
694
|
+
updated_at: str
|
|
695
|
+
merged_at: str | None
|
|
696
|
+
closed_at: str | None
|
|
697
|
+
comments_count: int
|
|
698
|
+
review_comments_count: int
|
|
699
|
+
additions: int
|
|
700
|
+
deletions: int
|
|
701
|
+
repo: str
|
|
702
|
+
reviews: list[dict] = field(default_factory=list)
|
|
703
|
+
|
|
704
|
+
|
|
705
|
+
@dataclass
|
|
706
|
+
class Issue:
|
|
707
|
+
number: int
|
|
708
|
+
title: str
|
|
709
|
+
author_login: str
|
|
710
|
+
state: str # open, closed
|
|
711
|
+
created_at: str
|
|
712
|
+
updated_at: str
|
|
713
|
+
closed_at: str | None
|
|
714
|
+
comments_count: int
|
|
715
|
+
labels: list[str]
|
|
716
|
+
repo: str
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
@dataclass
|
|
720
|
+
class ActivityWindow:
|
|
721
|
+
repos: list[str]
|
|
722
|
+
commits: list[Commit]
|
|
723
|
+
pull_requests: list[PullRequest]
|
|
724
|
+
issues: list[Issue]
|
|
725
|
+
fetched_at: datetime | None = None
|
|
726
|
+
period_start: datetime | None = None
|
|
727
|
+
period_end: datetime | None = None
|
|
728
|
+
`;
|
|
729
|
+
// ─── requirements.txt ────────────────────────────────────────────────────────
|
|
730
|
+
exports.TEMPLATE_REQUIREMENTS_TXT = `httpx>=0.27.0
|
|
731
|
+
anthropic>=0.40.0
|
|
732
|
+
`;
|
|
733
|
+
// ─── prompts/weekly_summary.md ───────────────────────────────────────────────
|
|
734
|
+
exports.TEMPLATE_WEEKLY_SUMMARY_PROMPT = `You are a senior engineering manager analysing your team's GitHub activity for the past week. Your job is to write a concise, insightful weekly summary that a CTO or team lead would actually want to read on Monday morning.
|
|
735
|
+
|
|
736
|
+
Rules:
|
|
737
|
+
- INTERPRET, don't just list. "3 PRs merged" is useless. "The auth refactor shipped -- 3 PRs merged across 2 repos" is useful.
|
|
738
|
+
- Highlight what SHIPPED (merged PRs, significant commits). This is the headline.
|
|
739
|
+
- Flag RISKS: stale PRs, unreviewed changes, unusual patterns (e.g. someone suddenly only doing docs, or a single person doing all reviews).
|
|
740
|
+
- Note TRENDS if visible: is velocity up or down vs last week? Is review turnaround getting slower?
|
|
741
|
+
- Keep it to 3-4 short paragraphs. No bullet dumps. Write like a human, not a report generator.
|
|
742
|
+
- If there's not much activity, say so honestly. Don't inflate.
|
|
743
|
+
- Use PR numbers (#42) and author names when citing specifics.
|
|
744
|
+
- End with 1-2 things to watch this week (stale PRs, upcoming deadlines implied by activity patterns).
|
|
745
|
+
|
|
746
|
+
Team: {team_name}
|
|
747
|
+
Repos: {repos}
|
|
748
|
+
|
|
749
|
+
{activity_data}
|
|
750
|
+
`;
|
|
751
|
+
// ─── .env.example ────────────────────────────────────────────────────────────
|
|
752
|
+
exports.TEMPLATE_ENV_EXAMPLE = `# Required secrets -- add these via: orch secrets set NAME VALUE
|
|
753
|
+
# Or in the web dashboard: Settings > Secrets
|
|
754
|
+
|
|
755
|
+
# orchagent API key -- create after publishing:
|
|
756
|
+
# orch agent-keys create <your-org>/{{name}}
|
|
757
|
+
ORCHAGENT_API_KEY=
|
|
758
|
+
|
|
759
|
+
# Discord webhook URL -- create at: Server Settings > Integrations > Webhooks > New Webhook
|
|
760
|
+
DISCORD_WEBHOOK_URL=
|
|
761
|
+
|
|
762
|
+
# Anthropic API key -- get from: https://console.anthropic.com/settings/keys
|
|
763
|
+
ANTHROPIC_API_KEY=
|
|
764
|
+
|
|
765
|
+
# Comma-separated GitHub repos to track (owner/repo format)
|
|
766
|
+
# The GitHub owner must match the account connected via: orch github connect
|
|
767
|
+
GITHUB_REPOS=myorg/my-repo
|
|
768
|
+
|
|
769
|
+
# Optional settings
|
|
770
|
+
TEAM_NAME=My Team
|
|
771
|
+
CLAUDE_MODEL=claude-sonnet-4-5-20250929
|
|
772
|
+
`;
|
|
773
|
+
// ─── README.md ───────────────────────────────────────────────────────────────
|
|
774
|
+
exports.TEMPLATE_README = `# {{name}}
|
|
775
|
+
|
|
776
|
+
A scheduled AI agent that analyses your team's GitHub activity and delivers intelligent weekly summaries to Discord.
|
|
777
|
+
|
|
778
|
+
**What it does:** Every week (on your schedule), this agent fetches commits, PRs, and issues from your GitHub repos, uses Claude to identify patterns, risks, and trends, then posts an insightful summary to your Discord channel. Not a formatted list -- real analysis.
|
|
779
|
+
|
|
780
|
+
## Quick Start (5 minutes)
|
|
781
|
+
|
|
782
|
+
### 1. Connect GitHub
|
|
783
|
+
|
|
784
|
+
\`\`\`bash
|
|
785
|
+
orch github connect
|
|
786
|
+
\`\`\`
|
|
787
|
+
|
|
788
|
+
Grant access to the repos you want to track. The agent uses orchagent's GitHub proxy -- no personal access tokens needed.
|
|
789
|
+
|
|
790
|
+
### 2. Publish
|
|
791
|
+
|
|
792
|
+
\`\`\`bash
|
|
793
|
+
orch publish
|
|
794
|
+
\`\`\`
|
|
795
|
+
|
|
796
|
+
### 3. Add secrets
|
|
797
|
+
|
|
798
|
+
Add these in the orchagent web dashboard (**Settings > Secrets**), or via CLI:
|
|
799
|
+
|
|
800
|
+
\`\`\`bash
|
|
801
|
+
orch secrets set ORCHAGENT_API_KEY <key> # Run: orch agent-keys create <your-org>/{{name}}
|
|
802
|
+
orch secrets set DISCORD_WEBHOOK_URL <url> # Discord > Server Settings > Integrations > Webhooks
|
|
803
|
+
orch secrets set ANTHROPIC_API_KEY <key> # https://console.anthropic.com/settings/keys
|
|
804
|
+
orch secrets set GITHUB_REPOS owner/repo # Comma-separated: owner/repo1,owner/repo2
|
|
805
|
+
\`\`\`
|
|
806
|
+
|
|
807
|
+
Optional:
|
|
808
|
+
\`\`\`bash
|
|
809
|
+
orch secrets set TEAM_NAME "My Team"
|
|
810
|
+
orch secrets set CLAUDE_MODEL claude-sonnet-4-5-20250929
|
|
811
|
+
\`\`\`
|
|
812
|
+
|
|
813
|
+
### 4. Test run
|
|
814
|
+
|
|
815
|
+
\`\`\`bash
|
|
816
|
+
orch run <your-org>/{{name}}
|
|
817
|
+
\`\`\`
|
|
818
|
+
|
|
819
|
+
Check your Discord channel -- the summary should appear within ~30 seconds.
|
|
820
|
+
|
|
821
|
+
### 5. Schedule
|
|
822
|
+
|
|
823
|
+
\`\`\`bash
|
|
824
|
+
orch schedule create <your-org>/{{name}} --cron "0 9 * * 1" --timezone "Europe/London"
|
|
825
|
+
\`\`\`
|
|
826
|
+
|
|
827
|
+
This runs every Monday at 9am. Adjust the cron and timezone:
|
|
828
|
+
- \`0 9 * * 1\` -- Monday 9am
|
|
829
|
+
- \`0 9 * * 1-5\` -- Every weekday 9am
|
|
830
|
+
- \`0 17 * * 5\` -- Friday 5pm
|
|
831
|
+
|
|
832
|
+
### Done!
|
|
833
|
+
|
|
834
|
+
View runs and logs:
|
|
835
|
+
|
|
836
|
+
\`\`\`bash
|
|
837
|
+
orch logs # Recent runs
|
|
838
|
+
orch schedule list # Your schedules
|
|
839
|
+
orch schedule trigger <schedule-id> # Manual trigger
|
|
840
|
+
\`\`\`
|
|
841
|
+
|
|
842
|
+
## How It Works
|
|
843
|
+
|
|
844
|
+
\`\`\`
|
|
845
|
+
main.py (runs once per trigger, then exits)
|
|
846
|
+
|-- GitHub fetcher -> orchagent GitHub Activity Proxy
|
|
847
|
+
| \`-- Commits, PRs, issues, reviews (14-day window)
|
|
848
|
+
|-- Activity store -> Stats computation
|
|
849
|
+
| \`-- Commits/author, PR turnaround, stale PRs, trends
|
|
850
|
+
|-- Analyst -> Claude LLM call
|
|
851
|
+
| \`-- Intelligent narrative summary (not a list)
|
|
852
|
+
\`-- Discord webhook POST
|
|
853
|
+
\`-- Summary delivered to your channel
|
|
854
|
+
\`\`\`
|
|
855
|
+
|
|
856
|
+
The agent analyses 14 days of data (not just 7) so it can detect week-over-week trends.
|
|
857
|
+
|
|
858
|
+
## Customisation
|
|
859
|
+
|
|
860
|
+
### Prompt tuning
|
|
861
|
+
Edit \`prompts/weekly_summary.md\` to change what the summary focuses on. The prompt controls the entire character of the output.
|
|
862
|
+
|
|
863
|
+
### Multiple repos
|
|
864
|
+
Set \`GITHUB_REPOS\` to a comma-separated list: \`org/repo1,org/repo2,org/repo3\`
|
|
865
|
+
|
|
866
|
+
### Multiple channels
|
|
867
|
+
To post to different Discord channels, deploy multiple instances with different \`DISCORD_WEBHOOK_URL\` secrets (use separate workspaces).
|
|
868
|
+
|
|
869
|
+
## Cost
|
|
870
|
+
|
|
871
|
+
~$0.01 per run (Claude Sonnet API call + E2B sandbox). At weekly frequency: ~$0.05/month.
|
|
872
|
+
|
|
873
|
+
## Troubleshooting
|
|
874
|
+
|
|
875
|
+
| Problem | Fix |
|
|
876
|
+
|---------|-----|
|
|
877
|
+
| "MISSING_SECRETS" error | Add all required secrets in Settings > Secrets |
|
|
878
|
+
| "GitHub App not installed" | Run \`orch github connect\` and grant repo access |
|
|
879
|
+
| Empty summary | Check \`GITHUB_REPOS\` format -- must be \`owner/repo\`, not just \`repo\` |
|
|
880
|
+
| Discord webhook 400/404 | Regenerate webhook in Discord server settings |
|
|
881
|
+
| No runs appearing | Check \`orch logs\` and \`orch schedule list\` |
|
|
882
|
+
`;
|
|
883
|
+
// ─── Available templates registry ────────────────────────────────────────────
|
|
884
|
+
exports.AVAILABLE_TEMPLATES = ['github-weekly-summary'];
|