@misterhuydo/sentinel 1.5.63 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.cairn/.hint-lock +1 -1
- package/.cairn/session.json +2 -2
- package/package.json +1 -1
- package/python/scripts/__pycache__/fix_ask_codebase_context.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_ask_codebase_stdin.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_chain_slack.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_fstring.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_knowledge_cache.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_knowledge_cache_staleness.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_merge_confirm.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_permission_messages.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_check_head_detect.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_msg_newlines.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_tracking_boss.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_tracking_db.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_tracking_main.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_project_isolation.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_system_prompt.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_two_bugs.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/patch_chain_release.cpython-311.pyc +0 -0
- package/python/sentinel/__init__.py +1 -1
- package/python/sentinel/__pycache__/__init__.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/cairn_client.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/cicd_trigger.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/config_loader.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/dependency_manager.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/dev_watcher.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/fix_engine.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/git_manager.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/health_checker.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/issue_watcher.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/log_fetcher.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/log_parser.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/log_syncer.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/main.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/notify.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/repo_router.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/repo_task_engine.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/reporter.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/sentinel_boss.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/sentinel_dev.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/slack_bot.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/state_store.cpython-311.pyc +0 -0
- package/python/sentinel/cairn_client.py +30 -11
- package/python/sentinel/fix_engine.py +200 -46
- package/python/sentinel/git_manager.py +335 -0
- package/python/sentinel/main.py +320 -6
- package/python/sentinel/state_store.py +121 -0
- package/python/tests/test_cairn_client.py +72 -0
- package/python/tests/test_fix_engine_cmd.py +53 -0
- package/python/tests/test_fix_engine_json.py +95 -0
- package/python/tests/test_fix_engine_prompt.py +93 -0
- package/python/tests/test_multi_repo_apply.py +254 -0
- package/python/tests/test_multi_repo_publish.py +175 -0
- package/python/tests/test_patch_parser.py +250 -0
- package/python/tests/test_project_lock.py +85 -0
- package/python/tests/test_state_store.py +87 -0
|
@@ -22,6 +22,86 @@ logger = logging.getLogger(__name__)
|
|
|
22
22
|
|
|
23
23
|
GIT_TIMEOUT = 60
|
|
24
24
|
|
|
25
|
+
# Matches any "repos/<name>/" occurrence at start-of-line or after whitespace.
|
|
26
|
+
# Group 1 captures the repo name (allowed chars: letters, digits, ._-).
|
|
27
|
+
_REPOS_PREFIX_RE = re.compile(
|
|
28
|
+
r"\brepos/([A-Za-z0-9._-]+)/"
|
|
29
|
+
)
|
|
30
|
+
# Header line listing affected repos (case-insensitive). Whitespace tolerant.
|
|
31
|
+
_AFFECTED_HEADER_RE = re.compile(
|
|
32
|
+
r"^\s*#\s*affected\s+repos\s*:\s*(.+?)\s*$",
|
|
33
|
+
re.IGNORECASE | re.MULTILINE,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def parse_multi_repo_patch(combined_patch: str) -> dict:
|
|
38
|
+
"""Split a combined diff (paths prefixed `repos/<name>/`) into per-repo sub-patches.
|
|
39
|
+
|
|
40
|
+
The `repos/<name>/` prefix is stripped from every path reference inside each
|
|
41
|
+
sub-patch so it can be `git apply`-ed from the corresponding repo root.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
{
|
|
45
|
+
"affected_repos": list[str], # declared header order, then first-seen
|
|
46
|
+
"patches": dict[str, str], # repo_name -> sub-patch
|
|
47
|
+
"summary": str, # any free text above the header / first diff
|
|
48
|
+
}
|
|
49
|
+
"""
|
|
50
|
+
if not combined_patch:
|
|
51
|
+
return {"affected_repos": [], "patches": {}, "summary": ""}
|
|
52
|
+
|
|
53
|
+
# 1. Optional `# Affected repos: a, b, c` header gives declared order.
|
|
54
|
+
declared: list[str] = []
|
|
55
|
+
m_hdr = _AFFECTED_HEADER_RE.search(combined_patch)
|
|
56
|
+
if m_hdr:
|
|
57
|
+
declared = [s.strip() for s in m_hdr.group(1).split(",") if s.strip()]
|
|
58
|
+
|
|
59
|
+
# 2. Free-text summary = everything above the header (or above the first diff
|
|
60
|
+
# if no header). Useful for PR bodies / Slack messages.
|
|
61
|
+
if m_hdr:
|
|
62
|
+
cut = m_hdr.start()
|
|
63
|
+
else:
|
|
64
|
+
first_diff = combined_patch.find("diff --git")
|
|
65
|
+
cut = first_diff if first_diff >= 0 else len(combined_patch)
|
|
66
|
+
summary = combined_patch[:cut].strip()
|
|
67
|
+
|
|
68
|
+
# 3. Split body by `diff --git` markers; non-matching preamble is dropped.
|
|
69
|
+
body = combined_patch[m_hdr.end():] if m_hdr else combined_patch
|
|
70
|
+
chunks = re.split(r"^(?=diff --git )", body, flags=re.MULTILINE)
|
|
71
|
+
chunks = [c for c in chunks if c.startswith("diff --git ")]
|
|
72
|
+
|
|
73
|
+
# 4. For each chunk, derive the repo name from the first repos/<name>/
|
|
74
|
+
# match. Strip every repos/<repo>/ occurrence in the chunk so the diff
|
|
75
|
+
# is applyable from the repo root.
|
|
76
|
+
seen_order: list[str] = []
|
|
77
|
+
grouped: dict[str, list[str]] = {}
|
|
78
|
+
for chunk in chunks:
|
|
79
|
+
m = _REPOS_PREFIX_RE.search(chunk)
|
|
80
|
+
if not m:
|
|
81
|
+
continue
|
|
82
|
+
repo = m.group(1)
|
|
83
|
+
stripped = re.sub(
|
|
84
|
+
rf"\brepos/{re.escape(repo)}/", "", chunk,
|
|
85
|
+
)
|
|
86
|
+
if repo not in grouped:
|
|
87
|
+
grouped[repo] = []
|
|
88
|
+
seen_order.append(repo)
|
|
89
|
+
grouped[repo].append(stripped)
|
|
90
|
+
|
|
91
|
+
patches = {repo: "".join(parts) for repo, parts in grouped.items()}
|
|
92
|
+
|
|
93
|
+
# 5. Order: declared header first (only repos that actually produced a diff),
|
|
94
|
+
# then any repo that appeared only via diffs.
|
|
95
|
+
ordered: list[str] = []
|
|
96
|
+
for r in declared:
|
|
97
|
+
if r in patches and r not in ordered:
|
|
98
|
+
ordered.append(r)
|
|
99
|
+
for r in seen_order:
|
|
100
|
+
if r not in ordered:
|
|
101
|
+
ordered.append(r)
|
|
102
|
+
|
|
103
|
+
return {"affected_repos": ordered, "patches": patches, "summary": summary}
|
|
104
|
+
|
|
25
105
|
|
|
26
106
|
class MissingToolError(Exception):
|
|
27
107
|
"""Raised when a required build tool (e.g. mvn, gradle) is not installed."""
|
|
@@ -180,6 +260,174 @@ def apply_and_commit(
|
|
|
180
260
|
return "committed", commit_hash
|
|
181
261
|
|
|
182
262
|
|
|
263
|
+
def apply_and_commit_multi(
|
|
264
|
+
event: ErrorEvent,
|
|
265
|
+
patch_path: Path,
|
|
266
|
+
all_repos: list[RepoConfig],
|
|
267
|
+
cfg: SentinelConfig,
|
|
268
|
+
) -> list[dict]:
|
|
269
|
+
"""Apply a multi-repo patch atomically at the dry-run stage, per-repo afterwards.
|
|
270
|
+
|
|
271
|
+
The combined patch is split with parse_multi_repo_patch(); each affected
|
|
272
|
+
repo's sub-patch is then dry-run independently. If ANY dry-run fails, every
|
|
273
|
+
repo is marked "aborted" with no mutations.
|
|
274
|
+
|
|
275
|
+
If all dry-runs pass, the patches are applied + tested + committed
|
|
276
|
+
sequentially per repo. Per-repo failures in this phase only mark that repo
|
|
277
|
+
as "failed" — they don't roll back commits already made in earlier repos.
|
|
278
|
+
|
|
279
|
+
Returns: list of result dicts, one per affected repo, ordered by apply_order:
|
|
280
|
+
{
|
|
281
|
+
"repo_name": str,
|
|
282
|
+
"repo": RepoConfig,
|
|
283
|
+
"status": "committed" | "failed" | "aborted",
|
|
284
|
+
"commit_hash": str, # filled if committed
|
|
285
|
+
"branch": str, # filled if committed
|
|
286
|
+
"apply_order": int, # 0 = library, higher = consumer
|
|
287
|
+
"reason": str, # filled if failed/aborted
|
|
288
|
+
"sub_patch_path": Path, # per-repo .diff written for traceability
|
|
289
|
+
}
|
|
290
|
+
"""
|
|
291
|
+
combined = patch_path.read_text(encoding="utf-8", errors="replace")
|
|
292
|
+
parsed = parse_multi_repo_patch(combined)
|
|
293
|
+
|
|
294
|
+
if not parsed["affected_repos"]:
|
|
295
|
+
logger.warning(
|
|
296
|
+
"Patch %s declares no recognised repos — nothing to apply",
|
|
297
|
+
event.fingerprint[:8],
|
|
298
|
+
)
|
|
299
|
+
return []
|
|
300
|
+
|
|
301
|
+
repo_map = {r.repo_name: r for r in all_repos}
|
|
302
|
+
affected = [] # list of (repo_name, RepoConfig, sub_patch_text)
|
|
303
|
+
for i, name in enumerate(parsed["affected_repos"]):
|
|
304
|
+
if name not in repo_map:
|
|
305
|
+
logger.warning(
|
|
306
|
+
"Patch references unknown repo '%s' — not in project config; skipping",
|
|
307
|
+
name,
|
|
308
|
+
)
|
|
309
|
+
continue
|
|
310
|
+
affected.append((name, repo_map[name], parsed["patches"][name]))
|
|
311
|
+
|
|
312
|
+
if not affected:
|
|
313
|
+
return []
|
|
314
|
+
|
|
315
|
+
# Write per-repo sub-patch files for traceability + reuse by `git apply`.
|
|
316
|
+
sub_patch_files: dict[str, Path] = {}
|
|
317
|
+
for name, _repo, sub in affected:
|
|
318
|
+
sub_path = patch_path.parent / f"{event.fingerprint}-{name}.diff"
|
|
319
|
+
sub_path.write_text(sub, encoding="utf-8")
|
|
320
|
+
sub_patch_files[name] = sub_path
|
|
321
|
+
|
|
322
|
+
# Protected-path veto runs on the combined patch.
|
|
323
|
+
if _check_protected_paths(patch_path):
|
|
324
|
+
return [
|
|
325
|
+
{
|
|
326
|
+
"repo_name": name, "repo": repo, "status": "aborted",
|
|
327
|
+
"commit_hash": "", "branch": "", "apply_order": i,
|
|
328
|
+
"reason": "patch touches protected path",
|
|
329
|
+
"sub_patch_path": sub_patch_files[name],
|
|
330
|
+
}
|
|
331
|
+
for i, (name, repo, _sub) in enumerate(affected)
|
|
332
|
+
]
|
|
333
|
+
|
|
334
|
+
# ── PHASE 1: dry-run every repo. Atomic abort on any failure. ─────────
|
|
335
|
+
dry_run_failures: list[str] = []
|
|
336
|
+
for name, repo, _sub in affected:
|
|
337
|
+
env = _git_env(repo)
|
|
338
|
+
sub_path = sub_patch_files[name]
|
|
339
|
+
# Discard stale changes from any prior aborted attempt
|
|
340
|
+
_git(["checkout", "."], cwd=repo.local_path, env=env)
|
|
341
|
+
# Pull latest
|
|
342
|
+
r = _git(["pull", "--rebase", "origin", repo.branch],
|
|
343
|
+
cwd=repo.local_path, env=env)
|
|
344
|
+
if r.returncode != 0:
|
|
345
|
+
dry_run_failures.append(f"{name}: git pull failed: {r.stderr.strip()[:200]}")
|
|
346
|
+
continue
|
|
347
|
+
# Dry-run
|
|
348
|
+
r = _git(["apply", "--check", "--ignore-whitespace", str(sub_path)],
|
|
349
|
+
cwd=repo.local_path, env=env)
|
|
350
|
+
if r.returncode != 0:
|
|
351
|
+
dry_run_failures.append(f"{name}: dry-run failed: {r.stderr.strip()[:200]}")
|
|
352
|
+
|
|
353
|
+
if dry_run_failures:
|
|
354
|
+
reason = "; ".join(dry_run_failures)
|
|
355
|
+
logger.error(
|
|
356
|
+
"Multi-repo dry-run aborted for %s: %s",
|
|
357
|
+
event.fingerprint[:8], reason,
|
|
358
|
+
)
|
|
359
|
+
return [
|
|
360
|
+
{
|
|
361
|
+
"repo_name": name, "repo": repo, "status": "aborted",
|
|
362
|
+
"commit_hash": "", "branch": "", "apply_order": i,
|
|
363
|
+
"reason": reason,
|
|
364
|
+
"sub_patch_path": sub_patch_files[name],
|
|
365
|
+
}
|
|
366
|
+
for i, (name, repo, _sub) in enumerate(affected)
|
|
367
|
+
]
|
|
368
|
+
|
|
369
|
+
# ── PHASE 2: per-repo apply + test + commit. Independent per repo. ────
|
|
370
|
+
results: list[dict] = []
|
|
371
|
+
for i, (name, repo, _sub) in enumerate(affected):
|
|
372
|
+
env = _git_env(repo)
|
|
373
|
+
sub_path = sub_patch_files[name]
|
|
374
|
+
entry = {
|
|
375
|
+
"repo_name": name, "repo": repo, "status": "failed",
|
|
376
|
+
"commit_hash": "", "branch": "", "apply_order": i,
|
|
377
|
+
"reason": "", "sub_patch_path": sub_path,
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
r = _git(["apply", "--ignore-whitespace", str(sub_path)],
|
|
381
|
+
cwd=repo.local_path, env=env)
|
|
382
|
+
if r.returncode != 0:
|
|
383
|
+
entry["reason"] = f"apply failed: {r.stderr.strip()[:200]}"
|
|
384
|
+
results.append(entry); continue
|
|
385
|
+
|
|
386
|
+
try:
|
|
387
|
+
tests_ok = _run_tests(repo, repo.local_path)
|
|
388
|
+
except (MissingToolError, MavenAuthError, MavenArtifactNotFoundError) as _te:
|
|
389
|
+
entry["reason"] = f"build error: {type(_te).__name__}: {str(_te)[:200]}"
|
|
390
|
+
_git(["checkout", "."], cwd=repo.local_path, env=env)
|
|
391
|
+
results.append(entry); continue
|
|
392
|
+
|
|
393
|
+
if not tests_ok:
|
|
394
|
+
entry["reason"] = "tests failed"
|
|
395
|
+
_git(["checkout", "."], cwd=repo.local_path, env=env)
|
|
396
|
+
results.append(entry); continue
|
|
397
|
+
|
|
398
|
+
summary = event.short_summary()[:60]
|
|
399
|
+
commit_msg = (
|
|
400
|
+
f"fix(sentinel): {summary} [auto]\n\n"
|
|
401
|
+
f"Error fingerprint: {event.fingerprint}\n"
|
|
402
|
+
f"Source: {event.source}\n"
|
|
403
|
+
)
|
|
404
|
+
if len(affected) > 1:
|
|
405
|
+
commit_msg += f"Multi-repo: part {i + 1}/{len(affected)}\n"
|
|
406
|
+
r = _git(["commit", "-am", commit_msg], cwd=repo.local_path, env=env)
|
|
407
|
+
if r.returncode != 0:
|
|
408
|
+
entry["reason"] = f"commit failed: {r.stderr.strip()[:200]}"
|
|
409
|
+
_git(["checkout", "."], cwd=repo.local_path, env=env)
|
|
410
|
+
results.append(entry); continue
|
|
411
|
+
|
|
412
|
+
commit_hash = _git(["rev-parse", "HEAD"], cwd=repo.local_path, env=env).stdout.strip()
|
|
413
|
+
try:
|
|
414
|
+
_append_changelog(repo, event, commit_hash)
|
|
415
|
+
except Exception as _ce:
|
|
416
|
+
logger.warning("CHANGELOG append failed for %s: %s", name, _ce)
|
|
417
|
+
entry.update({
|
|
418
|
+
"status": "committed",
|
|
419
|
+
"commit_hash": commit_hash,
|
|
420
|
+
"branch": repo.branch,
|
|
421
|
+
})
|
|
422
|
+
logger.info(
|
|
423
|
+
"Multi-repo: committed %s in %s for %s",
|
|
424
|
+
commit_hash[:8], name, event.fingerprint[:8],
|
|
425
|
+
)
|
|
426
|
+
results.append(entry)
|
|
427
|
+
|
|
428
|
+
return results
|
|
429
|
+
|
|
430
|
+
|
|
183
431
|
def _run_tests(repo: RepoConfig, local_path: str) -> bool:
|
|
184
432
|
"""Run the repo's test suite. Return True if passing."""
|
|
185
433
|
if (Path(local_path) / "pom.xml").exists():
|
|
@@ -402,6 +650,88 @@ def publish(
|
|
|
402
650
|
return branch, pr_url
|
|
403
651
|
|
|
404
652
|
|
|
653
|
+
def publish_multi(
|
|
654
|
+
event: ErrorEvent,
|
|
655
|
+
results: list[dict],
|
|
656
|
+
cfg: SentinelConfig,
|
|
657
|
+
) -> list[dict]:
|
|
658
|
+
"""Push committed branches and open per-repo PRs (or push direct on auto_commit).
|
|
659
|
+
|
|
660
|
+
Mutates each "committed" entry in `results` in-place, setting `pr_url` and
|
|
661
|
+
`branch`. Entries with status != "committed" are left untouched.
|
|
662
|
+
|
|
663
|
+
For multi-repo fixes (>1 committed entry), each PR's body notes the sibling
|
|
664
|
+
repos by name so reviewers can locate the matching branch.
|
|
665
|
+
"""
|
|
666
|
+
committed = [r for r in results if r["status"] == "committed"]
|
|
667
|
+
if not committed:
|
|
668
|
+
return results
|
|
669
|
+
|
|
670
|
+
repo_names = [r["repo_name"] for r in committed]
|
|
671
|
+
multi_repo = len(committed) > 1
|
|
672
|
+
|
|
673
|
+
for entry in committed:
|
|
674
|
+
repo: RepoConfig = entry["repo"]
|
|
675
|
+
commit_hash: str = entry["commit_hash"]
|
|
676
|
+
env = _git_env(repo)
|
|
677
|
+
local_path = repo.local_path
|
|
678
|
+
auto_commit = resolve_auto_commit(repo, cfg)
|
|
679
|
+
|
|
680
|
+
if not auto_commit:
|
|
681
|
+
if remote_fix_exists(repo, event.fingerprint, cfg):
|
|
682
|
+
logger.info(
|
|
683
|
+
"publish_multi: remote fix branch already exists for %s in %s — skipping",
|
|
684
|
+
event.fingerprint[:8], repo.repo_name,
|
|
685
|
+
)
|
|
686
|
+
entry["pr_url"] = ""
|
|
687
|
+
continue
|
|
688
|
+
|
|
689
|
+
if auto_commit:
|
|
690
|
+
r = _git(["push", "origin", repo.branch], cwd=local_path, env=env)
|
|
691
|
+
if r.returncode != 0:
|
|
692
|
+
logger.error(
|
|
693
|
+
"publish_multi: git push failed in %s:\n%s",
|
|
694
|
+
repo.repo_name, r.stderr,
|
|
695
|
+
)
|
|
696
|
+
entry["branch"] = repo.branch
|
|
697
|
+
entry["pr_url"] = ""
|
|
698
|
+
continue
|
|
699
|
+
|
|
700
|
+
branch_name = f"{cfg.project_name or 'sentinel'}/fix-{event.fingerprint[:8]}"
|
|
701
|
+
_git(["checkout", "-B", branch_name], cwd=local_path, env=env)
|
|
702
|
+
r = _git(["push", "-u", "origin", branch_name], cwd=local_path, env=env)
|
|
703
|
+
if r.returncode != 0:
|
|
704
|
+
logger.error(
|
|
705
|
+
"publish_multi: branch push failed in %s:\n%s",
|
|
706
|
+
repo.repo_name, r.stderr,
|
|
707
|
+
)
|
|
708
|
+
_git(["checkout", repo.branch], cwd=local_path, env=env)
|
|
709
|
+
entry["branch"] = branch_name
|
|
710
|
+
entry["pr_url"] = ""
|
|
711
|
+
continue
|
|
712
|
+
_git(["checkout", repo.branch], cwd=local_path, env=env)
|
|
713
|
+
|
|
714
|
+
# Build the multi-repo cross-reference paragraph (sibling repo names only;
|
|
715
|
+
# actual URLs are added in a follow-up two-pass update if/when implemented).
|
|
716
|
+
extra_body = ""
|
|
717
|
+
if multi_repo:
|
|
718
|
+
siblings = [n for n in repo_names if n != repo.repo_name]
|
|
719
|
+
extra_body = (
|
|
720
|
+
"### Multi-repo fix\n"
|
|
721
|
+
"This is part of a coordinated fix across multiple repositories.\n"
|
|
722
|
+
f"Sibling repos with the same fingerprint (`{event.fingerprint[:8]}`) "
|
|
723
|
+
f"on branch `{branch_name}`:\n"
|
|
724
|
+
+ "\n".join(f"- `{n}`" for n in siblings)
|
|
725
|
+
+ "\n\n_Merge order: lower apply_order first._"
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
pr_url = _open_github_pr(event, repo, cfg, branch_name, commit_hash, extra_body=extra_body)
|
|
729
|
+
entry["branch"] = branch_name
|
|
730
|
+
entry["pr_url"] = pr_url
|
|
731
|
+
|
|
732
|
+
return results
|
|
733
|
+
|
|
734
|
+
|
|
405
735
|
def _alert_github_token_error(cfg: "SentinelConfig", owner_repo: str, status: int, hint: str = "") -> None:
|
|
406
736
|
"""Fire a Slack alert when GitHub API rejects the token during PR creation."""
|
|
407
737
|
msg = (
|
|
@@ -423,6 +753,7 @@ def _open_github_pr(
|
|
|
423
753
|
cfg: SentinelConfig,
|
|
424
754
|
branch: str,
|
|
425
755
|
commit_hash: str,
|
|
756
|
+
extra_body: str = "",
|
|
426
757
|
) -> str:
|
|
427
758
|
if not cfg.github_token:
|
|
428
759
|
logger.warning("GITHUB_TOKEN not set — cannot open PR")
|
|
@@ -445,6 +776,10 @@ def _open_github_pr(
|
|
|
445
776
|
f"### Commits in this fix\n"
|
|
446
777
|
f"| SHA (short) | Description |\n|---|---|\n"
|
|
447
778
|
f"| `{commit_hash[:8]}` | fix(sentinel): {event.short_summary()[:60]} |\n\n"
|
|
779
|
+
)
|
|
780
|
+
if extra_body:
|
|
781
|
+
body += extra_body + "\n\n"
|
|
782
|
+
body += (
|
|
448
783
|
f"---\n"
|
|
449
784
|
f"_To apply: merge this PR. To reject: close it. "
|
|
450
785
|
f"Sentinel will not retry this fingerprint for 24 h._"
|