@misterhuydo/sentinel 1.5.63 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.cairn/session.json +2 -2
- package/package.json +1 -1
- package/python/scripts/__pycache__/fix_ask_codebase_context.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_ask_codebase_stdin.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_chain_slack.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_fstring.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_knowledge_cache.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_knowledge_cache_staleness.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_merge_confirm.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_permission_messages.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_check_head_detect.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_msg_newlines.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_tracking_boss.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_tracking_db.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_pr_tracking_main.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_project_isolation.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_system_prompt.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/fix_two_bugs.cpython-311.pyc +0 -0
- package/python/scripts/__pycache__/patch_chain_release.cpython-311.pyc +0 -0
- package/python/sentinel/__init__.py +1 -1
- package/python/sentinel/__pycache__/__init__.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/cairn_client.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/cicd_trigger.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/config_loader.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/dependency_manager.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/dev_watcher.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/fix_engine.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/git_manager.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/health_checker.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/issue_watcher.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/log_fetcher.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/log_parser.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/log_syncer.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/main.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/notify.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/repo_router.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/repo_task_engine.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/reporter.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/sentinel_boss.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/sentinel_dev.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/slack_bot.cpython-311.pyc +0 -0
- package/python/sentinel/__pycache__/state_store.cpython-311.pyc +0 -0
- package/python/sentinel/cairn_client.py +30 -11
- package/python/sentinel/fix_engine.py +182 -43
- package/python/sentinel/git_manager.py +335 -0
- package/python/sentinel/main.py +189 -5
- package/python/sentinel/state_store.py +121 -0
- package/python/tests/test_cairn_client.py +72 -0
- package/python/tests/test_fix_engine_json.py +95 -0
- package/python/tests/test_fix_engine_prompt.py +93 -0
- package/python/tests/test_multi_repo_apply.py +254 -0
- package/python/tests/test_multi_repo_publish.py +175 -0
- package/python/tests/test_patch_parser.py +250 -0
- package/python/tests/test_project_lock.py +85 -0
- package/python/tests/test_state_store.py +87 -0
|
@@ -22,6 +22,86 @@ logger = logging.getLogger(__name__)
|
|
|
22
22
|
|
|
23
23
|
GIT_TIMEOUT = 60
|
|
24
24
|
|
|
25
|
+
# Matches any "repos/<name>/" occurrence at start-of-line or after whitespace.
|
|
26
|
+
# Group 1 captures the repo name (allowed chars: letters, digits, ._-).
|
|
27
|
+
_REPOS_PREFIX_RE = re.compile(
|
|
28
|
+
r"\brepos/([A-Za-z0-9._-]+)/"
|
|
29
|
+
)
|
|
30
|
+
# Header line listing affected repos (case-insensitive). Whitespace tolerant.
|
|
31
|
+
_AFFECTED_HEADER_RE = re.compile(
|
|
32
|
+
r"^\s*#\s*affected\s+repos\s*:\s*(.+?)\s*$",
|
|
33
|
+
re.IGNORECASE | re.MULTILINE,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def parse_multi_repo_patch(combined_patch: str) -> dict:
|
|
38
|
+
"""Split a combined diff (paths prefixed `repos/<name>/`) into per-repo sub-patches.
|
|
39
|
+
|
|
40
|
+
The `repos/<name>/` prefix is stripped from every path reference inside each
|
|
41
|
+
sub-patch so it can be `git apply`-ed from the corresponding repo root.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
{
|
|
45
|
+
"affected_repos": list[str], # declared header order, then first-seen
|
|
46
|
+
"patches": dict[str, str], # repo_name -> sub-patch
|
|
47
|
+
"summary": str, # any free text above the header / first diff
|
|
48
|
+
}
|
|
49
|
+
"""
|
|
50
|
+
if not combined_patch:
|
|
51
|
+
return {"affected_repos": [], "patches": {}, "summary": ""}
|
|
52
|
+
|
|
53
|
+
# 1. Optional `# Affected repos: a, b, c` header gives declared order.
|
|
54
|
+
declared: list[str] = []
|
|
55
|
+
m_hdr = _AFFECTED_HEADER_RE.search(combined_patch)
|
|
56
|
+
if m_hdr:
|
|
57
|
+
declared = [s.strip() for s in m_hdr.group(1).split(",") if s.strip()]
|
|
58
|
+
|
|
59
|
+
# 2. Free-text summary = everything above the header (or above the first diff
|
|
60
|
+
# if no header). Useful for PR bodies / Slack messages.
|
|
61
|
+
if m_hdr:
|
|
62
|
+
cut = m_hdr.start()
|
|
63
|
+
else:
|
|
64
|
+
first_diff = combined_patch.find("diff --git")
|
|
65
|
+
cut = first_diff if first_diff >= 0 else len(combined_patch)
|
|
66
|
+
summary = combined_patch[:cut].strip()
|
|
67
|
+
|
|
68
|
+
# 3. Split body by `diff --git` markers; non-matching preamble is dropped.
|
|
69
|
+
body = combined_patch[m_hdr.end():] if m_hdr else combined_patch
|
|
70
|
+
chunks = re.split(r"^(?=diff --git )", body, flags=re.MULTILINE)
|
|
71
|
+
chunks = [c for c in chunks if c.startswith("diff --git ")]
|
|
72
|
+
|
|
73
|
+
# 4. For each chunk, derive the repo name from the first repos/<name>/
|
|
74
|
+
# match. Strip every repos/<repo>/ occurrence in the chunk so the diff
|
|
75
|
+
# is applyable from the repo root.
|
|
76
|
+
seen_order: list[str] = []
|
|
77
|
+
grouped: dict[str, list[str]] = {}
|
|
78
|
+
for chunk in chunks:
|
|
79
|
+
m = _REPOS_PREFIX_RE.search(chunk)
|
|
80
|
+
if not m:
|
|
81
|
+
continue
|
|
82
|
+
repo = m.group(1)
|
|
83
|
+
stripped = re.sub(
|
|
84
|
+
rf"\brepos/{re.escape(repo)}/", "", chunk,
|
|
85
|
+
)
|
|
86
|
+
if repo not in grouped:
|
|
87
|
+
grouped[repo] = []
|
|
88
|
+
seen_order.append(repo)
|
|
89
|
+
grouped[repo].append(stripped)
|
|
90
|
+
|
|
91
|
+
patches = {repo: "".join(parts) for repo, parts in grouped.items()}
|
|
92
|
+
|
|
93
|
+
# 5. Order: declared header first (only repos that actually produced a diff),
|
|
94
|
+
# then any repo that appeared only via diffs.
|
|
95
|
+
ordered: list[str] = []
|
|
96
|
+
for r in declared:
|
|
97
|
+
if r in patches and r not in ordered:
|
|
98
|
+
ordered.append(r)
|
|
99
|
+
for r in seen_order:
|
|
100
|
+
if r not in ordered:
|
|
101
|
+
ordered.append(r)
|
|
102
|
+
|
|
103
|
+
return {"affected_repos": ordered, "patches": patches, "summary": summary}
|
|
104
|
+
|
|
25
105
|
|
|
26
106
|
class MissingToolError(Exception):
|
|
27
107
|
"""Raised when a required build tool (e.g. mvn, gradle) is not installed."""
|
|
@@ -180,6 +260,174 @@ def apply_and_commit(
|
|
|
180
260
|
return "committed", commit_hash
|
|
181
261
|
|
|
182
262
|
|
|
263
|
+
def apply_and_commit_multi(
|
|
264
|
+
event: ErrorEvent,
|
|
265
|
+
patch_path: Path,
|
|
266
|
+
all_repos: list[RepoConfig],
|
|
267
|
+
cfg: SentinelConfig,
|
|
268
|
+
) -> list[dict]:
|
|
269
|
+
"""Apply a multi-repo patch atomically at the dry-run stage, per-repo afterwards.
|
|
270
|
+
|
|
271
|
+
The combined patch is split with parse_multi_repo_patch(); each affected
|
|
272
|
+
repo's sub-patch is then dry-run independently. If ANY dry-run fails, every
|
|
273
|
+
repo is marked "aborted" with no mutations.
|
|
274
|
+
|
|
275
|
+
If all dry-runs pass, the patches are applied + tested + committed
|
|
276
|
+
sequentially per repo. Per-repo failures in this phase only mark that repo
|
|
277
|
+
as "failed" — they don't roll back commits already made in earlier repos.
|
|
278
|
+
|
|
279
|
+
Returns: list of result dicts, one per affected repo, ordered by apply_order:
|
|
280
|
+
{
|
|
281
|
+
"repo_name": str,
|
|
282
|
+
"repo": RepoConfig,
|
|
283
|
+
"status": "committed" | "failed" | "aborted",
|
|
284
|
+
"commit_hash": str, # filled if committed
|
|
285
|
+
"branch": str, # filled if committed
|
|
286
|
+
"apply_order": int, # 0 = library, higher = consumer
|
|
287
|
+
"reason": str, # filled if failed/aborted
|
|
288
|
+
"sub_patch_path": Path, # per-repo .diff written for traceability
|
|
289
|
+
}
|
|
290
|
+
"""
|
|
291
|
+
combined = patch_path.read_text(encoding="utf-8", errors="replace")
|
|
292
|
+
parsed = parse_multi_repo_patch(combined)
|
|
293
|
+
|
|
294
|
+
if not parsed["affected_repos"]:
|
|
295
|
+
logger.warning(
|
|
296
|
+
"Patch %s declares no recognised repos — nothing to apply",
|
|
297
|
+
event.fingerprint[:8],
|
|
298
|
+
)
|
|
299
|
+
return []
|
|
300
|
+
|
|
301
|
+
repo_map = {r.repo_name: r for r in all_repos}
|
|
302
|
+
affected = [] # list of (repo_name, RepoConfig, sub_patch_text)
|
|
303
|
+
for i, name in enumerate(parsed["affected_repos"]):
|
|
304
|
+
if name not in repo_map:
|
|
305
|
+
logger.warning(
|
|
306
|
+
"Patch references unknown repo '%s' — not in project config; skipping",
|
|
307
|
+
name,
|
|
308
|
+
)
|
|
309
|
+
continue
|
|
310
|
+
affected.append((name, repo_map[name], parsed["patches"][name]))
|
|
311
|
+
|
|
312
|
+
if not affected:
|
|
313
|
+
return []
|
|
314
|
+
|
|
315
|
+
# Write per-repo sub-patch files for traceability + reuse by `git apply`.
|
|
316
|
+
sub_patch_files: dict[str, Path] = {}
|
|
317
|
+
for name, _repo, sub in affected:
|
|
318
|
+
sub_path = patch_path.parent / f"{event.fingerprint}-{name}.diff"
|
|
319
|
+
sub_path.write_text(sub, encoding="utf-8")
|
|
320
|
+
sub_patch_files[name] = sub_path
|
|
321
|
+
|
|
322
|
+
# Protected-path veto runs on the combined patch.
|
|
323
|
+
if _check_protected_paths(patch_path):
|
|
324
|
+
return [
|
|
325
|
+
{
|
|
326
|
+
"repo_name": name, "repo": repo, "status": "aborted",
|
|
327
|
+
"commit_hash": "", "branch": "", "apply_order": i,
|
|
328
|
+
"reason": "patch touches protected path",
|
|
329
|
+
"sub_patch_path": sub_patch_files[name],
|
|
330
|
+
}
|
|
331
|
+
for i, (name, repo, _sub) in enumerate(affected)
|
|
332
|
+
]
|
|
333
|
+
|
|
334
|
+
# ── PHASE 1: dry-run every repo. Atomic abort on any failure. ─────────
|
|
335
|
+
dry_run_failures: list[str] = []
|
|
336
|
+
for name, repo, _sub in affected:
|
|
337
|
+
env = _git_env(repo)
|
|
338
|
+
sub_path = sub_patch_files[name]
|
|
339
|
+
# Discard stale changes from any prior aborted attempt
|
|
340
|
+
_git(["checkout", "."], cwd=repo.local_path, env=env)
|
|
341
|
+
# Pull latest
|
|
342
|
+
r = _git(["pull", "--rebase", "origin", repo.branch],
|
|
343
|
+
cwd=repo.local_path, env=env)
|
|
344
|
+
if r.returncode != 0:
|
|
345
|
+
dry_run_failures.append(f"{name}: git pull failed: {r.stderr.strip()[:200]}")
|
|
346
|
+
continue
|
|
347
|
+
# Dry-run
|
|
348
|
+
r = _git(["apply", "--check", "--ignore-whitespace", str(sub_path)],
|
|
349
|
+
cwd=repo.local_path, env=env)
|
|
350
|
+
if r.returncode != 0:
|
|
351
|
+
dry_run_failures.append(f"{name}: dry-run failed: {r.stderr.strip()[:200]}")
|
|
352
|
+
|
|
353
|
+
if dry_run_failures:
|
|
354
|
+
reason = "; ".join(dry_run_failures)
|
|
355
|
+
logger.error(
|
|
356
|
+
"Multi-repo dry-run aborted for %s: %s",
|
|
357
|
+
event.fingerprint[:8], reason,
|
|
358
|
+
)
|
|
359
|
+
return [
|
|
360
|
+
{
|
|
361
|
+
"repo_name": name, "repo": repo, "status": "aborted",
|
|
362
|
+
"commit_hash": "", "branch": "", "apply_order": i,
|
|
363
|
+
"reason": reason,
|
|
364
|
+
"sub_patch_path": sub_patch_files[name],
|
|
365
|
+
}
|
|
366
|
+
for i, (name, repo, _sub) in enumerate(affected)
|
|
367
|
+
]
|
|
368
|
+
|
|
369
|
+
# ── PHASE 2: per-repo apply + test + commit. Independent per repo. ────
|
|
370
|
+
results: list[dict] = []
|
|
371
|
+
for i, (name, repo, _sub) in enumerate(affected):
|
|
372
|
+
env = _git_env(repo)
|
|
373
|
+
sub_path = sub_patch_files[name]
|
|
374
|
+
entry = {
|
|
375
|
+
"repo_name": name, "repo": repo, "status": "failed",
|
|
376
|
+
"commit_hash": "", "branch": "", "apply_order": i,
|
|
377
|
+
"reason": "", "sub_patch_path": sub_path,
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
r = _git(["apply", "--ignore-whitespace", str(sub_path)],
|
|
381
|
+
cwd=repo.local_path, env=env)
|
|
382
|
+
if r.returncode != 0:
|
|
383
|
+
entry["reason"] = f"apply failed: {r.stderr.strip()[:200]}"
|
|
384
|
+
results.append(entry); continue
|
|
385
|
+
|
|
386
|
+
try:
|
|
387
|
+
tests_ok = _run_tests(repo, repo.local_path)
|
|
388
|
+
except (MissingToolError, MavenAuthError, MavenArtifactNotFoundError) as _te:
|
|
389
|
+
entry["reason"] = f"build error: {type(_te).__name__}: {str(_te)[:200]}"
|
|
390
|
+
_git(["checkout", "."], cwd=repo.local_path, env=env)
|
|
391
|
+
results.append(entry); continue
|
|
392
|
+
|
|
393
|
+
if not tests_ok:
|
|
394
|
+
entry["reason"] = "tests failed"
|
|
395
|
+
_git(["checkout", "."], cwd=repo.local_path, env=env)
|
|
396
|
+
results.append(entry); continue
|
|
397
|
+
|
|
398
|
+
summary = event.short_summary()[:60]
|
|
399
|
+
commit_msg = (
|
|
400
|
+
f"fix(sentinel): {summary} [auto]\n\n"
|
|
401
|
+
f"Error fingerprint: {event.fingerprint}\n"
|
|
402
|
+
f"Source: {event.source}\n"
|
|
403
|
+
)
|
|
404
|
+
if len(affected) > 1:
|
|
405
|
+
commit_msg += f"Multi-repo: part {i + 1}/{len(affected)}\n"
|
|
406
|
+
r = _git(["commit", "-am", commit_msg], cwd=repo.local_path, env=env)
|
|
407
|
+
if r.returncode != 0:
|
|
408
|
+
entry["reason"] = f"commit failed: {r.stderr.strip()[:200]}"
|
|
409
|
+
_git(["checkout", "."], cwd=repo.local_path, env=env)
|
|
410
|
+
results.append(entry); continue
|
|
411
|
+
|
|
412
|
+
commit_hash = _git(["rev-parse", "HEAD"], cwd=repo.local_path, env=env).stdout.strip()
|
|
413
|
+
try:
|
|
414
|
+
_append_changelog(repo, event, commit_hash)
|
|
415
|
+
except Exception as _ce:
|
|
416
|
+
logger.warning("CHANGELOG append failed for %s: %s", name, _ce)
|
|
417
|
+
entry.update({
|
|
418
|
+
"status": "committed",
|
|
419
|
+
"commit_hash": commit_hash,
|
|
420
|
+
"branch": repo.branch,
|
|
421
|
+
})
|
|
422
|
+
logger.info(
|
|
423
|
+
"Multi-repo: committed %s in %s for %s",
|
|
424
|
+
commit_hash[:8], name, event.fingerprint[:8],
|
|
425
|
+
)
|
|
426
|
+
results.append(entry)
|
|
427
|
+
|
|
428
|
+
return results
|
|
429
|
+
|
|
430
|
+
|
|
183
431
|
def _run_tests(repo: RepoConfig, local_path: str) -> bool:
|
|
184
432
|
"""Run the repo's test suite. Return True if passing."""
|
|
185
433
|
if (Path(local_path) / "pom.xml").exists():
|
|
@@ -402,6 +650,88 @@ def publish(
|
|
|
402
650
|
return branch, pr_url
|
|
403
651
|
|
|
404
652
|
|
|
653
|
+
def publish_multi(
|
|
654
|
+
event: ErrorEvent,
|
|
655
|
+
results: list[dict],
|
|
656
|
+
cfg: SentinelConfig,
|
|
657
|
+
) -> list[dict]:
|
|
658
|
+
"""Push committed branches and open per-repo PRs (or push direct on auto_commit).
|
|
659
|
+
|
|
660
|
+
Mutates each "committed" entry in `results` in-place, setting `pr_url` and
|
|
661
|
+
`branch`. Entries with status != "committed" are left untouched.
|
|
662
|
+
|
|
663
|
+
For multi-repo fixes (>1 committed entry), each PR's body notes the sibling
|
|
664
|
+
repos by name so reviewers can locate the matching branch.
|
|
665
|
+
"""
|
|
666
|
+
committed = [r for r in results if r["status"] == "committed"]
|
|
667
|
+
if not committed:
|
|
668
|
+
return results
|
|
669
|
+
|
|
670
|
+
repo_names = [r["repo_name"] for r in committed]
|
|
671
|
+
multi_repo = len(committed) > 1
|
|
672
|
+
|
|
673
|
+
for entry in committed:
|
|
674
|
+
repo: RepoConfig = entry["repo"]
|
|
675
|
+
commit_hash: str = entry["commit_hash"]
|
|
676
|
+
env = _git_env(repo)
|
|
677
|
+
local_path = repo.local_path
|
|
678
|
+
auto_commit = resolve_auto_commit(repo, cfg)
|
|
679
|
+
|
|
680
|
+
if not auto_commit:
|
|
681
|
+
if remote_fix_exists(repo, event.fingerprint, cfg):
|
|
682
|
+
logger.info(
|
|
683
|
+
"publish_multi: remote fix branch already exists for %s in %s — skipping",
|
|
684
|
+
event.fingerprint[:8], repo.repo_name,
|
|
685
|
+
)
|
|
686
|
+
entry["pr_url"] = ""
|
|
687
|
+
continue
|
|
688
|
+
|
|
689
|
+
if auto_commit:
|
|
690
|
+
r = _git(["push", "origin", repo.branch], cwd=local_path, env=env)
|
|
691
|
+
if r.returncode != 0:
|
|
692
|
+
logger.error(
|
|
693
|
+
"publish_multi: git push failed in %s:\n%s",
|
|
694
|
+
repo.repo_name, r.stderr,
|
|
695
|
+
)
|
|
696
|
+
entry["branch"] = repo.branch
|
|
697
|
+
entry["pr_url"] = ""
|
|
698
|
+
continue
|
|
699
|
+
|
|
700
|
+
branch_name = f"{cfg.project_name or 'sentinel'}/fix-{event.fingerprint[:8]}"
|
|
701
|
+
_git(["checkout", "-B", branch_name], cwd=local_path, env=env)
|
|
702
|
+
r = _git(["push", "-u", "origin", branch_name], cwd=local_path, env=env)
|
|
703
|
+
if r.returncode != 0:
|
|
704
|
+
logger.error(
|
|
705
|
+
"publish_multi: branch push failed in %s:\n%s",
|
|
706
|
+
repo.repo_name, r.stderr,
|
|
707
|
+
)
|
|
708
|
+
_git(["checkout", repo.branch], cwd=local_path, env=env)
|
|
709
|
+
entry["branch"] = branch_name
|
|
710
|
+
entry["pr_url"] = ""
|
|
711
|
+
continue
|
|
712
|
+
_git(["checkout", repo.branch], cwd=local_path, env=env)
|
|
713
|
+
|
|
714
|
+
# Build the multi-repo cross-reference paragraph (sibling repo names only;
|
|
715
|
+
# actual URLs are added in a follow-up two-pass update if/when implemented).
|
|
716
|
+
extra_body = ""
|
|
717
|
+
if multi_repo:
|
|
718
|
+
siblings = [n for n in repo_names if n != repo.repo_name]
|
|
719
|
+
extra_body = (
|
|
720
|
+
"### Multi-repo fix\n"
|
|
721
|
+
"This is part of a coordinated fix across multiple repositories.\n"
|
|
722
|
+
f"Sibling repos with the same fingerprint (`{event.fingerprint[:8]}`) "
|
|
723
|
+
f"on branch `{branch_name}`:\n"
|
|
724
|
+
+ "\n".join(f"- `{n}`" for n in siblings)
|
|
725
|
+
+ "\n\n_Merge order: lower apply_order first._"
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
pr_url = _open_github_pr(event, repo, cfg, branch_name, commit_hash, extra_body=extra_body)
|
|
729
|
+
entry["branch"] = branch_name
|
|
730
|
+
entry["pr_url"] = pr_url
|
|
731
|
+
|
|
732
|
+
return results
|
|
733
|
+
|
|
734
|
+
|
|
405
735
|
def _alert_github_token_error(cfg: "SentinelConfig", owner_repo: str, status: int, hint: str = "") -> None:
|
|
406
736
|
"""Fire a Slack alert when GitHub API rejects the token during PR creation."""
|
|
407
737
|
msg = (
|
|
@@ -423,6 +753,7 @@ def _open_github_pr(
|
|
|
423
753
|
cfg: SentinelConfig,
|
|
424
754
|
branch: str,
|
|
425
755
|
commit_hash: str,
|
|
756
|
+
extra_body: str = "",
|
|
426
757
|
) -> str:
|
|
427
758
|
if not cfg.github_token:
|
|
428
759
|
logger.warning("GITHUB_TOKEN not set — cannot open PR")
|
|
@@ -445,6 +776,10 @@ def _open_github_pr(
|
|
|
445
776
|
f"### Commits in this fix\n"
|
|
446
777
|
f"| SHA (short) | Description |\n|---|---|\n"
|
|
447
778
|
f"| `{commit_hash[:8]}` | fix(sentinel): {event.short_summary()[:60]} |\n\n"
|
|
779
|
+
)
|
|
780
|
+
if extra_body:
|
|
781
|
+
body += extra_body + "\n\n"
|
|
782
|
+
body += (
|
|
448
783
|
f"---\n"
|
|
449
784
|
f"_To apply: merge this PR. To reject: close it. "
|
|
450
785
|
f"Sentinel will not retry this fingerprint for 24 h._"
|
package/python/sentinel/main.py
CHANGED
|
@@ -20,10 +20,13 @@ import sys
|
|
|
20
20
|
from datetime import datetime, timezone
|
|
21
21
|
from pathlib import Path
|
|
22
22
|
|
|
23
|
-
from .cairn_client import ensure_installed as cairn_installed, index_repo
|
|
23
|
+
from .cairn_client import ensure_installed as cairn_installed, index_repo, init_project_root
|
|
24
24
|
from .config_loader import ConfigLoader, SentinelConfig, resolve_auto_commit, resolve_auto_release
|
|
25
25
|
from .fix_engine import generate_fix
|
|
26
|
-
from .git_manager import
|
|
26
|
+
from .git_manager import (
|
|
27
|
+
apply_and_commit, publish, apply_and_commit_multi, publish_multi,
|
|
28
|
+
_git_env, MissingToolError, MavenAuthError, poll_open_prs,
|
|
29
|
+
)
|
|
27
30
|
from .cicd_trigger import trigger as cicd_trigger
|
|
28
31
|
from .log_fetcher import fetch_all
|
|
29
32
|
from .log_parser import parse_all, scan_all_for_markers, ErrorEvent
|
|
@@ -45,6 +48,18 @@ logger = logging.getLogger("sentinel")
|
|
|
45
48
|
|
|
46
49
|
_report_requested = False
|
|
47
50
|
|
|
51
|
+
# Per-project asyncio.Lock — serialises fix generation + apply within one
|
|
52
|
+
# project so two concurrent claude sessions never race on the working tree.
|
|
53
|
+
# Keyed by cfg.project_name; populated lazily on first acquire.
|
|
54
|
+
_project_locks: dict[str, "asyncio.Lock"] = {}
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _project_lock(project_name: str) -> "asyncio.Lock":
|
|
58
|
+
"""Return (and lazily create) the asyncio.Lock for a project."""
|
|
59
|
+
if project_name not in _project_locks:
|
|
60
|
+
_project_locks[project_name] = asyncio.Lock()
|
|
61
|
+
return _project_locks[project_name]
|
|
62
|
+
|
|
48
63
|
|
|
49
64
|
def _on_sigusr1(*_):
|
|
50
65
|
global _report_requested
|
|
@@ -271,10 +286,37 @@ async def _handle_error(event: ErrorEvent, cfg_loader: ConfigLoader, store: Stat
|
|
|
271
286
|
logger.debug("Fix already attempted recently for %s", event.fingerprint)
|
|
272
287
|
return
|
|
273
288
|
|
|
274
|
-
#
|
|
289
|
+
# Per-project lock — serialise fix generation+apply+publish so two
|
|
290
|
+
# concurrent claude sessions never race on the working tree of any repo.
|
|
291
|
+
async with _project_lock(sentinel.project_name or "_default"):
|
|
292
|
+
await _generate_apply_publish(
|
|
293
|
+
event, repo, sentinel, store, cfg_loader,
|
|
294
|
+
_progress, auto_commit, auto_release,
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
async def _generate_apply_publish(
|
|
299
|
+
event: ErrorEvent,
|
|
300
|
+
repo,
|
|
301
|
+
sentinel: SentinelConfig,
|
|
302
|
+
store: StateStore,
|
|
303
|
+
cfg_loader: ConfigLoader,
|
|
304
|
+
_progress,
|
|
305
|
+
auto_commit: bool,
|
|
306
|
+
auto_release: bool,
|
|
307
|
+
):
|
|
308
|
+
"""Generate the fix and apply it via the multi-repo flow (with single-repo fallback).
|
|
309
|
+
|
|
310
|
+
A patch from claude with `repos/<name>/...` paths goes through the
|
|
311
|
+
multi-repo apply (atomic dry-run, per-repo commit). If the patch has no
|
|
312
|
+
such prefix (legacy single-repo), we fall back to apply_and_commit/publish.
|
|
313
|
+
"""
|
|
275
314
|
_progress(":brain: Analyzing with Claude Code...")
|
|
276
315
|
patches_dir = Path(sentinel.workspace_dir).resolve() / "patches"
|
|
277
|
-
|
|
316
|
+
all_repos = list(cfg_loader.repos.values())
|
|
317
|
+
status, patch_path, marker = generate_fix(
|
|
318
|
+
event, repo, sentinel, patches_dir, store, all_repos=all_repos,
|
|
319
|
+
)
|
|
278
320
|
|
|
279
321
|
if status != "patch" or patch_path is None:
|
|
280
322
|
outcome = "skipped" if status in ("skip", "needs_human") else "failed"
|
|
@@ -295,8 +337,137 @@ async def _handle_error(event: ErrorEvent, cfg_loader: ConfigLoader, store: Stat
|
|
|
295
337
|
})
|
|
296
338
|
return
|
|
297
339
|
|
|
298
|
-
# ──
|
|
340
|
+
# ── Try multi-repo apply first; fall back to single-repo on legacy patches ──
|
|
299
341
|
_progress(":gear: Applying patch and running tests...")
|
|
342
|
+
multi_results = apply_and_commit_multi(event, patch_path, all_repos, sentinel)
|
|
343
|
+
|
|
344
|
+
if not multi_results:
|
|
345
|
+
# Legacy single-repo patch (no `repos/<name>/` prefix) — use existing flow.
|
|
346
|
+
logger.info(
|
|
347
|
+
"main: patch for %s has no repos/<name>/ prefix — falling back to single-repo apply",
|
|
348
|
+
event.fingerprint[:8],
|
|
349
|
+
)
|
|
350
|
+
await _apply_publish_single(
|
|
351
|
+
event, repo, sentinel, store, cfg_loader,
|
|
352
|
+
_progress, auto_commit, auto_release, patch_path, marker,
|
|
353
|
+
)
|
|
354
|
+
return
|
|
355
|
+
|
|
356
|
+
# ── Multi-repo flow ────────────────────────────────────────────────────
|
|
357
|
+
multi_results = publish_multi(event, multi_results, sentinel)
|
|
358
|
+
|
|
359
|
+
committed = [r for r in multi_results if r["status"] == "committed"]
|
|
360
|
+
failed = [r for r in multi_results if r["status"] != "committed"]
|
|
361
|
+
|
|
362
|
+
# Record per-repo state in fix_repos for Boss/reporter visibility.
|
|
363
|
+
for r in multi_results:
|
|
364
|
+
pr_state = "open" if r.get("pr_url") else ("merged" if r["status"] == "committed" and auto_commit else "")
|
|
365
|
+
try:
|
|
366
|
+
store.record_fix_repo(
|
|
367
|
+
event.fingerprint, r["repo_name"],
|
|
368
|
+
branch=r.get("branch", ""), commit_hash=r.get("commit_hash", ""),
|
|
369
|
+
pr_url=r.get("pr_url", ""),
|
|
370
|
+
pr_state=pr_state if r["status"] == "committed" else r["status"],
|
|
371
|
+
apply_order=r.get("apply_order", 0),
|
|
372
|
+
)
|
|
373
|
+
except Exception as _se:
|
|
374
|
+
logger.warning("record_fix_repo failed for %s: %s", r["repo_name"], _se)
|
|
375
|
+
|
|
376
|
+
# Summary fixes-table row uses the primary repo (where the error originated)
|
|
377
|
+
# so existing single-row queries (get_open_prs, recent fixes) still work.
|
|
378
|
+
primary = next((r for r in multi_results if r["repo_name"] == repo.repo_name), None)
|
|
379
|
+
if primary and primary["status"] == "committed":
|
|
380
|
+
store.record_fix(
|
|
381
|
+
event.fingerprint,
|
|
382
|
+
"applied" if auto_commit else "pending",
|
|
383
|
+
patch_path=str(patch_path),
|
|
384
|
+
commit_hash=primary["commit_hash"],
|
|
385
|
+
branch=primary.get("branch", ""),
|
|
386
|
+
pr_url=primary.get("pr_url", ""),
|
|
387
|
+
repo_name=repo.repo_name,
|
|
388
|
+
sentinel_marker=marker,
|
|
389
|
+
)
|
|
390
|
+
else:
|
|
391
|
+
store.record_fix(event.fingerprint, "failed", repo_name=repo.repo_name)
|
|
392
|
+
|
|
393
|
+
# ── Surface the result ─────────────────────────────────────────────────
|
|
394
|
+
if not committed:
|
|
395
|
+
reasons = "; ".join(f"{r['repo_name']}: {r['reason']}" for r in failed)
|
|
396
|
+
_progress(f":x: Multi-repo fix aborted — {reasons[:300]}")
|
|
397
|
+
send_failure_notification(sentinel, {
|
|
398
|
+
"source": event.source,
|
|
399
|
+
"message": event.message,
|
|
400
|
+
"repo_name": repo.repo_name,
|
|
401
|
+
"reason": f"Multi-repo apply failed: {reasons[:400]}",
|
|
402
|
+
"body": event.full_text()[:500],
|
|
403
|
+
})
|
|
404
|
+
return
|
|
405
|
+
|
|
406
|
+
if failed:
|
|
407
|
+
_progress(
|
|
408
|
+
f":warning: Multi-repo fix PARTIAL — committed in "
|
|
409
|
+
f"{', '.join(r['repo_name'] for r in committed)}; "
|
|
410
|
+
f"failed in {', '.join(r['repo_name'] for r in failed)}"
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
pr_lines = []
|
|
414
|
+
for r in committed:
|
|
415
|
+
if r.get("pr_url"):
|
|
416
|
+
pr_lines.append(f" • `{r['repo_name']}` → {r['pr_url']}")
|
|
417
|
+
else:
|
|
418
|
+
pr_lines.append(f" • `{r['repo_name']}` → pushed to `{r.get('branch','main')}` "
|
|
419
|
+
f"(`{r.get('commit_hash','')[:8]}`)")
|
|
420
|
+
if pr_lines:
|
|
421
|
+
_progress(":white_check_mark: Fix complete:\n" + "\n".join(pr_lines))
|
|
422
|
+
|
|
423
|
+
# Pending release tracking for auto_commit-without-auto_release per repo.
|
|
424
|
+
if auto_commit and not auto_release:
|
|
425
|
+
for r in committed:
|
|
426
|
+
if r.get("commit_hash") and r.get("branch"):
|
|
427
|
+
store.add_pending_release(
|
|
428
|
+
r["repo_name"], r["commit_hash"], r["branch"],
|
|
429
|
+
description=event.message[:120],
|
|
430
|
+
fingerprint=event.fingerprint,
|
|
431
|
+
)
|
|
432
|
+
|
|
433
|
+
# Single fix notification using the primary repo's row (back-compat with reporter).
|
|
434
|
+
if primary and primary["status"] == "committed":
|
|
435
|
+
send_fix_notification(sentinel, {
|
|
436
|
+
"source": event.source,
|
|
437
|
+
"severity": event.severity,
|
|
438
|
+
"fingerprint": event.fingerprint,
|
|
439
|
+
"first_seen": str(event.timestamp),
|
|
440
|
+
"message": event.message,
|
|
441
|
+
"stack_trace": getattr(event, "stack_trace", ""),
|
|
442
|
+
"repo_name": repo.repo_name,
|
|
443
|
+
"commit_hash": primary.get("commit_hash", ""),
|
|
444
|
+
"branch": primary.get("branch", ""),
|
|
445
|
+
"pr_url": primary.get("pr_url", ""),
|
|
446
|
+
"auto_commit": auto_commit,
|
|
447
|
+
"files_changed": [],
|
|
448
|
+
})
|
|
449
|
+
|
|
450
|
+
# CI/CD trigger — only fire for the primary repo for now (multi-repo cascade is v2).
|
|
451
|
+
if auto_commit and auto_release and primary and primary["status"] == "committed":
|
|
452
|
+
ok = cicd_trigger(repo, store, event.fingerprint)
|
|
453
|
+
if ok and repo.cicd_type.lower() in ("jenkins_release", "jenkins-release"):
|
|
454
|
+
_run_cascade(repo, sentinel, cfg_loader)
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
async def _apply_publish_single(
|
|
458
|
+
event: ErrorEvent,
|
|
459
|
+
repo,
|
|
460
|
+
sentinel: SentinelConfig,
|
|
461
|
+
store: StateStore,
|
|
462
|
+
cfg_loader: ConfigLoader,
|
|
463
|
+
_progress,
|
|
464
|
+
auto_commit: bool,
|
|
465
|
+
auto_release: bool,
|
|
466
|
+
patch_path: Path,
|
|
467
|
+
marker: str,
|
|
468
|
+
):
|
|
469
|
+
"""Legacy single-repo apply+publish flow — used when the patch has no
|
|
470
|
+
`repos/<name>/` prefix (older claude output, manual issues, etc.)."""
|
|
300
471
|
try:
|
|
301
472
|
commit_status, commit_hash = apply_and_commit(event, patch_path, repo, sentinel)
|
|
302
473
|
except MavenAuthError as e:
|
|
@@ -823,6 +994,19 @@ async def _startup_checks(cfg_loader: ConfigLoader) -> dict:
|
|
|
823
994
|
if not cairn_installed():
|
|
824
995
|
results["warnings"].append("Cairn not found — run: npm install -g @misterhuydo/cairn-mcp")
|
|
825
996
|
|
|
997
|
+
# Initialise the project-root .cairn/ so child sub-repos federate up to it.
|
|
998
|
+
# Once the project root + every sub-repo has its own .cairn/, Claude can
|
|
999
|
+
# see across all repos via Cairn's parent walk-up — required for multi-repo
|
|
1000
|
+
# fix generation to work.
|
|
1001
|
+
project_root = str(Path(cfg_loader.sentinel.workspace_dir).parent)
|
|
1002
|
+
if init_project_root(project_root):
|
|
1003
|
+
logger.info("Cairn federation root initialised at %s", project_root)
|
|
1004
|
+
else:
|
|
1005
|
+
results["warnings"].append(
|
|
1006
|
+
f"Cairn project-root init failed at {project_root} — multi-repo "
|
|
1007
|
+
"visibility may be degraded. See logs."
|
|
1008
|
+
)
|
|
1009
|
+
|
|
826
1010
|
for name, repo in cfg_loader.repos.items():
|
|
827
1011
|
local = Path(repo.local_path)
|
|
828
1012
|
if not local.exists():
|