@misterhuydo/sentinel 1.4.83 → 1.4.84

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.cairn/.hint-lock CHANGED
@@ -1 +1 @@
1
- 2026-03-27T02:03:40.951Z
1
+ 2026-03-27T03:42:45.385Z
@@ -1,6 +1,6 @@
1
1
  {
2
- "message": "Auto-checkpoint at 2026-03-27T02:18:31.246Z",
3
- "checkpoint_at": "2026-03-27T02:18:31.247Z",
2
+ "message": "Auto-checkpoint at 2026-03-27T03:40:21.932Z",
3
+ "checkpoint_at": "2026-03-27T03:40:21.934Z",
4
4
  "active_files": [],
5
5
  "notes": [],
6
6
  "mtime_snapshot": {}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@misterhuydo/sentinel",
3
- "version": "1.4.83",
3
+ "version": "1.4.84",
4
4
  "description": "Sentinel — Autonomous DevOps Agent installer and manager",
5
5
  "bin": {
6
6
  "sentinel": "./bin/sentinel.js"
@@ -30,7 +30,7 @@ class MissingToolError(Exception):
30
30
  self.tool = tool
31
31
 
32
32
  # Files that must never be modified by Sentinel
33
- _PROTECTED_PATHS = {".github/", "Jenkinsfile", "pom.xml"}
33
+ _PROTECTED_PATHS = {".github/", "Jenkinsfile"}
34
34
 
35
35
 
36
36
  def _git(args: list[str], cwd: str, env: dict | None = None, timeout: int = GIT_TIMEOUT) -> subprocess.CompletedProcess:
@@ -402,3 +402,88 @@ def _open_github_pr(
402
402
  else:
403
403
  logger.error("Failed to open PR (%s): %s", resp.status_code, resp.text[:300])
404
404
  return ""
405
+
406
+
407
+ def poll_open_prs(store, github_token: str) -> list[dict]:
408
+ """
409
+ Check GitHub for the current state of all pending PRs in the state store.
410
+
411
+ For each pending PR:
412
+ - merged → update status to 'merged'
413
+ - closed without merge → update status to 'skipped' (rejected by admin)
414
+ - still open → leave as-is
415
+
416
+ Returns a list of change dicts: {fingerprint, pr_url, new_status}
417
+ """
418
+ import re as _re
419
+
420
+ open_prs = store.get_open_prs()
421
+ if not open_prs:
422
+ return []
423
+
424
+ if not github_token:
425
+ logger.debug("poll_open_prs: no GITHUB_TOKEN — skipping PR status check")
426
+ return []
427
+
428
+ headers = {
429
+ "Authorization": f"Bearer {github_token}",
430
+ "Accept": "application/vnd.github+json",
431
+ }
432
+
433
+ changes = []
434
+ for fix in open_prs:
435
+ pr_url = fix.get("pr_url", "")
436
+ fingerprint = fix["fingerprint"]
437
+
438
+ # Extract owner/repo/number from https://github.com/<owner>/<repo>/pull/<number>
439
+ m = _re.search(r"github\.com/([^/]+/[^/]+)/pull/(\d+)", pr_url)
440
+ if not m:
441
+ logger.debug("poll_open_prs: cannot parse PR URL %s", pr_url)
442
+ continue
443
+
444
+ owner_repo = m.group(1)
445
+ pr_number = m.group(2)
446
+
447
+ try:
448
+ resp = requests.get(
449
+ f"https://api.github.com/repos/{owner_repo}/pulls/{pr_number}",
450
+ headers=headers,
451
+ timeout=15,
452
+ )
453
+ except Exception as e:
454
+ logger.warning("poll_open_prs: request failed for %s: %s", pr_url, e)
455
+ continue
456
+
457
+ if resp.status_code == 404:
458
+ logger.warning("poll_open_prs: PR not found (deleted?): %s", pr_url)
459
+ continue
460
+ if resp.status_code != 200:
461
+ logger.warning("poll_open_prs: unexpected %s for %s", resp.status_code, pr_url)
462
+ continue
463
+
464
+ data = resp.json()
465
+ state = data.get("state") # "open" | "closed"
466
+ merged = data.get("merged", False)
467
+
468
+ if state == "open":
469
+ continue # still pending
470
+
471
+ new_status = "merged" if merged else "skipped"
472
+
473
+ with store._conn() as conn:
474
+ conn.execute(
475
+ "UPDATE fixes SET status=? WHERE fingerprint=? AND status='pending'",
476
+ (new_status, fingerprint),
477
+ )
478
+
479
+ logger.info(
480
+ "poll_open_prs: PR #%s %s → %s (fp=%s)",
481
+ pr_number, owner_repo, new_status, fingerprint[:8],
482
+ )
483
+ changes.append({
484
+ "fingerprint": fingerprint,
485
+ "pr_url": pr_url,
486
+ "new_status": new_status,
487
+ })
488
+
489
+ return changes
@@ -163,3 +163,76 @@ def mark_done(issue_file: Path) -> None:
163
163
  dest = done_dir / f"{issue_file.stem}-{int(time.time())}{issue_file.suffix}"
164
164
  issue_file.rename(dest)
165
165
  logger.info("Issue archived: %s -> .done/%s", issue_file.name, dest.name)
166
+
167
+
168
+ def cancel_issue(issues_dir: Path, keyword: str) -> dict:
169
+ """
170
+ Cancel a pending issue (not yet picked up by the poll cycle).
171
+
172
+ Searches issues/ for a file whose TARGET_REPO header matches keyword first,
173
+ then falls back to full-content match. Moves the file to issues/.cancelled/.
174
+
175
+ Returns {"cancelled": filename} or {"error": reason}.
176
+ """
177
+ candidates = sorted(
178
+ [f for f in issues_dir.iterdir() if f.is_file() and not f.name.startswith(".")],
179
+ key=lambda f: f.stat().st_mtime,
180
+ reverse=True,
181
+ )
182
+ if not candidates:
183
+ return {"error": "No pending issues found"}
184
+
185
+ kw = keyword.lower()
186
+ repo_matched = []
187
+ content_matched = []
188
+ for f in candidates:
189
+ try:
190
+ content = f.read_text(encoding="utf-8", errors="replace")
191
+ for line in content.splitlines():
192
+ if line.strip().upper().startswith("TARGET_REPO"):
193
+ repo_val = line.split(":", 1)[-1].split("=", 1)[-1].strip().lower()
194
+ if kw in repo_val:
195
+ repo_matched.append(f)
196
+ break
197
+ else:
198
+ if kw in content.lower():
199
+ content_matched.append(f)
200
+ except OSError:
201
+ pass
202
+
203
+ matched = repo_matched if repo_matched else content_matched
204
+ if not matched:
205
+ return {"error": f"No pending issue matches '{keyword}' — it may already be running or finished"}
206
+
207
+ target = matched[0]
208
+ cancelled_dir = issues_dir / ".cancelled"
209
+ cancelled_dir.mkdir(exist_ok=True)
210
+ dest = cancelled_dir / target.name
211
+ if dest.exists():
212
+ dest = cancelled_dir / f"{target.stem}-{int(time.time())}{target.suffix}"
213
+ target.rename(dest)
214
+ logger.info("Issue cancelled: %s -> .cancelled/%s", target.name, dest.name)
215
+ return {"cancelled": target.name}
216
+
217
+
218
+ def purge_old_issues(issues_dir: Path, keep_days: int = 30) -> int:
219
+ """
220
+ Delete files older than keep_days from issues/.done/ and issues/.cancelled/.
221
+ Returns the number of files deleted.
222
+ """
223
+ cutoff = time.time() - keep_days * 86400
224
+ deleted = 0
225
+ for subdir in (".done", ".cancelled"):
226
+ d = issues_dir / subdir
227
+ if not d.exists():
228
+ continue
229
+ for f in d.iterdir():
230
+ if f.is_file() and f.stat().st_mtime < cutoff:
231
+ try:
232
+ f.unlink()
233
+ deleted += 1
234
+ except OSError:
235
+ pass
236
+ if deleted:
237
+ logger.info("Purged %d old issue archive file(s) (>%d days)", deleted, keep_days)
238
+ return deleted
@@ -23,11 +23,11 @@ from pathlib import Path
23
23
  from .cairn_client import ensure_installed as cairn_installed, index_repo
24
24
  from .config_loader import ConfigLoader, SentinelConfig
25
25
  from .fix_engine import generate_fix
26
- from .git_manager import apply_and_commit, publish, _git_env, MissingToolError
26
+ from .git_manager import apply_and_commit, publish, _git_env, MissingToolError, poll_open_prs
27
27
  from .cicd_trigger import trigger as cicd_trigger
28
28
  from .log_fetcher import fetch_all
29
29
  from .log_parser import parse_all, scan_all_for_markers, ErrorEvent
30
- from .issue_watcher import scan_issues, mark_done, IssueEvent
30
+ from .issue_watcher import scan_issues, mark_done, purge_old_issues, IssueEvent
31
31
  from .repo_router import route
32
32
  from .reporter import build_and_send, send_fix_notification, send_failure_notification, send_confirmed_notification, send_regression_notification, send_startup_notification, send_upgrade_notification
33
33
  from .notify import notify_fix_blocked, notify_fix_applied, notify_missing_tool, notify_tool_installing, notify_cascade_started, notify_cascade_result
@@ -96,7 +96,12 @@ def _auto_install_if_safe(
96
96
  return False
97
97
 
98
98
  repo_p = Path(repo_path)
99
- if not any((repo_p / bf).exists() for bf in spec["build_files"]):
99
+ # Search root and one level of subdirectories (e.g. frontend/yarn.lock)
100
+ def _build_file_exists(bf: str) -> bool:
101
+ if (repo_p / bf).exists():
102
+ return True
103
+ return any(sub.is_dir() and (sub / bf).exists() for sub in repo_p.iterdir())
104
+ if not any(_build_file_exists(bf) for bf in spec["build_files"]):
100
105
  logger.info(
101
106
  "Tool '%s' is whitelisted but no matching build file found in %s — skipping",
102
107
  tool, repo_path,
@@ -281,8 +286,9 @@ async def _handle_error(event: ErrorEvent, cfg_loader: ConfigLoader, store: Stat
281
286
 
282
287
  # ── Issue pipeline ────────────────────────────────────────────────────────────
283
288
 
284
- async def _handle_issue(event: IssueEvent, cfg_loader: ConfigLoader, store: StateStore):
285
- """Process a single issue file from the issues/ directory."""
289
+ async def _handle_issue(event: IssueEvent, cfg_loader: ConfigLoader, store: StateStore) -> dict | None:
290
+ """Process a single issue file from the issues/ directory.
291
+ Returns a result dict {submitter, repo_name, status, summary} or None if skipped."""
286
292
  sentinel = cfg_loader.sentinel
287
293
 
288
294
  if Path("SENTINEL_PAUSE").exists():
@@ -310,32 +316,51 @@ async def _handle_issue(event: IssueEvent, cfg_loader: ConfigLoader, store: Stat
310
316
  )
311
317
  return # Leave the file so admin can add the header
312
318
 
319
+ # Post "working on" to channel so everyone can see progress.
320
+ from .notify import slack_alert as _slack_alert
321
+ _submitter = getattr(event, "submitter_user_id", "")
322
+ _started_msg = (
323
+ f":hammer: Working on *<@{_submitter}>*'s request — *{repo.repo_name}*\n"
324
+ f"_{event.message[:120]}_"
325
+ ) if _submitter else (
326
+ f":hammer: Working on *{repo.repo_name}*\n_{event.message[:120]}_"
327
+ )
328
+ _slack_alert(sentinel.slack_bot_token, sentinel.slack_channel, _started_msg)
329
+
313
330
  try:
314
331
  patches_dir = Path(sentinel.workspace_dir).resolve() / "patches"
315
- status, patch_path, marker = generate_fix(event, repo, sentinel, patches_dir, store)
332
+ _loop = asyncio.get_event_loop()
333
+
334
+ # Run blocking subprocess work in thread executor so Boss stays responsive
335
+ status, patch_path, marker = await _loop.run_in_executor(
336
+ None, generate_fix, event, repo, sentinel, patches_dir, store
337
+ )
338
+
339
+ submitter_uid = getattr(event, "submitter_user_id", "")
316
340
 
317
341
  if status != "patch" or patch_path is None:
318
342
  store.record_fix(event.fingerprint, "skipped" if status in ("skip", "needs_human") else "failed",
319
343
  repo_name=repo.repo_name)
320
- # For user-submitted issues: always notify (person is waiting)
321
- submitter_uid = getattr(event, "submitter_user_id", "")
322
344
  reason_text = marker if status == "needs_human" else f"Claude Code returned {status.upper()}"
323
345
  notify_fix_blocked(sentinel, event.source, event.message,
324
346
  reason=reason_text, repo_name=repo.repo_name,
325
347
  submitter_user_id=submitter_uid)
326
348
  mark_done(event.issue_file)
327
- return
349
+ return {"submitter": submitter_uid, "repo_name": repo.repo_name,
350
+ "status": "blocked", "summary": reason_text[:120], "pr_url": ""}
328
351
 
329
- commit_status, commit_hash = apply_and_commit(event, patch_path, repo, sentinel)
352
+ commit_status, commit_hash = await _loop.run_in_executor(
353
+ None, apply_and_commit, event, patch_path, repo, sentinel
354
+ )
330
355
  if commit_status != "committed":
331
356
  store.record_fix(event.fingerprint, "failed", repo_name=repo.repo_name)
332
- submitter_uid = getattr(event, "submitter_user_id", "")
333
357
  notify_fix_blocked(sentinel, event.source, event.message,
334
358
  reason="Patch was generated but commit/tests failed",
335
359
  repo_name=repo.repo_name,
336
360
  submitter_user_id=submitter_uid)
337
361
  mark_done(event.issue_file)
338
- return
362
+ return {"submitter": submitter_uid, "repo_name": repo.repo_name,
363
+ "status": "blocked", "summary": "Commit/tests failed", "pr_url": ""}
339
364
 
340
365
  branch, pr_url = publish(event, repo, sentinel, commit_hash)
341
366
  store.record_fix(
@@ -362,7 +387,6 @@ async def _handle_issue(event: IssueEvent, cfg_loader: ConfigLoader, store: Stat
362
387
  "auto_publish": repo.auto_publish,
363
388
  "files_changed": [],
364
389
  })
365
- submitter_uid = getattr(event, "submitter_user_id", "")
366
390
  notify_fix_applied(sentinel, event.source, event.message,
367
391
  repo_name=repo.repo_name, branch=branch, pr_url=pr_url,
368
392
  submitter_user_id=submitter_uid)
@@ -373,30 +397,41 @@ async def _handle_issue(event: IssueEvent, cfg_loader: ConfigLoader, store: Stat
373
397
  if ok and repo.cicd_type.lower() in ("jenkins_release", "jenkins-release"):
374
398
  _run_cascade(repo, sentinel, cfg_loader)
375
399
 
400
+ return {"submitter": submitter_uid, "repo_name": repo.repo_name,
401
+ "status": "done", "summary": event.message[:120], "pr_url": pr_url}
402
+
376
403
  except MissingToolError as e:
377
404
  logger.warning("Missing tool for %s: %s", event.source, e)
378
405
  submitter_uid = getattr(event, "submitter_user_id", "")
379
- if not _auto_install_if_safe(e.tool, repo.local_path, sentinel, repo.repo_name, event.source):
406
+ installed = await _loop.run_in_executor(
407
+ None, _auto_install_if_safe, e.tool, repo.local_path, sentinel, repo.repo_name, event.source
408
+ )
409
+ if not installed:
380
410
  notify_missing_tool(sentinel, e.tool, repo.repo_name, event.source, submitter_uid)
381
411
  store.record_fix(event.fingerprint, "failed", repo_name=repo.repo_name)
382
412
  mark_done(event.issue_file)
383
- return
413
+ return {"submitter": submitter_uid, "repo_name": repo.repo_name,
414
+ "status": "blocked", "summary": f"Missing tool: {e.tool}", "pr_url": ""}
384
415
  # Tool installed — retry apply_and_commit once
385
416
  try:
386
- commit_status, commit_hash = apply_and_commit(event, patch_path, repo, sentinel)
417
+ commit_status, commit_hash = await _loop.run_in_executor(
418
+ None, apply_and_commit, event, patch_path, repo, sentinel
419
+ )
387
420
  except MissingToolError as e2:
388
421
  logger.error("Still missing tool after auto-install: %s", e2)
389
422
  notify_missing_tool(sentinel, e2.tool, repo.repo_name, event.source, submitter_uid)
390
423
  store.record_fix(event.fingerprint, "failed", repo_name=repo.repo_name)
391
424
  mark_done(event.issue_file)
392
- return
425
+ return {"submitter": submitter_uid, "repo_name": repo.repo_name,
426
+ "status": "blocked", "summary": f"Missing tool: {e2.tool}", "pr_url": ""}
393
427
  if commit_status != "committed":
394
428
  store.record_fix(event.fingerprint, "failed", repo_name=repo.repo_name)
395
429
  notify_fix_blocked(sentinel, event.source, event.message,
396
430
  reason="Patch was generated but commit/tests failed after tool install",
397
431
  repo_name=repo.repo_name, submitter_user_id=submitter_uid)
398
432
  mark_done(event.issue_file)
399
- return
433
+ return {"submitter": submitter_uid, "repo_name": repo.repo_name,
434
+ "status": "blocked", "summary": "Commit/tests failed after tool install", "pr_url": ""}
400
435
  branch, pr_url = publish(event, repo, sentinel, commit_hash)
401
436
  store.record_fix(
402
437
  event.fingerprint,
@@ -420,11 +455,16 @@ async def _handle_issue(event: IssueEvent, cfg_loader: ConfigLoader, store: Stat
420
455
  ok = cicd_trigger(repo, store, event.fingerprint)
421
456
  if ok and repo.cicd_type.lower() in ("jenkins_release", "jenkins-release"):
422
457
  _run_cascade(repo, sentinel, cfg_loader)
458
+ return {"submitter": submitter_uid, "repo_name": repo.repo_name,
459
+ "status": "done", "summary": event.message[:120], "pr_url": pr_url}
423
460
 
424
461
  except Exception:
425
462
  logger.exception("Unexpected error processing issue %s — archiving to prevent retry loop", event.source)
426
463
  store.record_fix(event.fingerprint, "failed", repo_name=repo.repo_name)
427
464
  mark_done(event.issue_file)
465
+ return {"submitter": getattr(event, "submitter_user_id", ""),
466
+ "repo_name": repo.repo_name if repo else event.target_repo,
467
+ "status": "blocked", "summary": "Unexpected error — check logs", "pr_url": ""}
428
468
 
429
469
 
430
470
  async def poll_cycle(cfg_loader: ConfigLoader, store: StateStore):
@@ -483,14 +523,51 @@ async def poll_cycle(cfg_loader: ConfigLoader, store: StateStore):
483
523
  quiet_hours, fix["fingerprint"], fix.get("repo_name"))
484
524
  send_confirmed_notification(cfg_loader.sentinel, confirmed)
485
525
 
526
+ # ── PR status sync — detect merges/rejections done via GitHub UI ─────────
527
+ pr_changes = poll_open_prs(store, cfg_loader.sentinel.github_token)
528
+ for ch in pr_changes:
529
+ if ch["new_status"] == "merged":
530
+ logger.info("PR merged externally: %s (fp=%s)", ch["pr_url"], ch["fingerprint"][:8])
531
+ elif ch["new_status"] == "skipped":
532
+ logger.info("PR closed/rejected: %s (fp=%s) — will not retry for 24h",
533
+ ch["pr_url"], ch["fingerprint"][:8])
534
+
486
535
  # ── Issues directory (always checked) ────────────────────────────────────
536
+ purge_old_issues(Path(".") / "issues")
487
537
  issues = scan_issues(Path("."))
488
538
  if issues:
489
539
  logger.info("%d issue file(s) found in issues/", len(issues))
490
- await asyncio.gather(
540
+ issue_results = await asyncio.gather(
491
541
  *[_handle_issue(e, cfg_loader, store) for e in issues],
492
542
  return_exceptions=True,
493
543
  )
544
+ # Send one DM per submitter summarising everything that just ran
545
+ from collections import defaultdict
546
+ from .notify import slack_dm as _slack_dm
547
+ by_submitter: dict[str, list] = defaultdict(list)
548
+ for r in issue_results:
549
+ if isinstance(r, dict) and r.get("submitter"):
550
+ by_submitter[r["submitter"]].append(r)
551
+ for uid, results in by_submitter.items():
552
+ done = [r for r in results if r["status"] == "done"]
553
+ blocked = [r for r in results if r["status"] == "blocked"]
554
+ lines = []
555
+ for r in done:
556
+ pr = f" — <{r['pr_url']}|PR>" if r.get("pr_url") else ""
557
+ lines.append(f":white_check_mark: *{r['repo_name']}*{pr}")
558
+ for r in blocked:
559
+ lines.append(f":x: *{r['repo_name']}* — {r['summary']}")
560
+ if done and blocked:
561
+ header = f":sentinel: *All done — {len(done)} succeeded, {len(blocked)} need attention:*"
562
+ elif blocked:
563
+ header = f":sentinel: *All done — {len(blocked)} need attention:*"
564
+ else:
565
+ header = f":sentinel: *All done — {len(done)} completed successfully:*"
566
+ _slack_dm(
567
+ cfg_loader.sentinel.slack_bot_token,
568
+ uid,
569
+ f"{header}\n" + "\n".join(lines),
570
+ )
494
571
 
495
572
 
496
573
  # -- Health URL checks -------------------------------------------------------
@@ -204,17 +204,11 @@ def notify_fix_blocked(
204
204
  f"*Reason:*\n{short_reason}"
205
205
  )
206
206
 
207
+ # Post to channel — mention submitter if known, otherwise broadcast
207
208
  if submitter_user_id:
208
- if getattr(cfg, "slack_dm_submitter", True):
209
- slack_dm(cfg.slack_bot_token, submitter_user_id, slack_text)
210
209
  slack_alert(cfg.slack_bot_token, cfg.slack_channel, f"<@{submitter_user_id}> {slack_text}")
211
210
  else:
212
- # No known submitter — broadcast to the whole channel
213
- slack_alert(
214
- cfg.slack_bot_token,
215
- cfg.slack_channel,
216
- f"<!channel> {slack_text}",
217
- )
211
+ slack_alert(cfg.slack_bot_token, cfg.slack_channel, f"<!channel> {slack_text}")
218
212
 
219
213
  # Always email admins
220
214
  try:
@@ -257,18 +251,24 @@ def notify_missing_tool(
257
251
  Notify admins that a build tool is missing on this server.
258
252
  Prompts them to ask Boss to install it.
259
253
  """
254
+ _INSTALL_STEPS = {
255
+ "yarn": "1. SSH into the server\n2. Run: `npm install -g yarn`\n3. Verify: `yarn --version`",
256
+ "node": "1. SSH into the server\n2. Run: `curl -fsSL https://rpm.nodesource.com/setup_22.x | sudo bash -`\n3. Run: `sudo yum install -y nodejs`\n4. Verify: `node --version`",
257
+ "npm": "1. SSH into the server\n2. Run: `sudo yum install -y nodejs npm` (or `apt-get install -y nodejs npm`)\n3. Verify: `npm --version`",
258
+ "mvn": "1. SSH into the server\n2. Run: `sudo yum install -y maven` (or `apt-get install -y maven`)\n3. Verify: `mvn --version`",
259
+ "gradle": "1. SSH into the server\n2. Run: `sudo yum install -y gradle` (or `apt-get install -y gradle`)\n3. Verify: `gradle --version`",
260
+ "make": "1. SSH into the server\n2. Run: `sudo yum install -y make` (or `apt-get install -y make`)\n3. Verify: `make --version`",
261
+ }
262
+ steps = _INSTALL_STEPS.get(tool, f"1. SSH into the server\n2. Install `{tool}` using your system package manager\n3. Verify it runs with `{tool} --version`")
260
263
  repo_line = f" for *{repo_name}*" if repo_name else ""
261
264
  slack_text = (
262
265
  f":wrench: *Build tool missing{repo_line}*\n"
263
- f"*Source:* {source}\n"
264
- f"The fix was generated but tests couldn't run because `{tool}` is not installed on this server.\n\n"
265
- f"Ask me to install it:\n"
266
- f"> @Sentinel install {tool}\n\n"
267
- f"Once installed, re-raise the issue to apply the fix."
266
+ f"*Missing:* `{tool}`\n"
267
+ f"The fix was generated but couldn't run because `{tool}` is not installed on this server.\n\n"
268
+ f"*How to fix:*\n{steps}\n\n"
269
+ f"Then tell me: `retry {repo_name or source}`"
268
270
  )
269
271
  if submitter_user_id:
270
- if getattr(cfg, "slack_dm_submitter", True):
271
- slack_dm(cfg.slack_bot_token, submitter_user_id, slack_text)
272
272
  slack_alert(cfg.slack_bot_token, cfg.slack_channel, f"<@{submitter_user_id}> {slack_text}")
273
273
  else:
274
274
  slack_alert(cfg.slack_bot_token, cfg.slack_channel, f"<!channel> {slack_text}")
@@ -301,13 +301,9 @@ def notify_fix_applied(
301
301
  + (f"{action_line}\n" if action_line else "")
302
302
  ).rstrip()
303
303
 
304
- if submitter_user_id:
305
- if getattr(cfg, "slack_dm_submitter", True):
306
- slack_dm(cfg.slack_bot_token, submitter_user_id, slack_text)
307
- channel_text = f"<@{submitter_user_id}> {slack_text}"
308
- slack_alert(cfg.slack_bot_token, cfg.slack_channel, channel_text)
309
- else:
310
- slack_alert(cfg.slack_bot_token, cfg.slack_channel, slack_text)
304
+ # Post to channel — mention submitter if known
305
+ channel_text = f"<@{submitter_user_id}> {slack_text}" if submitter_user_id else slack_text
306
+ slack_alert(cfg.slack_bot_token, cfg.slack_channel, channel_text)
311
307
 
312
308
 
313
309
  def notify_cascade_started(
@@ -226,6 +226,7 @@ reply with a grouped summary like this:
226
226
  *Issues & fix management*
227
227
  • `create_issue` — deliver a fix/task to this project
228
228
  • `retry_issue` — re-queue a failed or skipped fix
229
+ • `cancel_issue` — cancel a pending issue before Sentinel picks it up
229
230
  • `get_fix_details` — full details of a specific fix
230
231
  • `trigger_poll` — run a log-fetch + fix cycle right now
231
232
 
@@ -425,7 +426,7 @@ When to act vs. when to ask:
425
426
  list_recent_commits) → call immediately without asking permission. Never say "Want me to check?" —
426
427
  just check and report results. If you think it's worth doing, do it.
427
428
  - Write/action tools (create_issue, trigger_poll, pull_repo, pause_sentinel, install_tool, merge_pr,
428
- retry_issue, post_file) → act immediately for clear commands; confirm only when intent is ambiguous
429
+ retry_issue, cancel_issue, post_file) → act immediately for clear commands; confirm only when intent is ambiguous
429
430
  (e.g. unclear which project or repo to target).
430
431
  - Explaining a tool ("what does X do?") → explain naturally, then offer to run it if relevant.
431
432
  - NEVER gate investigation on user approval. If diagnosing a problem, run all relevant read tools
@@ -493,7 +494,7 @@ Sentinel is a 24/7 autonomous agent. Act on clear signals without asking permiss
493
494
 
494
495
  Autonomous action policy — Sentinel acts, humans review outcomes:
495
496
  - Read/investigate tools → always act immediately, no confirmation needed.
496
- - create_issue, retry_issue, trigger_poll, install_tool, pull_repo → act immediately when
497
+ - create_issue, retry_issue, cancel_issue, trigger_poll, install_tool, pull_repo → act immediately when
497
498
  intent is clear. Only pause for genuinely ambiguous target (unknown project/repo).
498
499
  - merge_pr → confirm once: "Merging PR #N for <repo> to <branch> — confirm?" Then act.
499
500
  - pause_sentinel → confirm once (halts all monitoring).
@@ -664,6 +665,29 @@ _TOOLS = [
664
665
  "required": ["project"],
665
666
  },
666
667
  },
668
+ {
669
+ "name": "cancel_issue",
670
+ "description": (
671
+ "Cancel a pending issue that is still waiting in the queue (not yet picked up). "
672
+ "Moves the file to issues/.cancelled/ so Sentinel never processes it. "
673
+ "If the issue is already running it cannot be cancelled — it will block naturally. "
674
+ "Use when the user says: 'stop', 'cancel that', 'don't process X', 'cancel the TypeLib retry'."
675
+ ),
676
+ "input_schema": {
677
+ "type": "object",
678
+ "properties": {
679
+ "project": {
680
+ "type": "string",
681
+ "description": "Project short name (e.g. '1881'). Required.",
682
+ },
683
+ "keyword": {
684
+ "type": "string",
685
+ "description": "Keyword to match the pending issue (repo name, error keyword, etc.)",
686
+ },
687
+ },
688
+ "required": ["project", "keyword"],
689
+ },
690
+ },
667
691
  {
668
692
  "name": "list_pending_prs",
669
693
  "description": "List all open Sentinel PRs awaiting admin review.",
@@ -1718,30 +1742,45 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
1718
1742
  if len(project_dirs) > 1 and project_arg:
1719
1743
  return json.dumps({"error": f"Ambiguous project '{project_arg}' — matches: {[_read_project_name(d) for d in project_dirs]}"})
1720
1744
 
1721
- project_dir = project_dirs[0]
1722
- done_dir = project_dir / "issues" / ".done"
1723
- if not done_dir.exists():
1745
+ project_dir = project_dirs[0]
1746
+ issues_base = project_dir / "issues"
1747
+ # Scan both .done/ and .cancelled/ so users can un-cancel by mistake
1748
+ archive_dirs = [issues_base / d for d in (".done", ".cancelled") if (issues_base / d).exists()]
1749
+ if not archive_dirs:
1724
1750
  return json.dumps({"error": "No archived issues found — issues/.done/ does not exist"})
1725
1751
 
1726
- # Find all archived issue files, newest first
1752
+ # Find all archived issue files across both dirs, newest first
1727
1753
  candidates = sorted(
1728
- [f for f in done_dir.iterdir() if f.is_file() and not f.name.startswith(".")],
1754
+ [f for d in archive_dirs for f in d.iterdir() if f.is_file() and not f.name.startswith(".")],
1729
1755
  key=lambda f: f.stat().st_mtime,
1730
1756
  reverse=True,
1731
1757
  )
1732
1758
  if not candidates:
1733
- return json.dumps({"error": "No archived issues found in issues/.done/"})
1759
+ return json.dumps({"error": "No archived issues found in issues/.done/ or issues/.cancelled/"})
1734
1760
 
1735
1761
  # Filter by keyword if provided
1736
1762
  if keyword:
1737
- matched = []
1763
+ # Priority 1: TARGET_REPO header matches keyword (exact repo name match)
1764
+ repo_matched = []
1765
+ # Priority 2: full content contains keyword
1766
+ content_matched = []
1738
1767
  for f in candidates:
1739
1768
  try:
1740
1769
  content = f.read_text(encoding="utf-8", errors="replace")
1741
- if keyword in content.lower():
1742
- matched.append(f)
1770
+ content_lower = content.lower()
1771
+ # Check if TARGET_REPO line matches (e.g. "TARGET_REPO=1881-SSOLoginWebApp")
1772
+ for line in content.splitlines():
1773
+ if line.strip().upper().startswith("TARGET_REPO="):
1774
+ repo_val = line.split("=", 1)[1].strip().lower()
1775
+ if keyword in repo_val:
1776
+ repo_matched.append(f)
1777
+ break
1778
+ else:
1779
+ if keyword in content_lower:
1780
+ content_matched.append(f)
1743
1781
  except OSError:
1744
1782
  pass
1783
+ matched = repo_matched if repo_matched else content_matched
1745
1784
  if not matched:
1746
1785
  return json.dumps({"error": f"No archived issues match keyword '{keyword}'"})
1747
1786
  candidates = matched
@@ -1767,6 +1806,67 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
1767
1806
  })
1768
1807
 
1769
1808
 
1809
+ if name == "cancel_issue":
1810
+ import re as _re
1811
+ project_arg = inputs.get("project", "").strip()
1812
+ keyword = inputs.get("keyword", "").strip()
1813
+
1814
+ project_dirs = _find_project_dirs(project_arg) if project_arg else _find_project_dirs()
1815
+ if not project_dirs:
1816
+ return json.dumps({"error": f"No project found matching '{project_arg}'"})
1817
+ if len(project_dirs) > 1 and project_arg:
1818
+ return json.dumps({"error": f"Ambiguous project '{project_arg}' — matches: {[_read_project_name(d) for d in project_dirs]}"})
1819
+
1820
+ project_dir = project_dirs[0]
1821
+ issues_dir = project_dir / "issues"
1822
+ if not issues_dir.exists():
1823
+ return json.dumps({"error": "No issues/ directory found for this project"})
1824
+
1825
+ from .issue_watcher import cancel_issue as _cancel_issue
1826
+ result = _cancel_issue(issues_dir, keyword)
1827
+ if "error" in result:
1828
+ return json.dumps(result)
1829
+
1830
+ # Option B: owner can cancel their own; admins can cancel anyone's;
1831
+ # system issues (no submitter) are admin-only.
1832
+ cancelled_file = (issues_dir / ".cancelled" / result["cancelled"])
1833
+ submitter_uid = ""
1834
+ if cancelled_file.exists():
1835
+ for line in cancelled_file.read_text(encoding="utf-8", errors="replace").splitlines():
1836
+ if line.strip().upper().startswith("SUBMITTED_BY:"):
1837
+ m = _re.search(r"\((\w+)\)", line)
1838
+ if m:
1839
+ submitter_uid = m.group(1)
1840
+ break
1841
+
1842
+ if submitter_uid:
1843
+ if user_id != submitter_uid and not is_admin:
1844
+ # Undo: move file back to issues/
1845
+ import shutil as _shutil
1846
+ _shutil.move(str(cancelled_file), str(issues_dir / result["cancelled"]))
1847
+ return json.dumps({
1848
+ "error": (
1849
+ f"Permission denied — this issue was submitted by <@{submitter_uid}>. "
1850
+ f"Only they or an admin can cancel it."
1851
+ )
1852
+ })
1853
+ else:
1854
+ # No submitter = system-generated issue, admin only
1855
+ if not is_admin:
1856
+ import shutil as _shutil
1857
+ _shutil.move(str(cancelled_file), str(issues_dir / result["cancelled"]))
1858
+ return json.dumps({"error": "This issue was not submitted via Slack — only admins can cancel it."})
1859
+
1860
+ project_label = _read_project_name(project_dir.resolve())
1861
+ logger.info("Boss cancel_issue: cancelled '%s' for %s by user %s", result["cancelled"], project_label, user_id)
1862
+ return json.dumps({
1863
+ "status": "cancelled",
1864
+ "project": project_label,
1865
+ "file": result["cancelled"],
1866
+ "note": "Moved to issues/.cancelled/ — Sentinel will not process it.",
1867
+ })
1868
+
1869
+
1770
1870
  if name == "get_fix_details":
1771
1871
  fp = inputs["fingerprint"]
1772
1872
  fix = store.get_confirmed_fix(fp) or store.get_marker_seen_fix(fp)