bingo-light 2.1.2 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.en.md +17 -7
- package/README.md +30 -4
- package/bingo-light +321 -11
- package/bingo_core/__init__.py +9 -1
- package/bingo_core/decisions.py +167 -0
- package/bingo_core/dep.py +385 -25
- package/bingo_core/dep_fork.py +268 -0
- package/bingo_core/models.py +1 -0
- package/bingo_core/repo.py +1031 -9
- package/bingo_core/semantic.py +85 -0
- package/bingo_core/state.py +1 -1
- package/bingo_core/team.py +170 -0
- package/completions/bingo-light.bash +14 -4
- package/completions/bingo-light.fish +23 -2
- package/completions/bingo-light.zsh +18 -2
- package/mcp-server.py +245 -7
- package/package.json +1 -1
package/bingo_core/repo.py
CHANGED
|
@@ -8,6 +8,7 @@ import json
|
|
|
8
8
|
import os
|
|
9
9
|
import re
|
|
10
10
|
import shlex
|
|
11
|
+
import shutil
|
|
11
12
|
import subprocess
|
|
12
13
|
import tempfile
|
|
13
14
|
from datetime import datetime, timezone
|
|
@@ -30,9 +31,12 @@ from bingo_core.exceptions import (
|
|
|
30
31
|
DirtyTreeError,
|
|
31
32
|
)
|
|
32
33
|
from bingo_core.models import ConflictInfo
|
|
34
|
+
from bingo_core.semantic import classify_conflict
|
|
35
|
+
from bingo_core.decisions import DecisionMemory, detect_resolution_strategy
|
|
33
36
|
from bingo_core.git import Git
|
|
34
37
|
from bingo_core.config import Config
|
|
35
38
|
from bingo_core.state import State
|
|
39
|
+
from bingo_core.team import TeamState
|
|
36
40
|
|
|
37
41
|
|
|
38
42
|
class Repo:
|
|
@@ -43,6 +47,8 @@ class Repo:
|
|
|
43
47
|
self.git = Git(self.path)
|
|
44
48
|
self.config = Config(self.path)
|
|
45
49
|
self.state = State(self.path)
|
|
50
|
+
self.team = TeamState(self.path, git=self.git)
|
|
51
|
+
self.decisions = DecisionMemory(self.path)
|
|
46
52
|
|
|
47
53
|
# -- Internal helpers --
|
|
48
54
|
|
|
@@ -212,6 +218,361 @@ class Repo:
|
|
|
212
218
|
pass
|
|
213
219
|
return ""
|
|
214
220
|
|
|
221
|
+
_PATCH_SUBJECT_RE = re.compile(
|
|
222
|
+
r"^\[bl\]\s+([a-zA-Z0-9][a-zA-Z0-9_-]*)\s*:\s*(.*)$"
|
|
223
|
+
)
|
|
224
|
+
_MESSAGE_MAX = 2048
|
|
225
|
+
|
|
226
|
+
def _build_patch_intent(self) -> dict:
|
|
227
|
+
"""Assemble patch-intent context for a rebase in progress.
|
|
228
|
+
|
|
229
|
+
Returns a dict with: name, subject, message, message_truncated,
|
|
230
|
+
original_sha, original_diff, diff_truncated, meta, stack_position.
|
|
231
|
+
|
|
232
|
+
All fields fall back to empty/None rather than raising. This is
|
|
233
|
+
defensive context-gathering for AI consumption, not a validator.
|
|
234
|
+
"""
|
|
235
|
+
result = {
|
|
236
|
+
"name": "",
|
|
237
|
+
"subject": "",
|
|
238
|
+
"message": "",
|
|
239
|
+
"message_truncated": False,
|
|
240
|
+
"original_sha": None,
|
|
241
|
+
"original_diff": None,
|
|
242
|
+
"diff_truncated": False,
|
|
243
|
+
"meta": None,
|
|
244
|
+
"stack_position": None,
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
msg_file = os.path.join(self.path, ".git", "rebase-merge", "message")
|
|
248
|
+
sha_file = os.path.join(self.path, ".git", "rebase-merge", "stopped-sha")
|
|
249
|
+
|
|
250
|
+
try:
|
|
251
|
+
with open(msg_file) as f:
|
|
252
|
+
raw_msg = f.read()
|
|
253
|
+
except (IOError, OSError):
|
|
254
|
+
return result
|
|
255
|
+
|
|
256
|
+
if len(raw_msg) > self._MESSAGE_MAX:
|
|
257
|
+
result["message"] = raw_msg[: self._MESSAGE_MAX]
|
|
258
|
+
result["message_truncated"] = True
|
|
259
|
+
else:
|
|
260
|
+
result["message"] = raw_msg
|
|
261
|
+
|
|
262
|
+
first_line = raw_msg.split("\n", 1)[0]
|
|
263
|
+
m = self._PATCH_SUBJECT_RE.match(first_line)
|
|
264
|
+
if m:
|
|
265
|
+
result["name"] = m.group(1)
|
|
266
|
+
result["subject"] = m.group(2).strip()
|
|
267
|
+
|
|
268
|
+
try:
|
|
269
|
+
with open(sha_file) as f:
|
|
270
|
+
sha = f.read().strip()
|
|
271
|
+
if sha:
|
|
272
|
+
result["original_sha"] = sha
|
|
273
|
+
try:
|
|
274
|
+
diff = self.git.run("show", "--format=", sha)
|
|
275
|
+
except GitError:
|
|
276
|
+
diff = ""
|
|
277
|
+
if diff:
|
|
278
|
+
if len(diff) > MAX_DIFF_SIZE:
|
|
279
|
+
result["original_diff"] = diff[:MAX_DIFF_SIZE]
|
|
280
|
+
result["diff_truncated"] = True
|
|
281
|
+
else:
|
|
282
|
+
result["original_diff"] = diff
|
|
283
|
+
except (IOError, OSError):
|
|
284
|
+
pass
|
|
285
|
+
|
|
286
|
+
if result["name"]:
|
|
287
|
+
try:
|
|
288
|
+
result["meta"] = self.state.patch_meta_get(result["name"])
|
|
289
|
+
except Exception:
|
|
290
|
+
result["meta"] = None
|
|
291
|
+
|
|
292
|
+
try:
|
|
293
|
+
c = self._load()
|
|
294
|
+
base = self._patches_base(c)
|
|
295
|
+
if base:
|
|
296
|
+
try:
|
|
297
|
+
log_output = self.git.run(
|
|
298
|
+
"rev-list", "--reverse",
|
|
299
|
+
f"{base}..{c['patches_branch']}"
|
|
300
|
+
)
|
|
301
|
+
except GitError:
|
|
302
|
+
log_output = ""
|
|
303
|
+
shas = log_output.splitlines()
|
|
304
|
+
subjects = []
|
|
305
|
+
for s in shas:
|
|
306
|
+
try:
|
|
307
|
+
subj = self.git.run(
|
|
308
|
+
"log", "-1", "--format=%s", s
|
|
309
|
+
).strip()
|
|
310
|
+
except GitError:
|
|
311
|
+
subj = ""
|
|
312
|
+
subjects.append(subj)
|
|
313
|
+
for idx, subj in enumerate(subjects, start=1):
|
|
314
|
+
m2 = self._PATCH_SUBJECT_RE.match(subj)
|
|
315
|
+
if m2 and m2.group(1) == result["name"]:
|
|
316
|
+
result["stack_position"] = {
|
|
317
|
+
"index": idx,
|
|
318
|
+
"total": len(subjects),
|
|
319
|
+
}
|
|
320
|
+
break
|
|
321
|
+
except Exception:
|
|
322
|
+
pass
|
|
323
|
+
|
|
324
|
+
return result
|
|
325
|
+
|
|
326
|
+
def _auto_dep_apply(self) -> Optional[dict]:
|
|
327
|
+
"""Auto-apply dependency patches after a successful sync.
|
|
328
|
+
|
|
329
|
+
Returns dep apply result dict, or None if no dep patches configured.
|
|
330
|
+
"""
|
|
331
|
+
dep_dir = os.path.join(self.path, ".bingo-deps")
|
|
332
|
+
if not os.path.isdir(dep_dir):
|
|
333
|
+
return None
|
|
334
|
+
try:
|
|
335
|
+
from bingo_core.dep import DepManager
|
|
336
|
+
dm = DepManager(self.path)
|
|
337
|
+
return dm.apply()
|
|
338
|
+
except Exception as e:
|
|
339
|
+
import sys as _sys
|
|
340
|
+
print(f"warning: auto dep-apply failed: {e}", file=_sys.stderr)
|
|
341
|
+
return {"ok": False, "warning": f"dep apply failed: {e}"}
|
|
342
|
+
|
|
343
|
+
# Lock file basenames that should be auto-resolved during sync
|
|
344
|
+
_LOCK_FILES = {"package-lock.json", "yarn.lock", "pnpm-lock.yaml"}
|
|
345
|
+
|
|
346
|
+
# Lock file -> package manager command
|
|
347
|
+
_LOCK_MANAGERS = {
|
|
348
|
+
"package-lock.json": ["npm", "install", "--package-lock-only"],
|
|
349
|
+
"yarn.lock": ["yarn", "install", "--mode", "update-lockfile"],
|
|
350
|
+
"pnpm-lock.yaml": ["pnpm", "install", "--lockfile-only"],
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
# Verification command templates by file extension.
|
|
354
|
+
# Each value is (template, kind). {path} is replaced with shlex.quote(file).
|
|
355
|
+
# Templates pass the path as argv[1] so nested quoting is not a concern.
|
|
356
|
+
_VERIFY_HINTS_BY_EXT = {
|
|
357
|
+
".py": ("python3 -m py_compile {path}", "syntax"),
|
|
358
|
+
".json": ("python3 -c 'import json,sys; json.load(open(sys.argv[1]))' {path}", "parse"),
|
|
359
|
+
".yml": ("python3 -c 'import yaml,sys; yaml.safe_load(open(sys.argv[1]))' {path}", "parse"),
|
|
360
|
+
".yaml": ("python3 -c 'import yaml,sys; yaml.safe_load(open(sys.argv[1]))' {path}", "parse"),
|
|
361
|
+
".toml": ("python3 -c 'import tomllib,sys; tomllib.load(open(sys.argv[1],\"rb\"))' {path}", "parse"),
|
|
362
|
+
".sh": ("bash -n {path}", "syntax"),
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
_PR_NUMBER_RE = re.compile(r"(?:#|pull request #)(\d+)")
|
|
366
|
+
|
|
367
|
+
def _build_patch_dependencies(self, current_name: str) -> Optional[dict]:
|
|
368
|
+
"""Find later patches in the stack that touch the same files as current.
|
|
369
|
+
|
|
370
|
+
Useful for detecting cascade risk: if we're resolving a conflict in
|
|
371
|
+
patch A, and patches B/C/D build on A's changes to the same files,
|
|
372
|
+
the AI should consider whether its merge choice will cascade.
|
|
373
|
+
|
|
374
|
+
Returns {current_patch, dependents: [{name, subject, position,
|
|
375
|
+
overlapping_files}]} or None if no stack info available.
|
|
376
|
+
"""
|
|
377
|
+
if not current_name:
|
|
378
|
+
return None
|
|
379
|
+
try:
|
|
380
|
+
c = self._load()
|
|
381
|
+
base = self._patches_base(c)
|
|
382
|
+
if not base:
|
|
383
|
+
return None
|
|
384
|
+
try:
|
|
385
|
+
log_output = self.git.run(
|
|
386
|
+
"rev-list", "--reverse",
|
|
387
|
+
f"{base}..{c['patches_branch']}"
|
|
388
|
+
)
|
|
389
|
+
except GitError:
|
|
390
|
+
return None
|
|
391
|
+
shas = log_output.splitlines()
|
|
392
|
+
if not shas:
|
|
393
|
+
return None
|
|
394
|
+
|
|
395
|
+
# Collect (index, name, subject, sha, files) for each patch.
|
|
396
|
+
patches_info = []
|
|
397
|
+
for idx, sha in enumerate(shas, start=1):
|
|
398
|
+
try:
|
|
399
|
+
subj = self.git.run("log", "-1", "--format=%s", sha).strip()
|
|
400
|
+
except GitError:
|
|
401
|
+
continue
|
|
402
|
+
m = self._PATCH_SUBJECT_RE.match(subj)
|
|
403
|
+
if not m:
|
|
404
|
+
continue
|
|
405
|
+
try:
|
|
406
|
+
files_out = self.git.run(
|
|
407
|
+
"show", "--format=", "--name-only", sha
|
|
408
|
+
)
|
|
409
|
+
except GitError:
|
|
410
|
+
files_out = ""
|
|
411
|
+
files = [ln for ln in files_out.splitlines() if ln]
|
|
412
|
+
patches_info.append({
|
|
413
|
+
"index": idx,
|
|
414
|
+
"name": m.group(1),
|
|
415
|
+
"subject": m.group(2).strip(),
|
|
416
|
+
"files": set(files),
|
|
417
|
+
})
|
|
418
|
+
|
|
419
|
+
# Find current patch and its files.
|
|
420
|
+
cur = next(
|
|
421
|
+
(p for p in patches_info if p["name"] == current_name), None
|
|
422
|
+
)
|
|
423
|
+
if cur is None:
|
|
424
|
+
return None
|
|
425
|
+
|
|
426
|
+
# Later patches that overlap.
|
|
427
|
+
dependents = []
|
|
428
|
+
for p in patches_info:
|
|
429
|
+
if p["index"] <= cur["index"]:
|
|
430
|
+
continue
|
|
431
|
+
overlap = cur["files"] & p["files"]
|
|
432
|
+
if overlap:
|
|
433
|
+
dependents.append({
|
|
434
|
+
"name": p["name"],
|
|
435
|
+
"subject": p["subject"],
|
|
436
|
+
"position": p["index"],
|
|
437
|
+
"overlapping_files": sorted(overlap),
|
|
438
|
+
})
|
|
439
|
+
return {
|
|
440
|
+
"current_patch": current_name,
|
|
441
|
+
"dependents": dependents,
|
|
442
|
+
}
|
|
443
|
+
except Exception:
|
|
444
|
+
return None
|
|
445
|
+
|
|
446
|
+
def _build_upstream_context(self, conflicted_files: List[str]) -> Optional[dict]:
|
|
447
|
+
"""Find upstream commits that modified the conflicting files.
|
|
448
|
+
|
|
449
|
+
Uses .bingo/.undo-tracking (pre-sync upstream position) as the
|
|
450
|
+
baseline and the current tracking branch as the target. For each
|
|
451
|
+
conflicted file, lists upstream commits between those two points.
|
|
452
|
+
|
|
453
|
+
Returns a dict {range, total_commits, commits_touching_conflicts}
|
|
454
|
+
or None if the comparison range cannot be established.
|
|
455
|
+
"""
|
|
456
|
+
try:
|
|
457
|
+
_head, old_tracking = self.state.load_undo()
|
|
458
|
+
except Exception:
|
|
459
|
+
return None
|
|
460
|
+
if not old_tracking:
|
|
461
|
+
return None
|
|
462
|
+
|
|
463
|
+
try:
|
|
464
|
+
c = self._load()
|
|
465
|
+
except Exception:
|
|
466
|
+
return None
|
|
467
|
+
# During a conflict, _sync_locked rolls back the tracking branch,
|
|
468
|
+
# so we use the remote-tracking ref (upstream/<branch>) which
|
|
469
|
+
# reflects the fetched target position.
|
|
470
|
+
new_tracking = (
|
|
471
|
+
self.git.rev_parse(f"upstream/{c['upstream_branch']}")
|
|
472
|
+
or self.git.rev_parse(c["tracking_branch"])
|
|
473
|
+
)
|
|
474
|
+
if not new_tracking or new_tracking == old_tracking:
|
|
475
|
+
return None
|
|
476
|
+
|
|
477
|
+
commit_map: dict = {}
|
|
478
|
+
FMT = "%H%x1f%h%x1f%an%x1f%at%x1f%s"
|
|
479
|
+
for f in conflicted_files:
|
|
480
|
+
try:
|
|
481
|
+
out = self.git.run(
|
|
482
|
+
"log",
|
|
483
|
+
f"--format={FMT}",
|
|
484
|
+
f"{old_tracking}..{new_tracking}",
|
|
485
|
+
"--",
|
|
486
|
+
f,
|
|
487
|
+
)
|
|
488
|
+
except GitError:
|
|
489
|
+
continue
|
|
490
|
+
for line in out.splitlines():
|
|
491
|
+
parts = line.split("\x1f")
|
|
492
|
+
if len(parts) != 5:
|
|
493
|
+
continue
|
|
494
|
+
sha, short, author, ts, subject = parts
|
|
495
|
+
entry = commit_map.setdefault(sha, {
|
|
496
|
+
"sha": sha,
|
|
497
|
+
"short_sha": short,
|
|
498
|
+
"author": author,
|
|
499
|
+
"timestamp": int(ts) if ts.isdigit() else 0,
|
|
500
|
+
"subject": subject,
|
|
501
|
+
"files": [],
|
|
502
|
+
"pr": None,
|
|
503
|
+
})
|
|
504
|
+
if f not in entry["files"]:
|
|
505
|
+
entry["files"].append(f)
|
|
506
|
+
if entry["pr"] is None:
|
|
507
|
+
m = self._PR_NUMBER_RE.search(subject)
|
|
508
|
+
if m:
|
|
509
|
+
entry["pr"] = m.group(1)
|
|
510
|
+
|
|
511
|
+
total = 0
|
|
512
|
+
try:
|
|
513
|
+
total = self.git.rev_list_count(f"{old_tracking}..{new_tracking}")
|
|
514
|
+
except Exception:
|
|
515
|
+
pass
|
|
516
|
+
|
|
517
|
+
commits = sorted(
|
|
518
|
+
commit_map.values(), key=lambda x: x["timestamp"], reverse=True
|
|
519
|
+
)
|
|
520
|
+
return {
|
|
521
|
+
"range": f"{old_tracking[:7]}..{new_tracking[:7]}",
|
|
522
|
+
"total_commits": total,
|
|
523
|
+
"commits_touching_conflicts": commits,
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
def _verify_hints_for(self, files: List[str]) -> List[dict]:
|
|
527
|
+
"""Generate per-file verification commands by extension.
|
|
528
|
+
|
|
529
|
+
Returns a list of dicts: {"file": str, "command": str, "kind": str}.
|
|
530
|
+
Files with unknown extensions are silently skipped. Paths are passed
|
|
531
|
+
through shlex.quote to stay shell-safe.
|
|
532
|
+
"""
|
|
533
|
+
hints: List[dict] = []
|
|
534
|
+
for f in files:
|
|
535
|
+
_, ext = os.path.splitext(f)
|
|
536
|
+
entry = self._VERIFY_HINTS_BY_EXT.get(ext.lower())
|
|
537
|
+
if entry is None:
|
|
538
|
+
continue
|
|
539
|
+
template, kind = entry
|
|
540
|
+
command = template.format(path=shlex.quote(f))
|
|
541
|
+
hints.append({"file": f, "command": command, "kind": kind})
|
|
542
|
+
return hints
|
|
543
|
+
|
|
544
|
+
def _resolve_lock_files(self, unresolved: List[str]) -> List[str]:
|
|
545
|
+
"""Auto-resolve lock file conflicts by accepting theirs + regenerating.
|
|
546
|
+
|
|
547
|
+
Returns the list of still-unresolved files (lock files removed).
|
|
548
|
+
"""
|
|
549
|
+
lock_files = [f for f in unresolved if os.path.basename(f) in self._LOCK_FILES]
|
|
550
|
+
if not lock_files:
|
|
551
|
+
return unresolved
|
|
552
|
+
|
|
553
|
+
for lf in lock_files:
|
|
554
|
+
# Accept upstream version
|
|
555
|
+
self.git.run_ok("checkout", "--theirs", "--", lf)
|
|
556
|
+
self.git.run_ok("add", lf)
|
|
557
|
+
|
|
558
|
+
# Try to regenerate via package manager
|
|
559
|
+
basename = os.path.basename(lf)
|
|
560
|
+
mgr_cmd = self._LOCK_MANAGERS.get(basename)
|
|
561
|
+
lock_dir = os.path.dirname(os.path.join(self.path, lf)) or self.path
|
|
562
|
+
if (mgr_cmd and shutil.which(mgr_cmd[0])
|
|
563
|
+
and os.path.isfile(os.path.join(lock_dir, "package.json"))):
|
|
564
|
+
try:
|
|
565
|
+
subprocess.run(
|
|
566
|
+
mgr_cmd, cwd=lock_dir,
|
|
567
|
+
capture_output=True, text=True, timeout=120,
|
|
568
|
+
)
|
|
569
|
+
# Re-add after regeneration
|
|
570
|
+
self.git.run_ok("add", lf)
|
|
571
|
+
except (subprocess.TimeoutExpired, OSError):
|
|
572
|
+
pass # Keep the theirs version
|
|
573
|
+
|
|
574
|
+
return [f for f in unresolved if f not in lock_files]
|
|
575
|
+
|
|
215
576
|
def _build_conflict_result(
|
|
216
577
|
self,
|
|
217
578
|
conflicted_files: List[str],
|
|
@@ -328,6 +689,7 @@ class Repo:
|
|
|
328
689
|
theirs=theirs,
|
|
329
690
|
conflict_count=conflict_count,
|
|
330
691
|
merge_hint=merge_hint,
|
|
692
|
+
semantic_class=classify_conflict(ours, theirs, filepath),
|
|
331
693
|
)
|
|
332
694
|
|
|
333
695
|
def _record_sync(self, c: dict, behind: int, saved_tracking: str) -> None:
|
|
@@ -607,9 +969,12 @@ class Repo:
|
|
|
607
969
|
"reason": reason,
|
|
608
970
|
}
|
|
609
971
|
|
|
610
|
-
def doctor(self) -> dict:
|
|
972
|
+
def doctor(self, report: bool = False) -> dict:
|
|
611
973
|
"""Run health checks on the repository.
|
|
612
974
|
|
|
975
|
+
Args:
|
|
976
|
+
report: If True, include extended checks (team locks, expiry, deps).
|
|
977
|
+
|
|
613
978
|
Returns {"ok": True/False, "issues": N, "checks": [...]}
|
|
614
979
|
"""
|
|
615
980
|
c = self._load()
|
|
@@ -735,6 +1100,66 @@ class Repo:
|
|
|
735
1100
|
else:
|
|
736
1101
|
_check("config", "fail", "missing")
|
|
737
1102
|
|
|
1103
|
+
# Extended checks (--report mode)
|
|
1104
|
+
if report:
|
|
1105
|
+
# Stale locks
|
|
1106
|
+
locks = self.team.list_locks()
|
|
1107
|
+
if locks:
|
|
1108
|
+
now = datetime.now(timezone.utc)
|
|
1109
|
+
for lock in locks:
|
|
1110
|
+
locked_at = lock.get("locked_at", "")
|
|
1111
|
+
if locked_at:
|
|
1112
|
+
try:
|
|
1113
|
+
lock_dt = datetime.strptime(
|
|
1114
|
+
locked_at, "%Y-%m-%dT%H:%M:%SZ"
|
|
1115
|
+
).replace(tzinfo=timezone.utc)
|
|
1116
|
+
days = (now - lock_dt).days
|
|
1117
|
+
if days > 7:
|
|
1118
|
+
_check(
|
|
1119
|
+
f"stale_lock:{lock['patch']}",
|
|
1120
|
+
"warn",
|
|
1121
|
+
f"locked by {lock['owner']} for {days}d",
|
|
1122
|
+
)
|
|
1123
|
+
except ValueError:
|
|
1124
|
+
pass
|
|
1125
|
+
if not any(c_item["name"].startswith("stale_lock:") for c_item in checks):
|
|
1126
|
+
_check("team_locks", "pass", f"{len(locks)} active lock(s)")
|
|
1127
|
+
else:
|
|
1128
|
+
_check("team_locks", "pass", "no locks")
|
|
1129
|
+
|
|
1130
|
+
# Expired patches
|
|
1131
|
+
try:
|
|
1132
|
+
expire_result = self.patch_expire()
|
|
1133
|
+
n_expired = len(expire_result.get("expired", []))
|
|
1134
|
+
n_expiring = len(expire_result.get("expiring_soon", []))
|
|
1135
|
+
if n_expired > 0:
|
|
1136
|
+
_check("expired_patches", "warn", f"{n_expired} expired patch(es)")
|
|
1137
|
+
elif n_expiring > 0:
|
|
1138
|
+
_check("expiring_patches", "warn", f"{n_expiring} expiring soon")
|
|
1139
|
+
else:
|
|
1140
|
+
_check("patch_expiry", "pass", "none expired")
|
|
1141
|
+
except Exception:
|
|
1142
|
+
pass
|
|
1143
|
+
|
|
1144
|
+
# Dependency patches health
|
|
1145
|
+
dep_dir = os.path.join(self.path, ".bingo-deps")
|
|
1146
|
+
if os.path.isdir(dep_dir):
|
|
1147
|
+
try:
|
|
1148
|
+
from bingo_core.dep import DepManager
|
|
1149
|
+
dm = DepManager(self.path)
|
|
1150
|
+
dep_status = dm.status()
|
|
1151
|
+
if dep_status.get("ok"):
|
|
1152
|
+
total = dep_status.get("total_patches", 0)
|
|
1153
|
+
healthy = dep_status.get("healthy", 0)
|
|
1154
|
+
if healthy == total:
|
|
1155
|
+
_check("dep_patches", "pass", f"{total} patch(es) healthy")
|
|
1156
|
+
else:
|
|
1157
|
+
_check("dep_patches", "warn", f"{total - healthy}/{total} need attention")
|
|
1158
|
+
else:
|
|
1159
|
+
_check("dep_patches", "warn", dep_status.get("error", "unknown"))
|
|
1160
|
+
except Exception:
|
|
1161
|
+
pass
|
|
1162
|
+
|
|
738
1163
|
return {"ok": issues == 0, "issues": issues, "checks": checks}
|
|
739
1164
|
|
|
740
1165
|
def diff(self) -> dict:
|
|
@@ -857,7 +1282,9 @@ class Repo:
|
|
|
857
1282
|
def conflict_analyze(self) -> dict:
|
|
858
1283
|
"""Analyze current rebase conflicts.
|
|
859
1284
|
|
|
860
|
-
Returns structured info about each conflicted file
|
|
1285
|
+
Returns structured info about each conflicted file plus
|
|
1286
|
+
patch-intent context and per-file verification hints when
|
|
1287
|
+
a rebase is in progress.
|
|
861
1288
|
"""
|
|
862
1289
|
self._ensure_git_repo()
|
|
863
1290
|
|
|
@@ -871,11 +1298,23 @@ class Repo:
|
|
|
871
1298
|
current_patch = self._current_rebase_patch()
|
|
872
1299
|
conflicts = [self._extract_conflict(f) for f in conflicted]
|
|
873
1300
|
|
|
874
|
-
|
|
1301
|
+
patch_intent = self._build_patch_intent()
|
|
1302
|
+
verify = {
|
|
1303
|
+
"test_command": self.config.get("test.command") or None,
|
|
1304
|
+
"file_hints": self._verify_hints_for(conflicted),
|
|
1305
|
+
}
|
|
1306
|
+
upstream_context = self._build_upstream_context(conflicted)
|
|
1307
|
+
patch_dependencies = self._build_patch_dependencies(
|
|
1308
|
+
patch_intent.get("name", "") if patch_intent else ""
|
|
1309
|
+
)
|
|
1310
|
+
|
|
1311
|
+
result = {
|
|
875
1312
|
"ok": True,
|
|
876
1313
|
"in_rebase": True,
|
|
877
1314
|
"current_patch": current_patch,
|
|
878
1315
|
"conflicts": [c.to_dict() for c in conflicts],
|
|
1316
|
+
"patch_intent": patch_intent,
|
|
1317
|
+
"verify": verify,
|
|
879
1318
|
"resolution_steps": [
|
|
880
1319
|
"1. Read ours (upstream) and theirs (your patch) for each conflict",
|
|
881
1320
|
"2. Write the merged file content (include both changes where possible)",
|
|
@@ -885,8 +1324,38 @@ class Repo:
|
|
|
885
1324
|
"6. To abort instead: git rebase --abort",
|
|
886
1325
|
],
|
|
887
1326
|
}
|
|
1327
|
+
if upstream_context is not None:
|
|
1328
|
+
result["upstream_context"] = upstream_context
|
|
1329
|
+
if patch_dependencies is not None:
|
|
1330
|
+
result["patch_dependencies"] = patch_dependencies
|
|
1331
|
+
|
|
1332
|
+
# Decision memory: look up previous resolutions for this patch.
|
|
1333
|
+
patch_name = patch_intent.get("name", "") if patch_intent else ""
|
|
1334
|
+
if patch_name:
|
|
1335
|
+
memory_entries = []
|
|
1336
|
+
for conflict in conflicts:
|
|
1337
|
+
prior = self.decisions.lookup(
|
|
1338
|
+
patch_name,
|
|
1339
|
+
file=conflict.file,
|
|
1340
|
+
semantic_class=conflict.semantic_class,
|
|
1341
|
+
limit=3,
|
|
1342
|
+
)
|
|
1343
|
+
if prior:
|
|
1344
|
+
memory_entries.append({
|
|
1345
|
+
"file": conflict.file,
|
|
1346
|
+
"semantic_class": conflict.semantic_class,
|
|
1347
|
+
"previous_decisions": prior,
|
|
1348
|
+
})
|
|
1349
|
+
if memory_entries:
|
|
1350
|
+
result["decision_memory"] = {
|
|
1351
|
+
"patch": patch_name,
|
|
1352
|
+
"entries": memory_entries,
|
|
1353
|
+
}
|
|
1354
|
+
return result
|
|
888
1355
|
|
|
889
|
-
def conflict_resolve(
|
|
1356
|
+
def conflict_resolve(
|
|
1357
|
+
self, file_path: str, content: str = "", verify: bool = False
|
|
1358
|
+
) -> dict:
|
|
890
1359
|
"""Resolve a single conflicted file and continue rebase if possible.
|
|
891
1360
|
|
|
892
1361
|
Args:
|
|
@@ -925,6 +1394,9 @@ class Repo:
|
|
|
925
1394
|
f"Unmerged files: {', '.join(unmerged) if unmerged else '(none)'}"
|
|
926
1395
|
)
|
|
927
1396
|
|
|
1397
|
+
# Capture pre-resolve conflict snapshot (for decision memory).
|
|
1398
|
+
pre_conflict = self._extract_conflict(rel_path)
|
|
1399
|
+
|
|
928
1400
|
# Write content if provided
|
|
929
1401
|
if content:
|
|
930
1402
|
full_path = str(resolved)
|
|
@@ -938,6 +1410,41 @@ class Repo:
|
|
|
938
1410
|
if not self.git.run_ok("add", rel_path):
|
|
939
1411
|
raise BingoError(f"Failed to stage file: {rel_path}")
|
|
940
1412
|
|
|
1413
|
+
# Record decision memory (best-effort; silent on failure).
|
|
1414
|
+
try:
|
|
1415
|
+
intent = self._build_patch_intent()
|
|
1416
|
+
patch_name = intent.get("name", "") if intent else ""
|
|
1417
|
+
if patch_name:
|
|
1418
|
+
resolved_content = content
|
|
1419
|
+
if not resolved_content:
|
|
1420
|
+
try:
|
|
1421
|
+
with open(str(resolved)) as f:
|
|
1422
|
+
resolved_content = f.read()
|
|
1423
|
+
except (IOError, OSError):
|
|
1424
|
+
resolved_content = ""
|
|
1425
|
+
strategy = detect_resolution_strategy(
|
|
1426
|
+
resolved_content, pre_conflict.ours, pre_conflict.theirs
|
|
1427
|
+
)
|
|
1428
|
+
# Pick the first upstream commit touching this file as the
|
|
1429
|
+
# "triggering" upstream change (best-effort context).
|
|
1430
|
+
uc = self._build_upstream_context([rel_path])
|
|
1431
|
+
upstream_sha = None
|
|
1432
|
+
upstream_subject = None
|
|
1433
|
+
if uc and uc.get("commits_touching_conflicts"):
|
|
1434
|
+
top = uc["commits_touching_conflicts"][0]
|
|
1435
|
+
upstream_sha = top.get("sha")
|
|
1436
|
+
upstream_subject = top.get("subject")
|
|
1437
|
+
self.decisions.record(
|
|
1438
|
+
patch_name,
|
|
1439
|
+
file=rel_path,
|
|
1440
|
+
semantic_class=pre_conflict.semantic_class,
|
|
1441
|
+
resolution_strategy=strategy,
|
|
1442
|
+
upstream_sha=upstream_sha,
|
|
1443
|
+
upstream_subject=upstream_subject,
|
|
1444
|
+
)
|
|
1445
|
+
except Exception:
|
|
1446
|
+
pass # memory is best-effort; never block rebase on it
|
|
1447
|
+
|
|
941
1448
|
# Check remaining unmerged files
|
|
942
1449
|
remaining = self.git.ls_files_unmerged()
|
|
943
1450
|
if remaining:
|
|
@@ -983,12 +1490,35 @@ class Repo:
|
|
|
983
1490
|
"sync_complete": False,
|
|
984
1491
|
}
|
|
985
1492
|
# Rebase fully complete
|
|
986
|
-
|
|
1493
|
+
result_dict = {
|
|
987
1494
|
"ok": True,
|
|
988
1495
|
"resolved": rel_path,
|
|
989
1496
|
"rebase_continued": True,
|
|
990
1497
|
"sync_complete": True,
|
|
991
1498
|
}
|
|
1499
|
+
if verify:
|
|
1500
|
+
test_cmd = self.config.get("test.command")
|
|
1501
|
+
if not test_cmd:
|
|
1502
|
+
result_dict["verify_result"] = {
|
|
1503
|
+
"skipped": True,
|
|
1504
|
+
"reason": "no test.command configured",
|
|
1505
|
+
}
|
|
1506
|
+
else:
|
|
1507
|
+
try:
|
|
1508
|
+
t = self.test()
|
|
1509
|
+
vr = {
|
|
1510
|
+
"test": t.get("test", "fail"),
|
|
1511
|
+
"command": t.get("command", test_cmd),
|
|
1512
|
+
}
|
|
1513
|
+
if t.get("output"):
|
|
1514
|
+
vr["output"] = t["output"]
|
|
1515
|
+
result_dict["verify_result"] = vr
|
|
1516
|
+
except BingoError as e:
|
|
1517
|
+
result_dict["verify_result"] = {
|
|
1518
|
+
"skipped": True,
|
|
1519
|
+
"reason": str(e),
|
|
1520
|
+
}
|
|
1521
|
+
return result_dict
|
|
992
1522
|
|
|
993
1523
|
# rebase --continue failed -- check why
|
|
994
1524
|
new_unmerged = self.git.ls_files_unmerged()
|
|
@@ -1187,15 +1717,22 @@ class Repo:
|
|
|
1187
1717
|
"test_error": str(e),
|
|
1188
1718
|
}
|
|
1189
1719
|
|
|
1190
|
-
|
|
1720
|
+
result = {
|
|
1191
1721
|
"ok": True,
|
|
1192
1722
|
"synced": True,
|
|
1193
1723
|
"behind_before": behind,
|
|
1194
1724
|
"patches_rebased": patch_count,
|
|
1195
1725
|
}
|
|
1726
|
+
dep = self._auto_dep_apply()
|
|
1727
|
+
if dep is not None:
|
|
1728
|
+
result["dep_apply"] = dep
|
|
1729
|
+
return result
|
|
1196
1730
|
|
|
1197
1731
|
# Rebase failed -- check if rerere auto-resolved
|
|
1198
1732
|
unresolved = self.git.ls_files_unmerged()
|
|
1733
|
+
# Auto-resolve lock file conflicts (package-lock.json, yarn.lock, etc.)
|
|
1734
|
+
if unresolved:
|
|
1735
|
+
unresolved = self._resolve_lock_files(unresolved)
|
|
1199
1736
|
if not unresolved:
|
|
1200
1737
|
# rerere resolved everything -- try to continue
|
|
1201
1738
|
rerere_ok = True
|
|
@@ -1233,13 +1770,17 @@ class Repo:
|
|
|
1233
1770
|
"rerere_resolved": True,
|
|
1234
1771
|
},
|
|
1235
1772
|
)
|
|
1236
|
-
|
|
1773
|
+
result = {
|
|
1237
1774
|
"ok": True,
|
|
1238
1775
|
"synced": True,
|
|
1239
1776
|
"behind_before": behind,
|
|
1240
1777
|
"patches_rebased": patch_count,
|
|
1241
1778
|
"rerere_resolved": True,
|
|
1242
1779
|
}
|
|
1780
|
+
dep = self._auto_dep_apply()
|
|
1781
|
+
if dep is not None:
|
|
1782
|
+
result["dep_apply"] = dep
|
|
1783
|
+
return result
|
|
1243
1784
|
|
|
1244
1785
|
# Rollback tracking branch
|
|
1245
1786
|
self.git.run_ok("branch", "-f", c["tracking_branch"], saved_tracking)
|
|
@@ -1349,13 +1890,17 @@ class Repo:
|
|
|
1349
1890
|
# Clean rebase
|
|
1350
1891
|
self.state.clear_circuit_breaker()
|
|
1351
1892
|
self._record_sync(c, behind, saved_tracking)
|
|
1352
|
-
|
|
1893
|
+
result = {
|
|
1353
1894
|
"ok": True,
|
|
1354
1895
|
"action": "synced",
|
|
1355
1896
|
"behind_before": behind,
|
|
1356
1897
|
"patches_rebased": patch_count,
|
|
1357
1898
|
"conflicts_resolved": 0,
|
|
1358
1899
|
}
|
|
1900
|
+
dep = self._auto_dep_apply()
|
|
1901
|
+
if dep is not None:
|
|
1902
|
+
result["dep_apply"] = dep
|
|
1903
|
+
return result
|
|
1359
1904
|
|
|
1360
1905
|
# Enter conflict resolution loop
|
|
1361
1906
|
conflicts_resolved = 0
|
|
@@ -1391,6 +1936,21 @@ class Repo:
|
|
|
1391
1936
|
if not unresolved:
|
|
1392
1937
|
continue
|
|
1393
1938
|
|
|
1939
|
+
# Auto-resolve lock file conflicts before reporting
|
|
1940
|
+
unresolved = self._resolve_lock_files(unresolved)
|
|
1941
|
+
if not unresolved:
|
|
1942
|
+
# Lock files were the only conflicts — try to continue
|
|
1943
|
+
env = os.environ.copy()
|
|
1944
|
+
env["GIT_EDITOR"] = "true"
|
|
1945
|
+
cont_result = subprocess.run(
|
|
1946
|
+
["git", "rebase", "--continue"],
|
|
1947
|
+
cwd=self.path,
|
|
1948
|
+
capture_output=True, text=True, env=env,
|
|
1949
|
+
)
|
|
1950
|
+
if cont_result.returncode == 0:
|
|
1951
|
+
conflicts_resolved += 1
|
|
1952
|
+
continue
|
|
1953
|
+
|
|
1394
1954
|
# Real unresolved conflicts -- report and stop
|
|
1395
1955
|
self.git.run_ok("branch", "-f", c["tracking_branch"], saved_tracking)
|
|
1396
1956
|
|
|
@@ -1415,13 +1975,17 @@ class Repo:
|
|
|
1415
1975
|
# If we get here, all conflicts were auto-resolved by rerere
|
|
1416
1976
|
self.state.clear_circuit_breaker()
|
|
1417
1977
|
self._record_sync(c, behind, saved_tracking)
|
|
1418
|
-
|
|
1978
|
+
result = {
|
|
1419
1979
|
"ok": True,
|
|
1420
1980
|
"action": "synced_with_rerere",
|
|
1421
1981
|
"behind_before": behind,
|
|
1422
1982
|
"patches_rebased": patch_count,
|
|
1423
1983
|
"conflicts_auto_resolved": conflicts_resolved,
|
|
1424
1984
|
}
|
|
1985
|
+
dep = self._auto_dep_apply()
|
|
1986
|
+
if dep is not None:
|
|
1987
|
+
result["dep_apply"] = dep
|
|
1988
|
+
return result
|
|
1425
1989
|
|
|
1426
1990
|
def undo(self) -> dict:
|
|
1427
1991
|
"""Undo the last sync operation.
|
|
@@ -1664,6 +2228,15 @@ class Repo:
|
|
|
1664
2228
|
if m:
|
|
1665
2229
|
pname = m.group(1)
|
|
1666
2230
|
|
|
2231
|
+
# Lock enforcement — check by parsed name or by target
|
|
2232
|
+
lock_name = pname or target
|
|
2233
|
+
if lock_name and self.team.is_locked_by_other(lock_name):
|
|
2234
|
+
lock = self.team.get_lock(lock_name)
|
|
2235
|
+
raise BingoError(
|
|
2236
|
+
f"Patch '{lock_name}' is locked by {lock['owner']}. "
|
|
2237
|
+
"They must unlock it first."
|
|
2238
|
+
)
|
|
2239
|
+
|
|
1667
2240
|
if self.git.current_branch() != c["patches_branch"]:
|
|
1668
2241
|
self.git.run("checkout", c["patches_branch"])
|
|
1669
2242
|
|
|
@@ -1695,6 +2268,17 @@ class Repo:
|
|
|
1695
2268
|
)
|
|
1696
2269
|
hash_val = self._resolve_patch(c, target)
|
|
1697
2270
|
|
|
2271
|
+
# Lock enforcement — check by parsed name or by target
|
|
2272
|
+
subject_chk = self.git.run("log", "-1", "--format=%s", hash_val)
|
|
2273
|
+
m_chk = re.match(r"^\[bl\] ([^:]+):", subject_chk)
|
|
2274
|
+
lock_name_chk = m_chk.group(1) if m_chk else target
|
|
2275
|
+
if lock_name_chk and self.team.is_locked_by_other(lock_name_chk):
|
|
2276
|
+
lock = self.team.get_lock(lock_name_chk)
|
|
2277
|
+
raise BingoError(
|
|
2278
|
+
f"Patch '{lock_name_chk}' is locked by {lock['owner']}. "
|
|
2279
|
+
"They must unlock it first."
|
|
2280
|
+
)
|
|
2281
|
+
|
|
1698
2282
|
has_staged = not self.git.run_ok("diff", "--cached", "--quiet")
|
|
1699
2283
|
if not has_staged:
|
|
1700
2284
|
raise BingoError(
|
|
@@ -2048,6 +2632,444 @@ class Repo:
|
|
|
2048
2632
|
self.state.patch_meta_set(target, key, value)
|
|
2049
2633
|
return {"ok": True, "patch": target, "set": key, "value": value}
|
|
2050
2634
|
|
|
2635
|
+
# -- Team / Locking --
|
|
2636
|
+
|
|
2637
|
+
def patch_lock(self, name: str, reason: str = "") -> dict:
|
|
2638
|
+
"""Lock a patch for exclusive editing.
|
|
2639
|
+
|
|
2640
|
+
Returns {"ok": True, "patch": ..., "owner": ..., "locked_at": ...}
|
|
2641
|
+
"""
|
|
2642
|
+
c = self._load()
|
|
2643
|
+
# Verify patch exists
|
|
2644
|
+
self._resolve_patch(c, name)
|
|
2645
|
+
return self.team.lock(name, reason=reason)
|
|
2646
|
+
|
|
2647
|
+
def patch_unlock(self, name: str, force: bool = False) -> dict:
|
|
2648
|
+
"""Unlock a patch.
|
|
2649
|
+
|
|
2650
|
+
Returns {"ok": True, "patch": ..., "owner": ...}
|
|
2651
|
+
"""
|
|
2652
|
+
c = self._load()
|
|
2653
|
+
self._resolve_patch(c, name)
|
|
2654
|
+
return self.team.unlock(name, force=force)
|
|
2655
|
+
|
|
2656
|
+
# -- Smart Patch Management --
|
|
2657
|
+
|
|
2658
|
+
def patch_check(self, name: str = "") -> dict:
|
|
2659
|
+
"""Check if patches are still needed (obsolescence detection).
|
|
2660
|
+
|
|
2661
|
+
For each patch, checks whether upstream now contains equivalent changes.
|
|
2662
|
+
Heuristic: apply patch diff to current upstream — if it produces no change,
|
|
2663
|
+
the patch is obsolete.
|
|
2664
|
+
|
|
2665
|
+
Returns {"ok": True, "patches": [{"name", "status", "reason"}]}
|
|
2666
|
+
"""
|
|
2667
|
+
c = self._load()
|
|
2668
|
+
base = self._patches_base(c)
|
|
2669
|
+
if not base:
|
|
2670
|
+
return {"ok": True, "patches": [], "count": 0}
|
|
2671
|
+
|
|
2672
|
+
patches = self.git.log_patches(base, c["patches_branch"])
|
|
2673
|
+
if not patches:
|
|
2674
|
+
return {"ok": True, "patches": [], "count": 0}
|
|
2675
|
+
|
|
2676
|
+
# If a specific name given, filter
|
|
2677
|
+
if name:
|
|
2678
|
+
patches = [p for p in patches if p.name == name]
|
|
2679
|
+
if not patches:
|
|
2680
|
+
raise BingoError(f"Patch '{name}' not found.")
|
|
2681
|
+
|
|
2682
|
+
# Get current upstream tip
|
|
2683
|
+
tracking = c.get("tracking_branch", DEFAULT_TRACKING)
|
|
2684
|
+
upstream_head = self.git.rev_parse(tracking)
|
|
2685
|
+
if not upstream_head:
|
|
2686
|
+
return {
|
|
2687
|
+
"ok": True,
|
|
2688
|
+
"patches": [
|
|
2689
|
+
{"name": p.name, "status": "unknown", "reason": "No upstream tracking branch"}
|
|
2690
|
+
for p in patches
|
|
2691
|
+
],
|
|
2692
|
+
"count": len(patches),
|
|
2693
|
+
}
|
|
2694
|
+
|
|
2695
|
+
results = []
|
|
2696
|
+
for p in patches:
|
|
2697
|
+
try:
|
|
2698
|
+
# Get the files this patch touches
|
|
2699
|
+
diff_output = self.git.run(
|
|
2700
|
+
"diff", "--name-only", f"{p.hash}^", p.hash, check=False
|
|
2701
|
+
)
|
|
2702
|
+
patch_files = [f for f in diff_output.splitlines() if f.strip()]
|
|
2703
|
+
|
|
2704
|
+
if not patch_files:
|
|
2705
|
+
results.append({"name": p.name, "status": "active", "reason": "No files changed"})
|
|
2706
|
+
continue
|
|
2707
|
+
|
|
2708
|
+
# Check if upstream already contains equivalent changes
|
|
2709
|
+
# If it applies but produces no diff, the changes are already upstream
|
|
2710
|
+
try:
|
|
2711
|
+
# Check if the patch's changes already exist at upstream
|
|
2712
|
+
all_match = True
|
|
2713
|
+
for pf in patch_files:
|
|
2714
|
+
# Get file content at patch commit
|
|
2715
|
+
try:
|
|
2716
|
+
content_at_patch = self.git.run(
|
|
2717
|
+
"show", f"{p.hash}:{pf}", check=False
|
|
2718
|
+
)
|
|
2719
|
+
except GitError:
|
|
2720
|
+
content_at_patch = ""
|
|
2721
|
+
|
|
2722
|
+
# Get file content at upstream
|
|
2723
|
+
try:
|
|
2724
|
+
content_at_upstream = self.git.run(
|
|
2725
|
+
"show", f"{tracking}:{pf}", check=False
|
|
2726
|
+
)
|
|
2727
|
+
except GitError:
|
|
2728
|
+
content_at_upstream = ""
|
|
2729
|
+
|
|
2730
|
+
# If upstream already has the same content as post-patch,
|
|
2731
|
+
# this patch is obsolete for this file
|
|
2732
|
+
if content_at_upstream != content_at_patch:
|
|
2733
|
+
all_match = False
|
|
2734
|
+
break
|
|
2735
|
+
|
|
2736
|
+
if all_match:
|
|
2737
|
+
results.append({
|
|
2738
|
+
"name": p.name,
|
|
2739
|
+
"status": "obsolete",
|
|
2740
|
+
"reason": "Upstream contains equivalent changes",
|
|
2741
|
+
})
|
|
2742
|
+
else:
|
|
2743
|
+
# Check if upstream changed same files (potential conflict)
|
|
2744
|
+
upstream_changed = self.git.run(
|
|
2745
|
+
"diff", "--name-only", base, tracking, "--", *patch_files,
|
|
2746
|
+
check=False,
|
|
2747
|
+
)
|
|
2748
|
+
if upstream_changed.strip():
|
|
2749
|
+
results.append({
|
|
2750
|
+
"name": p.name,
|
|
2751
|
+
"status": "active",
|
|
2752
|
+
"reason": "Upstream also modified these files — review recommended",
|
|
2753
|
+
})
|
|
2754
|
+
else:
|
|
2755
|
+
results.append({
|
|
2756
|
+
"name": p.name,
|
|
2757
|
+
"status": "active",
|
|
2758
|
+
"reason": "Patch still applies unique changes",
|
|
2759
|
+
})
|
|
2760
|
+
except GitError:
|
|
2761
|
+
results.append({"name": p.name, "status": "active", "reason": "Could not compare"})
|
|
2762
|
+
|
|
2763
|
+
except GitError:
|
|
2764
|
+
results.append({"name": p.name, "status": "unknown", "reason": "Error analyzing patch"})
|
|
2765
|
+
|
|
2766
|
+
return {"ok": True, "patches": results, "count": len(results)}
|
|
2767
|
+
|
|
2768
|
+
def patch_upstream(self, name: str) -> dict:
|
|
2769
|
+
"""Export a patch as a clean PR-ready diff for upstream submission.
|
|
2770
|
+
|
|
2771
|
+
Strips [bl] prefix and git metadata — produces a clean diff + description.
|
|
2772
|
+
|
|
2773
|
+
Returns {"ok": True, "patch": ..., "diff": ..., "description": ..., "files": [...]}
|
|
2774
|
+
"""
|
|
2775
|
+
c = self._load()
|
|
2776
|
+
hash_val = self._resolve_patch(c, name)
|
|
2777
|
+
|
|
2778
|
+
# Get commit subject and strip [bl] prefix
|
|
2779
|
+
subject = self.git.run("log", "-1", "--format=%s", hash_val)
|
|
2780
|
+
description = subject
|
|
2781
|
+
m = re.match(r"^\[bl\] [^:]+:\s*(.*)", subject)
|
|
2782
|
+
if m:
|
|
2783
|
+
description = m.group(1)
|
|
2784
|
+
|
|
2785
|
+
# Get commit body (if any)
|
|
2786
|
+
body = self.git.run("log", "-1", "--format=%b", hash_val, check=False).strip()
|
|
2787
|
+
if body:
|
|
2788
|
+
description = f"{description}\n\n{body}"
|
|
2789
|
+
|
|
2790
|
+
# Generate clean diff
|
|
2791
|
+
diff = self.git.run("diff", f"{hash_val}^", hash_val, check=False)
|
|
2792
|
+
|
|
2793
|
+
# Get file list
|
|
2794
|
+
files_output = self.git.run(
|
|
2795
|
+
"diff", "--name-only", f"{hash_val}^", hash_val, check=False
|
|
2796
|
+
)
|
|
2797
|
+
files = [f for f in files_output.splitlines() if f.strip()]
|
|
2798
|
+
|
|
2799
|
+
# Get stats
|
|
2800
|
+
stat = self.git.run(
|
|
2801
|
+
"diff", "--stat", f"{hash_val}^", hash_val, check=False
|
|
2802
|
+
).strip()
|
|
2803
|
+
|
|
2804
|
+
return {
|
|
2805
|
+
"ok": True,
|
|
2806
|
+
"patch": name,
|
|
2807
|
+
"diff": diff,
|
|
2808
|
+
"description": description,
|
|
2809
|
+
"files": files,
|
|
2810
|
+
"stats": stat,
|
|
2811
|
+
}
|
|
2812
|
+
|
|
2813
|
+
def patch_expire(self) -> dict:
|
|
2814
|
+
"""List patches that have passed or are approaching their expiry date.
|
|
2815
|
+
|
|
2816
|
+
Returns {"ok": True, "expired": [...], "expiring_soon": [...], "active": [...]}
|
|
2817
|
+
"""
|
|
2818
|
+
c = self._load()
|
|
2819
|
+
base = self._patches_base(c)
|
|
2820
|
+
if not base:
|
|
2821
|
+
return {"ok": True, "expired": [], "expiring_soon": [], "active": [], "count": 0}
|
|
2822
|
+
|
|
2823
|
+
patches = self.git.log_patches(base, c["patches_branch"])
|
|
2824
|
+
now = datetime.now(timezone.utc)
|
|
2825
|
+
expired = []
|
|
2826
|
+
expiring_soon = []
|
|
2827
|
+
active = []
|
|
2828
|
+
|
|
2829
|
+
for p in patches:
|
|
2830
|
+
meta = self.state.patch_meta_get(p.name)
|
|
2831
|
+
expires_str = meta.get("expires")
|
|
2832
|
+
if not expires_str:
|
|
2833
|
+
active.append({"name": p.name, "expires": None, "status": "no_expiry"})
|
|
2834
|
+
continue
|
|
2835
|
+
|
|
2836
|
+
try:
|
|
2837
|
+
expires_dt = datetime.strptime(expires_str, "%Y-%m-%d").replace(
|
|
2838
|
+
tzinfo=timezone.utc
|
|
2839
|
+
)
|
|
2840
|
+
except ValueError:
|
|
2841
|
+
try:
|
|
2842
|
+
expires_dt = datetime.strptime(
|
|
2843
|
+
expires_str, "%Y-%m-%dT%H:%M:%SZ"
|
|
2844
|
+
).replace(tzinfo=timezone.utc)
|
|
2845
|
+
except ValueError:
|
|
2846
|
+
active.append({"name": p.name, "expires": expires_str, "status": "invalid_date"})
|
|
2847
|
+
continue
|
|
2848
|
+
|
|
2849
|
+
days_left = (expires_dt - now).days
|
|
2850
|
+
entry = {"name": p.name, "expires": expires_str, "days_left": days_left}
|
|
2851
|
+
|
|
2852
|
+
if days_left < 0:
|
|
2853
|
+
entry["status"] = "expired"
|
|
2854
|
+
expired.append(entry)
|
|
2855
|
+
elif days_left <= 7:
|
|
2856
|
+
entry["status"] = "expiring_soon"
|
|
2857
|
+
expiring_soon.append(entry)
|
|
2858
|
+
else:
|
|
2859
|
+
entry["status"] = "active"
|
|
2860
|
+
active.append(entry)
|
|
2861
|
+
|
|
2862
|
+
return {
|
|
2863
|
+
"ok": True,
|
|
2864
|
+
"expired": expired,
|
|
2865
|
+
"expiring_soon": expiring_soon,
|
|
2866
|
+
"active": active,
|
|
2867
|
+
"count": len(expired) + len(expiring_soon),
|
|
2868
|
+
}
|
|
2869
|
+
|
|
2870
|
+
def patch_stats(self) -> dict:
|
|
2871
|
+
"""Get health metrics for all patches.
|
|
2872
|
+
|
|
2873
|
+
Returns {"ok": True, "patches": [{"name", "age_days", "files", "insertions",
|
|
2874
|
+
"deletions", "sync_conflicts"}]}
|
|
2875
|
+
"""
|
|
2876
|
+
c = self._load()
|
|
2877
|
+
base = self._patches_base(c)
|
|
2878
|
+
if not base:
|
|
2879
|
+
return {"ok": True, "patches": [], "count": 0}
|
|
2880
|
+
|
|
2881
|
+
patches = self.git.log_patches(base, c["patches_branch"])
|
|
2882
|
+
now = datetime.now(timezone.utc)
|
|
2883
|
+
|
|
2884
|
+
# Load sync history for conflict frequency analysis
|
|
2885
|
+
sync_history = self.state.get_sync_history()
|
|
2886
|
+
syncs = sync_history.get("syncs", [])
|
|
2887
|
+
|
|
2888
|
+
results = []
|
|
2889
|
+
for p in patches:
|
|
2890
|
+
meta = self.state.patch_meta_get(p.name)
|
|
2891
|
+
|
|
2892
|
+
# Compute age
|
|
2893
|
+
created_str = meta.get("created", "")
|
|
2894
|
+
age_days = -1
|
|
2895
|
+
if created_str:
|
|
2896
|
+
try:
|
|
2897
|
+
created_dt = datetime.strptime(
|
|
2898
|
+
created_str, "%Y-%m-%dT%H:%M:%SZ"
|
|
2899
|
+
).replace(tzinfo=timezone.utc)
|
|
2900
|
+
age_days = (now - created_dt).days
|
|
2901
|
+
except ValueError:
|
|
2902
|
+
pass
|
|
2903
|
+
|
|
2904
|
+
# Count sync conflicts — look for syncs where this patch name
|
|
2905
|
+
# appeared and the sync had issues
|
|
2906
|
+
sync_count = 0
|
|
2907
|
+
for sync in syncs:
|
|
2908
|
+
for sp in sync.get("patches", []):
|
|
2909
|
+
if sp.get("name") == p.name:
|
|
2910
|
+
sync_count += 1
|
|
2911
|
+
|
|
2912
|
+
# Lock info
|
|
2913
|
+
lock = self.team.get_lock(p.name)
|
|
2914
|
+
|
|
2915
|
+
entry = {
|
|
2916
|
+
"name": p.name,
|
|
2917
|
+
"age_days": age_days,
|
|
2918
|
+
"files": p.files,
|
|
2919
|
+
"insertions": p.insertions,
|
|
2920
|
+
"deletions": p.deletions,
|
|
2921
|
+
"status": meta.get("status", "permanent"),
|
|
2922
|
+
"owner": meta.get("owner", ""),
|
|
2923
|
+
"locked_by": lock["owner"] if lock else "",
|
|
2924
|
+
"syncs_survived": sync_count,
|
|
2925
|
+
}
|
|
2926
|
+
results.append(entry)
|
|
2927
|
+
|
|
2928
|
+
return {"ok": True, "patches": results, "count": len(results)}
|
|
2929
|
+
|
|
2930
|
+
# -- Report --
|
|
2931
|
+
|
|
2932
|
+
def report(self) -> dict:
|
|
2933
|
+
"""Generate a comprehensive markdown health report.
|
|
2934
|
+
|
|
2935
|
+
Aggregates status, patches, stats, expiry, locks, history, and deps.
|
|
2936
|
+
|
|
2937
|
+
Returns {"ok": True, "report": "<markdown>", "summary": {...}}
|
|
2938
|
+
"""
|
|
2939
|
+
now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
2940
|
+
lines = ["# Fork Health Report", f"Generated: {now}", ""]
|
|
2941
|
+
|
|
2942
|
+
alerts = []
|
|
2943
|
+
patch_count = 0
|
|
2944
|
+
behind = 0
|
|
2945
|
+
|
|
2946
|
+
# Overview
|
|
2947
|
+
try:
|
|
2948
|
+
st = self.status()
|
|
2949
|
+
behind = st.get("behind", 0)
|
|
2950
|
+
patch_count = st.get("patch_count", 0)
|
|
2951
|
+
upstream = st.get("upstream_url", "?")
|
|
2952
|
+
branch = st.get("upstream_branch", "?")
|
|
2953
|
+
action = st.get("recommended_action", "?")
|
|
2954
|
+
lines.append("## Overview")
|
|
2955
|
+
lines.append(f"- Upstream: {upstream} ({branch})")
|
|
2956
|
+
lines.append(f"- Behind: {behind} commit(s)")
|
|
2957
|
+
lines.append(f"- Patches: {patch_count}")
|
|
2958
|
+
lines.append(f"- Recommended action: {action}")
|
|
2959
|
+
last_sync = st.get("last_sync", "")
|
|
2960
|
+
if last_sync:
|
|
2961
|
+
lines.append(f"- Last sync: {last_sync}")
|
|
2962
|
+
lines.append("")
|
|
2963
|
+
except Exception as e:
|
|
2964
|
+
lines.append(f"## Overview\n- Error: {e}\n")
|
|
2965
|
+
|
|
2966
|
+
# Patch Stack
|
|
2967
|
+
try:
|
|
2968
|
+
stats = self.patch_stats()
|
|
2969
|
+
stat_patches = stats.get("patches", [])
|
|
2970
|
+
if stat_patches:
|
|
2971
|
+
lines.append("## Patch Stack")
|
|
2972
|
+
lines.append("| # | Name | Age | Size | Syncs | Status | Owner |")
|
|
2973
|
+
lines.append("|---|------|-----|------|-------|--------|-------|")
|
|
2974
|
+
for i, p in enumerate(stat_patches, 1):
|
|
2975
|
+
age = f"{p['age_days']}d" if p.get("age_days", -1) >= 0 else "?"
|
|
2976
|
+
size = f"+{p.get('insertions', 0)}/-{p.get('deletions', 0)}"
|
|
2977
|
+
syncs = str(p.get("syncs_survived", 0))
|
|
2978
|
+
status = p.get("status", "")
|
|
2979
|
+
owner = p.get("owner", "") or p.get("locked_by", "") or "-"
|
|
2980
|
+
lines.append(f"| {i} | {p['name']} | {age} | {size} | {syncs} | {status} | {owner} |")
|
|
2981
|
+
lines.append("")
|
|
2982
|
+
except Exception:
|
|
2983
|
+
pass
|
|
2984
|
+
|
|
2985
|
+
# Expiry
|
|
2986
|
+
try:
|
|
2987
|
+
expire = self.patch_expire()
|
|
2988
|
+
expired = expire.get("expired", [])
|
|
2989
|
+
expiring = expire.get("expiring_soon", [])
|
|
2990
|
+
for e in expired:
|
|
2991
|
+
alerts.append(f"[EXPIRED] patch \"{e['name']}\" expired {e['expires']}")
|
|
2992
|
+
for e in expiring:
|
|
2993
|
+
alerts.append(f"[EXPIRING] patch \"{e['name']}\" expires {e['expires']} ({e['days_left']}d left)")
|
|
2994
|
+
except Exception:
|
|
2995
|
+
pass
|
|
2996
|
+
|
|
2997
|
+
# Team locks
|
|
2998
|
+
try:
|
|
2999
|
+
locks = self.team.list_locks()
|
|
3000
|
+
if locks:
|
|
3001
|
+
now_dt = datetime.now(timezone.utc)
|
|
3002
|
+
for lock in locks:
|
|
3003
|
+
locked_at = lock.get("locked_at", "")
|
|
3004
|
+
if locked_at:
|
|
3005
|
+
try:
|
|
3006
|
+
lock_dt = datetime.strptime(
|
|
3007
|
+
locked_at, "%Y-%m-%dT%H:%M:%SZ"
|
|
3008
|
+
).replace(tzinfo=timezone.utc)
|
|
3009
|
+
days = (now_dt - lock_dt).days
|
|
3010
|
+
if days > 7:
|
|
3011
|
+
alerts.append(
|
|
3012
|
+
f"[STALE LOCK] patch \"{lock['patch']}\" "
|
|
3013
|
+
f"locked by {lock['owner']} for {days}d"
|
|
3014
|
+
)
|
|
3015
|
+
except ValueError:
|
|
3016
|
+
pass
|
|
3017
|
+
except Exception:
|
|
3018
|
+
pass
|
|
3019
|
+
|
|
3020
|
+
# Alerts
|
|
3021
|
+
if alerts:
|
|
3022
|
+
lines.append("## Alerts")
|
|
3023
|
+
for a in alerts:
|
|
3024
|
+
lines.append(f"- {a}")
|
|
3025
|
+
lines.append("")
|
|
3026
|
+
|
|
3027
|
+
# Sync History (last 5)
|
|
3028
|
+
try:
|
|
3029
|
+
history = self.state.get_sync_history()
|
|
3030
|
+
syncs = history.get("syncs", [])
|
|
3031
|
+
if syncs:
|
|
3032
|
+
lines.append("## Sync History (last 5)")
|
|
3033
|
+
for sync in syncs[-5:]:
|
|
3034
|
+
ts = sync.get("timestamp", "?")
|
|
3035
|
+
n = sync.get("upstream_commits_integrated", 0)
|
|
3036
|
+
p_count = len(sync.get("patches", []))
|
|
3037
|
+
lines.append(f"- {ts}: {n} upstream commit(s), {p_count} patch(es)")
|
|
3038
|
+
lines.append("")
|
|
3039
|
+
except Exception:
|
|
3040
|
+
pass
|
|
3041
|
+
|
|
3042
|
+
# Dependencies
|
|
3043
|
+
try:
|
|
3044
|
+
dep_dir = os.path.join(self.path, ".bingo-deps")
|
|
3045
|
+
if os.path.isdir(dep_dir):
|
|
3046
|
+
from bingo_core.dep import DepManager
|
|
3047
|
+
dm = DepManager(self.path)
|
|
3048
|
+
dep_list = dm.list_patches()
|
|
3049
|
+
dep_pkgs = dep_list.get("packages", [])
|
|
3050
|
+
if dep_pkgs:
|
|
3051
|
+
lines.append("## Dependencies")
|
|
3052
|
+
for pkg in dep_pkgs:
|
|
3053
|
+
pname = pkg.get("name", "?")
|
|
3054
|
+
ver = pkg.get("version", "?")
|
|
3055
|
+
n_patches = len(pkg.get("patches", []))
|
|
3056
|
+
lines.append(f"- {pname}@{ver}: {n_patches} patch(es)")
|
|
3057
|
+
lines.append("")
|
|
3058
|
+
except Exception:
|
|
3059
|
+
pass
|
|
3060
|
+
|
|
3061
|
+
report_text = "\n".join(lines)
|
|
3062
|
+
|
|
3063
|
+
return {
|
|
3064
|
+
"ok": True,
|
|
3065
|
+
"report": report_text,
|
|
3066
|
+
"summary": {
|
|
3067
|
+
"patches": patch_count,
|
|
3068
|
+
"behind": behind,
|
|
3069
|
+
"alerts": len(alerts),
|
|
3070
|
+
},
|
|
3071
|
+
}
|
|
3072
|
+
|
|
2051
3073
|
# -- Config --
|
|
2052
3074
|
|
|
2053
3075
|
def config_get(self, key: str) -> dict:
|