bingo-light 2.1.2 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.en.md +17 -7
- package/README.md +30 -4
- package/bingo-light +321 -11
- package/bingo_core/__init__.py +9 -1
- package/bingo_core/decisions.py +167 -0
- package/bingo_core/dep.py +385 -25
- package/bingo_core/dep_fork.py +268 -0
- package/bingo_core/models.py +1 -0
- package/bingo_core/repo.py +1031 -9
- package/bingo_core/semantic.py +85 -0
- package/bingo_core/state.py +1 -1
- package/bingo_core/team.py +170 -0
- package/completions/bingo-light.bash +14 -4
- package/completions/bingo-light.fish +23 -2
- package/completions/bingo-light.zsh +18 -2
- package/mcp-server.py +245 -7
- package/package.json +1 -1
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"""
|
|
2
|
+
bingo_core.decisions — per-patch conflict-resolution memory.
|
|
3
|
+
|
|
4
|
+
Complements git rerere (which keys by literal conflict text) with a
|
|
5
|
+
pattern-level memory: records how patch X was resolved against upstream
|
|
6
|
+
commit Y, keyed by (patch_name, file, semantic_class). When the same
|
|
7
|
+
patch conflicts again in a similar pattern, previous decisions are
|
|
8
|
+
surfaced to the AI during conflict-analyze so it can consider the
|
|
9
|
+
prior choice.
|
|
10
|
+
|
|
11
|
+
Storage: .bingo/decisions/<patch-name>.json, one file per patch.
|
|
12
|
+
This avoids hot contention and keeps each patch's history isolated.
|
|
13
|
+
|
|
14
|
+
Python 3.8+ stdlib only. No external dependencies.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import json
|
|
20
|
+
import os
|
|
21
|
+
import re
|
|
22
|
+
from datetime import datetime, timezone
|
|
23
|
+
from typing import List, Optional
|
|
24
|
+
|
|
25
|
+
# Patch name constraint mirrors PATCH_NAME_RE to keep filenames safe.
|
|
26
|
+
_SAFE_PATCH_NAME = re.compile(r"^[a-zA-Z0-9][a-zA-Z0-9_-]*$")
|
|
27
|
+
MAX_DECISIONS_PER_PATCH = 50
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class DecisionMemory:
|
|
31
|
+
"""Per-patch decision log stored under .bingo/decisions/."""
|
|
32
|
+
|
|
33
|
+
def __init__(self, repo_path: str):
|
|
34
|
+
self.repo_path = repo_path
|
|
35
|
+
self.dir = os.path.join(repo_path, ".bingo", "decisions")
|
|
36
|
+
|
|
37
|
+
def _path_for(self, patch_name: str) -> Optional[str]:
|
|
38
|
+
if not patch_name or not _SAFE_PATCH_NAME.match(patch_name):
|
|
39
|
+
return None
|
|
40
|
+
return os.path.join(self.dir, f"{patch_name}.json")
|
|
41
|
+
|
|
42
|
+
def _load_all(self, patch_name: str) -> List[dict]:
|
|
43
|
+
path = self._path_for(patch_name)
|
|
44
|
+
if not path or not os.path.isfile(path):
|
|
45
|
+
return []
|
|
46
|
+
try:
|
|
47
|
+
with open(path) as f:
|
|
48
|
+
data = json.load(f)
|
|
49
|
+
except (IOError, OSError, json.JSONDecodeError):
|
|
50
|
+
return []
|
|
51
|
+
decisions = data.get("decisions", [])
|
|
52
|
+
return decisions if isinstance(decisions, list) else []
|
|
53
|
+
|
|
54
|
+
def _save_all(self, patch_name: str, decisions: List[dict]) -> None:
|
|
55
|
+
path = self._path_for(patch_name)
|
|
56
|
+
if not path:
|
|
57
|
+
return
|
|
58
|
+
os.makedirs(self.dir, exist_ok=True)
|
|
59
|
+
tmp = path + ".tmp"
|
|
60
|
+
try:
|
|
61
|
+
with open(tmp, "w") as f:
|
|
62
|
+
json.dump(
|
|
63
|
+
{"patch": patch_name, "decisions": decisions},
|
|
64
|
+
f, indent=2,
|
|
65
|
+
)
|
|
66
|
+
os.replace(tmp, path)
|
|
67
|
+
except (IOError, OSError):
|
|
68
|
+
try:
|
|
69
|
+
os.unlink(tmp)
|
|
70
|
+
except OSError:
|
|
71
|
+
pass
|
|
72
|
+
|
|
73
|
+
def record(
|
|
74
|
+
self,
|
|
75
|
+
patch_name: str,
|
|
76
|
+
file: str,
|
|
77
|
+
semantic_class: str,
|
|
78
|
+
resolution_strategy: str,
|
|
79
|
+
upstream_sha: Optional[str] = None,
|
|
80
|
+
upstream_subject: Optional[str] = None,
|
|
81
|
+
notes: str = "",
|
|
82
|
+
) -> None:
|
|
83
|
+
"""Append one decision for a patch.
|
|
84
|
+
|
|
85
|
+
Silently no-ops if patch_name is empty or invalid. The newest
|
|
86
|
+
MAX_DECISIONS_PER_PATCH entries are retained; older entries are
|
|
87
|
+
dropped FIFO to keep files bounded.
|
|
88
|
+
"""
|
|
89
|
+
if not patch_name or not _SAFE_PATCH_NAME.match(patch_name):
|
|
90
|
+
return
|
|
91
|
+
entry = {
|
|
92
|
+
"timestamp": datetime.now(timezone.utc).strftime(
|
|
93
|
+
"%Y-%m-%dT%H:%M:%SZ"
|
|
94
|
+
),
|
|
95
|
+
"file": file,
|
|
96
|
+
"semantic_class": semantic_class,
|
|
97
|
+
"resolution_strategy": resolution_strategy,
|
|
98
|
+
"upstream_sha": upstream_sha,
|
|
99
|
+
"upstream_subject": upstream_subject,
|
|
100
|
+
"notes": notes,
|
|
101
|
+
}
|
|
102
|
+
decisions = self._load_all(patch_name)
|
|
103
|
+
decisions.append(entry)
|
|
104
|
+
if len(decisions) > MAX_DECISIONS_PER_PATCH:
|
|
105
|
+
decisions = decisions[-MAX_DECISIONS_PER_PATCH:]
|
|
106
|
+
self._save_all(patch_name, decisions)
|
|
107
|
+
|
|
108
|
+
def lookup(
|
|
109
|
+
self,
|
|
110
|
+
patch_name: str,
|
|
111
|
+
file: Optional[str] = None,
|
|
112
|
+
semantic_class: Optional[str] = None,
|
|
113
|
+
limit: int = 5,
|
|
114
|
+
) -> List[dict]:
|
|
115
|
+
"""Return up to `limit` previous decisions, most-recent first,
|
|
116
|
+
ranked by relevance to the given file/semantic_class.
|
|
117
|
+
|
|
118
|
+
Ranking: +2 if file matches, +1 if semantic_class matches.
|
|
119
|
+
Ties broken by recency.
|
|
120
|
+
"""
|
|
121
|
+
decisions = self._load_all(patch_name)
|
|
122
|
+
if not decisions:
|
|
123
|
+
return []
|
|
124
|
+
|
|
125
|
+
def score(d: dict) -> tuple:
|
|
126
|
+
relevance = 0
|
|
127
|
+
if file and d.get("file") == file:
|
|
128
|
+
relevance += 2
|
|
129
|
+
if semantic_class and d.get("semantic_class") == semantic_class:
|
|
130
|
+
relevance += 1
|
|
131
|
+
return (relevance, d.get("timestamp", ""))
|
|
132
|
+
|
|
133
|
+
ranked = sorted(decisions, key=score, reverse=True)
|
|
134
|
+
result = []
|
|
135
|
+
for d in ranked[:limit]:
|
|
136
|
+
# Add a human-readable relevance tag
|
|
137
|
+
tag = []
|
|
138
|
+
if file and d.get("file") == file:
|
|
139
|
+
tag.append("same_file")
|
|
140
|
+
if semantic_class and d.get("semantic_class") == semantic_class:
|
|
141
|
+
tag.append("same_class")
|
|
142
|
+
entry = dict(d)
|
|
143
|
+
entry["relevance"] = "+".join(tag) if tag else "recent"
|
|
144
|
+
result.append(entry)
|
|
145
|
+
return result
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def detect_resolution_strategy(
|
|
149
|
+
resolved_content: str, ours: str, theirs: str
|
|
150
|
+
) -> str:
|
|
151
|
+
"""Classify how a resolution was produced by comparing bytes.
|
|
152
|
+
|
|
153
|
+
Returns "keep_ours" / "keep_theirs" / "manual". Exact-match required
|
|
154
|
+
because partial merges still count as manual. Empty `resolved_content`
|
|
155
|
+
returns "manual" (caller didn't tell us what was written).
|
|
156
|
+
"""
|
|
157
|
+
if not resolved_content:
|
|
158
|
+
return "manual"
|
|
159
|
+
# Strip trailing whitespace on both sides to be forgiving of newline diffs.
|
|
160
|
+
r = resolved_content.rstrip()
|
|
161
|
+
o = (ours or "").rstrip()
|
|
162
|
+
t = (theirs or "").rstrip()
|
|
163
|
+
if r == o:
|
|
164
|
+
return "keep_ours"
|
|
165
|
+
if r == t:
|
|
166
|
+
return "keep_theirs"
|
|
167
|
+
return "manual"
|
package/bingo_core/dep.py
CHANGED
|
@@ -26,6 +26,7 @@ import shutil
|
|
|
26
26
|
import subprocess
|
|
27
27
|
import tempfile
|
|
28
28
|
from dataclasses import dataclass, field
|
|
29
|
+
from datetime import datetime, timezone
|
|
29
30
|
from typing import Any, Dict, List, Optional, Tuple
|
|
30
31
|
|
|
31
32
|
|
|
@@ -525,6 +526,226 @@ class DepManager:
|
|
|
525
526
|
self._save_config()
|
|
526
527
|
return {"ok": True, "package": package, "dropped": patch_name or "all"}
|
|
527
528
|
|
|
529
|
+
# ── Override Management ─────────────────────────────────────────────────
|
|
530
|
+
|
|
531
|
+
def _read_package_json(self) -> Optional[dict]:
|
|
532
|
+
"""Read package.json from cwd. Returns None if not found."""
|
|
533
|
+
pj_path = os.path.join(self.cwd, "package.json")
|
|
534
|
+
if not os.path.isfile(pj_path):
|
|
535
|
+
return None
|
|
536
|
+
try:
|
|
537
|
+
with open(pj_path) as f:
|
|
538
|
+
return json.load(f)
|
|
539
|
+
except (json.JSONDecodeError, IOError):
|
|
540
|
+
return None
|
|
541
|
+
|
|
542
|
+
def _write_package_json(self, data: dict) -> None:
|
|
543
|
+
"""Atomically write package.json preserving 2-space indent."""
|
|
544
|
+
pj_path = os.path.join(self.cwd, "package.json")
|
|
545
|
+
fd, tmp = tempfile.mkstemp(suffix=".tmp", dir=self.cwd)
|
|
546
|
+
try:
|
|
547
|
+
with os.fdopen(fd, "w") as f:
|
|
548
|
+
json.dump(data, f, indent=2)
|
|
549
|
+
f.write("\n")
|
|
550
|
+
os.replace(tmp, pj_path)
|
|
551
|
+
except Exception:
|
|
552
|
+
try:
|
|
553
|
+
os.unlink(tmp)
|
|
554
|
+
except FileNotFoundError:
|
|
555
|
+
pass
|
|
556
|
+
raise
|
|
557
|
+
|
|
558
|
+
def _load_overrides_tracking(self) -> dict:
|
|
559
|
+
"""Load .bingo-deps/overrides.json tracking data."""
|
|
560
|
+
path = os.path.join(self.cwd, DEP_DIR, "overrides.json")
|
|
561
|
+
if not os.path.isfile(path):
|
|
562
|
+
return {"overrides": {}}
|
|
563
|
+
try:
|
|
564
|
+
with open(path) as f:
|
|
565
|
+
return json.load(f)
|
|
566
|
+
except (json.JSONDecodeError, IOError):
|
|
567
|
+
return {"overrides": {}}
|
|
568
|
+
|
|
569
|
+
def _save_overrides_tracking(self, data: dict) -> None:
|
|
570
|
+
"""Write .bingo-deps/overrides.json."""
|
|
571
|
+
os.makedirs(os.path.join(self.cwd, DEP_DIR), exist_ok=True)
|
|
572
|
+
path = os.path.join(self.cwd, DEP_DIR, "overrides.json")
|
|
573
|
+
with open(path, "w") as f:
|
|
574
|
+
json.dump(data, f, indent=2)
|
|
575
|
+
f.write("\n")
|
|
576
|
+
|
|
577
|
+
def override_list(self) -> dict:
|
|
578
|
+
"""List npm/yarn overrides with tracked reasons.
|
|
579
|
+
|
|
580
|
+
Returns {"ok": True, "overrides": [...], "count": N}
|
|
581
|
+
"""
|
|
582
|
+
pj = self._read_package_json()
|
|
583
|
+
if pj is None:
|
|
584
|
+
return {"ok": True, "overrides": [], "count": 0, "note": "No package.json"}
|
|
585
|
+
|
|
586
|
+
# npm uses "overrides", yarn uses "resolutions"
|
|
587
|
+
overrides = pj.get("overrides", {})
|
|
588
|
+
resolutions = pj.get("resolutions", {})
|
|
589
|
+
all_ovs = {}
|
|
590
|
+
for pkg, ver in overrides.items():
|
|
591
|
+
all_ovs[pkg] = {"version": ver if isinstance(ver, str) else json.dumps(ver), "source": "overrides"}
|
|
592
|
+
for pkg, ver in resolutions.items():
|
|
593
|
+
if pkg not in all_ovs:
|
|
594
|
+
all_ovs[pkg] = {"version": ver, "source": "resolutions"}
|
|
595
|
+
|
|
596
|
+
# Merge with tracking data
|
|
597
|
+
tracking = self._load_overrides_tracking()
|
|
598
|
+
result = []
|
|
599
|
+
for pkg, info in all_ovs.items():
|
|
600
|
+
tracked = tracking.get("overrides", {}).get(pkg, {})
|
|
601
|
+
result.append({
|
|
602
|
+
"package": pkg,
|
|
603
|
+
"version": info["version"],
|
|
604
|
+
"source": info["source"],
|
|
605
|
+
"reason": tracked.get("reason", ""),
|
|
606
|
+
"created": tracked.get("created", ""),
|
|
607
|
+
"tracked": bool(tracked),
|
|
608
|
+
})
|
|
609
|
+
|
|
610
|
+
return {"ok": True, "overrides": result, "count": len(result)}
|
|
611
|
+
|
|
612
|
+
def override_check(self) -> dict:
|
|
613
|
+
"""Check if npm overrides are still needed.
|
|
614
|
+
|
|
615
|
+
Reads package-lock.json to determine what version the tree resolves to.
|
|
616
|
+
Returns {"ok": True, "overrides": [{"package", "status", "reason"}]}
|
|
617
|
+
"""
|
|
618
|
+
pj = self._read_package_json()
|
|
619
|
+
if pj is None:
|
|
620
|
+
return {"ok": True, "overrides": [], "count": 0}
|
|
621
|
+
|
|
622
|
+
overrides = pj.get("overrides", {})
|
|
623
|
+
resolutions = pj.get("resolutions", {})
|
|
624
|
+
all_ovs = dict(overrides)
|
|
625
|
+
all_ovs.update(resolutions)
|
|
626
|
+
|
|
627
|
+
if not all_ovs:
|
|
628
|
+
return {"ok": True, "overrides": [], "count": 0}
|
|
629
|
+
|
|
630
|
+
# Try reading package-lock.json for resolved versions
|
|
631
|
+
lock_path = os.path.join(self.cwd, "package-lock.json")
|
|
632
|
+
lock_data: Optional[dict] = None
|
|
633
|
+
if os.path.isfile(lock_path):
|
|
634
|
+
try:
|
|
635
|
+
with open(lock_path) as f:
|
|
636
|
+
lock_data = json.load(f)
|
|
637
|
+
except (json.JSONDecodeError, IOError):
|
|
638
|
+
pass
|
|
639
|
+
|
|
640
|
+
results = []
|
|
641
|
+
for pkg, override_ver in all_ovs.items():
|
|
642
|
+
if not isinstance(override_ver, str):
|
|
643
|
+
results.append({
|
|
644
|
+
"package": pkg,
|
|
645
|
+
"override_version": json.dumps(override_ver),
|
|
646
|
+
"status": "complex",
|
|
647
|
+
"reason": "Nested override — manual check required",
|
|
648
|
+
})
|
|
649
|
+
continue
|
|
650
|
+
|
|
651
|
+
# Look up in lock file
|
|
652
|
+
resolved_ver = None
|
|
653
|
+
if lock_data:
|
|
654
|
+
# npm v2/v3 lock format: packages["node_modules/<pkg>"].version
|
|
655
|
+
packages = lock_data.get("packages", {})
|
|
656
|
+
lock_key = f"node_modules/{pkg}"
|
|
657
|
+
if lock_key in packages:
|
|
658
|
+
resolved_ver = packages[lock_key].get("version", "")
|
|
659
|
+
|
|
660
|
+
if resolved_ver is None:
|
|
661
|
+
results.append({
|
|
662
|
+
"package": pkg,
|
|
663
|
+
"override_version": override_ver,
|
|
664
|
+
"status": "unknown",
|
|
665
|
+
"reason": "Cannot determine resolved version",
|
|
666
|
+
})
|
|
667
|
+
elif resolved_ver == override_ver:
|
|
668
|
+
# Lock resolved to override version — could be redundant
|
|
669
|
+
# Check if the package's parent requires a different version
|
|
670
|
+
# by looking at the dependency entry in lock file
|
|
671
|
+
# If the lock resolves to the same version, the override
|
|
672
|
+
# may no longer be needed
|
|
673
|
+
results.append({
|
|
674
|
+
"package": pkg,
|
|
675
|
+
"override_version": override_ver,
|
|
676
|
+
"resolved_version": resolved_ver,
|
|
677
|
+
"status": "redundant",
|
|
678
|
+
"reason": "Lock resolves to override version — may no longer be needed",
|
|
679
|
+
})
|
|
680
|
+
else:
|
|
681
|
+
results.append({
|
|
682
|
+
"package": pkg,
|
|
683
|
+
"override_version": override_ver,
|
|
684
|
+
"resolved_version": resolved_ver,
|
|
685
|
+
"status": "active",
|
|
686
|
+
"reason": f"Override forcing {override_ver} (tree wants {resolved_ver})",
|
|
687
|
+
})
|
|
688
|
+
|
|
689
|
+
redundant = sum(1 for r in results if r["status"] == "redundant")
|
|
690
|
+
return {"ok": True, "overrides": results, "count": len(results), "redundant": redundant}
|
|
691
|
+
|
|
692
|
+
def override_add(self, package: str, version: str, reason: str = "") -> dict:
|
|
693
|
+
"""Add an npm override with reason tracking.
|
|
694
|
+
|
|
695
|
+
Returns {"ok": True, "package": ..., "version": ...}
|
|
696
|
+
"""
|
|
697
|
+
pj = self._read_package_json()
|
|
698
|
+
if pj is None:
|
|
699
|
+
return {"ok": False, "error": "No package.json found"}
|
|
700
|
+
|
|
701
|
+
# Detect yarn vs npm
|
|
702
|
+
yarn_lock = os.path.isfile(os.path.join(self.cwd, "yarn.lock"))
|
|
703
|
+
field = "resolutions" if yarn_lock else "overrides"
|
|
704
|
+
|
|
705
|
+
if field not in pj:
|
|
706
|
+
pj[field] = {}
|
|
707
|
+
pj[field][package] = version
|
|
708
|
+
self._write_package_json(pj)
|
|
709
|
+
|
|
710
|
+
# Track reason
|
|
711
|
+
tracking = self._load_overrides_tracking()
|
|
712
|
+
tracking.setdefault("overrides", {})[package] = {
|
|
713
|
+
"version": version,
|
|
714
|
+
"reason": reason,
|
|
715
|
+
"created": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
|
716
|
+
"manager_field": field,
|
|
717
|
+
}
|
|
718
|
+
self._save_overrides_tracking(tracking)
|
|
719
|
+
|
|
720
|
+
return {"ok": True, "package": package, "version": version, "field": field}
|
|
721
|
+
|
|
722
|
+
def override_drop(self, package: str) -> dict:
|
|
723
|
+
"""Remove an npm override.
|
|
724
|
+
|
|
725
|
+
Returns {"ok": True, "package": ..., "dropped": True}
|
|
726
|
+
"""
|
|
727
|
+
pj = self._read_package_json()
|
|
728
|
+
if pj is None:
|
|
729
|
+
return {"ok": False, "error": "No package.json found"}
|
|
730
|
+
|
|
731
|
+
dropped = False
|
|
732
|
+
for pj_key in ("overrides", "resolutions"):
|
|
733
|
+
if pj_key in pj and package in pj[pj_key]:
|
|
734
|
+
del pj[pj_key][package]
|
|
735
|
+
if not pj[pj_key]:
|
|
736
|
+
del pj[pj_key]
|
|
737
|
+
dropped = True
|
|
738
|
+
if dropped:
|
|
739
|
+
self._write_package_json(pj)
|
|
740
|
+
|
|
741
|
+
# Remove tracking
|
|
742
|
+
tracking = self._load_overrides_tracking()
|
|
743
|
+
if package in tracking.get("overrides", {}):
|
|
744
|
+
del tracking["overrides"][package]
|
|
745
|
+
self._save_overrides_tracking(tracking)
|
|
746
|
+
|
|
747
|
+
return {"ok": True, "package": package, "dropped": dropped}
|
|
748
|
+
|
|
528
749
|
|
|
529
750
|
# ─── Diff Utilities ──────────────────────────────────────────────────────────
|
|
530
751
|
|
|
@@ -610,36 +831,175 @@ def _apply_patch(patch_path: str, target_dir: str) -> Tuple[bool, str]:
|
|
|
610
831
|
|
|
611
832
|
|
|
612
833
|
def _apply_patch_python(patch_path: str, target_dir: str) -> Tuple[bool, str]:
|
|
613
|
-
"""Pure-Python
|
|
834
|
+
"""Pure-Python unified diff patch application.
|
|
835
|
+
|
|
836
|
+
Parses unified diff format and applies hunks to target files.
|
|
837
|
+
Supports: context matching, fuzzy offset (±3 lines), new/deleted files.
|
|
838
|
+
Processes hunks in reverse order to avoid line number cascading.
|
|
839
|
+
"""
|
|
840
|
+
import re
|
|
841
|
+
|
|
614
842
|
try:
|
|
615
843
|
with open(patch_path) as f:
|
|
616
|
-
|
|
844
|
+
patch_lines = f.readlines()
|
|
617
845
|
except OSError as e:
|
|
618
846
|
return (False, str(e))
|
|
619
847
|
|
|
620
|
-
# Parse
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
848
|
+
# Parse into file-level diffs
|
|
849
|
+
file_diffs: List[dict] = []
|
|
850
|
+
i = 0
|
|
851
|
+
while i < len(patch_lines):
|
|
852
|
+
line = patch_lines[i]
|
|
853
|
+
|
|
854
|
+
# Find --- a/... and +++ b/... pair
|
|
855
|
+
if line.startswith("--- "):
|
|
856
|
+
if i + 1 < len(patch_lines) and patch_lines[i + 1].startswith("+++ "):
|
|
857
|
+
old_path = line[4:].strip()
|
|
858
|
+
new_line = patch_lines[i + 1]
|
|
859
|
+
# Strip p2: +++ b/<pkg>/<file> -> <file>
|
|
860
|
+
new_path_raw = new_line[6:].strip()
|
|
861
|
+
parts = new_path_raw.split("/", 1)
|
|
862
|
+
rel_path = parts[1] if len(parts) > 1 else parts[0]
|
|
863
|
+
|
|
864
|
+
is_new = old_path == "/dev/null" or old_path.endswith("/dev/null")
|
|
865
|
+
is_delete = new_line.strip().endswith("/dev/null")
|
|
866
|
+
|
|
867
|
+
# Collect hunks for this file
|
|
868
|
+
hunks: List[dict] = []
|
|
869
|
+
i += 2
|
|
870
|
+
while i < len(patch_lines):
|
|
871
|
+
hunk_line = patch_lines[i]
|
|
872
|
+
m = re.match(
|
|
873
|
+
r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@',
|
|
874
|
+
hunk_line,
|
|
875
|
+
)
|
|
876
|
+
if m:
|
|
877
|
+
old_start = int(m.group(1))
|
|
878
|
+
old_count = int(m.group(2)) if m.group(2) is not None else 1
|
|
879
|
+
new_start = int(m.group(3))
|
|
880
|
+
new_count = int(m.group(4)) if m.group(4) is not None else 1
|
|
881
|
+
hunk_body: List[str] = []
|
|
882
|
+
i += 1
|
|
883
|
+
while i < len(patch_lines):
|
|
884
|
+
hl = patch_lines[i]
|
|
885
|
+
if hl.startswith((" ", "+", "-")):
|
|
886
|
+
hunk_body.append(hl)
|
|
887
|
+
i += 1
|
|
888
|
+
elif hl.startswith("\\ No newline"):
|
|
889
|
+
i += 1 # skip no-newline marker
|
|
890
|
+
else:
|
|
891
|
+
break
|
|
892
|
+
hunks.append({
|
|
893
|
+
"old_start": old_start,
|
|
894
|
+
"old_count": old_count,
|
|
895
|
+
"new_start": new_start,
|
|
896
|
+
"new_count": new_count,
|
|
897
|
+
"lines": hunk_body,
|
|
898
|
+
})
|
|
899
|
+
elif hunk_line.startswith("--- ") or hunk_line.startswith("diff "):
|
|
900
|
+
break # next file diff
|
|
901
|
+
else:
|
|
902
|
+
i += 1
|
|
903
|
+
|
|
904
|
+
file_diffs.append({
|
|
905
|
+
"path": rel_path,
|
|
906
|
+
"is_new": is_new,
|
|
907
|
+
"is_delete": is_delete,
|
|
908
|
+
"hunks": hunks,
|
|
909
|
+
})
|
|
910
|
+
continue
|
|
911
|
+
i += 1
|
|
912
|
+
|
|
913
|
+
if not file_diffs:
|
|
914
|
+
return (False, "No file diffs found in patch")
|
|
915
|
+
|
|
916
|
+
# Apply each file diff
|
|
917
|
+
for fd in file_diffs:
|
|
918
|
+
target_file = os.path.join(target_dir, fd["path"])
|
|
919
|
+
|
|
920
|
+
if fd["is_delete"]:
|
|
921
|
+
try:
|
|
922
|
+
os.remove(target_file)
|
|
923
|
+
except FileNotFoundError:
|
|
924
|
+
pass
|
|
925
|
+
continue
|
|
926
|
+
|
|
927
|
+
if fd["is_new"]:
|
|
928
|
+
os.makedirs(os.path.dirname(target_file) or ".", exist_ok=True)
|
|
929
|
+
new_lines: List[str] = []
|
|
930
|
+
for hunk in fd["hunks"]:
|
|
931
|
+
for hl in hunk["lines"]:
|
|
932
|
+
if hl.startswith("+"):
|
|
933
|
+
new_lines.append(hl[1:])
|
|
934
|
+
elif hl.startswith(" "):
|
|
935
|
+
new_lines.append(hl[1:])
|
|
936
|
+
with open(target_file, "w") as f:
|
|
937
|
+
f.writelines(new_lines)
|
|
938
|
+
continue
|
|
939
|
+
|
|
940
|
+
# Existing file — read, apply hunks in reverse, write
|
|
941
|
+
if not os.path.isfile(target_file):
|
|
942
|
+
return (False, f"File not found: {fd['path']}")
|
|
943
|
+
|
|
944
|
+
with open(target_file) as f:
|
|
945
|
+
file_lines = f.readlines()
|
|
946
|
+
|
|
947
|
+
# Process hunks in reverse order to preserve line numbers
|
|
948
|
+
for hunk in reversed(fd["hunks"]):
|
|
949
|
+
old_start = hunk["old_start"] - 1 # 0-indexed
|
|
950
|
+
hunk_lines = hunk["lines"]
|
|
951
|
+
|
|
952
|
+
# Build expected old lines and new lines
|
|
953
|
+
old_expected: List[str] = []
|
|
954
|
+
new_replacement: List[str] = []
|
|
955
|
+
for hl in hunk_lines:
|
|
956
|
+
if hl.startswith(" "):
|
|
957
|
+
old_expected.append(hl[1:])
|
|
958
|
+
new_replacement.append(hl[1:])
|
|
959
|
+
elif hl.startswith("-"):
|
|
960
|
+
old_expected.append(hl[1:])
|
|
961
|
+
elif hl.startswith("+"):
|
|
962
|
+
new_replacement.append(hl[1:])
|
|
963
|
+
|
|
964
|
+
# Try exact match first, then fuzzy offset ±3
|
|
965
|
+
match_pos = -1
|
|
966
|
+
for offset in range(0, 4):
|
|
967
|
+
for sign in (0, -1, 1) if offset == 0 else (-1, 1):
|
|
968
|
+
pos = old_start + offset * sign
|
|
969
|
+
if pos < 0 or pos + len(old_expected) > len(file_lines):
|
|
970
|
+
continue
|
|
971
|
+
chunk = file_lines[pos:pos + len(old_expected)]
|
|
972
|
+
if _lines_match(chunk, old_expected):
|
|
973
|
+
match_pos = pos
|
|
974
|
+
break
|
|
975
|
+
if match_pos >= 0:
|
|
976
|
+
break
|
|
977
|
+
|
|
978
|
+
if match_pos < 0:
|
|
979
|
+
context = old_expected[0].rstrip() if old_expected else "(empty)"
|
|
980
|
+
return (
|
|
981
|
+
False,
|
|
982
|
+
f"Hunk failed for {fd['path']} at line {hunk['old_start']}: "
|
|
983
|
+
f"context mismatch near '{context}'",
|
|
984
|
+
)
|
|
985
|
+
|
|
986
|
+
# Apply: replace old lines with new lines
|
|
987
|
+
file_lines[match_pos:match_pos + len(old_expected)] = new_replacement
|
|
988
|
+
|
|
989
|
+
with open(target_file, "w") as f:
|
|
990
|
+
f.writelines(file_lines)
|
|
991
|
+
|
|
992
|
+
return (True, "")
|
|
993
|
+
|
|
994
|
+
|
|
995
|
+
def _lines_match(actual: List[str], expected: List[str]) -> bool:
|
|
996
|
+
"""Compare lines ignoring trailing whitespace differences."""
|
|
997
|
+
if len(actual) != len(expected):
|
|
998
|
+
return False
|
|
999
|
+
for a, e in zip(actual, expected):
|
|
1000
|
+
if a.rstrip("\n\r") != e.rstrip("\n\r"):
|
|
1001
|
+
return False
|
|
1002
|
+
return True
|
|
643
1003
|
|
|
644
1004
|
|
|
645
1005
|
def _is_binary(path: str) -> bool:
|