bingo-light 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +522 -0
- package/README.zh-CN.md +534 -0
- package/bin/cli.js +46 -0
- package/bin/mcp.js +45 -0
- package/bingo-light +1094 -0
- package/bingo_core/__init__.py +77 -0
- package/bingo_core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/bingo_core/__pycache__/_entry.cpython-313.pyc +0 -0
- package/bingo_core/__pycache__/config.cpython-313.pyc +0 -0
- package/bingo_core/__pycache__/exceptions.cpython-313.pyc +0 -0
- package/bingo_core/__pycache__/git.cpython-313.pyc +0 -0
- package/bingo_core/__pycache__/models.cpython-313.pyc +0 -0
- package/bingo_core/__pycache__/repo.cpython-313.pyc +0 -0
- package/bingo_core/__pycache__/setup.cpython-313.pyc +0 -0
- package/bingo_core/__pycache__/state.cpython-313.pyc +0 -0
- package/bingo_core/config.py +110 -0
- package/bingo_core/exceptions.py +48 -0
- package/bingo_core/git.py +194 -0
- package/bingo_core/models.py +37 -0
- package/bingo_core/repo.py +2376 -0
- package/bingo_core/setup.py +549 -0
- package/bingo_core/state.py +306 -0
- package/completions/bingo-light.bash +118 -0
- package/completions/bingo-light.fish +197 -0
- package/completions/bingo-light.zsh +169 -0
- package/mcp-server.py +788 -0
- package/package.json +34 -0
|
@@ -0,0 +1,2376 @@
|
|
|
1
|
+
"""
|
|
2
|
+
bingo_core.repo — Repo class: top-level facade with ALL bingo-light commands.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import re
|
|
10
|
+
import shlex
|
|
11
|
+
import subprocess
|
|
12
|
+
import tempfile
|
|
13
|
+
from datetime import datetime, timezone
|
|
14
|
+
from typing import List, Optional
|
|
15
|
+
|
|
16
|
+
from bingo_core import (
|
|
17
|
+
PATCH_PREFIX,
|
|
18
|
+
DEFAULT_TRACKING,
|
|
19
|
+
DEFAULT_PATCHES,
|
|
20
|
+
MAX_DIFF_SIZE,
|
|
21
|
+
PATCH_NAME_RE,
|
|
22
|
+
PATCH_NAME_MAX,
|
|
23
|
+
MAX_RESOLVE_ITER,
|
|
24
|
+
RERERE_MAX_ITER,
|
|
25
|
+
)
|
|
26
|
+
from bingo_core.exceptions import (
|
|
27
|
+
BingoError,
|
|
28
|
+
GitError,
|
|
29
|
+
NotGitRepoError,
|
|
30
|
+
DirtyTreeError,
|
|
31
|
+
)
|
|
32
|
+
from bingo_core.models import ConflictInfo
|
|
33
|
+
from bingo_core.git import Git
|
|
34
|
+
from bingo_core.config import Config
|
|
35
|
+
from bingo_core.state import State
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class Repo:
|
|
39
|
+
"""Top-level facade with ALL bingo-light commands."""
|
|
40
|
+
|
|
41
|
+
def __init__(self, path: Optional[str] = None):
|
|
42
|
+
self.path = path or os.getcwd()
|
|
43
|
+
self.git = Git(self.path)
|
|
44
|
+
self.config = Config(self.path)
|
|
45
|
+
self.state = State(self.path)
|
|
46
|
+
|
|
47
|
+
# -- Internal helpers --
|
|
48
|
+
|
|
49
|
+
def _ensure_git_repo(self) -> None:
|
|
50
|
+
"""Verify we're in a git repository."""
|
|
51
|
+
if not self.git.run_ok("rev-parse", "--is-inside-work-tree"):
|
|
52
|
+
raise NotGitRepoError()
|
|
53
|
+
# Auto-unshallow if shallow clone
|
|
54
|
+
try:
|
|
55
|
+
result = self.git.run("rev-parse", "--is-shallow-repository", check=False)
|
|
56
|
+
if result == "true":
|
|
57
|
+
self.git.run_ok("fetch", "--unshallow")
|
|
58
|
+
except Exception:
|
|
59
|
+
pass
|
|
60
|
+
|
|
61
|
+
def _load(self) -> dict:
|
|
62
|
+
"""Load config, raising NotInitializedError if needed.
|
|
63
|
+
|
|
64
|
+
Returns dict with upstream_url, upstream_branch, patches_branch, tracking_branch.
|
|
65
|
+
"""
|
|
66
|
+
self._ensure_git_repo()
|
|
67
|
+
# Guard: reject .bingolight if tracked by git (possible upstream injection)
|
|
68
|
+
result = self.git.run_unchecked("ls-files", "--error-unmatch", ".bingolight")
|
|
69
|
+
if result.returncode == 0:
|
|
70
|
+
raise BingoError(
|
|
71
|
+
".bingolight is tracked by git. This is a security risk — "
|
|
72
|
+
"upstream may have injected it. Run: git rm --cached .bingolight"
|
|
73
|
+
)
|
|
74
|
+
c = self.config.load()
|
|
75
|
+
# Auto-fix stale tracking branch (e.g. after manual conflict resolution)
|
|
76
|
+
self._fix_stale_tracking(c)
|
|
77
|
+
return c
|
|
78
|
+
|
|
79
|
+
def _ensure_clean(self) -> None:
|
|
80
|
+
"""Raise DirtyTreeError if working tree is dirty."""
|
|
81
|
+
if not self.git.is_clean():
|
|
82
|
+
raise DirtyTreeError()
|
|
83
|
+
|
|
84
|
+
def _patches_base(self, c: dict) -> Optional[str]:
|
|
85
|
+
"""Get merge base between tracking and patches branches."""
|
|
86
|
+
return self.git.merge_base(c["tracking_branch"], c["patches_branch"])
|
|
87
|
+
|
|
88
|
+
def _in_rebase(self) -> bool:
|
|
89
|
+
"""Check if a rebase is in progress."""
|
|
90
|
+
return os.path.isdir(
|
|
91
|
+
os.path.join(self.path, ".git", "rebase-merge")
|
|
92
|
+
) or os.path.isdir(os.path.join(self.path, ".git", "rebase-apply"))
|
|
93
|
+
|
|
94
|
+
def _fix_stale_tracking(self, c: dict) -> None:
|
|
95
|
+
"""Auto-fix tracking branch after manual conflict resolution.
|
|
96
|
+
|
|
97
|
+
If a sync was rolled back on conflict and user completed rebase manually,
|
|
98
|
+
tracking branch may be stale. Detect and fix.
|
|
99
|
+
"""
|
|
100
|
+
if self._in_rebase():
|
|
101
|
+
return
|
|
102
|
+
if self.state.is_undo_active():
|
|
103
|
+
return
|
|
104
|
+
|
|
105
|
+
tracking_pos = self.git.rev_parse(c["tracking_branch"])
|
|
106
|
+
upstream_pos = self.git.rev_parse(f"upstream/{c['upstream_branch']}")
|
|
107
|
+
if not tracking_pos or not upstream_pos:
|
|
108
|
+
return
|
|
109
|
+
if tracking_pos == upstream_pos:
|
|
110
|
+
return
|
|
111
|
+
|
|
112
|
+
# Count non-[bl] commits in tracking..patches
|
|
113
|
+
# Only auto-fix if there are exactly the expected non-bl commits
|
|
114
|
+
# (upstream commits that were merged manually after conflict resolution).
|
|
115
|
+
# If there are too many, something else is wrong -- don't touch it.
|
|
116
|
+
try:
|
|
117
|
+
log_output = self.git.run(
|
|
118
|
+
"log",
|
|
119
|
+
"--format=%s",
|
|
120
|
+
f"{c['tracking_branch']}..{c['patches_branch']}",
|
|
121
|
+
)
|
|
122
|
+
non_bl_count = 0
|
|
123
|
+
total_count = 0
|
|
124
|
+
for line in log_output.splitlines():
|
|
125
|
+
if not line:
|
|
126
|
+
continue
|
|
127
|
+
total_count += 1
|
|
128
|
+
if not line.startswith("[bl] "):
|
|
129
|
+
non_bl_count += 1
|
|
130
|
+
# Only auto-advance if non-bl commits exist AND they don't
|
|
131
|
+
# outnumber bl commits (heuristic: user manually resolved a sync)
|
|
132
|
+
if 0 < non_bl_count <= total_count // 2 + 1:
|
|
133
|
+
self.git.run(
|
|
134
|
+
"branch", "-f", c["tracking_branch"], upstream_pos
|
|
135
|
+
)
|
|
136
|
+
except GitError:
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
def _resolve_patch(self, c: dict, target: str) -> str:
|
|
140
|
+
"""Resolve a patch target (name or 1-based index) to a commit hash.
|
|
141
|
+
|
|
142
|
+
Raises BingoError if not found.
|
|
143
|
+
"""
|
|
144
|
+
base = self._patches_base(c)
|
|
145
|
+
if not base:
|
|
146
|
+
raise BingoError("No patches found.")
|
|
147
|
+
|
|
148
|
+
try:
|
|
149
|
+
commits_output = self.git.run(
|
|
150
|
+
"rev-list", "--reverse", f"{base}..{c['patches_branch']}"
|
|
151
|
+
)
|
|
152
|
+
except GitError:
|
|
153
|
+
raise BingoError("No patches found.")
|
|
154
|
+
|
|
155
|
+
if not commits_output:
|
|
156
|
+
raise BingoError("No patches found.")
|
|
157
|
+
|
|
158
|
+
commits = commits_output.splitlines()
|
|
159
|
+
|
|
160
|
+
# Try as index first
|
|
161
|
+
if target.isdigit():
|
|
162
|
+
idx = int(target)
|
|
163
|
+
if 1 <= idx <= len(commits):
|
|
164
|
+
return commits[idx - 1]
|
|
165
|
+
raise BingoError(f"Patch index {target} out of range.")
|
|
166
|
+
|
|
167
|
+
# Try as exact name
|
|
168
|
+
for h in commits:
|
|
169
|
+
subject = self.git.run("log", "-1", "--format=%s", h)
|
|
170
|
+
if f"{PATCH_PREFIX} {target}:" in subject:
|
|
171
|
+
return h
|
|
172
|
+
|
|
173
|
+
# Try as partial match on patch name (not arbitrary substring)
|
|
174
|
+
matches = []
|
|
175
|
+
for h in commits:
|
|
176
|
+
subject = self.git.run("log", "-1", "--format=%s", h)
|
|
177
|
+
m = re.match(r"^\[bl\] ([^:]+):", subject)
|
|
178
|
+
if m and target in m.group(1):
|
|
179
|
+
matches.append(h)
|
|
180
|
+
if len(matches) == 1:
|
|
181
|
+
return matches[0]
|
|
182
|
+
if len(matches) > 1:
|
|
183
|
+
raise BingoError(
|
|
184
|
+
f"Ambiguous patch target '{target}': matches {len(matches)} patches. "
|
|
185
|
+
"Use the exact name or index number."
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
raise BingoError(f"Patch '{target}' not found.")
|
|
189
|
+
|
|
190
|
+
def _validate_patch_name(self, name: str) -> None:
|
|
191
|
+
"""Validate a patch name."""
|
|
192
|
+
if len(name) > PATCH_NAME_MAX:
|
|
193
|
+
raise BingoError(f"Patch name too long (max {PATCH_NAME_MAX} characters).")
|
|
194
|
+
if not PATCH_NAME_RE.match(name):
|
|
195
|
+
raise BingoError(
|
|
196
|
+
"Invalid patch name. Use letters, numbers, hyphens, underscores. "
|
|
197
|
+
"Must start with a letter or number."
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
def _current_rebase_patch(self) -> str:
|
|
201
|
+
"""Read the current patch subject from an in-progress rebase.
|
|
202
|
+
|
|
203
|
+
Returns the first line of .git/rebase-merge/message, or "" if
|
|
204
|
+
unavailable. Used by conflict_analyze() and _smart_sync_locked().
|
|
205
|
+
"""
|
|
206
|
+
msg_file = os.path.join(self.path, ".git", "rebase-merge", "message")
|
|
207
|
+
if os.path.isfile(msg_file):
|
|
208
|
+
try:
|
|
209
|
+
with open(msg_file) as f:
|
|
210
|
+
return f.readline().strip()
|
|
211
|
+
except IOError:
|
|
212
|
+
pass
|
|
213
|
+
return ""
|
|
214
|
+
|
|
215
|
+
def _build_conflict_result(
|
|
216
|
+
self,
|
|
217
|
+
conflicted_files: List[str],
|
|
218
|
+
**extra: object,
|
|
219
|
+
) -> dict:
|
|
220
|
+
"""Build a standardized conflict result dict.
|
|
221
|
+
|
|
222
|
+
Extracts conflict details for each file and includes the current
|
|
223
|
+
rebase patch. Callers can pass additional keys via **extra which
|
|
224
|
+
are merged into the returned dict.
|
|
225
|
+
|
|
226
|
+
Always includes:
|
|
227
|
+
ok, conflict, current_patch, conflicted_files,
|
|
228
|
+
conflicts, resolution_steps, abort_cmd
|
|
229
|
+
"""
|
|
230
|
+
conflicts = [self._extract_conflict(f) for f in conflicted_files]
|
|
231
|
+
current_patch = self._current_rebase_patch()
|
|
232
|
+
|
|
233
|
+
result: dict = {
|
|
234
|
+
"ok": False,
|
|
235
|
+
"conflict": True,
|
|
236
|
+
"current_patch": current_patch,
|
|
237
|
+
"conflicted_files": conflicted_files,
|
|
238
|
+
"conflicts": [c.to_dict() for c in conflicts],
|
|
239
|
+
"resolution_steps": [
|
|
240
|
+
"1. Read ours (upstream) and theirs (your patch) for each conflict",
|
|
241
|
+
"2. Write the merged file content (include both changes where possible)",
|
|
242
|
+
"3. Run: git add <conflicted-files>",
|
|
243
|
+
"4. Run: bingo-light conflict-resolve (or git rebase --continue)",
|
|
244
|
+
"5. If more conflicts appear, repeat from step 1",
|
|
245
|
+
"6. To abort instead: git rebase --abort",
|
|
246
|
+
],
|
|
247
|
+
"abort_cmd": "git rebase --abort",
|
|
248
|
+
}
|
|
249
|
+
result.update(extra)
|
|
250
|
+
return result
|
|
251
|
+
|
|
252
|
+
_CONFLICT_SIZE_LIMIT = 1024 * 1024 # 1MB per file
|
|
253
|
+
|
|
254
|
+
def _extract_conflict(self, filepath: str) -> ConflictInfo:
|
|
255
|
+
"""Parse conflict markers from a file and return ConflictInfo."""
|
|
256
|
+
full_path = os.path.join(self.path, filepath)
|
|
257
|
+
ours_lines: List[str] = []
|
|
258
|
+
theirs_lines: List[str] = []
|
|
259
|
+
conflict_count = 0
|
|
260
|
+
state = "normal"
|
|
261
|
+
|
|
262
|
+
if not os.path.isfile(full_path):
|
|
263
|
+
return ConflictInfo(
|
|
264
|
+
file=filepath, conflict_count=1,
|
|
265
|
+
ours="", theirs="",
|
|
266
|
+
merge_hint="File deleted on one side. Decide: keep or remove.",
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
try:
|
|
270
|
+
file_size = os.path.getsize(full_path)
|
|
271
|
+
if file_size > self._CONFLICT_SIZE_LIMIT:
|
|
272
|
+
return ConflictInfo(
|
|
273
|
+
file=filepath, conflict_count=1,
|
|
274
|
+
ours="(file too large to display)",
|
|
275
|
+
theirs="(file too large to display)",
|
|
276
|
+
merge_hint="Large or binary file conflict. Resolve manually.",
|
|
277
|
+
)
|
|
278
|
+
with open(full_path) as f:
|
|
279
|
+
for line in f:
|
|
280
|
+
if line.startswith("<<<<<<< "):
|
|
281
|
+
conflict_count += 1
|
|
282
|
+
state = "ours"
|
|
283
|
+
elif line.startswith("||||||| "):
|
|
284
|
+
state = "base"
|
|
285
|
+
elif line.startswith("======="):
|
|
286
|
+
state = "theirs"
|
|
287
|
+
elif line.startswith(">>>>>>> "):
|
|
288
|
+
state = "normal"
|
|
289
|
+
else:
|
|
290
|
+
if state == "ours":
|
|
291
|
+
ours_lines.append(line.rstrip("\n"))
|
|
292
|
+
elif state == "theirs":
|
|
293
|
+
theirs_lines.append(line.rstrip("\n"))
|
|
294
|
+
except IOError:
|
|
295
|
+
return ConflictInfo(
|
|
296
|
+
file=filepath, conflict_count=1,
|
|
297
|
+
ours="", theirs="",
|
|
298
|
+
merge_hint="Cannot read file. Resolve manually.",
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
ours = "\n".join(ours_lines)
|
|
302
|
+
theirs = "\n".join(theirs_lines)
|
|
303
|
+
|
|
304
|
+
# Generate merge hint
|
|
305
|
+
if not ours and theirs:
|
|
306
|
+
merge_hint = (
|
|
307
|
+
"Upstream deleted content that your patch modifies. "
|
|
308
|
+
"Decide: keep your version or accept upstream deletion."
|
|
309
|
+
)
|
|
310
|
+
elif ours and not theirs:
|
|
311
|
+
merge_hint = (
|
|
312
|
+
"Your patch deleted content that upstream modified. "
|
|
313
|
+
"Decide: keep upstream changes or accept your deletion."
|
|
314
|
+
)
|
|
315
|
+
elif conflict_count > 1:
|
|
316
|
+
merge_hint = (
|
|
317
|
+
"Multiple conflict regions. Resolve each <<<<<<< ... >>>>>>> "
|
|
318
|
+
"block independently. Usually: keep both additions, reconcile edits."
|
|
319
|
+
)
|
|
320
|
+
else:
|
|
321
|
+
merge_hint = (
|
|
322
|
+
"Merge both changes. Keep ours (upstream) and theirs (your patch)."
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
return ConflictInfo(
|
|
326
|
+
file=filepath,
|
|
327
|
+
ours=ours,
|
|
328
|
+
theirs=theirs,
|
|
329
|
+
conflict_count=conflict_count,
|
|
330
|
+
merge_hint=merge_hint,
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
def _record_sync(self, c: dict, behind: int, saved_tracking: str) -> None:
|
|
334
|
+
"""Record a sync event to history."""
|
|
335
|
+
try:
|
|
336
|
+
upstream_after = (
|
|
337
|
+
self.git.rev_parse(f"upstream/{c['upstream_branch']}") or ""
|
|
338
|
+
)
|
|
339
|
+
base = self._patches_base(c)
|
|
340
|
+
patches_list: List[dict] = []
|
|
341
|
+
if base:
|
|
342
|
+
try:
|
|
343
|
+
commits = self.git.run(
|
|
344
|
+
"rev-list", "--reverse", f"{base}..{c['patches_branch']}"
|
|
345
|
+
)
|
|
346
|
+
for h in commits.splitlines():
|
|
347
|
+
if not h:
|
|
348
|
+
continue
|
|
349
|
+
subject = self.git.run("log", "-1", "--format=%s", h)
|
|
350
|
+
pname = ""
|
|
351
|
+
m = re.match(r"^\[bl\] ([^:]+):", subject)
|
|
352
|
+
if m:
|
|
353
|
+
pname = m.group(1)
|
|
354
|
+
patches_list.append(
|
|
355
|
+
{
|
|
356
|
+
"name": pname,
|
|
357
|
+
"hash": self.git.rev_parse_short(h),
|
|
358
|
+
}
|
|
359
|
+
)
|
|
360
|
+
except GitError:
|
|
361
|
+
pass
|
|
362
|
+
|
|
363
|
+
upstream_before_short = saved_tracking[:8] if saved_tracking else ""
|
|
364
|
+
self.state.record_sync(
|
|
365
|
+
behind=behind,
|
|
366
|
+
upstream_before=upstream_before_short,
|
|
367
|
+
upstream_after=upstream_after,
|
|
368
|
+
patches=patches_list,
|
|
369
|
+
)
|
|
370
|
+
except Exception as e:
|
|
371
|
+
import sys
|
|
372
|
+
print(f"warning: failed to record sync history: {e}", file=sys.stderr)
|
|
373
|
+
|
|
374
|
+
def _get_patch_mapping(self, c: dict) -> List[dict]:
|
|
375
|
+
"""Get current patches as list of {name, hash} dicts."""
|
|
376
|
+
base = self._patches_base(c)
|
|
377
|
+
if not base:
|
|
378
|
+
return []
|
|
379
|
+
patches = self.git.log_patches(base, c["patches_branch"])
|
|
380
|
+
return [{"name": p.name, "hash": p.hash} for p in patches]
|
|
381
|
+
|
|
382
|
+
# -- Init --
|
|
383
|
+
|
|
384
|
+
def init(self, upstream_url: str, branch: str = "") -> dict:
|
|
385
|
+
"""Initialize bingo-light in a git repository.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
upstream_url: URL of the original upstream repository
|
|
389
|
+
branch: upstream branch to track (default: auto-detect)
|
|
390
|
+
|
|
391
|
+
Returns:
|
|
392
|
+
{"ok": True, "upstream": ..., "branch": ..., "tracking": ..., "patches": ...}
|
|
393
|
+
"""
|
|
394
|
+
self._ensure_git_repo()
|
|
395
|
+
|
|
396
|
+
# Detect re-init
|
|
397
|
+
reinit = bool(self.git.run_ok("remote", "get-url", "upstream"))
|
|
398
|
+
|
|
399
|
+
# Add/update upstream remote
|
|
400
|
+
if reinit:
|
|
401
|
+
self.git.run("remote", "set-url", "upstream", upstream_url)
|
|
402
|
+
else:
|
|
403
|
+
self.git.run("remote", "add", "upstream", upstream_url)
|
|
404
|
+
|
|
405
|
+
# Fetch upstream
|
|
406
|
+
if not self.git.fetch("upstream"):
|
|
407
|
+
raise BingoError(f"Failed to fetch upstream. Check the URL: {upstream_url}")
|
|
408
|
+
|
|
409
|
+
# Auto-detect upstream branch if not specified
|
|
410
|
+
if not branch:
|
|
411
|
+
try:
|
|
412
|
+
output = self.git.run("remote", "show", "upstream", check=False)
|
|
413
|
+
for line in output.splitlines():
|
|
414
|
+
if "HEAD branch" in line:
|
|
415
|
+
detected = line.split()[-1]
|
|
416
|
+
if detected and detected != "(unknown)":
|
|
417
|
+
branch = detected
|
|
418
|
+
break
|
|
419
|
+
except Exception:
|
|
420
|
+
pass
|
|
421
|
+
|
|
422
|
+
if not branch:
|
|
423
|
+
for candidate in ["main", "master", "develop"]:
|
|
424
|
+
if self.git.rev_parse(f"upstream/{candidate}"):
|
|
425
|
+
branch = candidate
|
|
426
|
+
break
|
|
427
|
+
|
|
428
|
+
branch = branch or "main"
|
|
429
|
+
|
|
430
|
+
# Verify upstream branch exists
|
|
431
|
+
if not self.git.rev_parse(f"upstream/{branch}"):
|
|
432
|
+
try:
|
|
433
|
+
output = self.git.run(
|
|
434
|
+
"branch", "-r", "--list", "upstream/*", check=False
|
|
435
|
+
)
|
|
436
|
+
avail = ", ".join(
|
|
437
|
+
line.strip().replace("upstream/", "")
|
|
438
|
+
for line in output.splitlines()
|
|
439
|
+
if line.strip()
|
|
440
|
+
)
|
|
441
|
+
except Exception:
|
|
442
|
+
avail = "none"
|
|
443
|
+
raise BingoError(
|
|
444
|
+
f"Branch 'upstream/{branch}' not found. Available: {avail or 'none'}"
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
patches_branch = DEFAULT_PATCHES
|
|
448
|
+
tracking_branch = DEFAULT_TRACKING
|
|
449
|
+
|
|
450
|
+
# Enable rerere + diff3
|
|
451
|
+
self.git.run("config", "rerere.enabled", "true")
|
|
452
|
+
self.git.run("config", "rerere.autoupdate", "true")
|
|
453
|
+
self.git.run("config", "merge.conflictstyle", "diff3")
|
|
454
|
+
|
|
455
|
+
# Create tracking branch
|
|
456
|
+
self.git.run(
|
|
457
|
+
"branch", "-f", tracking_branch, f"upstream/{branch}"
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
# Create patches branch
|
|
461
|
+
if not self.git.rev_parse(patches_branch):
|
|
462
|
+
# Check if current branch has commits ahead of upstream
|
|
463
|
+
ahead = 0
|
|
464
|
+
merge_base_val = self.git.merge_base(f"upstream/{branch}", "HEAD")
|
|
465
|
+
if merge_base_val:
|
|
466
|
+
ahead = self.git.rev_list_count(f"{merge_base_val}..HEAD")
|
|
467
|
+
|
|
468
|
+
if ahead > 0:
|
|
469
|
+
self.git.run("branch", patches_branch, "HEAD")
|
|
470
|
+
else:
|
|
471
|
+
self.git.run("branch", patches_branch, tracking_branch)
|
|
472
|
+
|
|
473
|
+
# Save config
|
|
474
|
+
self.config.save(upstream_url, branch, patches_branch, tracking_branch)
|
|
475
|
+
|
|
476
|
+
# Exclude config from git tracking
|
|
477
|
+
exclude_file = os.path.join(self.path, ".git", "info", "exclude")
|
|
478
|
+
try:
|
|
479
|
+
exclude_content = ""
|
|
480
|
+
if os.path.isfile(exclude_file):
|
|
481
|
+
with open(exclude_file) as f:
|
|
482
|
+
exclude_content = f.read()
|
|
483
|
+
if ".bingolight" not in exclude_content:
|
|
484
|
+
with open(exclude_file, "a") as f:
|
|
485
|
+
f.write("\n.bingolight\n")
|
|
486
|
+
except IOError:
|
|
487
|
+
pass
|
|
488
|
+
|
|
489
|
+
# Switch to patches branch
|
|
490
|
+
current = self.git.current_branch()
|
|
491
|
+
if current != patches_branch:
|
|
492
|
+
self.git.run_ok("checkout", patches_branch)
|
|
493
|
+
|
|
494
|
+
result = {
|
|
495
|
+
"ok": True,
|
|
496
|
+
"upstream": upstream_url,
|
|
497
|
+
"branch": branch,
|
|
498
|
+
"tracking": tracking_branch,
|
|
499
|
+
"patches": patches_branch,
|
|
500
|
+
}
|
|
501
|
+
if reinit:
|
|
502
|
+
result["reinit"] = True
|
|
503
|
+
return result
|
|
504
|
+
|
|
505
|
+
# -- Status & Diagnostics --
|
|
506
|
+
|
|
507
|
+
def status(self) -> dict:
|
|
508
|
+
"""Get structured status of the fork.
|
|
509
|
+
|
|
510
|
+
Returns dict with recommended_action: up_to_date, sync_safe, sync_risky, resolve_conflict
|
|
511
|
+
"""
|
|
512
|
+
c = self._load()
|
|
513
|
+
|
|
514
|
+
# Fetch upstream silently
|
|
515
|
+
self.git.run_ok("fetch", "upstream")
|
|
516
|
+
|
|
517
|
+
tracking_head = self.git.rev_parse(c["tracking_branch"]) or ""
|
|
518
|
+
upstream_head = (
|
|
519
|
+
self.git.rev_parse(f"upstream/{c['upstream_branch']}") or ""
|
|
520
|
+
)
|
|
521
|
+
|
|
522
|
+
behind = 0
|
|
523
|
+
if tracking_head and upstream_head and tracking_head != upstream_head:
|
|
524
|
+
behind = self.git.rev_list_count(
|
|
525
|
+
f"{c['tracking_branch']}..upstream/{c['upstream_branch']}"
|
|
526
|
+
)
|
|
527
|
+
|
|
528
|
+
base = self._patches_base(c)
|
|
529
|
+
patch_count = 0
|
|
530
|
+
patches: List[dict] = []
|
|
531
|
+
if base:
|
|
532
|
+
patch_count = self.git.rev_list_count(f"{base}..{c['patches_branch']}")
|
|
533
|
+
patch_infos = self.git.log_patches(base, c["patches_branch"])
|
|
534
|
+
patches = [
|
|
535
|
+
{
|
|
536
|
+
"name": p.name,
|
|
537
|
+
"hash": p.hash,
|
|
538
|
+
"subject": p.subject,
|
|
539
|
+
"files": p.files,
|
|
540
|
+
}
|
|
541
|
+
for p in patch_infos
|
|
542
|
+
]
|
|
543
|
+
|
|
544
|
+
# Conflict risk: overlap between patch files and upstream files
|
|
545
|
+
overlap: List[str] = []
|
|
546
|
+
if behind > 0 and patch_count > 0 and base:
|
|
547
|
+
patch_files = set(self.git.diff_names(f"{base}..{c['patches_branch']}"))
|
|
548
|
+
upstream_files = set(
|
|
549
|
+
self.git.diff_names(
|
|
550
|
+
f"{c['tracking_branch']}..upstream/{c['upstream_branch']}"
|
|
551
|
+
)
|
|
552
|
+
)
|
|
553
|
+
overlap = sorted(patch_files & upstream_files)
|
|
554
|
+
|
|
555
|
+
in_rebase = self._in_rebase()
|
|
556
|
+
|
|
557
|
+
# Detect stale patches
|
|
558
|
+
patches_stale = False
|
|
559
|
+
if base and tracking_head and base != tracking_head:
|
|
560
|
+
patches_stale = True
|
|
561
|
+
|
|
562
|
+
up_to_date = behind == 0 and not patches_stale
|
|
563
|
+
|
|
564
|
+
# Compute recommended action
|
|
565
|
+
if in_rebase:
|
|
566
|
+
action = "resolve_conflict"
|
|
567
|
+
reason = (
|
|
568
|
+
"Rebase in progress. Run conflict-analyze to see conflicts, "
|
|
569
|
+
"resolve them, then git add + git rebase --continue."
|
|
570
|
+
)
|
|
571
|
+
elif patches_stale and behind == 0:
|
|
572
|
+
action = "sync_safe"
|
|
573
|
+
reason = (
|
|
574
|
+
"Patches are on an older base than tracking branch. "
|
|
575
|
+
"Run sync to rebase patches onto current upstream."
|
|
576
|
+
)
|
|
577
|
+
elif up_to_date:
|
|
578
|
+
action = "up_to_date"
|
|
579
|
+
reason = "Fork is in sync with upstream. No action needed."
|
|
580
|
+
elif behind > 0 and not overlap:
|
|
581
|
+
action = "sync_safe"
|
|
582
|
+
reason = (
|
|
583
|
+
f"{behind} commits behind. No file overlap detected. Safe to sync."
|
|
584
|
+
)
|
|
585
|
+
elif behind > 0 and overlap:
|
|
586
|
+
action = "sync_risky"
|
|
587
|
+
reason = (
|
|
588
|
+
f"{behind} commits behind. {len(overlap)} file(s) overlap with "
|
|
589
|
+
"your patches — conflicts likely. Run sync --dry-run first."
|
|
590
|
+
)
|
|
591
|
+
else:
|
|
592
|
+
action = "unknown"
|
|
593
|
+
reason = "Check status manually."
|
|
594
|
+
|
|
595
|
+
return {
|
|
596
|
+
"ok": True,
|
|
597
|
+
"upstream_url": c["upstream_url"],
|
|
598
|
+
"upstream_branch": c["upstream_branch"],
|
|
599
|
+
"current_branch": self.git.current_branch(),
|
|
600
|
+
"behind": behind,
|
|
601
|
+
"patch_count": patch_count,
|
|
602
|
+
"patches": patches,
|
|
603
|
+
"conflict_risk": overlap,
|
|
604
|
+
"in_rebase": in_rebase,
|
|
605
|
+
"up_to_date": up_to_date,
|
|
606
|
+
"recommended_action": action,
|
|
607
|
+
"reason": reason,
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
def doctor(self) -> dict:
|
|
611
|
+
"""Run health checks on the repository.
|
|
612
|
+
|
|
613
|
+
Returns {"ok": True/False, "issues": N, "checks": [...]}
|
|
614
|
+
"""
|
|
615
|
+
c = self._load()
|
|
616
|
+
issues = 0
|
|
617
|
+
checks: List[dict] = []
|
|
618
|
+
|
|
619
|
+
def _check(name: str, status: str, detail: str = "") -> None:
|
|
620
|
+
nonlocal issues
|
|
621
|
+
checks.append({"name": name, "status": status, "detail": detail})
|
|
622
|
+
if status == "fail":
|
|
623
|
+
issues += 1
|
|
624
|
+
|
|
625
|
+
# Git version
|
|
626
|
+
try:
|
|
627
|
+
git_ver = self.git.run("--version").replace("git version ", "")
|
|
628
|
+
_check("git", "pass", git_ver)
|
|
629
|
+
except GitError:
|
|
630
|
+
_check("git", "fail", "git not found")
|
|
631
|
+
|
|
632
|
+
# Rerere
|
|
633
|
+
try:
|
|
634
|
+
rerere = self.git.run("config", "rerere.enabled", check=False)
|
|
635
|
+
if rerere == "true":
|
|
636
|
+
_check("rerere", "pass", "enabled")
|
|
637
|
+
else:
|
|
638
|
+
_check("rerere", "fail", "disabled")
|
|
639
|
+
except Exception:
|
|
640
|
+
_check("rerere", "fail", "disabled")
|
|
641
|
+
|
|
642
|
+
# Upstream remote
|
|
643
|
+
if self.git.run_ok("remote", "get-url", "upstream"):
|
|
644
|
+
url = self.git.run("remote", "get-url", "upstream", check=False)
|
|
645
|
+
_check("upstream remote", "pass", url)
|
|
646
|
+
else:
|
|
647
|
+
_check("upstream remote", "fail", "not found")
|
|
648
|
+
|
|
649
|
+
# Tracking branch
|
|
650
|
+
if self.git.rev_parse(c["tracking_branch"]):
|
|
651
|
+
_check("tracking branch", "pass", c["tracking_branch"])
|
|
652
|
+
else:
|
|
653
|
+
_check("tracking branch", "fail", "missing")
|
|
654
|
+
|
|
655
|
+
# Patches branch
|
|
656
|
+
if self.git.rev_parse(c["patches_branch"]):
|
|
657
|
+
_check("patches branch", "pass", c["patches_branch"])
|
|
658
|
+
else:
|
|
659
|
+
_check("patches branch", "fail", "missing")
|
|
660
|
+
|
|
661
|
+
# Rebase in progress
|
|
662
|
+
if self._in_rebase():
|
|
663
|
+
_check("rebase", "fail", "rebase in progress — resolve or abort")
|
|
664
|
+
else:
|
|
665
|
+
_check("rebase", "pass", "none")
|
|
666
|
+
|
|
667
|
+
# Stale tracking
|
|
668
|
+
tracking_head = self.git.rev_parse(c["tracking_branch"])
|
|
669
|
+
upstream_head = self.git.rev_parse(f"upstream/{c['upstream_branch']}")
|
|
670
|
+
if tracking_head and upstream_head and tracking_head != upstream_head:
|
|
671
|
+
behind = self.git.rev_list_count(
|
|
672
|
+
f"{c['tracking_branch']}..upstream/{c['upstream_branch']}"
|
|
673
|
+
)
|
|
674
|
+
if behind > 0:
|
|
675
|
+
_check(
|
|
676
|
+
"tracking_freshness", "warn",
|
|
677
|
+
f"{behind} commit(s) behind upstream — run sync",
|
|
678
|
+
)
|
|
679
|
+
else:
|
|
680
|
+
_check("tracking_freshness", "pass", "up to date")
|
|
681
|
+
elif tracking_head and upstream_head:
|
|
682
|
+
_check("tracking_freshness", "pass", "up to date")
|
|
683
|
+
|
|
684
|
+
# .bingo state directory
|
|
685
|
+
bingo_dir = os.path.join(self.path, ".bingo")
|
|
686
|
+
if os.path.isdir(bingo_dir):
|
|
687
|
+
_check("state_dir", "pass", ".bingo/")
|
|
688
|
+
else:
|
|
689
|
+
_check("state_dir", "warn", ".bingo/ missing — undo/history unavailable")
|
|
690
|
+
|
|
691
|
+
# Patch stack integrity
|
|
692
|
+
if issues == 0 and not self._in_rebase():
|
|
693
|
+
base = self._patches_base(c)
|
|
694
|
+
if not base:
|
|
695
|
+
_check("patch_stack", "pass", "no patches")
|
|
696
|
+
else:
|
|
697
|
+
patch_count = self.git.rev_list_count(
|
|
698
|
+
f"{base}..{c['patches_branch']}"
|
|
699
|
+
)
|
|
700
|
+
if patch_count == 0:
|
|
701
|
+
_check("patch_stack", "pass", "no patches")
|
|
702
|
+
else:
|
|
703
|
+
tmp_branch = f"bl-doctor-{os.getpid()}"
|
|
704
|
+
original_branch = self.git.current_branch()
|
|
705
|
+
try:
|
|
706
|
+
self.git.run("branch", tmp_branch, c["patches_branch"])
|
|
707
|
+
current_tracking = self.git.rev_parse(c["tracking_branch"])
|
|
708
|
+
result = self.git.run_unchecked(
|
|
709
|
+
"rebase",
|
|
710
|
+
"--onto",
|
|
711
|
+
c["tracking_branch"],
|
|
712
|
+
current_tracking or c["tracking_branch"],
|
|
713
|
+
tmp_branch,
|
|
714
|
+
)
|
|
715
|
+
if result.returncode == 0:
|
|
716
|
+
_check(
|
|
717
|
+
"patch_stack",
|
|
718
|
+
"pass",
|
|
719
|
+
f"all {patch_count} patch(es) clean",
|
|
720
|
+
)
|
|
721
|
+
else:
|
|
722
|
+
_check("patch_stack", "fail", "conflicts detected")
|
|
723
|
+
self.git.run_ok("rebase", "--abort")
|
|
724
|
+
finally:
|
|
725
|
+
# Restore original branch before deleting tmp
|
|
726
|
+
if original_branch:
|
|
727
|
+
self.git.run_ok("checkout", original_branch)
|
|
728
|
+
else:
|
|
729
|
+
self.git.run_ok("checkout", c["patches_branch"])
|
|
730
|
+
self.git.run_ok("branch", "-D", tmp_branch)
|
|
731
|
+
|
|
732
|
+
# Config file
|
|
733
|
+
if self.config.exists():
|
|
734
|
+
_check("config", "pass", "present")
|
|
735
|
+
else:
|
|
736
|
+
_check("config", "fail", "missing")
|
|
737
|
+
|
|
738
|
+
return {"ok": issues == 0, "issues": issues, "checks": checks}
|
|
739
|
+
|
|
740
|
+
def diff(self) -> dict:
|
|
741
|
+
"""Show all changes vs upstream (with 50K truncation).
|
|
742
|
+
|
|
743
|
+
Returns {"ok": True, "stat": "...", "diff": "...", "truncated": bool}
|
|
744
|
+
"""
|
|
745
|
+
c = self._load()
|
|
746
|
+
base = self._patches_base(c)
|
|
747
|
+
if not base:
|
|
748
|
+
return {"ok": True, "stat": "", "diff": "", "truncated": False}
|
|
749
|
+
|
|
750
|
+
try:
|
|
751
|
+
stat = self.git.run(
|
|
752
|
+
"diff", f"{base}..{c['patches_branch']}", "--stat", check=False
|
|
753
|
+
)
|
|
754
|
+
diff_content = self.git.run(
|
|
755
|
+
"diff", f"{base}..{c['patches_branch']}", check=False
|
|
756
|
+
)
|
|
757
|
+
except Exception:
|
|
758
|
+
stat = ""
|
|
759
|
+
diff_content = ""
|
|
760
|
+
|
|
761
|
+
if len(diff_content) > MAX_DIFF_SIZE:
|
|
762
|
+
preview = diff_content[:2000]
|
|
763
|
+
size_kb = len(diff_content) // 1024
|
|
764
|
+
return {
|
|
765
|
+
"ok": True,
|
|
766
|
+
"truncated": True,
|
|
767
|
+
"stat": stat,
|
|
768
|
+
"preview": preview,
|
|
769
|
+
"full_size": len(diff_content),
|
|
770
|
+
"message": (
|
|
771
|
+
f"Diff too large ({size_kb}KB). Showing preview. "
|
|
772
|
+
"Use bingo-light diff without --json for full output."
|
|
773
|
+
),
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
return {
|
|
777
|
+
"ok": True,
|
|
778
|
+
"truncated": False,
|
|
779
|
+
"stat": stat,
|
|
780
|
+
"diff": diff_content,
|
|
781
|
+
}
|
|
782
|
+
|
|
783
|
+
def history(self) -> dict:
|
|
784
|
+
"""Get sync history from .bingo/sync-history.json.
|
|
785
|
+
|
|
786
|
+
Returns {"ok": True, "syncs": [...]}
|
|
787
|
+
"""
|
|
788
|
+
self._load() # validate repo is initialized
|
|
789
|
+
data = self.state.get_sync_history()
|
|
790
|
+
data["ok"] = True
|
|
791
|
+
return data
|
|
792
|
+
|
|
793
|
+
def session(self, update: bool = False) -> dict:
|
|
794
|
+
"""Get or update session notes.
|
|
795
|
+
|
|
796
|
+
Returns {"ok": True, "session": "...", ...}
|
|
797
|
+
"""
|
|
798
|
+
c = self._load()
|
|
799
|
+
|
|
800
|
+
if update:
|
|
801
|
+
# Fetch upstream silently
|
|
802
|
+
self.git.run_ok("fetch", "upstream")
|
|
803
|
+
|
|
804
|
+
tracking_head = self.git.rev_parse(c["tracking_branch"]) or ""
|
|
805
|
+
upstream_head = (
|
|
806
|
+
self.git.rev_parse(f"upstream/{c['upstream_branch']}") or ""
|
|
807
|
+
)
|
|
808
|
+
behind = 0
|
|
809
|
+
if (
|
|
810
|
+
tracking_head
|
|
811
|
+
and upstream_head
|
|
812
|
+
and tracking_head != upstream_head
|
|
813
|
+
):
|
|
814
|
+
behind = self.git.rev_list_count(
|
|
815
|
+
f"{c['tracking_branch']}..upstream/{c['upstream_branch']}"
|
|
816
|
+
)
|
|
817
|
+
|
|
818
|
+
base = self._patches_base(c)
|
|
819
|
+
patch_count = 0
|
|
820
|
+
patch_list_str = "(none)"
|
|
821
|
+
if base:
|
|
822
|
+
patch_count = self.git.rev_list_count(
|
|
823
|
+
f"{base}..{c['patches_branch']}"
|
|
824
|
+
)
|
|
825
|
+
if patch_count > 0:
|
|
826
|
+
patches = self.git.log_patches(base, c["patches_branch"])
|
|
827
|
+
lines = []
|
|
828
|
+
for i, p in enumerate(patches, 1):
|
|
829
|
+
lines.append(f"{i}. {p.subject} ({p.files} file(s))")
|
|
830
|
+
patch_list_str = "\n".join(lines)
|
|
831
|
+
|
|
832
|
+
ts = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
833
|
+
content = (
|
|
834
|
+
f"# bingo-light session notes\n"
|
|
835
|
+
f"Updated: {ts}\n\n"
|
|
836
|
+
f"## Upstream\n"
|
|
837
|
+
f"- URL: {c['upstream_url']}\n"
|
|
838
|
+
f"- Branch: {c['upstream_branch']}\n"
|
|
839
|
+
f"- Behind: {behind} commits\n\n"
|
|
840
|
+
f"## Patch Stack ({patch_count} patches)\n"
|
|
841
|
+
f"{patch_list_str}\n\n"
|
|
842
|
+
f"## Last Sync\n"
|
|
843
|
+
f"(see history)\n"
|
|
844
|
+
)
|
|
845
|
+
self.state.update_session(content)
|
|
846
|
+
return {"ok": True, "updated": True, "session": content}
|
|
847
|
+
|
|
848
|
+
existing = self.state.get_session()
|
|
849
|
+
if existing:
|
|
850
|
+
return {"ok": True, "session": existing}
|
|
851
|
+
return {
|
|
852
|
+
"ok": True,
|
|
853
|
+
"session": "",
|
|
854
|
+
"message": "No session notes yet. Run bingo-light session update to create.",
|
|
855
|
+
}
|
|
856
|
+
|
|
857
|
+
def conflict_analyze(self) -> dict:
|
|
858
|
+
"""Analyze current rebase conflicts.
|
|
859
|
+
|
|
860
|
+
Returns structured info about each conflicted file.
|
|
861
|
+
"""
|
|
862
|
+
self._ensure_git_repo()
|
|
863
|
+
|
|
864
|
+
if not self._in_rebase():
|
|
865
|
+
return {"ok": True, "in_rebase": False, "conflicts": []}
|
|
866
|
+
|
|
867
|
+
conflicted = self.git.ls_files_unmerged()
|
|
868
|
+
if not conflicted:
|
|
869
|
+
return {"ok": True, "in_rebase": True, "conflicts": []}
|
|
870
|
+
|
|
871
|
+
current_patch = self._current_rebase_patch()
|
|
872
|
+
conflicts = [self._extract_conflict(f) for f in conflicted]
|
|
873
|
+
|
|
874
|
+
return {
|
|
875
|
+
"ok": True,
|
|
876
|
+
"in_rebase": True,
|
|
877
|
+
"current_patch": current_patch,
|
|
878
|
+
"conflicts": [c.to_dict() for c in conflicts],
|
|
879
|
+
"resolution_steps": [
|
|
880
|
+
"1. Read ours (upstream) and theirs (your patch) for each conflict",
|
|
881
|
+
"2. Write the merged file content (include both changes where possible)",
|
|
882
|
+
"3. Run: git add <conflicted-files>",
|
|
883
|
+
"4. Run: bingo-light conflict-resolve (or git rebase --continue)",
|
|
884
|
+
"5. If more conflicts appear, repeat from step 1",
|
|
885
|
+
"6. To abort instead: git rebase --abort",
|
|
886
|
+
],
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
def conflict_resolve(self, file_path: str, content: str = "") -> dict:
|
|
890
|
+
"""Resolve a single conflicted file and continue rebase if possible.
|
|
891
|
+
|
|
892
|
+
Args:
|
|
893
|
+
file_path: Path to the conflicted file (relative to repo root)
|
|
894
|
+
content: If non-empty, write this content to the file before staging
|
|
895
|
+
|
|
896
|
+
Returns:
|
|
897
|
+
Result dict with resolved file, remaining conflicts, and rebase state
|
|
898
|
+
"""
|
|
899
|
+
import pathlib
|
|
900
|
+
|
|
901
|
+
self._ensure_git_repo()
|
|
902
|
+
|
|
903
|
+
if not self._in_rebase():
|
|
904
|
+
raise BingoError("No rebase in progress. Nothing to resolve.")
|
|
905
|
+
|
|
906
|
+
if not file_path:
|
|
907
|
+
raise BingoError("No file specified. Usage: conflict-resolve <file>")
|
|
908
|
+
|
|
909
|
+
# Resolve and validate path doesn't escape repo root
|
|
910
|
+
repo_root = pathlib.Path(self.path).resolve()
|
|
911
|
+
resolved = (repo_root / file_path).resolve()
|
|
912
|
+
try:
|
|
913
|
+
resolved.relative_to(repo_root)
|
|
914
|
+
except ValueError:
|
|
915
|
+
raise BingoError(
|
|
916
|
+
f"Path escapes repository root: {file_path}"
|
|
917
|
+
)
|
|
918
|
+
rel_path = str(resolved.relative_to(repo_root))
|
|
919
|
+
|
|
920
|
+
# Verify file is actually in the unmerged list
|
|
921
|
+
unmerged = self.git.ls_files_unmerged()
|
|
922
|
+
if rel_path not in unmerged:
|
|
923
|
+
raise BingoError(
|
|
924
|
+
f"File is not in the unmerged list: {rel_path}\n"
|
|
925
|
+
f"Unmerged files: {', '.join(unmerged) if unmerged else '(none)'}"
|
|
926
|
+
)
|
|
927
|
+
|
|
928
|
+
# Write content if provided
|
|
929
|
+
if content:
|
|
930
|
+
full_path = str(resolved)
|
|
931
|
+
parent = os.path.dirname(full_path)
|
|
932
|
+
if parent:
|
|
933
|
+
os.makedirs(parent, exist_ok=True)
|
|
934
|
+
with open(full_path, "w") as f:
|
|
935
|
+
f.write(content)
|
|
936
|
+
|
|
937
|
+
# Stage the resolved file
|
|
938
|
+
if not self.git.run_ok("add", rel_path):
|
|
939
|
+
raise BingoError(f"Failed to stage file: {rel_path}")
|
|
940
|
+
|
|
941
|
+
# Check remaining unmerged files
|
|
942
|
+
remaining = self.git.ls_files_unmerged()
|
|
943
|
+
if remaining:
|
|
944
|
+
# Still have unresolved files in this patch
|
|
945
|
+
conflicts = [self._extract_conflict(f) for f in remaining]
|
|
946
|
+
return {
|
|
947
|
+
"ok": True,
|
|
948
|
+
"resolved": rel_path,
|
|
949
|
+
"remaining": remaining,
|
|
950
|
+
"conflicts": [c.to_dict() for c in conflicts],
|
|
951
|
+
}
|
|
952
|
+
|
|
953
|
+
# All files resolved for this patch -- try to continue rebase
|
|
954
|
+
env = os.environ.copy()
|
|
955
|
+
env["GIT_EDITOR"] = "true"
|
|
956
|
+
cont_result = subprocess.run(
|
|
957
|
+
["git", "rebase", "--continue"],
|
|
958
|
+
cwd=self.path,
|
|
959
|
+
capture_output=True,
|
|
960
|
+
text=True,
|
|
961
|
+
env=env,
|
|
962
|
+
)
|
|
963
|
+
|
|
964
|
+
if cont_result.returncode == 0:
|
|
965
|
+
# Rebase may have completed or moved to next patch
|
|
966
|
+
if self._in_rebase():
|
|
967
|
+
# Next patch is being applied, check for new conflicts
|
|
968
|
+
new_unmerged = self.git.ls_files_unmerged()
|
|
969
|
+
if new_unmerged:
|
|
970
|
+
result = self._build_conflict_result(
|
|
971
|
+
new_unmerged,
|
|
972
|
+
resolved=rel_path,
|
|
973
|
+
rebase_continued=True,
|
|
974
|
+
)
|
|
975
|
+
result["ok"] = True
|
|
976
|
+
return result
|
|
977
|
+
# No conflicts on next patch -- it applied cleanly
|
|
978
|
+
# but rebase still in progress (more patches to go)
|
|
979
|
+
return {
|
|
980
|
+
"ok": True,
|
|
981
|
+
"resolved": rel_path,
|
|
982
|
+
"rebase_continued": True,
|
|
983
|
+
"sync_complete": False,
|
|
984
|
+
}
|
|
985
|
+
# Rebase fully complete
|
|
986
|
+
return {
|
|
987
|
+
"ok": True,
|
|
988
|
+
"resolved": rel_path,
|
|
989
|
+
"rebase_continued": True,
|
|
990
|
+
"sync_complete": True,
|
|
991
|
+
}
|
|
992
|
+
|
|
993
|
+
# rebase --continue failed -- check why
|
|
994
|
+
new_unmerged = self.git.ls_files_unmerged()
|
|
995
|
+
if new_unmerged:
|
|
996
|
+
# Next patch triggered new conflicts
|
|
997
|
+
result = self._build_conflict_result(
|
|
998
|
+
new_unmerged,
|
|
999
|
+
resolved=rel_path,
|
|
1000
|
+
rebase_continued=True,
|
|
1001
|
+
)
|
|
1002
|
+
result["ok"] = True
|
|
1003
|
+
return result
|
|
1004
|
+
|
|
1005
|
+
# Failed for some other reason
|
|
1006
|
+
stderr = cont_result.stderr.strip()
|
|
1007
|
+
raise BingoError(
|
|
1008
|
+
f"git rebase --continue failed: {stderr or '(unknown error)'}"
|
|
1009
|
+
)
|
|
1010
|
+
|
|
1011
|
+
# -- Sync --
|
|
1012
|
+
|
|
1013
|
+
def sync(
|
|
1014
|
+
self,
|
|
1015
|
+
dry_run: bool = False,
|
|
1016
|
+
force: bool = False,
|
|
1017
|
+
test: bool = False,
|
|
1018
|
+
) -> dict:
|
|
1019
|
+
"""Fetch upstream and rebase patches.
|
|
1020
|
+
|
|
1021
|
+
Args:
|
|
1022
|
+
dry_run: Preview only, don't modify anything
|
|
1023
|
+
force: Skip confirmation
|
|
1024
|
+
test: Run tests after sync; auto-undo on failure
|
|
1025
|
+
|
|
1026
|
+
Returns sync result dict
|
|
1027
|
+
"""
|
|
1028
|
+
c = self._load()
|
|
1029
|
+
if self._in_rebase():
|
|
1030
|
+
raise BingoError(
|
|
1031
|
+
"A rebase is already in progress. Resolve it first with "
|
|
1032
|
+
"'git rebase --continue' or 'git rebase --abort'."
|
|
1033
|
+
)
|
|
1034
|
+
self.state.acquire_lock()
|
|
1035
|
+
try:
|
|
1036
|
+
return self._sync_locked(c, dry_run, force, test)
|
|
1037
|
+
finally:
|
|
1038
|
+
self.state.release_lock()
|
|
1039
|
+
|
|
1040
|
+
def _sync_locked(
|
|
1041
|
+
self, c: dict, dry_run: bool, force: bool, test: bool
|
|
1042
|
+
) -> dict:
|
|
1043
|
+
self._ensure_clean()
|
|
1044
|
+
|
|
1045
|
+
# 1. Fetch upstream
|
|
1046
|
+
if not self.git.fetch("upstream"):
|
|
1047
|
+
raise BingoError("Failed to fetch upstream.")
|
|
1048
|
+
|
|
1049
|
+
# 2. Check how far behind
|
|
1050
|
+
tracking_head = self.git.rev_parse(c["tracking_branch"]) or ""
|
|
1051
|
+
upstream_head = (
|
|
1052
|
+
self.git.rev_parse(f"upstream/{c['upstream_branch']}") or ""
|
|
1053
|
+
)
|
|
1054
|
+
|
|
1055
|
+
if tracking_head == upstream_head:
|
|
1056
|
+
return {
|
|
1057
|
+
"ok": True,
|
|
1058
|
+
"synced": True,
|
|
1059
|
+
"behind_before": 0,
|
|
1060
|
+
"patches_rebased": 0,
|
|
1061
|
+
"up_to_date": True,
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
behind = self.git.rev_list_count(
|
|
1065
|
+
f"{c['tracking_branch']}..upstream/{c['upstream_branch']}"
|
|
1066
|
+
)
|
|
1067
|
+
|
|
1068
|
+
base = self._patches_base(c)
|
|
1069
|
+
patch_count = 0
|
|
1070
|
+
if base:
|
|
1071
|
+
patch_count = self.git.rev_list_count(f"{base}..{c['patches_branch']}")
|
|
1072
|
+
|
|
1073
|
+
# 3. Dry run
|
|
1074
|
+
if dry_run:
|
|
1075
|
+
tmp_branch = f"bl-dryrun-{os.getpid()}"
|
|
1076
|
+
tmp_tracking = f"bl-dryrun-tracking-{os.getpid()}"
|
|
1077
|
+
try:
|
|
1078
|
+
self.git.run("branch", tmp_branch, c["patches_branch"])
|
|
1079
|
+
self.git.run(
|
|
1080
|
+
"branch", tmp_tracking, f"upstream/{c['upstream_branch']}"
|
|
1081
|
+
)
|
|
1082
|
+
result = self.git.run_unchecked(
|
|
1083
|
+
"rebase",
|
|
1084
|
+
"--onto",
|
|
1085
|
+
tmp_tracking,
|
|
1086
|
+
c["tracking_branch"],
|
|
1087
|
+
tmp_branch,
|
|
1088
|
+
)
|
|
1089
|
+
if result.returncode == 0:
|
|
1090
|
+
return {
|
|
1091
|
+
"ok": True,
|
|
1092
|
+
"dry_run": True,
|
|
1093
|
+
"clean": True,
|
|
1094
|
+
"behind": behind,
|
|
1095
|
+
"patches": patch_count,
|
|
1096
|
+
}
|
|
1097
|
+
else:
|
|
1098
|
+
conflicted = self.git.ls_files_unmerged()
|
|
1099
|
+
self.git.run_ok("rebase", "--abort")
|
|
1100
|
+
return {
|
|
1101
|
+
"ok": True,
|
|
1102
|
+
"dry_run": True,
|
|
1103
|
+
"clean": False,
|
|
1104
|
+
"behind": behind,
|
|
1105
|
+
"patches": patch_count,
|
|
1106
|
+
"conflicted_files": conflicted,
|
|
1107
|
+
}
|
|
1108
|
+
finally:
|
|
1109
|
+
self.git.run_ok("checkout", c["patches_branch"])
|
|
1110
|
+
self.git.run_ok("branch", "-D", tmp_branch)
|
|
1111
|
+
self.git.run_ok("branch", "-D", tmp_tracking)
|
|
1112
|
+
|
|
1113
|
+
# 4. Ensure we're on the patches branch
|
|
1114
|
+
if self.git.current_branch() != c["patches_branch"]:
|
|
1115
|
+
self.git.run("checkout", c["patches_branch"])
|
|
1116
|
+
|
|
1117
|
+
# 5. Save current state for rollback
|
|
1118
|
+
saved_head = self.git.rev_parse("HEAD")
|
|
1119
|
+
if not saved_head:
|
|
1120
|
+
raise BingoError("Cannot determine current HEAD. Aborting sync.")
|
|
1121
|
+
saved_tracking = self.git.rev_parse(c["tracking_branch"])
|
|
1122
|
+
if not saved_tracking:
|
|
1123
|
+
raise BingoError(
|
|
1124
|
+
f"Cannot resolve tracking branch '{c['tracking_branch']}'. "
|
|
1125
|
+
"Run 'bingo-light doctor' to diagnose."
|
|
1126
|
+
)
|
|
1127
|
+
self.state.save_undo(saved_head, saved_tracking)
|
|
1128
|
+
|
|
1129
|
+
# 6. Update tracking branch
|
|
1130
|
+
self.git.run(
|
|
1131
|
+
"branch", "-f", c["tracking_branch"], f"upstream/{c['upstream_branch']}"
|
|
1132
|
+
)
|
|
1133
|
+
|
|
1134
|
+
# 7. Rebase patches
|
|
1135
|
+
result = self.git.run_unchecked(
|
|
1136
|
+
"rebase",
|
|
1137
|
+
"--onto",
|
|
1138
|
+
c["tracking_branch"],
|
|
1139
|
+
saved_tracking,
|
|
1140
|
+
c["patches_branch"],
|
|
1141
|
+
)
|
|
1142
|
+
|
|
1143
|
+
if result.returncode == 0:
|
|
1144
|
+
self._record_sync(c, behind, saved_tracking)
|
|
1145
|
+
self.state.run_hook(
|
|
1146
|
+
"on-sync-success",
|
|
1147
|
+
{"behind_before": behind, "patches_rebased": patch_count},
|
|
1148
|
+
)
|
|
1149
|
+
|
|
1150
|
+
# Run tests if requested
|
|
1151
|
+
if test:
|
|
1152
|
+
try:
|
|
1153
|
+
test_result = self.test()
|
|
1154
|
+
if not test_result.get("ok"):
|
|
1155
|
+
# Undo the sync -- restore both branches
|
|
1156
|
+
try:
|
|
1157
|
+
self.git.run(
|
|
1158
|
+
"branch", "-f", c["patches_branch"], saved_head
|
|
1159
|
+
)
|
|
1160
|
+
if self.git.current_branch() == c["patches_branch"]:
|
|
1161
|
+
self.git.run("reset", "--hard", saved_head)
|
|
1162
|
+
self.git.run(
|
|
1163
|
+
"branch", "-f",
|
|
1164
|
+
c["tracking_branch"], saved_tracking,
|
|
1165
|
+
)
|
|
1166
|
+
except GitError:
|
|
1167
|
+
# Rollback failed -- abort any in-progress rebase
|
|
1168
|
+
self.git.run_ok("rebase", "--abort")
|
|
1169
|
+
self.state.run_hook(
|
|
1170
|
+
"on-test-fail", {"behind_before": behind}
|
|
1171
|
+
)
|
|
1172
|
+
return {
|
|
1173
|
+
"ok": False,
|
|
1174
|
+
"synced": False,
|
|
1175
|
+
"test": "fail",
|
|
1176
|
+
"auto_undone": True,
|
|
1177
|
+
}
|
|
1178
|
+
except BingoError as e:
|
|
1179
|
+
# Test command not configured or failed to run --
|
|
1180
|
+
# sync succeeded but test was skipped
|
|
1181
|
+
return {
|
|
1182
|
+
"ok": True,
|
|
1183
|
+
"synced": True,
|
|
1184
|
+
"behind_before": behind,
|
|
1185
|
+
"patches_rebased": patch_count,
|
|
1186
|
+
"test": "skipped",
|
|
1187
|
+
"test_error": str(e),
|
|
1188
|
+
}
|
|
1189
|
+
|
|
1190
|
+
return {
|
|
1191
|
+
"ok": True,
|
|
1192
|
+
"synced": True,
|
|
1193
|
+
"behind_before": behind,
|
|
1194
|
+
"patches_rebased": patch_count,
|
|
1195
|
+
}
|
|
1196
|
+
|
|
1197
|
+
# Rebase failed -- check if rerere auto-resolved
|
|
1198
|
+
unresolved = self.git.ls_files_unmerged()
|
|
1199
|
+
if not unresolved:
|
|
1200
|
+
# rerere resolved everything -- try to continue
|
|
1201
|
+
rerere_ok = True
|
|
1202
|
+
rerere_iter = 0
|
|
1203
|
+
while self._in_rebase():
|
|
1204
|
+
rerere_iter += 1
|
|
1205
|
+
if rerere_iter > RERERE_MAX_ITER:
|
|
1206
|
+
self.git.run_ok("rebase", "--abort")
|
|
1207
|
+
raise BingoError(
|
|
1208
|
+
"rerere auto-continue exceeded 50 iterations, aborting."
|
|
1209
|
+
)
|
|
1210
|
+
env = os.environ.copy()
|
|
1211
|
+
env["GIT_EDITOR"] = "true"
|
|
1212
|
+
cont_result = subprocess.run(
|
|
1213
|
+
["git", "rebase", "--continue"],
|
|
1214
|
+
cwd=self.path,
|
|
1215
|
+
capture_output=True,
|
|
1216
|
+
text=True,
|
|
1217
|
+
env=env,
|
|
1218
|
+
)
|
|
1219
|
+
if cont_result.returncode == 0:
|
|
1220
|
+
break
|
|
1221
|
+
unresolved = self.git.ls_files_unmerged()
|
|
1222
|
+
if unresolved:
|
|
1223
|
+
rerere_ok = False
|
|
1224
|
+
break
|
|
1225
|
+
|
|
1226
|
+
if rerere_ok:
|
|
1227
|
+
self._record_sync(c, behind, saved_tracking)
|
|
1228
|
+
self.state.run_hook(
|
|
1229
|
+
"on-sync-success",
|
|
1230
|
+
{
|
|
1231
|
+
"behind_before": behind,
|
|
1232
|
+
"patches_rebased": patch_count,
|
|
1233
|
+
"rerere_resolved": True,
|
|
1234
|
+
},
|
|
1235
|
+
)
|
|
1236
|
+
return {
|
|
1237
|
+
"ok": True,
|
|
1238
|
+
"synced": True,
|
|
1239
|
+
"behind_before": behind,
|
|
1240
|
+
"patches_rebased": patch_count,
|
|
1241
|
+
"rerere_resolved": True,
|
|
1242
|
+
}
|
|
1243
|
+
|
|
1244
|
+
# Rollback tracking branch
|
|
1245
|
+
self.git.run_ok("branch", "-f", c["tracking_branch"], saved_tracking)
|
|
1246
|
+
|
|
1247
|
+
self.state.run_hook("on-conflict", {"patch_count": patch_count})
|
|
1248
|
+
|
|
1249
|
+
conflicted_files = self.git.ls_files_unmerged()
|
|
1250
|
+
return self._build_conflict_result(
|
|
1251
|
+
conflicted_files,
|
|
1252
|
+
synced=False,
|
|
1253
|
+
next=(
|
|
1254
|
+
"Run bingo-light conflict-analyze --json to see conflict details, "
|
|
1255
|
+
"then resolve each file"
|
|
1256
|
+
),
|
|
1257
|
+
tracking_restore=(
|
|
1258
|
+
f"git branch -f {c['tracking_branch']} {saved_tracking}"
|
|
1259
|
+
),
|
|
1260
|
+
)
|
|
1261
|
+
|
|
1262
|
+
def smart_sync(self) -> dict:
|
|
1263
|
+
"""Smart sync: circuit breaker + auto-rerere + detailed conflict JSON.
|
|
1264
|
+
|
|
1265
|
+
Returns sync result dict with detailed conflict information.
|
|
1266
|
+
"""
|
|
1267
|
+
c = self._load()
|
|
1268
|
+
if self._in_rebase():
|
|
1269
|
+
raise BingoError(
|
|
1270
|
+
"A rebase is already in progress. Resolve it first with "
|
|
1271
|
+
"'git rebase --continue' or 'git rebase --abort'."
|
|
1272
|
+
)
|
|
1273
|
+
self.state.acquire_lock()
|
|
1274
|
+
try:
|
|
1275
|
+
return self._smart_sync_locked(c)
|
|
1276
|
+
finally:
|
|
1277
|
+
self.state.release_lock()
|
|
1278
|
+
|
|
1279
|
+
def _smart_sync_locked(self, c: dict) -> dict:
|
|
1280
|
+
self._ensure_clean()
|
|
1281
|
+
|
|
1282
|
+
# Circuit breaker check (pre-fetch)
|
|
1283
|
+
self.git.run_ok("fetch", "upstream")
|
|
1284
|
+
upstream_target = (
|
|
1285
|
+
self.git.rev_parse(f"upstream/{c['upstream_branch']}") or ""
|
|
1286
|
+
)
|
|
1287
|
+
if self.state.check_circuit_breaker(upstream_target):
|
|
1288
|
+
raise BingoError(
|
|
1289
|
+
"Circuit breaker: 3 consecutive sync failures on the same "
|
|
1290
|
+
"upstream commit. Resolve conflicts manually or wait for "
|
|
1291
|
+
"upstream to advance."
|
|
1292
|
+
)
|
|
1293
|
+
|
|
1294
|
+
tracking_head = self.git.rev_parse(c["tracking_branch"]) or ""
|
|
1295
|
+
upstream_head = upstream_target
|
|
1296
|
+
|
|
1297
|
+
if tracking_head == upstream_head:
|
|
1298
|
+
base = self._patches_base(c)
|
|
1299
|
+
patch_count = 0
|
|
1300
|
+
if base:
|
|
1301
|
+
patch_count = self.git.rev_list_count(
|
|
1302
|
+
f"{base}..{c['patches_branch']}"
|
|
1303
|
+
)
|
|
1304
|
+
return {
|
|
1305
|
+
"ok": True,
|
|
1306
|
+
"action": "none",
|
|
1307
|
+
"message": "Already up to date.",
|
|
1308
|
+
"behind": 0,
|
|
1309
|
+
"patches": patch_count,
|
|
1310
|
+
}
|
|
1311
|
+
|
|
1312
|
+
behind = self.git.rev_list_count(
|
|
1313
|
+
f"{c['tracking_branch']}..upstream/{c['upstream_branch']}"
|
|
1314
|
+
)
|
|
1315
|
+
base = self._patches_base(c)
|
|
1316
|
+
patch_count = 0
|
|
1317
|
+
if base:
|
|
1318
|
+
patch_count = self.git.rev_list_count(f"{base}..{c['patches_branch']}")
|
|
1319
|
+
|
|
1320
|
+
# Save state for rollback
|
|
1321
|
+
saved_head = self.git.rev_parse("HEAD")
|
|
1322
|
+
if not saved_head:
|
|
1323
|
+
raise BingoError("Cannot determine current HEAD. Aborting smart-sync.")
|
|
1324
|
+
saved_tracking = self.git.rev_parse(c["tracking_branch"])
|
|
1325
|
+
if not saved_tracking:
|
|
1326
|
+
raise BingoError(
|
|
1327
|
+
f"Cannot resolve tracking branch '{c['tracking_branch']}'. "
|
|
1328
|
+
"Run 'bingo-light doctor' to diagnose."
|
|
1329
|
+
)
|
|
1330
|
+
self.state.save_undo(saved_head, saved_tracking)
|
|
1331
|
+
|
|
1332
|
+
# Update tracking
|
|
1333
|
+
self.git.run(
|
|
1334
|
+
"branch", "-f", c["tracking_branch"], f"upstream/{c['upstream_branch']}"
|
|
1335
|
+
)
|
|
1336
|
+
if self.git.current_branch() != c["patches_branch"]:
|
|
1337
|
+
self.git.run("checkout", c["patches_branch"])
|
|
1338
|
+
|
|
1339
|
+
# Attempt rebase
|
|
1340
|
+
result = self.git.run_unchecked(
|
|
1341
|
+
"rebase",
|
|
1342
|
+
"--onto",
|
|
1343
|
+
c["tracking_branch"],
|
|
1344
|
+
saved_tracking,
|
|
1345
|
+
c["patches_branch"],
|
|
1346
|
+
)
|
|
1347
|
+
|
|
1348
|
+
if result.returncode == 0:
|
|
1349
|
+
# Clean rebase
|
|
1350
|
+
self.state.clear_circuit_breaker()
|
|
1351
|
+
self._record_sync(c, behind, saved_tracking)
|
|
1352
|
+
return {
|
|
1353
|
+
"ok": True,
|
|
1354
|
+
"action": "synced",
|
|
1355
|
+
"behind_before": behind,
|
|
1356
|
+
"patches_rebased": patch_count,
|
|
1357
|
+
"conflicts_resolved": 0,
|
|
1358
|
+
}
|
|
1359
|
+
|
|
1360
|
+
# Enter conflict resolution loop
|
|
1361
|
+
conflicts_resolved = 0
|
|
1362
|
+
resolve_iter = 0
|
|
1363
|
+
|
|
1364
|
+
while self._in_rebase():
|
|
1365
|
+
resolve_iter += 1
|
|
1366
|
+
if resolve_iter > MAX_RESOLVE_ITER:
|
|
1367
|
+
self.git.run_ok("rebase", "--abort")
|
|
1368
|
+
self.git.run_ok("branch", "-f", c["tracking_branch"], saved_tracking)
|
|
1369
|
+
raise BingoError(
|
|
1370
|
+
f"Smart sync: exceeded {MAX_RESOLVE_ITER} resolution attempts. Aborting."
|
|
1371
|
+
)
|
|
1372
|
+
|
|
1373
|
+
unresolved = self.git.ls_files_unmerged()
|
|
1374
|
+
|
|
1375
|
+
if not unresolved:
|
|
1376
|
+
# rerere resolved everything in this step
|
|
1377
|
+
env = os.environ.copy()
|
|
1378
|
+
env["GIT_EDITOR"] = "true"
|
|
1379
|
+
cont_result = subprocess.run(
|
|
1380
|
+
["git", "rebase", "--continue"],
|
|
1381
|
+
cwd=self.path,
|
|
1382
|
+
capture_output=True,
|
|
1383
|
+
text=True,
|
|
1384
|
+
env=env,
|
|
1385
|
+
)
|
|
1386
|
+
if cont_result.returncode == 0:
|
|
1387
|
+
conflicts_resolved += 1
|
|
1388
|
+
continue
|
|
1389
|
+
# Continue failed -- check for new conflicts
|
|
1390
|
+
unresolved = self.git.ls_files_unmerged()
|
|
1391
|
+
if not unresolved:
|
|
1392
|
+
continue
|
|
1393
|
+
|
|
1394
|
+
# Real unresolved conflicts -- report and stop
|
|
1395
|
+
self.git.run_ok("branch", "-f", c["tracking_branch"], saved_tracking)
|
|
1396
|
+
|
|
1397
|
+
# Circuit breaker: increment failure count
|
|
1398
|
+
self.state.record_circuit_breaker(upstream_target)
|
|
1399
|
+
|
|
1400
|
+
result = self._build_conflict_result(
|
|
1401
|
+
unresolved,
|
|
1402
|
+
action="needs_human",
|
|
1403
|
+
behind_before=behind,
|
|
1404
|
+
conflicts_auto_resolved=conflicts_resolved,
|
|
1405
|
+
remaining_conflicts=[
|
|
1406
|
+
self._extract_conflict(f).to_dict() for f in unresolved
|
|
1407
|
+
],
|
|
1408
|
+
next=(
|
|
1409
|
+
"For each conflict: read merge_hint, write merged file, "
|
|
1410
|
+
"git add, git rebase --continue"
|
|
1411
|
+
),
|
|
1412
|
+
)
|
|
1413
|
+
return result
|
|
1414
|
+
|
|
1415
|
+
# If we get here, all conflicts were auto-resolved by rerere
|
|
1416
|
+
self.state.clear_circuit_breaker()
|
|
1417
|
+
self._record_sync(c, behind, saved_tracking)
|
|
1418
|
+
return {
|
|
1419
|
+
"ok": True,
|
|
1420
|
+
"action": "synced_with_rerere",
|
|
1421
|
+
"behind_before": behind,
|
|
1422
|
+
"patches_rebased": patch_count,
|
|
1423
|
+
"conflicts_auto_resolved": conflicts_resolved,
|
|
1424
|
+
}
|
|
1425
|
+
|
|
1426
|
+
def undo(self) -> dict:
|
|
1427
|
+
"""Undo the last sync operation.
|
|
1428
|
+
|
|
1429
|
+
Returns {"ok": True, "restored_to": "..."}
|
|
1430
|
+
"""
|
|
1431
|
+
c = self._load()
|
|
1432
|
+
self._ensure_clean()
|
|
1433
|
+
|
|
1434
|
+
prev_head, prev_tracking = self.state.load_undo()
|
|
1435
|
+
|
|
1436
|
+
if not prev_head:
|
|
1437
|
+
# Fallback to reflog
|
|
1438
|
+
try:
|
|
1439
|
+
output = self.git.run(
|
|
1440
|
+
"reflog", c["patches_branch"], "--format=%H", "-2"
|
|
1441
|
+
)
|
|
1442
|
+
lines = output.splitlines()
|
|
1443
|
+
if len(lines) >= 2:
|
|
1444
|
+
prev_head = lines[1]
|
|
1445
|
+
except GitError:
|
|
1446
|
+
pass
|
|
1447
|
+
|
|
1448
|
+
if not prev_head:
|
|
1449
|
+
raise BingoError("No previous state found to undo. Have you synced yet?")
|
|
1450
|
+
|
|
1451
|
+
current_head = self.git.rev_parse(c["patches_branch"])
|
|
1452
|
+
if not current_head:
|
|
1453
|
+
raise BingoError(
|
|
1454
|
+
f"Patch branch '{c['patches_branch']}' not found. "
|
|
1455
|
+
"Cannot determine current state."
|
|
1456
|
+
)
|
|
1457
|
+
if prev_head == current_head:
|
|
1458
|
+
return {"ok": True, "message": "nothing to undo"}
|
|
1459
|
+
|
|
1460
|
+
# Restore patches branch
|
|
1461
|
+
if self.git.current_branch() == c["patches_branch"]:
|
|
1462
|
+
self.git.run("reset", "--hard", prev_head)
|
|
1463
|
+
else:
|
|
1464
|
+
self.git.run("branch", "-f", c["patches_branch"], prev_head)
|
|
1465
|
+
|
|
1466
|
+
# Restore tracking branch
|
|
1467
|
+
if prev_tracking:
|
|
1468
|
+
self.git.run("branch", "-f", c["tracking_branch"], prev_tracking)
|
|
1469
|
+
self.state.clear_undo_tracking()
|
|
1470
|
+
|
|
1471
|
+
# Mark undo active
|
|
1472
|
+
self.state.mark_undo_active()
|
|
1473
|
+
|
|
1474
|
+
return {"ok": True, "restored_to": prev_head}
|
|
1475
|
+
|
|
1476
|
+
# -- Patches --
|
|
1477
|
+
|
|
1478
|
+
def patch_new(self, name: str, description: str = "") -> dict:
|
|
1479
|
+
"""Create a new patch from current changes.
|
|
1480
|
+
|
|
1481
|
+
Args:
|
|
1482
|
+
name: Patch name (alphanumeric, hyphens, underscores)
|
|
1483
|
+
description: Brief description
|
|
1484
|
+
|
|
1485
|
+
Returns {"ok": True, "patch": "...", "hash": "...", "description": "..."}
|
|
1486
|
+
"""
|
|
1487
|
+
c = self._load()
|
|
1488
|
+
self._validate_patch_name(name)
|
|
1489
|
+
|
|
1490
|
+
# Check for duplicate patch name
|
|
1491
|
+
base = self._patches_base(c)
|
|
1492
|
+
if base:
|
|
1493
|
+
try:
|
|
1494
|
+
log_output = self.git.run(
|
|
1495
|
+
"log", "--format=%s", f"{base}..{c['patches_branch']}"
|
|
1496
|
+
)
|
|
1497
|
+
for line in log_output.splitlines():
|
|
1498
|
+
if f"{PATCH_PREFIX} {name}:" in line:
|
|
1499
|
+
raise BingoError(
|
|
1500
|
+
f"A patch named '{name}' already exists. "
|
|
1501
|
+
"Use a different name or drop the existing one."
|
|
1502
|
+
)
|
|
1503
|
+
except BingoError:
|
|
1504
|
+
raise # re-raise duplicate name errors
|
|
1505
|
+
except GitError:
|
|
1506
|
+
pass # git log failed (empty stack, etc.) -- safe to continue
|
|
1507
|
+
|
|
1508
|
+
# Ensure on patches branch
|
|
1509
|
+
if self.git.current_branch() != c["patches_branch"]:
|
|
1510
|
+
self.git.run("checkout", c["patches_branch"])
|
|
1511
|
+
|
|
1512
|
+
# Check for changes
|
|
1513
|
+
has_staged = not self.git.run_ok("diff", "--cached", "--quiet")
|
|
1514
|
+
has_unstaged = not self.git.run_ok("diff", "--quiet")
|
|
1515
|
+
try:
|
|
1516
|
+
untracked = self.git.run(
|
|
1517
|
+
"ls-files", "--others", "--exclude-standard", check=False
|
|
1518
|
+
)
|
|
1519
|
+
has_untracked = bool(untracked.strip())
|
|
1520
|
+
except Exception:
|
|
1521
|
+
has_untracked = False
|
|
1522
|
+
|
|
1523
|
+
if not has_staged and not has_unstaged and not has_untracked:
|
|
1524
|
+
raise BingoError("No changes to create a patch from. Make some changes first!")
|
|
1525
|
+
|
|
1526
|
+
# If nothing is staged, stage everything
|
|
1527
|
+
if not has_staged:
|
|
1528
|
+
if has_unstaged or has_untracked:
|
|
1529
|
+
self.git.run("add", "-A")
|
|
1530
|
+
|
|
1531
|
+
if not description:
|
|
1532
|
+
description = os.environ.get("BINGO_DESCRIPTION", "no description")
|
|
1533
|
+
|
|
1534
|
+
commit_msg = f"{PATCH_PREFIX} {name}: {description}"
|
|
1535
|
+
self.git.run("commit", "-m", commit_msg)
|
|
1536
|
+
short_hash = self.git.rev_parse_short("HEAD")
|
|
1537
|
+
|
|
1538
|
+
return {
|
|
1539
|
+
"ok": True,
|
|
1540
|
+
"patch": name,
|
|
1541
|
+
"hash": short_hash,
|
|
1542
|
+
"description": description,
|
|
1543
|
+
}
|
|
1544
|
+
|
|
1545
|
+
def patch_list(self, verbose: bool = False) -> dict:
|
|
1546
|
+
"""List all patches in the stack.
|
|
1547
|
+
|
|
1548
|
+
Returns {"ok": True, "patches": [...], "count": N}
|
|
1549
|
+
"""
|
|
1550
|
+
c = self._load()
|
|
1551
|
+
base = self._patches_base(c)
|
|
1552
|
+
if not base:
|
|
1553
|
+
return {"ok": True, "patches": [], "count": 0}
|
|
1554
|
+
|
|
1555
|
+
patches = self.git.log_patches(base, c["patches_branch"])
|
|
1556
|
+
if not patches:
|
|
1557
|
+
return {"ok": True, "patches": [], "count": 0}
|
|
1558
|
+
|
|
1559
|
+
result_patches = []
|
|
1560
|
+
for p in patches:
|
|
1561
|
+
entry: dict = {
|
|
1562
|
+
"name": p.name,
|
|
1563
|
+
"hash": p.hash,
|
|
1564
|
+
"subject": p.subject,
|
|
1565
|
+
"files": p.files,
|
|
1566
|
+
"stat": p.stat,
|
|
1567
|
+
}
|
|
1568
|
+
if verbose:
|
|
1569
|
+
# Include per-file details
|
|
1570
|
+
try:
|
|
1571
|
+
file_output = self.git.run(
|
|
1572
|
+
"diff-tree", "--no-commit-id", "--name-status", "-r",
|
|
1573
|
+
p.hash, check=False,
|
|
1574
|
+
)
|
|
1575
|
+
file_details: List[str] = []
|
|
1576
|
+
if file_output:
|
|
1577
|
+
for line in file_output.splitlines():
|
|
1578
|
+
parts = line.split("\t", 1)
|
|
1579
|
+
if len(parts) == 2:
|
|
1580
|
+
status_code = parts[0].strip()
|
|
1581
|
+
fname = parts[1].strip()
|
|
1582
|
+
prefix = {"M": "~", "A": "+", "D": "-"}.get(
|
|
1583
|
+
status_code, "?"
|
|
1584
|
+
)
|
|
1585
|
+
file_details.append(f"{prefix} {fname}")
|
|
1586
|
+
entry["file_details"] = file_details
|
|
1587
|
+
except Exception:
|
|
1588
|
+
entry["file_details"] = []
|
|
1589
|
+
result_patches.append(entry)
|
|
1590
|
+
|
|
1591
|
+
return {
|
|
1592
|
+
"ok": True,
|
|
1593
|
+
"patches": result_patches,
|
|
1594
|
+
"count": len(result_patches),
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
def patch_show(self, target: str) -> dict:
|
|
1598
|
+
"""Show full diff and stats for a specific patch.
|
|
1599
|
+
|
|
1600
|
+
Returns {"ok": True, "patch": {...}} with truncation support.
|
|
1601
|
+
"""
|
|
1602
|
+
c = self._load()
|
|
1603
|
+
hash_val = self._resolve_patch(c, target)
|
|
1604
|
+
|
|
1605
|
+
short_hash = self.git.rev_parse_short(hash_val)
|
|
1606
|
+
subject = self.git.run("log", "-1", "--format=%s", hash_val)
|
|
1607
|
+
|
|
1608
|
+
pname = ""
|
|
1609
|
+
m = re.match(r"^\[bl\] ([^:]+):", subject)
|
|
1610
|
+
if m:
|
|
1611
|
+
pname = m.group(1)
|
|
1612
|
+
|
|
1613
|
+
stat = self.git.run(
|
|
1614
|
+
"diff-tree", "--no-commit-id", "--shortstat", hash_val, check=False
|
|
1615
|
+
).strip()
|
|
1616
|
+
|
|
1617
|
+
diff_content = self.git.run(
|
|
1618
|
+
"diff-tree", "--no-commit-id", "-p", hash_val, check=False
|
|
1619
|
+
)
|
|
1620
|
+
|
|
1621
|
+
if len(diff_content) > MAX_DIFF_SIZE:
|
|
1622
|
+
preview = diff_content[:2000]
|
|
1623
|
+
size_kb = len(diff_content) // 1024
|
|
1624
|
+
return {
|
|
1625
|
+
"ok": True,
|
|
1626
|
+
"truncated": True,
|
|
1627
|
+
"patch": {
|
|
1628
|
+
"name": pname,
|
|
1629
|
+
"hash": short_hash,
|
|
1630
|
+
"subject": subject,
|
|
1631
|
+
"stat": stat,
|
|
1632
|
+
"preview": preview,
|
|
1633
|
+
"full_size": len(diff_content),
|
|
1634
|
+
"message": (
|
|
1635
|
+
f"Diff too large ({size_kb}KB). Showing preview."
|
|
1636
|
+
),
|
|
1637
|
+
},
|
|
1638
|
+
}
|
|
1639
|
+
|
|
1640
|
+
return {
|
|
1641
|
+
"ok": True,
|
|
1642
|
+
"truncated": False,
|
|
1643
|
+
"patch": {
|
|
1644
|
+
"name": pname,
|
|
1645
|
+
"hash": short_hash,
|
|
1646
|
+
"subject": subject,
|
|
1647
|
+
"stat": stat,
|
|
1648
|
+
"diff": diff_content,
|
|
1649
|
+
},
|
|
1650
|
+
}
|
|
1651
|
+
|
|
1652
|
+
def patch_drop(self, target: str) -> dict:
|
|
1653
|
+
"""Remove a patch from the stack.
|
|
1654
|
+
|
|
1655
|
+
Returns {"ok": True, "dropped": "...", "hash": "..."}
|
|
1656
|
+
"""
|
|
1657
|
+
c = self._load()
|
|
1658
|
+
self._ensure_clean()
|
|
1659
|
+
hash_val = self._resolve_patch(c, target)
|
|
1660
|
+
|
|
1661
|
+
subject = self.git.run("log", "-1", "--format=%s", hash_val)
|
|
1662
|
+
pname = ""
|
|
1663
|
+
m = re.match(r"^\[bl\] ([^:]+):", subject)
|
|
1664
|
+
if m:
|
|
1665
|
+
pname = m.group(1)
|
|
1666
|
+
|
|
1667
|
+
if self.git.current_branch() != c["patches_branch"]:
|
|
1668
|
+
self.git.run("checkout", c["patches_branch"])
|
|
1669
|
+
|
|
1670
|
+
short_hash = self.git.rev_parse_short(hash_val)
|
|
1671
|
+
|
|
1672
|
+
result = self.git.run_unchecked(
|
|
1673
|
+
"rebase", "--onto", f"{hash_val}^", hash_val, c["patches_branch"]
|
|
1674
|
+
)
|
|
1675
|
+
if result.returncode == 0:
|
|
1676
|
+
return {"ok": True, "dropped": pname, "hash": short_hash}
|
|
1677
|
+
else:
|
|
1678
|
+
self.git.run_ok("rebase", "--abort")
|
|
1679
|
+
raise BingoError(
|
|
1680
|
+
"Failed to drop patch. There may be dependencies between patches."
|
|
1681
|
+
)
|
|
1682
|
+
|
|
1683
|
+
def patch_edit(self, target: str) -> dict:
|
|
1684
|
+
"""Fold staged changes into an existing patch.
|
|
1685
|
+
|
|
1686
|
+
Returns {"ok": True, "edited": "..."}
|
|
1687
|
+
"""
|
|
1688
|
+
c = self._load()
|
|
1689
|
+
# Check for unstaged changes (staged are expected for patch edit)
|
|
1690
|
+
has_unstaged = not self.git.run_ok("diff", "--quiet")
|
|
1691
|
+
if has_unstaged:
|
|
1692
|
+
raise DirtyTreeError(
|
|
1693
|
+
"Unstaged changes detected. Stage the changes you want to fold "
|
|
1694
|
+
"into the patch with 'git add', or stash unstaged changes first."
|
|
1695
|
+
)
|
|
1696
|
+
hash_val = self._resolve_patch(c, target)
|
|
1697
|
+
|
|
1698
|
+
has_staged = not self.git.run_ok("diff", "--cached", "--quiet")
|
|
1699
|
+
if not has_staged:
|
|
1700
|
+
raise BingoError(
|
|
1701
|
+
"No staged changes. Stage your fixes first with 'git add', "
|
|
1702
|
+
"then run this command."
|
|
1703
|
+
)
|
|
1704
|
+
|
|
1705
|
+
subject = self.git.run("log", "-1", "--format=%s", hash_val)
|
|
1706
|
+
|
|
1707
|
+
# Save HEAD before creating fixup commit (for rollback)
|
|
1708
|
+
pre_fixup_head = self.git.rev_parse("HEAD")
|
|
1709
|
+
|
|
1710
|
+
# Create a fixup commit
|
|
1711
|
+
self.git.run("commit", f"--fixup={hash_val}")
|
|
1712
|
+
|
|
1713
|
+
# Non-interactive rebase with autosquash
|
|
1714
|
+
base = self._patches_base(c)
|
|
1715
|
+
if not base:
|
|
1716
|
+
# Undo fixup commit
|
|
1717
|
+
if pre_fixup_head:
|
|
1718
|
+
self.git.run_ok("reset", "--hard", pre_fixup_head)
|
|
1719
|
+
raise BingoError("No patches base found.")
|
|
1720
|
+
|
|
1721
|
+
env = os.environ.copy()
|
|
1722
|
+
env["GIT_SEQUENCE_EDITOR"] = "true"
|
|
1723
|
+
result = subprocess.run(
|
|
1724
|
+
["git", "rebase", "--autosquash", base],
|
|
1725
|
+
cwd=self.path,
|
|
1726
|
+
capture_output=True,
|
|
1727
|
+
text=True,
|
|
1728
|
+
env=env,
|
|
1729
|
+
)
|
|
1730
|
+
if result.returncode != 0:
|
|
1731
|
+
self.git.run_ok("rebase", "--abort")
|
|
1732
|
+
# Remove orphaned fixup commit
|
|
1733
|
+
if pre_fixup_head:
|
|
1734
|
+
self.git.run_ok("reset", "--hard", pre_fixup_head)
|
|
1735
|
+
return {
|
|
1736
|
+
"ok": False,
|
|
1737
|
+
"error": "Rebase conflict while editing patch. Rebase aborted.",
|
|
1738
|
+
}
|
|
1739
|
+
|
|
1740
|
+
return {"ok": True, "edited": subject}
|
|
1741
|
+
|
|
1742
|
+
def patch_export(self, output_dir: str = ".bl-patches") -> dict:
|
|
1743
|
+
"""Export all patches as .patch files + series file.
|
|
1744
|
+
|
|
1745
|
+
Returns {"ok": True, "count": N, "directory": "...", "files": [...]}
|
|
1746
|
+
"""
|
|
1747
|
+
c = self._load()
|
|
1748
|
+
base = self._patches_base(c)
|
|
1749
|
+
if not base:
|
|
1750
|
+
return {"ok": True, "count": 0, "patches": []}
|
|
1751
|
+
|
|
1752
|
+
count = self.git.rev_list_count(f"{base}..{c['patches_branch']}")
|
|
1753
|
+
if count == 0:
|
|
1754
|
+
return {"ok": True, "count": 0, "patches": []}
|
|
1755
|
+
|
|
1756
|
+
if os.path.isabs(output_dir):
|
|
1757
|
+
abs_output = os.path.realpath(output_dir)
|
|
1758
|
+
else:
|
|
1759
|
+
abs_output = os.path.realpath(os.path.join(self.path, output_dir))
|
|
1760
|
+
# For relative paths, ensure they stay within the repo
|
|
1761
|
+
if not abs_output.startswith(os.path.realpath(self.path)):
|
|
1762
|
+
raise BingoError(
|
|
1763
|
+
f"Export path escapes repository: {output_dir}. "
|
|
1764
|
+
"Use a path within the repo or an absolute path."
|
|
1765
|
+
)
|
|
1766
|
+
os.makedirs(abs_output, exist_ok=True)
|
|
1767
|
+
|
|
1768
|
+
self.git.run(
|
|
1769
|
+
"format-patch",
|
|
1770
|
+
"--numbered",
|
|
1771
|
+
"--output-directory",
|
|
1772
|
+
abs_output,
|
|
1773
|
+
f"{base}..{c['patches_branch']}",
|
|
1774
|
+
)
|
|
1775
|
+
|
|
1776
|
+
# Create series file
|
|
1777
|
+
patch_files = sorted(
|
|
1778
|
+
f for f in os.listdir(abs_output) if f.endswith(".patch")
|
|
1779
|
+
)
|
|
1780
|
+
with open(os.path.join(abs_output, "series"), "w") as f:
|
|
1781
|
+
for pf in patch_files:
|
|
1782
|
+
f.write(pf + "\n")
|
|
1783
|
+
|
|
1784
|
+
return {
|
|
1785
|
+
"ok": True,
|
|
1786
|
+
"count": count,
|
|
1787
|
+
"directory": output_dir,
|
|
1788
|
+
"files": patch_files,
|
|
1789
|
+
}
|
|
1790
|
+
|
|
1791
|
+
def _restore_bl_prefix(self) -> None:
|
|
1792
|
+
"""Restore [bl] prefix on HEAD if git am stripped it."""
|
|
1793
|
+
try:
|
|
1794
|
+
subject = self.git.run("log", "-1", "--format=%s", "HEAD")
|
|
1795
|
+
if not subject.startswith(PATCH_PREFIX + " "):
|
|
1796
|
+
body = self.git.run("log", "-1", "--format=%B", "HEAD")
|
|
1797
|
+
subprocess.run(
|
|
1798
|
+
["git", "commit", "--amend", "-m", PATCH_PREFIX + " " + body],
|
|
1799
|
+
cwd=self.path, capture_output=True, text=True,
|
|
1800
|
+
)
|
|
1801
|
+
except (GitError, OSError):
|
|
1802
|
+
pass
|
|
1803
|
+
|
|
1804
|
+
def patch_import(self, path: str) -> dict:
|
|
1805
|
+
"""Import .patch file(s) into the stack.
|
|
1806
|
+
|
|
1807
|
+
Returns {"ok": True, "imported": True, "patch_count": N}
|
|
1808
|
+
"""
|
|
1809
|
+
c = self._load()
|
|
1810
|
+
self._ensure_clean()
|
|
1811
|
+
|
|
1812
|
+
if self.git.current_branch() != c["patches_branch"]:
|
|
1813
|
+
self.git.run("checkout", c["patches_branch"])
|
|
1814
|
+
|
|
1815
|
+
abs_path = os.path.realpath(
|
|
1816
|
+
os.path.join(self.path, path) if not os.path.isabs(path) else path
|
|
1817
|
+
)
|
|
1818
|
+
|
|
1819
|
+
if os.path.isdir(abs_path):
|
|
1820
|
+
series_file = os.path.join(abs_path, "series")
|
|
1821
|
+
if os.path.isfile(series_file):
|
|
1822
|
+
with open(series_file) as f:
|
|
1823
|
+
for line in f:
|
|
1824
|
+
line = line.strip()
|
|
1825
|
+
if not line or line.startswith("#"):
|
|
1826
|
+
continue
|
|
1827
|
+
# Validate entry stays within the import directory
|
|
1828
|
+
entry_path = os.path.realpath(
|
|
1829
|
+
os.path.join(abs_path, line)
|
|
1830
|
+
)
|
|
1831
|
+
if not entry_path.startswith(abs_path):
|
|
1832
|
+
raise BingoError(
|
|
1833
|
+
f"Series entry escapes import directory: {line}"
|
|
1834
|
+
)
|
|
1835
|
+
result = self.git.run_unchecked("am", entry_path)
|
|
1836
|
+
if result.returncode != 0:
|
|
1837
|
+
raise BingoError(
|
|
1838
|
+
f"Failed to apply {line}. "
|
|
1839
|
+
"Run git am --abort to undo."
|
|
1840
|
+
)
|
|
1841
|
+
self._restore_bl_prefix()
|
|
1842
|
+
else:
|
|
1843
|
+
for pf in sorted(os.listdir(abs_path)):
|
|
1844
|
+
if not pf.endswith(".patch"):
|
|
1845
|
+
continue
|
|
1846
|
+
result = self.git.run_unchecked(
|
|
1847
|
+
"am", os.path.join(abs_path, pf)
|
|
1848
|
+
)
|
|
1849
|
+
if result.returncode != 0:
|
|
1850
|
+
raise BingoError(
|
|
1851
|
+
f"Failed to apply {pf}. Run git am --abort to undo."
|
|
1852
|
+
)
|
|
1853
|
+
self._restore_bl_prefix()
|
|
1854
|
+
else:
|
|
1855
|
+
if not os.path.isfile(abs_path):
|
|
1856
|
+
raise BingoError(f"File not found: {path}")
|
|
1857
|
+
result = self.git.run_unchecked("am", abs_path)
|
|
1858
|
+
if result.returncode != 0:
|
|
1859
|
+
raise BingoError(
|
|
1860
|
+
"Failed to apply patch. Run git am --abort to undo."
|
|
1861
|
+
)
|
|
1862
|
+
self._restore_bl_prefix()
|
|
1863
|
+
|
|
1864
|
+
base = self._patches_base(c)
|
|
1865
|
+
imported_count = 0
|
|
1866
|
+
if base:
|
|
1867
|
+
imported_count = self.git.rev_list_count(
|
|
1868
|
+
f"{base}..{c['patches_branch']}"
|
|
1869
|
+
)
|
|
1870
|
+
|
|
1871
|
+
return {"ok": True, "imported": True, "patch_count": imported_count}
|
|
1872
|
+
|
|
1873
|
+
def patch_reorder(self, order: str = "") -> dict:
|
|
1874
|
+
"""Reorder patches by specifying new order as comma-separated indices.
|
|
1875
|
+
|
|
1876
|
+
Args:
|
|
1877
|
+
order: e.g. "2,1,3" to swap first two patches
|
|
1878
|
+
|
|
1879
|
+
Returns {"ok": True, "reordered": "..."}
|
|
1880
|
+
"""
|
|
1881
|
+
c = self._load()
|
|
1882
|
+
self._ensure_clean()
|
|
1883
|
+
base = self._patches_base(c)
|
|
1884
|
+
if not base:
|
|
1885
|
+
return {"ok": True, "message": "no patches to reorder"}
|
|
1886
|
+
|
|
1887
|
+
patch_total = self.git.rev_list_count(f"{base}..{c['patches_branch']}")
|
|
1888
|
+
if patch_total <= 1:
|
|
1889
|
+
return {"ok": True, "message": "only one patch"}
|
|
1890
|
+
|
|
1891
|
+
if not order:
|
|
1892
|
+
raise BingoError(
|
|
1893
|
+
"Reorder requires --order \"2,1,3\" in non-interactive mode."
|
|
1894
|
+
)
|
|
1895
|
+
|
|
1896
|
+
# Parse and validate indices
|
|
1897
|
+
try:
|
|
1898
|
+
indices = [int(x.strip()) for x in order.split(",")]
|
|
1899
|
+
except ValueError:
|
|
1900
|
+
raise BingoError("Invalid order format. Use comma-separated integers.")
|
|
1901
|
+
|
|
1902
|
+
if len(indices) != patch_total:
|
|
1903
|
+
raise BingoError(
|
|
1904
|
+
f"Reorder requires exactly {patch_total} indices "
|
|
1905
|
+
f"(one per patch), got {len(indices)}."
|
|
1906
|
+
)
|
|
1907
|
+
|
|
1908
|
+
if self.git.current_branch() != c["patches_branch"]:
|
|
1909
|
+
self.git.run("checkout", c["patches_branch"])
|
|
1910
|
+
|
|
1911
|
+
# Create a GIT_SEQUENCE_EDITOR script
|
|
1912
|
+
# Build sed-like reorder: read all pick lines, then emit in new order
|
|
1913
|
+
script_content = "#!/bin/bash\n"
|
|
1914
|
+
for i, idx in enumerate(indices):
|
|
1915
|
+
script_content += f'line{i}=$(sed -n "{idx}p" "$1")\n'
|
|
1916
|
+
for i in range(len(indices)):
|
|
1917
|
+
op = ">" if i == 0 else ">>"
|
|
1918
|
+
script_content += f'echo "$line{i}" {op} "$1.tmp"\n'
|
|
1919
|
+
script_content += 'mv "$1.tmp" "$1"\n'
|
|
1920
|
+
|
|
1921
|
+
fd, script_path = tempfile.mkstemp(suffix=".sh")
|
|
1922
|
+
try:
|
|
1923
|
+
with os.fdopen(fd, "w") as f:
|
|
1924
|
+
f.write(script_content)
|
|
1925
|
+
os.chmod(script_path, 0o755)
|
|
1926
|
+
|
|
1927
|
+
env = os.environ.copy()
|
|
1928
|
+
env["GIT_SEQUENCE_EDITOR"] = f"bash {script_path}"
|
|
1929
|
+
result = subprocess.run(
|
|
1930
|
+
["git", "rebase", "-i", base],
|
|
1931
|
+
cwd=self.path,
|
|
1932
|
+
capture_output=True,
|
|
1933
|
+
text=True,
|
|
1934
|
+
env=env,
|
|
1935
|
+
)
|
|
1936
|
+
if result.returncode == 0:
|
|
1937
|
+
return {"ok": True, "reordered": order}
|
|
1938
|
+
else:
|
|
1939
|
+
self.git.run_ok("rebase", "--abort")
|
|
1940
|
+
raise BingoError("Failed to reorder patches.")
|
|
1941
|
+
finally:
|
|
1942
|
+
try:
|
|
1943
|
+
os.unlink(script_path)
|
|
1944
|
+
except FileNotFoundError:
|
|
1945
|
+
pass
|
|
1946
|
+
|
|
1947
|
+
def patch_squash(self, idx1: int, idx2: int) -> dict:
|
|
1948
|
+
"""Merge two adjacent patches into one.
|
|
1949
|
+
|
|
1950
|
+
Args:
|
|
1951
|
+
idx1: 1-based index of first patch
|
|
1952
|
+
idx2: 1-based index of second patch (must be adjacent to idx1)
|
|
1953
|
+
|
|
1954
|
+
Returns {"ok": True, "squashed": [idx1, idx2]}
|
|
1955
|
+
"""
|
|
1956
|
+
c = self._load()
|
|
1957
|
+
self._ensure_clean()
|
|
1958
|
+
|
|
1959
|
+
base = self._patches_base(c)
|
|
1960
|
+
if not base:
|
|
1961
|
+
raise BingoError("No patches.")
|
|
1962
|
+
|
|
1963
|
+
total = self.git.rev_list_count(f"{base}..{c['patches_branch']}")
|
|
1964
|
+
if not (1 <= idx1 <= total):
|
|
1965
|
+
raise BingoError(f"Index out of range: {idx1} (1-{total})")
|
|
1966
|
+
if not (1 <= idx2 <= total):
|
|
1967
|
+
raise BingoError(f"Index out of range: {idx2} (1-{total})")
|
|
1968
|
+
if idx1 == idx2:
|
|
1969
|
+
raise BingoError("Cannot squash a patch with itself.")
|
|
1970
|
+
|
|
1971
|
+
if self.git.current_branch() != c["patches_branch"]:
|
|
1972
|
+
self.git.run("checkout", c["patches_branch"])
|
|
1973
|
+
|
|
1974
|
+
# Create GIT_SEQUENCE_EDITOR that changes line idx2 to squash
|
|
1975
|
+
script_content = (
|
|
1976
|
+
f'#!/bin/bash\n'
|
|
1977
|
+
f'awk \'NR=={idx2} && /^pick/{{sub(/^pick/,"squash")}} {{print}}\' '
|
|
1978
|
+
f'"$1" > "$1.tmp" && mv "$1.tmp" "$1"\n'
|
|
1979
|
+
)
|
|
1980
|
+
|
|
1981
|
+
fd, script_path = tempfile.mkstemp(suffix=".sh")
|
|
1982
|
+
try:
|
|
1983
|
+
with os.fdopen(fd, "w") as f:
|
|
1984
|
+
f.write(script_content)
|
|
1985
|
+
os.chmod(script_path, 0o755)
|
|
1986
|
+
|
|
1987
|
+
env = os.environ.copy()
|
|
1988
|
+
env["GIT_SEQUENCE_EDITOR"] = script_path
|
|
1989
|
+
result = subprocess.run(
|
|
1990
|
+
["git", "rebase", "-i", base],
|
|
1991
|
+
cwd=self.path,
|
|
1992
|
+
capture_output=True,
|
|
1993
|
+
text=True,
|
|
1994
|
+
env=env,
|
|
1995
|
+
)
|
|
1996
|
+
if result.returncode == 0:
|
|
1997
|
+
return {"ok": True, "squashed": [idx1, idx2]}
|
|
1998
|
+
else:
|
|
1999
|
+
self.git.run_ok("rebase", "--abort")
|
|
2000
|
+
raise BingoError("Failed to squash patches.")
|
|
2001
|
+
finally:
|
|
2002
|
+
try:
|
|
2003
|
+
os.unlink(script_path)
|
|
2004
|
+
except FileNotFoundError:
|
|
2005
|
+
pass
|
|
2006
|
+
|
|
2007
|
+
def patch_meta(
|
|
2008
|
+
self, target: str, key: str = "", value: str = ""
|
|
2009
|
+
) -> dict:
|
|
2010
|
+
"""Get/set patch metadata.
|
|
2011
|
+
|
|
2012
|
+
If key is empty, return all metadata.
|
|
2013
|
+
If key is set but value is empty, return that key's value.
|
|
2014
|
+
If both key and value are set, set the value.
|
|
2015
|
+
|
|
2016
|
+
Returns {"ok": True, ...}
|
|
2017
|
+
"""
|
|
2018
|
+
c = self._load()
|
|
2019
|
+
|
|
2020
|
+
# Verify patch exists
|
|
2021
|
+
base = self._patches_base(c)
|
|
2022
|
+
if base:
|
|
2023
|
+
try:
|
|
2024
|
+
log_output = self.git.run(
|
|
2025
|
+
"log", "--format=%s", f"{base}..{c['patches_branch']}"
|
|
2026
|
+
)
|
|
2027
|
+
found = False
|
|
2028
|
+
for line in log_output.splitlines():
|
|
2029
|
+
if f"{PATCH_PREFIX} {target}:" in line:
|
|
2030
|
+
found = True
|
|
2031
|
+
break
|
|
2032
|
+
if not found:
|
|
2033
|
+
raise BingoError(f"Patch '{target}' not found.")
|
|
2034
|
+
except GitError:
|
|
2035
|
+
raise BingoError(f"Patch '{target}' not found.")
|
|
2036
|
+
|
|
2037
|
+
if not key:
|
|
2038
|
+
# Return all metadata
|
|
2039
|
+
meta = self.state.patch_meta_get(target)
|
|
2040
|
+
return {"ok": True, "patch": target, "meta": meta}
|
|
2041
|
+
|
|
2042
|
+
if not value:
|
|
2043
|
+
# Get specific key
|
|
2044
|
+
meta = self.state.patch_meta_get(target)
|
|
2045
|
+
return {"ok": True, "patch": target, "key": key, "value": meta.get(key, "")}
|
|
2046
|
+
|
|
2047
|
+
# Set value
|
|
2048
|
+
self.state.patch_meta_set(target, key, value)
|
|
2049
|
+
return {"ok": True, "patch": target, "set": key, "value": value}
|
|
2050
|
+
|
|
2051
|
+
# -- Config --
|
|
2052
|
+
|
|
2053
|
+
def config_get(self, key: str) -> dict:
|
|
2054
|
+
"""Get a config value.
|
|
2055
|
+
|
|
2056
|
+
Returns {"ok": True, "key": "...", "value": "..."}
|
|
2057
|
+
"""
|
|
2058
|
+
self._load() # ensure initialized
|
|
2059
|
+
val = self.config.get(key) or ""
|
|
2060
|
+
return {"ok": True, "key": key, "value": val}
|
|
2061
|
+
|
|
2062
|
+
def config_set(self, key: str, value: str) -> dict:
|
|
2063
|
+
"""Set a config value.
|
|
2064
|
+
|
|
2065
|
+
Returns {"ok": True, "key": "...", "value": "..."}
|
|
2066
|
+
"""
|
|
2067
|
+
self._load() # ensure initialized
|
|
2068
|
+
self.config.set(key, value)
|
|
2069
|
+
return {"ok": True, "key": key, "value": value}
|
|
2070
|
+
|
|
2071
|
+
def config_list(self) -> dict:
|
|
2072
|
+
"""List all config values.
|
|
2073
|
+
|
|
2074
|
+
Returns {"ok": True, "config": {...}}
|
|
2075
|
+
"""
|
|
2076
|
+
self._load() # ensure initialized
|
|
2077
|
+
return {"ok": True, "config": self.config.list_all()}
|
|
2078
|
+
|
|
2079
|
+
# -- Other --
|
|
2080
|
+
|
|
2081
|
+
def test(self) -> dict:
|
|
2082
|
+
"""Run configured test command.
|
|
2083
|
+
|
|
2084
|
+
Returns {"ok": True/False, "test": "pass"/"fail", "command": "..."}
|
|
2085
|
+
"""
|
|
2086
|
+
self._load() # validate repo is initialized
|
|
2087
|
+
test_cmd = self.config.get("test.command")
|
|
2088
|
+
if not test_cmd:
|
|
2089
|
+
raise BingoError(
|
|
2090
|
+
"No test command. Set one with: bingo-light config set test.command 'make test'"
|
|
2091
|
+
)
|
|
2092
|
+
|
|
2093
|
+
try:
|
|
2094
|
+
result = subprocess.run(
|
|
2095
|
+
["bash", "-c", test_cmd],
|
|
2096
|
+
cwd=self.path,
|
|
2097
|
+
capture_output=True,
|
|
2098
|
+
text=True,
|
|
2099
|
+
timeout=600, # 10 minute limit
|
|
2100
|
+
)
|
|
2101
|
+
except subprocess.TimeoutExpired:
|
|
2102
|
+
return {"ok": False, "test": "timeout", "command": test_cmd}
|
|
2103
|
+
if result.returncode == 0:
|
|
2104
|
+
return {"ok": True, "test": "pass", "command": test_cmd}
|
|
2105
|
+
else:
|
|
2106
|
+
return {
|
|
2107
|
+
"ok": False,
|
|
2108
|
+
"test": "fail",
|
|
2109
|
+
"command": test_cmd,
|
|
2110
|
+
"output": result.stdout + result.stderr,
|
|
2111
|
+
}
|
|
2112
|
+
|
|
2113
|
+
def auto_sync(self, schedule: str = "daily") -> dict:
|
|
2114
|
+
"""Generate GitHub Actions workflow YAML for automated sync.
|
|
2115
|
+
|
|
2116
|
+
Returns {"ok": True, "workflow": "...", "schedule": "..."}
|
|
2117
|
+
"""
|
|
2118
|
+
c = self._load()
|
|
2119
|
+
|
|
2120
|
+
cron_map = {
|
|
2121
|
+
"6h": ("0 */6 * * *", "every 6 hours"),
|
|
2122
|
+
"weekly": ("0 0 * * 1", "weekly (Monday)"),
|
|
2123
|
+
"daily": ("0 0 * * *", "daily"),
|
|
2124
|
+
}
|
|
2125
|
+
cron_schedule, schedule_desc = cron_map.get(
|
|
2126
|
+
schedule, cron_map["daily"]
|
|
2127
|
+
)
|
|
2128
|
+
|
|
2129
|
+
output_dir = os.path.join(self.path, ".github", "workflows")
|
|
2130
|
+
output_file = os.path.join(output_dir, "bingo-light-sync.yml")
|
|
2131
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
2132
|
+
|
|
2133
|
+
# Sanitize config values for shell safety in YAML
|
|
2134
|
+
url = shlex.quote(c["upstream_url"])
|
|
2135
|
+
tb = shlex.quote(c["tracking_branch"])
|
|
2136
|
+
ub = shlex.quote(c["upstream_branch"])
|
|
2137
|
+
pb = shlex.quote(c["patches_branch"])
|
|
2138
|
+
|
|
2139
|
+
# Validate branch names (no shell metacharacters)
|
|
2140
|
+
_branch_re = re.compile(r"^[a-zA-Z0-9._/-]+$")
|
|
2141
|
+
for name, val in [("tracking_branch", c["tracking_branch"]),
|
|
2142
|
+
("upstream_branch", c["upstream_branch"]),
|
|
2143
|
+
("patches_branch", c["patches_branch"])]:
|
|
2144
|
+
if not _branch_re.match(val):
|
|
2145
|
+
raise BingoError(
|
|
2146
|
+
f"Unsafe characters in {name}: {val!r}. "
|
|
2147
|
+
"Branch names must be alphanumeric with . / _ - only."
|
|
2148
|
+
)
|
|
2149
|
+
|
|
2150
|
+
workflow = f"""# Generated by bingo-light
|
|
2151
|
+
# Automatically syncs your fork with upstream and rebases patches.
|
|
2152
|
+
# On failure, creates a GitHub Issue to notify you.
|
|
2153
|
+
|
|
2154
|
+
name: Bingo Light Auto-Sync
|
|
2155
|
+
|
|
2156
|
+
on:
|
|
2157
|
+
schedule:
|
|
2158
|
+
- cron: '{cron_schedule}'
|
|
2159
|
+
workflow_dispatch: # Manual trigger
|
|
2160
|
+
|
|
2161
|
+
jobs:
|
|
2162
|
+
sync:
|
|
2163
|
+
runs-on: ubuntu-latest
|
|
2164
|
+
permissions:
|
|
2165
|
+
contents: write
|
|
2166
|
+
issues: write
|
|
2167
|
+
|
|
2168
|
+
steps:
|
|
2169
|
+
- name: Checkout
|
|
2170
|
+
uses: actions/checkout@v4
|
|
2171
|
+
with:
|
|
2172
|
+
fetch-depth: 0
|
|
2173
|
+
token: ${{{{ secrets.GITHUB_TOKEN }}}}
|
|
2174
|
+
|
|
2175
|
+
- name: Configure Git
|
|
2176
|
+
run: |
|
|
2177
|
+
git config user.name "bingo-light[bot]"
|
|
2178
|
+
git config user.email "bingo-light[bot]@users.noreply.github.com"
|
|
2179
|
+
|
|
2180
|
+
- name: Fetch upstream
|
|
2181
|
+
run: |
|
|
2182
|
+
git remote add upstream {url} || git remote set-url upstream {url}
|
|
2183
|
+
git fetch upstream
|
|
2184
|
+
|
|
2185
|
+
- name: Rebase patches
|
|
2186
|
+
id: rebase
|
|
2187
|
+
run: |
|
|
2188
|
+
SAVED_TRACKING=$(git rev-parse {tb})
|
|
2189
|
+
git branch -f {tb} upstream/{ub}
|
|
2190
|
+
git checkout {pb}
|
|
2191
|
+
if git rebase --onto {tb} $SAVED_TRACKING {pb} 2>&1; then
|
|
2192
|
+
echo "result=success" >> $GITHUB_OUTPUT
|
|
2193
|
+
else
|
|
2194
|
+
git rebase --abort || true
|
|
2195
|
+
echo "result=conflict" >> $GITHUB_OUTPUT
|
|
2196
|
+
fi
|
|
2197
|
+
|
|
2198
|
+
- name: Push if successful
|
|
2199
|
+
if: steps.rebase.outputs.result == 'success'
|
|
2200
|
+
run: |
|
|
2201
|
+
git push origin {pb} --force-with-lease
|
|
2202
|
+
git push origin {tb} --force-with-lease
|
|
2203
|
+
|
|
2204
|
+
- name: Create issue on conflict
|
|
2205
|
+
if: steps.rebase.outputs.result == 'conflict'
|
|
2206
|
+
uses: actions/github-script@v7
|
|
2207
|
+
with:
|
|
2208
|
+
script: |
|
|
2209
|
+
const title = `[bingo-light] Sync conflict detected (${{new Date().toISOString().split('T')[0]}})`;
|
|
2210
|
+
const body = `## Auto-sync failed\\n\\nUpstream has changes that conflict with your patches.\\n\\n**Action required:** Run \\`bingo-light sync\\` locally to resolve conflicts.\\n\\n---\\n*This issue was created automatically by bingo-light.*`;
|
|
2211
|
+
await github.rest.issues.create({{
|
|
2212
|
+
owner: context.repo.owner,
|
|
2213
|
+
repo: context.repo.repo,
|
|
2214
|
+
title,
|
|
2215
|
+
body,
|
|
2216
|
+
labels: ['bingo-light', 'sync-conflict']
|
|
2217
|
+
}});
|
|
2218
|
+
"""
|
|
2219
|
+
|
|
2220
|
+
with open(output_file, "w") as f:
|
|
2221
|
+
f.write(workflow)
|
|
2222
|
+
|
|
2223
|
+
return {
|
|
2224
|
+
"ok": True,
|
|
2225
|
+
"workflow": ".github/workflows/bingo-light-sync.yml",
|
|
2226
|
+
"schedule": schedule_desc,
|
|
2227
|
+
}
|
|
2228
|
+
|
|
2229
|
+
@staticmethod
|
|
2230
|
+
def _workspace_config_path() -> str:
|
|
2231
|
+
config_dir = os.environ.get(
|
|
2232
|
+
"XDG_CONFIG_HOME", os.path.join(os.path.expanduser("~"), ".config")
|
|
2233
|
+
)
|
|
2234
|
+
return os.path.join(config_dir, "bingo-light", "workspace.json")
|
|
2235
|
+
|
|
2236
|
+
@staticmethod
|
|
2237
|
+
def _load_workspace(config_path: str) -> dict:
|
|
2238
|
+
"""Load workspace.json safely, handling corruption."""
|
|
2239
|
+
try:
|
|
2240
|
+
with open(config_path) as f:
|
|
2241
|
+
data = json.load(f)
|
|
2242
|
+
if not isinstance(data, dict) or "repos" not in data:
|
|
2243
|
+
return {"repos": []}
|
|
2244
|
+
return data
|
|
2245
|
+
except (json.JSONDecodeError, IOError):
|
|
2246
|
+
return {"repos": []}
|
|
2247
|
+
|
|
2248
|
+
def workspace_init(self) -> dict:
|
|
2249
|
+
"""Initialize workspace config."""
|
|
2250
|
+
workspace_config = self._workspace_config_path()
|
|
2251
|
+
os.makedirs(os.path.dirname(workspace_config), exist_ok=True)
|
|
2252
|
+
if not os.path.isfile(workspace_config):
|
|
2253
|
+
with open(workspace_config, "w") as f:
|
|
2254
|
+
json.dump({"repos": []}, f)
|
|
2255
|
+
return {"ok": True, "workspace": workspace_config}
|
|
2256
|
+
|
|
2257
|
+
def workspace_add(
|
|
2258
|
+
self, repo_path: str = "", alias: str = ""
|
|
2259
|
+
) -> dict:
|
|
2260
|
+
"""Add a repo to workspace."""
|
|
2261
|
+
workspace_config = self._workspace_config_path()
|
|
2262
|
+
if not os.path.isfile(workspace_config):
|
|
2263
|
+
raise BingoError("Run 'bingo-light workspace init' first.")
|
|
2264
|
+
|
|
2265
|
+
repo_path = repo_path or self.path
|
|
2266
|
+
repo_path = os.path.realpath(repo_path)
|
|
2267
|
+
alias = alias or os.path.basename(repo_path)
|
|
2268
|
+
|
|
2269
|
+
# Validate the path is a git repo
|
|
2270
|
+
if not os.path.isdir(repo_path):
|
|
2271
|
+
raise BingoError(f"Directory not found: {repo_path}")
|
|
2272
|
+
if not os.path.isdir(os.path.join(repo_path, ".git")):
|
|
2273
|
+
raise BingoError(f"Not a git repository: {repo_path}")
|
|
2274
|
+
|
|
2275
|
+
data = self._load_workspace(workspace_config)
|
|
2276
|
+
repos = data.setdefault("repos", [])
|
|
2277
|
+
|
|
2278
|
+
# Check for duplicate path or alias
|
|
2279
|
+
for r in repos:
|
|
2280
|
+
if r.get("path") == repo_path:
|
|
2281
|
+
raise BingoError(f"Repo already in workspace: {repo_path}")
|
|
2282
|
+
if r.get("alias") == alias:
|
|
2283
|
+
raise BingoError(
|
|
2284
|
+
f"Alias '{alias}' already in use. Use a different alias."
|
|
2285
|
+
)
|
|
2286
|
+
|
|
2287
|
+
repos.append({"path": repo_path, "alias": alias})
|
|
2288
|
+
|
|
2289
|
+
with open(workspace_config, "w") as f:
|
|
2290
|
+
json.dump(data, f, indent=2)
|
|
2291
|
+
|
|
2292
|
+
return {"ok": True, "added": alias, "path": repo_path}
|
|
2293
|
+
|
|
2294
|
+
def workspace_list(self) -> dict:
|
|
2295
|
+
"""List workspace repos."""
|
|
2296
|
+
workspace_config = self._workspace_config_path()
|
|
2297
|
+
if not os.path.isfile(workspace_config):
|
|
2298
|
+
raise BingoError("No workspace. Run 'bingo-light workspace init'.")
|
|
2299
|
+
|
|
2300
|
+
data = self._load_workspace(workspace_config)
|
|
2301
|
+
return {"ok": True, "repos": data.get("repos", [])}
|
|
2302
|
+
|
|
2303
|
+
def workspace_remove(self, target: str) -> dict:
|
|
2304
|
+
"""Remove a repo from the workspace by alias or path."""
|
|
2305
|
+
workspace_config = self._workspace_config_path()
|
|
2306
|
+
if not os.path.isfile(workspace_config):
|
|
2307
|
+
raise BingoError("No workspace. Run 'bingo-light workspace init'.")
|
|
2308
|
+
data = self._load_workspace(workspace_config)
|
|
2309
|
+
repos = data.get("repos", [])
|
|
2310
|
+
original_count = len(repos)
|
|
2311
|
+
repos = [r for r in repos if r.get("alias") != target and r.get("path") != target]
|
|
2312
|
+
if len(repos) == original_count:
|
|
2313
|
+
raise BingoError(f"Repo '{target}' not found in workspace.")
|
|
2314
|
+
data["repos"] = repos
|
|
2315
|
+
with open(workspace_config, "w") as f:
|
|
2316
|
+
json.dump(data, f, indent=2)
|
|
2317
|
+
return {"ok": True, "removed": target}
|
|
2318
|
+
|
|
2319
|
+
def workspace_status(self) -> dict:
|
|
2320
|
+
"""List workspace repos with per-repo sync status."""
|
|
2321
|
+
workspace_config = self._workspace_config_path()
|
|
2322
|
+
if not os.path.isfile(workspace_config):
|
|
2323
|
+
raise BingoError("No workspace. Run 'bingo-light workspace init'.")
|
|
2324
|
+
|
|
2325
|
+
data = self._load_workspace(workspace_config)
|
|
2326
|
+
repos = []
|
|
2327
|
+
for r in data.get("repos", []):
|
|
2328
|
+
alias = r.get("alias", r.get("path", "unknown"))
|
|
2329
|
+
path = r.get("path", "")
|
|
2330
|
+
entry: dict = {"alias": alias, "path": path}
|
|
2331
|
+
if not path or not os.path.isdir(path):
|
|
2332
|
+
entry["status"] = "missing"
|
|
2333
|
+
repos.append(entry)
|
|
2334
|
+
continue
|
|
2335
|
+
try:
|
|
2336
|
+
sub = Repo(path)
|
|
2337
|
+
st = sub.status()
|
|
2338
|
+
entry["behind"] = st.get("behind", 0)
|
|
2339
|
+
entry["patches"] = st.get("patch_count", 0)
|
|
2340
|
+
entry["status"] = "ok" if st.get("up_to_date") else "behind"
|
|
2341
|
+
except (BingoError, OSError) as e:
|
|
2342
|
+
entry["status"] = "error"
|
|
2343
|
+
entry["error"] = str(e)
|
|
2344
|
+
repos.append(entry)
|
|
2345
|
+
return {"ok": True, "repos": repos}
|
|
2346
|
+
|
|
2347
|
+
def workspace_sync(self) -> dict:
|
|
2348
|
+
"""Sync all workspace repos."""
|
|
2349
|
+
workspace_config = self._workspace_config_path()
|
|
2350
|
+
if not os.path.isfile(workspace_config):
|
|
2351
|
+
raise BingoError("No workspace.")
|
|
2352
|
+
|
|
2353
|
+
data = self._load_workspace(workspace_config)
|
|
2354
|
+
|
|
2355
|
+
results = []
|
|
2356
|
+
for r in data.get("repos", []):
|
|
2357
|
+
alias = r.get("alias", r.get("path", "unknown"))
|
|
2358
|
+
path = r.get("path", "")
|
|
2359
|
+
if not path or not os.path.isdir(path):
|
|
2360
|
+
results.append({
|
|
2361
|
+
"alias": alias, "status": "failed",
|
|
2362
|
+
"error": f"Directory not found: {path}",
|
|
2363
|
+
})
|
|
2364
|
+
continue
|
|
2365
|
+
try:
|
|
2366
|
+
repo = Repo(path)
|
|
2367
|
+
repo.sync(force=True)
|
|
2368
|
+
results.append({"alias": alias, "status": "ok"})
|
|
2369
|
+
except (BingoError, OSError) as e:
|
|
2370
|
+
status = "conflict" if "conflict" in str(e).lower() else "failed"
|
|
2371
|
+
results.append({
|
|
2372
|
+
"alias": alias, "status": status, "error": str(e),
|
|
2373
|
+
})
|
|
2374
|
+
|
|
2375
|
+
all_ok = all(r["status"] == "ok" for r in results)
|
|
2376
|
+
return {"ok": all_ok, "synced": results}
|