bingo-light 2.1.1 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.en.md +14 -7
- package/README.md +192 -126
- package/bingo-light +116 -0
- package/bingo_core/__init__.py +1 -1
- package/bingo_core/dep.py +652 -0
- package/bingo_core/dep_npm.py +113 -0
- package/bingo_core/dep_pip.py +178 -0
- package/bingo_core/setup.py +73 -17
- package/completions/bingo-light.bash +14 -1
- package/completions/bingo-light.fish +25 -1
- package/completions/bingo-light.zsh +20 -0
- package/mcp-server.py +106 -1
- package/package.json +1 -1
|
@@ -0,0 +1,652 @@
|
|
|
1
|
+
"""
|
|
2
|
+
bingo_core.dep — Dependency patching engine.
|
|
3
|
+
|
|
4
|
+
Maintains patches on top of npm/pip packages. Same mental model as git fork
|
|
5
|
+
patching: upstream is the published package, your patches sit on top.
|
|
6
|
+
|
|
7
|
+
Storage layout:
|
|
8
|
+
.bingo-deps/
|
|
9
|
+
config.json # { "packages": { "some-lib": { "version": "2.1.0", "manager": "npm" } } }
|
|
10
|
+
patches/
|
|
11
|
+
some-lib/
|
|
12
|
+
fix-auth.patch
|
|
13
|
+
add-logging.patch
|
|
14
|
+
another-lib/
|
|
15
|
+
typo-fix.patch
|
|
16
|
+
|
|
17
|
+
Python 3.8+ stdlib only. No pip dependencies.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import difflib
|
|
23
|
+
import json
|
|
24
|
+
import os
|
|
25
|
+
import shutil
|
|
26
|
+
import subprocess
|
|
27
|
+
import tempfile
|
|
28
|
+
from dataclasses import dataclass, field
|
|
29
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# ─── Constants ───────────────────────────────────────────────────────────────
|
|
33
|
+
|
|
34
|
+
DEP_DIR = ".bingo-deps"
|
|
35
|
+
DEP_CONFIG = "config.json"
|
|
36
|
+
PATCHES_DIR = "patches"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# ─── Data Models ─────────────────────────────────────────────────────────────
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class DepPatch:
|
|
44
|
+
"""A single patch on a dependency."""
|
|
45
|
+
package: str
|
|
46
|
+
name: str # patch file name (e.g. "fix-auth.patch")
|
|
47
|
+
path: str # full path to .patch file
|
|
48
|
+
description: str = ""
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass
|
|
52
|
+
class DepPackage:
|
|
53
|
+
"""A patched dependency."""
|
|
54
|
+
name: str
|
|
55
|
+
version: str # version the patches were generated against
|
|
56
|
+
manager: str # "npm" | "pip"
|
|
57
|
+
patches: List[DepPatch] = field(default_factory=list)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class DepConflict:
|
|
62
|
+
"""Conflict when applying a patch after upstream update."""
|
|
63
|
+
package: str
|
|
64
|
+
patch_name: str
|
|
65
|
+
old_version: str
|
|
66
|
+
new_version: str
|
|
67
|
+
error: str
|
|
68
|
+
hint: str = ""
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# ─── Backend Interface ───────────────────────────────────────────────────────
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class DepBackend:
|
|
75
|
+
"""Abstract backend for a package manager (npm, pip, etc.)."""
|
|
76
|
+
|
|
77
|
+
name: str = "unknown"
|
|
78
|
+
|
|
79
|
+
def detect(self, cwd: str) -> bool:
|
|
80
|
+
"""Return True if this backend applies to the project at cwd."""
|
|
81
|
+
raise NotImplementedError
|
|
82
|
+
|
|
83
|
+
def get_installed_version(self, package: str, cwd: str) -> Optional[str]:
|
|
84
|
+
"""Get the currently installed version of a package."""
|
|
85
|
+
raise NotImplementedError
|
|
86
|
+
|
|
87
|
+
def get_install_path(self, package: str, cwd: str) -> Optional[str]:
|
|
88
|
+
"""Get the filesystem path where the package is installed."""
|
|
89
|
+
raise NotImplementedError
|
|
90
|
+
|
|
91
|
+
def fetch_original(self, package: str, version: str, dest: str) -> bool:
|
|
92
|
+
"""Download the original (unpatched) package source to dest dir."""
|
|
93
|
+
raise NotImplementedError
|
|
94
|
+
|
|
95
|
+
def list_files(self, package: str, cwd: str) -> List[str]:
|
|
96
|
+
"""List all files in the installed package (relative paths)."""
|
|
97
|
+
raise NotImplementedError
|
|
98
|
+
|
|
99
|
+
def install_hook_command(self) -> str:
|
|
100
|
+
"""Return the postinstall command string for auto-applying patches."""
|
|
101
|
+
return "bingo-light dep apply"
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
# ─── Core Engine ─────────────────────────────────────────────────────────────
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class DepManager:
|
|
108
|
+
"""Core dependency patching engine."""
|
|
109
|
+
|
|
110
|
+
def __init__(self, cwd: str = "."):
|
|
111
|
+
self.cwd = os.path.abspath(cwd)
|
|
112
|
+
self.dep_dir = os.path.join(self.cwd, DEP_DIR)
|
|
113
|
+
self.config_path = os.path.join(self.dep_dir, DEP_CONFIG)
|
|
114
|
+
self.patches_dir = os.path.join(self.dep_dir, PATCHES_DIR)
|
|
115
|
+
self._backends: List[DepBackend] = []
|
|
116
|
+
self._config: Optional[Dict] = None
|
|
117
|
+
|
|
118
|
+
# Register backends (import lazily to avoid circular deps)
|
|
119
|
+
from bingo_core.dep_npm import NpmBackend
|
|
120
|
+
from bingo_core.dep_pip import PipBackend
|
|
121
|
+
self._backends = [NpmBackend(), PipBackend()]
|
|
122
|
+
|
|
123
|
+
# ─── Config ──────────────────────────────────────────────────────────
|
|
124
|
+
|
|
125
|
+
def _load_config(self) -> Dict:
|
|
126
|
+
if self._config is not None:
|
|
127
|
+
return self._config
|
|
128
|
+
if os.path.isfile(self.config_path):
|
|
129
|
+
with open(self.config_path) as f:
|
|
130
|
+
self._config = json.load(f)
|
|
131
|
+
else:
|
|
132
|
+
self._config = {"packages": {}}
|
|
133
|
+
return self._config
|
|
134
|
+
|
|
135
|
+
def _save_config(self) -> None:
|
|
136
|
+
os.makedirs(self.dep_dir, exist_ok=True)
|
|
137
|
+
with open(self.config_path, "w") as f:
|
|
138
|
+
json.dump(self._config or {}, f, indent=2)
|
|
139
|
+
f.write("\n")
|
|
140
|
+
|
|
141
|
+
# ─── Backend Detection ───────────────────────────────────────────────
|
|
142
|
+
|
|
143
|
+
def _detect_backend(self, package: str) -> Optional[DepBackend]:
|
|
144
|
+
"""Auto-detect which backend manages a package."""
|
|
145
|
+
for b in self._backends:
|
|
146
|
+
if b.detect(self.cwd) and b.get_installed_version(package, self.cwd):
|
|
147
|
+
return b
|
|
148
|
+
return None
|
|
149
|
+
|
|
150
|
+
def _get_backend(self, manager: str) -> Optional[DepBackend]:
|
|
151
|
+
"""Get backend by name."""
|
|
152
|
+
for b in self._backends:
|
|
153
|
+
if b.name == manager:
|
|
154
|
+
return b
|
|
155
|
+
return None
|
|
156
|
+
|
|
157
|
+
# ─── Postinstall Hook ─────────────────────────────────────────────────
|
|
158
|
+
|
|
159
|
+
def _ensure_postinstall_hook(self, backend: DepBackend) -> Optional[str]:
|
|
160
|
+
"""Add 'bingo-light dep apply' to package.json postinstall if npm project.
|
|
161
|
+
|
|
162
|
+
Returns a message if hook was added, None otherwise.
|
|
163
|
+
"""
|
|
164
|
+
if backend.name != "npm":
|
|
165
|
+
return None
|
|
166
|
+
|
|
167
|
+
pkg_json_path = os.path.join(self.cwd, "package.json")
|
|
168
|
+
if not os.path.isfile(pkg_json_path):
|
|
169
|
+
return None
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
with open(pkg_json_path) as f:
|
|
173
|
+
data = json.load(f)
|
|
174
|
+
except (json.JSONDecodeError, OSError):
|
|
175
|
+
return None
|
|
176
|
+
|
|
177
|
+
scripts = data.setdefault("scripts", {})
|
|
178
|
+
# Find bingo-light binary — use absolute path for reliability
|
|
179
|
+
bl_bin = shutil.which("bingo-light")
|
|
180
|
+
if bl_bin:
|
|
181
|
+
hook_cmd = f"{bl_bin} dep apply"
|
|
182
|
+
else:
|
|
183
|
+
# Fallback: npx always works if npm is available
|
|
184
|
+
hook_cmd = "npx --yes bingo-light dep apply"
|
|
185
|
+
|
|
186
|
+
existing = scripts.get("postinstall", "")
|
|
187
|
+
if hook_cmd in existing:
|
|
188
|
+
return None # already present
|
|
189
|
+
|
|
190
|
+
if existing:
|
|
191
|
+
scripts["postinstall"] = f"{existing} && {hook_cmd}"
|
|
192
|
+
else:
|
|
193
|
+
scripts["postinstall"] = hook_cmd
|
|
194
|
+
|
|
195
|
+
try:
|
|
196
|
+
with open(pkg_json_path, "w") as f:
|
|
197
|
+
json.dump(data, f, indent=2)
|
|
198
|
+
f.write("\n")
|
|
199
|
+
except OSError:
|
|
200
|
+
return None
|
|
201
|
+
|
|
202
|
+
return "postinstall hook added to package.json"
|
|
203
|
+
|
|
204
|
+
# ─── Patch Generation ────────────────────────────────────────────────
|
|
205
|
+
|
|
206
|
+
def patch(self, package: str, patch_name: str = "",
|
|
207
|
+
description: str = "") -> Dict[str, Any]:
|
|
208
|
+
"""Generate a patch for a modified dependency.
|
|
209
|
+
|
|
210
|
+
1. Detect the package manager
|
|
211
|
+
2. Find the installed (modified) version
|
|
212
|
+
3. Download the original version
|
|
213
|
+
4. Generate a unified diff
|
|
214
|
+
5. Save as .patch file
|
|
215
|
+
"""
|
|
216
|
+
backend = self._detect_backend(package)
|
|
217
|
+
if not backend:
|
|
218
|
+
return {"ok": False, "error": f"Package '{package}' not found in any package manager"}
|
|
219
|
+
|
|
220
|
+
version = backend.get_installed_version(package, self.cwd)
|
|
221
|
+
if not version:
|
|
222
|
+
return {"ok": False, "error": f"Cannot determine version for '{package}'"}
|
|
223
|
+
|
|
224
|
+
install_path = backend.get_install_path(package, self.cwd)
|
|
225
|
+
if not install_path or not os.path.isdir(install_path):
|
|
226
|
+
return {"ok": False, "error": f"Install path not found for '{package}'"}
|
|
227
|
+
|
|
228
|
+
# Download original
|
|
229
|
+
tmpdir = tempfile.mkdtemp(prefix="bingo-dep-")
|
|
230
|
+
try:
|
|
231
|
+
original_dir = os.path.join(tmpdir, "original")
|
|
232
|
+
os.makedirs(original_dir)
|
|
233
|
+
if not backend.fetch_original(package, version, original_dir):
|
|
234
|
+
return {"ok": False, "error": f"Failed to download original '{package}@{version}'"}
|
|
235
|
+
|
|
236
|
+
# Generate unified diff
|
|
237
|
+
diff_lines = _generate_diff(original_dir, install_path, package)
|
|
238
|
+
if not diff_lines:
|
|
239
|
+
return {"ok": False, "error": f"No modifications found in '{package}'"}
|
|
240
|
+
|
|
241
|
+
# Determine patch name
|
|
242
|
+
if not patch_name:
|
|
243
|
+
config = self._load_config()
|
|
244
|
+
pkg_conf = config.get("packages", {}).get(package, {})
|
|
245
|
+
existing = pkg_conf.get("patches", [])
|
|
246
|
+
idx = len(existing) + 1
|
|
247
|
+
patch_name = f"patch-{idx:03d}"
|
|
248
|
+
|
|
249
|
+
if not patch_name.endswith(".patch"):
|
|
250
|
+
patch_name += ".patch"
|
|
251
|
+
|
|
252
|
+
# Save patch
|
|
253
|
+
pkg_patches_dir = os.path.join(self.patches_dir, package)
|
|
254
|
+
os.makedirs(pkg_patches_dir, exist_ok=True)
|
|
255
|
+
patch_path = os.path.join(pkg_patches_dir, patch_name)
|
|
256
|
+
with open(patch_path, "w") as f:
|
|
257
|
+
f.writelines(diff_lines)
|
|
258
|
+
|
|
259
|
+
# Update config
|
|
260
|
+
config = self._load_config()
|
|
261
|
+
pkgs = config.setdefault("packages", {})
|
|
262
|
+
pkg = pkgs.setdefault(package, {
|
|
263
|
+
"version": version,
|
|
264
|
+
"manager": backend.name,
|
|
265
|
+
"patches": [],
|
|
266
|
+
})
|
|
267
|
+
pkg["version"] = version
|
|
268
|
+
if patch_name not in pkg["patches"]:
|
|
269
|
+
pkg["patches"].append(patch_name)
|
|
270
|
+
if description:
|
|
271
|
+
pkg.setdefault("descriptions", {})[patch_name] = description
|
|
272
|
+
self._save_config()
|
|
273
|
+
|
|
274
|
+
# Auto-add postinstall hook on first patch
|
|
275
|
+
hook_msg = self._ensure_postinstall_hook(backend)
|
|
276
|
+
|
|
277
|
+
file_count = sum(1 for line in diff_lines if line.startswith("--- "))
|
|
278
|
+
result = {
|
|
279
|
+
"ok": True,
|
|
280
|
+
"package": package,
|
|
281
|
+
"version": version,
|
|
282
|
+
"patch": patch_name,
|
|
283
|
+
"files_changed": file_count,
|
|
284
|
+
"manager": backend.name,
|
|
285
|
+
}
|
|
286
|
+
if hook_msg:
|
|
287
|
+
result["hook"] = hook_msg
|
|
288
|
+
return result
|
|
289
|
+
finally:
|
|
290
|
+
shutil.rmtree(tmpdir, ignore_errors=True)
|
|
291
|
+
|
|
292
|
+
# ─── Patch Application ───────────────────────────────────────────────
|
|
293
|
+
|
|
294
|
+
def apply(self, package: str = "") -> Dict[str, Any]:
|
|
295
|
+
"""Apply patches to installed dependencies.
|
|
296
|
+
|
|
297
|
+
If package is empty, apply all patches for all tracked packages.
|
|
298
|
+
"""
|
|
299
|
+
config = self._load_config()
|
|
300
|
+
packages = config.get("packages", {})
|
|
301
|
+
|
|
302
|
+
if package:
|
|
303
|
+
if package not in packages:
|
|
304
|
+
return {"ok": False, "error": f"No patches tracked for '{package}'"}
|
|
305
|
+
targets = {package: packages[package]}
|
|
306
|
+
else:
|
|
307
|
+
targets = packages
|
|
308
|
+
|
|
309
|
+
results = []
|
|
310
|
+
for pkg_name, pkg_conf in targets.items():
|
|
311
|
+
backend = self._get_backend(pkg_conf.get("manager", ""))
|
|
312
|
+
if not backend:
|
|
313
|
+
results.append({"package": pkg_name, "ok": False, "error": "Unknown manager"})
|
|
314
|
+
continue
|
|
315
|
+
|
|
316
|
+
install_path = backend.get_install_path(pkg_name, self.cwd)
|
|
317
|
+
if not install_path:
|
|
318
|
+
results.append({"package": pkg_name, "ok": False, "error": "Not installed"})
|
|
319
|
+
continue
|
|
320
|
+
|
|
321
|
+
for patch_name in pkg_conf.get("patches", []):
|
|
322
|
+
patch_path = os.path.join(self.patches_dir, pkg_name, patch_name)
|
|
323
|
+
if not os.path.isfile(patch_path):
|
|
324
|
+
results.append({
|
|
325
|
+
"package": pkg_name, "patch": patch_name,
|
|
326
|
+
"ok": False, "error": "Patch file missing",
|
|
327
|
+
})
|
|
328
|
+
continue
|
|
329
|
+
|
|
330
|
+
success, error = _apply_patch(patch_path, install_path)
|
|
331
|
+
results.append({
|
|
332
|
+
"package": pkg_name, "patch": patch_name,
|
|
333
|
+
"ok": success, "error": error,
|
|
334
|
+
})
|
|
335
|
+
|
|
336
|
+
applied = [r for r in results if r["ok"]]
|
|
337
|
+
failed = [r for r in results if not r["ok"]]
|
|
338
|
+
return {
|
|
339
|
+
"ok": len(failed) == 0,
|
|
340
|
+
"applied": len(applied),
|
|
341
|
+
"failed": len(failed),
|
|
342
|
+
"results": results,
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
# ─── Status ──────────────────────────────────────────────────────────
|
|
346
|
+
|
|
347
|
+
def status(self) -> Dict[str, Any]:
|
|
348
|
+
"""Show status of all tracked dependency patches."""
|
|
349
|
+
config = self._load_config()
|
|
350
|
+
packages = config.get("packages", {})
|
|
351
|
+
pkg_status = []
|
|
352
|
+
|
|
353
|
+
for pkg_name, pkg_conf in packages.items():
|
|
354
|
+
backend = self._get_backend(pkg_conf.get("manager", ""))
|
|
355
|
+
current_ver = None
|
|
356
|
+
installed = False
|
|
357
|
+
if backend:
|
|
358
|
+
current_ver = backend.get_installed_version(pkg_name, self.cwd)
|
|
359
|
+
installed = current_ver is not None
|
|
360
|
+
|
|
361
|
+
patched_ver = pkg_conf.get("version", "?")
|
|
362
|
+
version_match = (current_ver == patched_ver) if current_ver else None
|
|
363
|
+
patch_count = len(pkg_conf.get("patches", []))
|
|
364
|
+
|
|
365
|
+
status = "ok"
|
|
366
|
+
if not installed:
|
|
367
|
+
status = "not_installed"
|
|
368
|
+
elif not version_match:
|
|
369
|
+
status = "version_mismatch"
|
|
370
|
+
|
|
371
|
+
pkg_status.append({
|
|
372
|
+
"package": pkg_name,
|
|
373
|
+
"manager": pkg_conf.get("manager", "?"),
|
|
374
|
+
"patched_version": patched_ver,
|
|
375
|
+
"installed_version": current_ver or "?",
|
|
376
|
+
"patches": patch_count,
|
|
377
|
+
"status": status,
|
|
378
|
+
})
|
|
379
|
+
|
|
380
|
+
return {
|
|
381
|
+
"ok": True,
|
|
382
|
+
"packages": pkg_status,
|
|
383
|
+
"total_packages": len(pkg_status),
|
|
384
|
+
"total_patches": sum(p["patches"] for p in pkg_status),
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
# ─── List ────────────────────────────────────────────────────────────
|
|
388
|
+
|
|
389
|
+
def list_patches(self, package: str = "") -> Dict[str, Any]:
|
|
390
|
+
"""List all patches, optionally for a specific package."""
|
|
391
|
+
config = self._load_config()
|
|
392
|
+
packages = config.get("packages", {})
|
|
393
|
+
all_patches = []
|
|
394
|
+
|
|
395
|
+
targets = {package: packages[package]} if package and package in packages else packages
|
|
396
|
+
for pkg_name, pkg_conf in targets.items():
|
|
397
|
+
descs = pkg_conf.get("descriptions", {})
|
|
398
|
+
for patch_name in pkg_conf.get("patches", []):
|
|
399
|
+
patch_path = os.path.join(self.patches_dir, pkg_name, patch_name)
|
|
400
|
+
all_patches.append({
|
|
401
|
+
"package": pkg_name,
|
|
402
|
+
"patch": patch_name,
|
|
403
|
+
"description": descs.get(patch_name, ""),
|
|
404
|
+
"version": pkg_conf.get("version", "?"),
|
|
405
|
+
"exists": os.path.isfile(patch_path),
|
|
406
|
+
})
|
|
407
|
+
|
|
408
|
+
return {"ok": True, "patches": all_patches}
|
|
409
|
+
|
|
410
|
+
# ─── Sync (after npm/pip update) ─────────────────────────────────────
|
|
411
|
+
|
|
412
|
+
def sync(self) -> Dict[str, Any]:
|
|
413
|
+
"""After a package manager update, re-apply patches and detect conflicts."""
|
|
414
|
+
config = self._load_config()
|
|
415
|
+
packages = config.get("packages", {})
|
|
416
|
+
results = []
|
|
417
|
+
|
|
418
|
+
for pkg_name, pkg_conf in packages.items():
|
|
419
|
+
backend = self._get_backend(pkg_conf.get("manager", ""))
|
|
420
|
+
if not backend:
|
|
421
|
+
continue
|
|
422
|
+
|
|
423
|
+
current_ver = backend.get_installed_version(pkg_name, self.cwd)
|
|
424
|
+
patched_ver = pkg_conf.get("version", "")
|
|
425
|
+
|
|
426
|
+
if not current_ver:
|
|
427
|
+
results.append({
|
|
428
|
+
"package": pkg_name, "status": "not_installed",
|
|
429
|
+
"old_version": patched_ver, "new_version": None,
|
|
430
|
+
})
|
|
431
|
+
continue
|
|
432
|
+
|
|
433
|
+
if current_ver == patched_ver:
|
|
434
|
+
# Same version — just re-apply
|
|
435
|
+
apply_result = self.apply(pkg_name)
|
|
436
|
+
results.append({
|
|
437
|
+
"package": pkg_name, "status": "reapplied",
|
|
438
|
+
"old_version": patched_ver, "new_version": current_ver,
|
|
439
|
+
"apply": apply_result,
|
|
440
|
+
})
|
|
441
|
+
else:
|
|
442
|
+
# Version changed — try to apply, may conflict
|
|
443
|
+
install_path = backend.get_install_path(pkg_name, self.cwd)
|
|
444
|
+
conflicts = []
|
|
445
|
+
applied = []
|
|
446
|
+
|
|
447
|
+
for patch_name in pkg_conf.get("patches", []):
|
|
448
|
+
patch_path = os.path.join(self.patches_dir, pkg_name, patch_name)
|
|
449
|
+
if not os.path.isfile(patch_path):
|
|
450
|
+
conflicts.append({
|
|
451
|
+
"patch": patch_name,
|
|
452
|
+
"error": "Patch file missing",
|
|
453
|
+
})
|
|
454
|
+
continue
|
|
455
|
+
|
|
456
|
+
success, error = _apply_patch(patch_path, install_path)
|
|
457
|
+
if success:
|
|
458
|
+
applied.append(patch_name)
|
|
459
|
+
else:
|
|
460
|
+
conflicts.append({
|
|
461
|
+
"patch": patch_name,
|
|
462
|
+
"error": error,
|
|
463
|
+
"hint": f"Package updated {patched_ver} → {current_ver}. "
|
|
464
|
+
f"Regenerate patch: bingo-light dep patch {pkg_name}",
|
|
465
|
+
})
|
|
466
|
+
|
|
467
|
+
status = "synced" if not conflicts else "conflict"
|
|
468
|
+
results.append({
|
|
469
|
+
"package": pkg_name,
|
|
470
|
+
"status": status,
|
|
471
|
+
"old_version": patched_ver,
|
|
472
|
+
"new_version": current_ver,
|
|
473
|
+
"applied": applied,
|
|
474
|
+
"conflicts": conflicts,
|
|
475
|
+
})
|
|
476
|
+
|
|
477
|
+
# Update tracked version if all patches applied
|
|
478
|
+
if not conflicts:
|
|
479
|
+
pkg_conf["version"] = current_ver
|
|
480
|
+
self._save_config()
|
|
481
|
+
|
|
482
|
+
total_conflicts = sum(
|
|
483
|
+
len(r.get("conflicts", [])) for r in results
|
|
484
|
+
)
|
|
485
|
+
return {
|
|
486
|
+
"ok": total_conflicts == 0,
|
|
487
|
+
"results": results,
|
|
488
|
+
"total_conflicts": total_conflicts,
|
|
489
|
+
"recommended_action": "ok" if total_conflicts == 0 else "regenerate_patches",
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
# ─── Drop ────────────────────────────────────────────────────────────
|
|
493
|
+
|
|
494
|
+
def drop(self, package: str, patch_name: str = "") -> Dict[str, Any]:
|
|
495
|
+
"""Remove a patch or all patches for a package."""
|
|
496
|
+
config = self._load_config()
|
|
497
|
+
packages = config.get("packages", {})
|
|
498
|
+
|
|
499
|
+
if package not in packages:
|
|
500
|
+
return {"ok": False, "error": f"No patches tracked for '{package}'"}
|
|
501
|
+
|
|
502
|
+
if patch_name:
|
|
503
|
+
# Remove specific patch
|
|
504
|
+
if not patch_name.endswith(".patch"):
|
|
505
|
+
patch_name += ".patch"
|
|
506
|
+
patches = packages[package].get("patches", [])
|
|
507
|
+
if patch_name not in patches:
|
|
508
|
+
return {"ok": False, "error": f"Patch '{patch_name}' not found"}
|
|
509
|
+
patches.remove(patch_name)
|
|
510
|
+
patch_path = os.path.join(self.patches_dir, package, patch_name)
|
|
511
|
+
if os.path.isfile(patch_path):
|
|
512
|
+
os.remove(patch_path)
|
|
513
|
+
if not patches:
|
|
514
|
+
del packages[package]
|
|
515
|
+
pkg_dir = os.path.join(self.patches_dir, package)
|
|
516
|
+
if os.path.isdir(pkg_dir):
|
|
517
|
+
shutil.rmtree(pkg_dir)
|
|
518
|
+
else:
|
|
519
|
+
# Remove all patches for package
|
|
520
|
+
del packages[package]
|
|
521
|
+
pkg_dir = os.path.join(self.patches_dir, package)
|
|
522
|
+
if os.path.isdir(pkg_dir):
|
|
523
|
+
shutil.rmtree(pkg_dir)
|
|
524
|
+
|
|
525
|
+
self._save_config()
|
|
526
|
+
return {"ok": True, "package": package, "dropped": patch_name or "all"}
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
# ─── Diff Utilities ──────────────────────────────────────────────────────────
|
|
530
|
+
|
|
531
|
+
|
|
532
|
+
def _generate_diff(original_dir: str, modified_dir: str,
|
|
533
|
+
label: str) -> List[str]:
|
|
534
|
+
"""Generate unified diff between two directory trees."""
|
|
535
|
+
diff_lines: List[str] = []
|
|
536
|
+
|
|
537
|
+
for root, _dirs, files in os.walk(original_dir):
|
|
538
|
+
for fname in sorted(files):
|
|
539
|
+
orig_path = os.path.join(root, fname)
|
|
540
|
+
rel = os.path.relpath(orig_path, original_dir)
|
|
541
|
+
mod_path = os.path.join(modified_dir, rel)
|
|
542
|
+
|
|
543
|
+
# Skip non-text files
|
|
544
|
+
if _is_binary(orig_path):
|
|
545
|
+
continue
|
|
546
|
+
|
|
547
|
+
try:
|
|
548
|
+
with open(orig_path) as f:
|
|
549
|
+
orig_lines = f.readlines()
|
|
550
|
+
except (UnicodeDecodeError, OSError):
|
|
551
|
+
continue
|
|
552
|
+
|
|
553
|
+
if os.path.isfile(mod_path):
|
|
554
|
+
try:
|
|
555
|
+
with open(mod_path) as f:
|
|
556
|
+
mod_lines = f.readlines()
|
|
557
|
+
except (UnicodeDecodeError, OSError):
|
|
558
|
+
continue
|
|
559
|
+
|
|
560
|
+
if orig_lines != mod_lines:
|
|
561
|
+
diff = difflib.unified_diff(
|
|
562
|
+
orig_lines, mod_lines,
|
|
563
|
+
fromfile=f"a/{label}/{rel}",
|
|
564
|
+
tofile=f"b/{label}/{rel}",
|
|
565
|
+
)
|
|
566
|
+
diff_lines.extend(diff)
|
|
567
|
+
|
|
568
|
+
# Check for new files in modified that don't exist in original
|
|
569
|
+
for root, _dirs, files in os.walk(modified_dir):
|
|
570
|
+
for fname in sorted(files):
|
|
571
|
+
mod_path = os.path.join(root, fname)
|
|
572
|
+
rel = os.path.relpath(mod_path, modified_dir)
|
|
573
|
+
orig_path = os.path.join(original_dir, rel)
|
|
574
|
+
|
|
575
|
+
if not os.path.isfile(orig_path) and not _is_binary(mod_path):
|
|
576
|
+
try:
|
|
577
|
+
with open(mod_path) as f:
|
|
578
|
+
mod_lines = f.readlines()
|
|
579
|
+
diff = difflib.unified_diff(
|
|
580
|
+
[], mod_lines,
|
|
581
|
+
fromfile=f"a/{label}/{rel}",
|
|
582
|
+
tofile=f"b/{label}/{rel}",
|
|
583
|
+
)
|
|
584
|
+
diff_lines.extend(diff)
|
|
585
|
+
except (UnicodeDecodeError, OSError):
|
|
586
|
+
continue
|
|
587
|
+
|
|
588
|
+
return diff_lines
|
|
589
|
+
|
|
590
|
+
|
|
591
|
+
def _apply_patch(patch_path: str, target_dir: str) -> Tuple[bool, str]:
|
|
592
|
+
"""Apply a unified diff patch to a directory. Returns (success, error)."""
|
|
593
|
+
# Try system `patch` command first (most reliable)
|
|
594
|
+
# Patches use a/<pkg>/<file> format, so -p2 strips the a/<pkg>/ prefix
|
|
595
|
+
if shutil.which("patch"):
|
|
596
|
+
with open(patch_path) as pf:
|
|
597
|
+
result = subprocess.run(
|
|
598
|
+
["patch", "-p2", "-d", target_dir, "--forward", "--batch"],
|
|
599
|
+
stdin=pf,
|
|
600
|
+
capture_output=True, text=True,
|
|
601
|
+
)
|
|
602
|
+
if result.returncode == 0:
|
|
603
|
+
return (True, "")
|
|
604
|
+
# patch may partially apply; check if "FAILED" in output
|
|
605
|
+
if "FAILED" in result.stdout or result.returncode != 0:
|
|
606
|
+
return (False, result.stdout.strip() or result.stderr.strip())
|
|
607
|
+
|
|
608
|
+
# Fallback: Python-based patch application
|
|
609
|
+
return _apply_patch_python(patch_path, target_dir)
|
|
610
|
+
|
|
611
|
+
|
|
612
|
+
def _apply_patch_python(patch_path: str, target_dir: str) -> Tuple[bool, str]:
|
|
613
|
+
"""Pure-Python patch application (basic unified diff support)."""
|
|
614
|
+
try:
|
|
615
|
+
with open(patch_path) as f:
|
|
616
|
+
patch_text = f.read()
|
|
617
|
+
except OSError as e:
|
|
618
|
+
return (False, str(e))
|
|
619
|
+
|
|
620
|
+
# Parse hunks
|
|
621
|
+
current_file = None
|
|
622
|
+
hunks: Dict[str, List[str]] = {}
|
|
623
|
+
|
|
624
|
+
for line in patch_text.splitlines(keepends=True):
|
|
625
|
+
if line.startswith("+++ b/"):
|
|
626
|
+
# Extract relative path after the package name prefix
|
|
627
|
+
parts = line[6:].strip().split("/", 1)
|
|
628
|
+
current_file = parts[1] if len(parts) > 1 else parts[0]
|
|
629
|
+
hunks.setdefault(current_file, [])
|
|
630
|
+
elif current_file and (line.startswith("+") or line.startswith("-")
|
|
631
|
+
or line.startswith(" ") or line.startswith("@@")):
|
|
632
|
+
hunks[current_file].append(line)
|
|
633
|
+
|
|
634
|
+
if not hunks:
|
|
635
|
+
return (False, "No hunks found in patch")
|
|
636
|
+
|
|
637
|
+
# For now, just verify the files exist — full Python patching is complex
|
|
638
|
+
missing = [f for f in hunks if not os.path.isfile(os.path.join(target_dir, f))]
|
|
639
|
+
if missing:
|
|
640
|
+
return (False, f"Files not found: {', '.join(missing[:3])}")
|
|
641
|
+
|
|
642
|
+
return (False, "Python-only patch application not fully implemented; install 'patch' command")
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
def _is_binary(path: str) -> bool:
|
|
646
|
+
"""Heuristic: check if a file is binary."""
|
|
647
|
+
try:
|
|
648
|
+
with open(path, "rb") as f:
|
|
649
|
+
chunk = f.read(8192)
|
|
650
|
+
return b"\x00" in chunk
|
|
651
|
+
except OSError:
|
|
652
|
+
return True
|