bingo-light 2.1.1 → 2.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1012 @@
1
+ """
2
+ bingo_core.dep — Dependency patching engine.
3
+
4
+ Maintains patches on top of npm/pip packages. Same mental model as git fork
5
+ patching: upstream is the published package, your patches sit on top.
6
+
7
+ Storage layout:
8
+ .bingo-deps/
9
+ config.json # { "packages": { "some-lib": { "version": "2.1.0", "manager": "npm" } } }
10
+ patches/
11
+ some-lib/
12
+ fix-auth.patch
13
+ add-logging.patch
14
+ another-lib/
15
+ typo-fix.patch
16
+
17
+ Python 3.8+ stdlib only. No pip dependencies.
18
+ """
19
+
20
+ from __future__ import annotations
21
+
22
+ import difflib
23
+ import json
24
+ import os
25
+ import shutil
26
+ import subprocess
27
+ import tempfile
28
+ from dataclasses import dataclass, field
29
+ from datetime import datetime, timezone
30
+ from typing import Any, Dict, List, Optional, Tuple
31
+
32
+
33
+ # ─── Constants ───────────────────────────────────────────────────────────────
34
+
35
+ DEP_DIR = ".bingo-deps"
36
+ DEP_CONFIG = "config.json"
37
+ PATCHES_DIR = "patches"
38
+
39
+
40
+ # ─── Data Models ─────────────────────────────────────────────────────────────
41
+
42
+
43
+ @dataclass
44
+ class DepPatch:
45
+ """A single patch on a dependency."""
46
+ package: str
47
+ name: str # patch file name (e.g. "fix-auth.patch")
48
+ path: str # full path to .patch file
49
+ description: str = ""
50
+
51
+
52
+ @dataclass
53
+ class DepPackage:
54
+ """A patched dependency."""
55
+ name: str
56
+ version: str # version the patches were generated against
57
+ manager: str # "npm" | "pip"
58
+ patches: List[DepPatch] = field(default_factory=list)
59
+
60
+
61
+ @dataclass
62
+ class DepConflict:
63
+ """Conflict when applying a patch after upstream update."""
64
+ package: str
65
+ patch_name: str
66
+ old_version: str
67
+ new_version: str
68
+ error: str
69
+ hint: str = ""
70
+
71
+
72
+ # ─── Backend Interface ───────────────────────────────────────────────────────
73
+
74
+
75
+ class DepBackend:
76
+ """Abstract backend for a package manager (npm, pip, etc.)."""
77
+
78
+ name: str = "unknown"
79
+
80
+ def detect(self, cwd: str) -> bool:
81
+ """Return True if this backend applies to the project at cwd."""
82
+ raise NotImplementedError
83
+
84
+ def get_installed_version(self, package: str, cwd: str) -> Optional[str]:
85
+ """Get the currently installed version of a package."""
86
+ raise NotImplementedError
87
+
88
+ def get_install_path(self, package: str, cwd: str) -> Optional[str]:
89
+ """Get the filesystem path where the package is installed."""
90
+ raise NotImplementedError
91
+
92
+ def fetch_original(self, package: str, version: str, dest: str) -> bool:
93
+ """Download the original (unpatched) package source to dest dir."""
94
+ raise NotImplementedError
95
+
96
+ def list_files(self, package: str, cwd: str) -> List[str]:
97
+ """List all files in the installed package (relative paths)."""
98
+ raise NotImplementedError
99
+
100
+ def install_hook_command(self) -> str:
101
+ """Return the postinstall command string for auto-applying patches."""
102
+ return "bingo-light dep apply"
103
+
104
+
105
+ # ─── Core Engine ─────────────────────────────────────────────────────────────
106
+
107
+
108
+ class DepManager:
109
+ """Core dependency patching engine."""
110
+
111
+ def __init__(self, cwd: str = "."):
112
+ self.cwd = os.path.abspath(cwd)
113
+ self.dep_dir = os.path.join(self.cwd, DEP_DIR)
114
+ self.config_path = os.path.join(self.dep_dir, DEP_CONFIG)
115
+ self.patches_dir = os.path.join(self.dep_dir, PATCHES_DIR)
116
+ self._backends: List[DepBackend] = []
117
+ self._config: Optional[Dict] = None
118
+
119
+ # Register backends (import lazily to avoid circular deps)
120
+ from bingo_core.dep_npm import NpmBackend
121
+ from bingo_core.dep_pip import PipBackend
122
+ self._backends = [NpmBackend(), PipBackend()]
123
+
124
+ # ─── Config ──────────────────────────────────────────────────────────
125
+
126
+ def _load_config(self) -> Dict:
127
+ if self._config is not None:
128
+ return self._config
129
+ if os.path.isfile(self.config_path):
130
+ with open(self.config_path) as f:
131
+ self._config = json.load(f)
132
+ else:
133
+ self._config = {"packages": {}}
134
+ return self._config
135
+
136
+ def _save_config(self) -> None:
137
+ os.makedirs(self.dep_dir, exist_ok=True)
138
+ with open(self.config_path, "w") as f:
139
+ json.dump(self._config or {}, f, indent=2)
140
+ f.write("\n")
141
+
142
+ # ─── Backend Detection ───────────────────────────────────────────────
143
+
144
+ def _detect_backend(self, package: str) -> Optional[DepBackend]:
145
+ """Auto-detect which backend manages a package."""
146
+ for b in self._backends:
147
+ if b.detect(self.cwd) and b.get_installed_version(package, self.cwd):
148
+ return b
149
+ return None
150
+
151
+ def _get_backend(self, manager: str) -> Optional[DepBackend]:
152
+ """Get backend by name."""
153
+ for b in self._backends:
154
+ if b.name == manager:
155
+ return b
156
+ return None
157
+
158
+ # ─── Postinstall Hook ─────────────────────────────────────────────────
159
+
160
+ def _ensure_postinstall_hook(self, backend: DepBackend) -> Optional[str]:
161
+ """Add 'bingo-light dep apply' to package.json postinstall if npm project.
162
+
163
+ Returns a message if hook was added, None otherwise.
164
+ """
165
+ if backend.name != "npm":
166
+ return None
167
+
168
+ pkg_json_path = os.path.join(self.cwd, "package.json")
169
+ if not os.path.isfile(pkg_json_path):
170
+ return None
171
+
172
+ try:
173
+ with open(pkg_json_path) as f:
174
+ data = json.load(f)
175
+ except (json.JSONDecodeError, OSError):
176
+ return None
177
+
178
+ scripts = data.setdefault("scripts", {})
179
+ # Find bingo-light binary — use absolute path for reliability
180
+ bl_bin = shutil.which("bingo-light")
181
+ if bl_bin:
182
+ hook_cmd = f"{bl_bin} dep apply"
183
+ else:
184
+ # Fallback: npx always works if npm is available
185
+ hook_cmd = "npx --yes bingo-light dep apply"
186
+
187
+ existing = scripts.get("postinstall", "")
188
+ if hook_cmd in existing:
189
+ return None # already present
190
+
191
+ if existing:
192
+ scripts["postinstall"] = f"{existing} && {hook_cmd}"
193
+ else:
194
+ scripts["postinstall"] = hook_cmd
195
+
196
+ try:
197
+ with open(pkg_json_path, "w") as f:
198
+ json.dump(data, f, indent=2)
199
+ f.write("\n")
200
+ except OSError:
201
+ return None
202
+
203
+ return "postinstall hook added to package.json"
204
+
205
+ # ─── Patch Generation ────────────────────────────────────────────────
206
+
207
+ def patch(self, package: str, patch_name: str = "",
208
+ description: str = "") -> Dict[str, Any]:
209
+ """Generate a patch for a modified dependency.
210
+
211
+ 1. Detect the package manager
212
+ 2. Find the installed (modified) version
213
+ 3. Download the original version
214
+ 4. Generate a unified diff
215
+ 5. Save as .patch file
216
+ """
217
+ backend = self._detect_backend(package)
218
+ if not backend:
219
+ return {"ok": False, "error": f"Package '{package}' not found in any package manager"}
220
+
221
+ version = backend.get_installed_version(package, self.cwd)
222
+ if not version:
223
+ return {"ok": False, "error": f"Cannot determine version for '{package}'"}
224
+
225
+ install_path = backend.get_install_path(package, self.cwd)
226
+ if not install_path or not os.path.isdir(install_path):
227
+ return {"ok": False, "error": f"Install path not found for '{package}'"}
228
+
229
+ # Download original
230
+ tmpdir = tempfile.mkdtemp(prefix="bingo-dep-")
231
+ try:
232
+ original_dir = os.path.join(tmpdir, "original")
233
+ os.makedirs(original_dir)
234
+ if not backend.fetch_original(package, version, original_dir):
235
+ return {"ok": False, "error": f"Failed to download original '{package}@{version}'"}
236
+
237
+ # Generate unified diff
238
+ diff_lines = _generate_diff(original_dir, install_path, package)
239
+ if not diff_lines:
240
+ return {"ok": False, "error": f"No modifications found in '{package}'"}
241
+
242
+ # Determine patch name
243
+ if not patch_name:
244
+ config = self._load_config()
245
+ pkg_conf = config.get("packages", {}).get(package, {})
246
+ existing = pkg_conf.get("patches", [])
247
+ idx = len(existing) + 1
248
+ patch_name = f"patch-{idx:03d}"
249
+
250
+ if not patch_name.endswith(".patch"):
251
+ patch_name += ".patch"
252
+
253
+ # Save patch
254
+ pkg_patches_dir = os.path.join(self.patches_dir, package)
255
+ os.makedirs(pkg_patches_dir, exist_ok=True)
256
+ patch_path = os.path.join(pkg_patches_dir, patch_name)
257
+ with open(patch_path, "w") as f:
258
+ f.writelines(diff_lines)
259
+
260
+ # Update config
261
+ config = self._load_config()
262
+ pkgs = config.setdefault("packages", {})
263
+ pkg = pkgs.setdefault(package, {
264
+ "version": version,
265
+ "manager": backend.name,
266
+ "patches": [],
267
+ })
268
+ pkg["version"] = version
269
+ if patch_name not in pkg["patches"]:
270
+ pkg["patches"].append(patch_name)
271
+ if description:
272
+ pkg.setdefault("descriptions", {})[patch_name] = description
273
+ self._save_config()
274
+
275
+ # Auto-add postinstall hook on first patch
276
+ hook_msg = self._ensure_postinstall_hook(backend)
277
+
278
+ file_count = sum(1 for line in diff_lines if line.startswith("--- "))
279
+ result = {
280
+ "ok": True,
281
+ "package": package,
282
+ "version": version,
283
+ "patch": patch_name,
284
+ "files_changed": file_count,
285
+ "manager": backend.name,
286
+ }
287
+ if hook_msg:
288
+ result["hook"] = hook_msg
289
+ return result
290
+ finally:
291
+ shutil.rmtree(tmpdir, ignore_errors=True)
292
+
293
+ # ─── Patch Application ───────────────────────────────────────────────
294
+
295
+ def apply(self, package: str = "") -> Dict[str, Any]:
296
+ """Apply patches to installed dependencies.
297
+
298
+ If package is empty, apply all patches for all tracked packages.
299
+ """
300
+ config = self._load_config()
301
+ packages = config.get("packages", {})
302
+
303
+ if package:
304
+ if package not in packages:
305
+ return {"ok": False, "error": f"No patches tracked for '{package}'"}
306
+ targets = {package: packages[package]}
307
+ else:
308
+ targets = packages
309
+
310
+ results = []
311
+ for pkg_name, pkg_conf in targets.items():
312
+ backend = self._get_backend(pkg_conf.get("manager", ""))
313
+ if not backend:
314
+ results.append({"package": pkg_name, "ok": False, "error": "Unknown manager"})
315
+ continue
316
+
317
+ install_path = backend.get_install_path(pkg_name, self.cwd)
318
+ if not install_path:
319
+ results.append({"package": pkg_name, "ok": False, "error": "Not installed"})
320
+ continue
321
+
322
+ for patch_name in pkg_conf.get("patches", []):
323
+ patch_path = os.path.join(self.patches_dir, pkg_name, patch_name)
324
+ if not os.path.isfile(patch_path):
325
+ results.append({
326
+ "package": pkg_name, "patch": patch_name,
327
+ "ok": False, "error": "Patch file missing",
328
+ })
329
+ continue
330
+
331
+ success, error = _apply_patch(patch_path, install_path)
332
+ results.append({
333
+ "package": pkg_name, "patch": patch_name,
334
+ "ok": success, "error": error,
335
+ })
336
+
337
+ applied = [r for r in results if r["ok"]]
338
+ failed = [r for r in results if not r["ok"]]
339
+ return {
340
+ "ok": len(failed) == 0,
341
+ "applied": len(applied),
342
+ "failed": len(failed),
343
+ "results": results,
344
+ }
345
+
346
+ # ─── Status ──────────────────────────────────────────────────────────
347
+
348
+ def status(self) -> Dict[str, Any]:
349
+ """Show status of all tracked dependency patches."""
350
+ config = self._load_config()
351
+ packages = config.get("packages", {})
352
+ pkg_status = []
353
+
354
+ for pkg_name, pkg_conf in packages.items():
355
+ backend = self._get_backend(pkg_conf.get("manager", ""))
356
+ current_ver = None
357
+ installed = False
358
+ if backend:
359
+ current_ver = backend.get_installed_version(pkg_name, self.cwd)
360
+ installed = current_ver is not None
361
+
362
+ patched_ver = pkg_conf.get("version", "?")
363
+ version_match = (current_ver == patched_ver) if current_ver else None
364
+ patch_count = len(pkg_conf.get("patches", []))
365
+
366
+ status = "ok"
367
+ if not installed:
368
+ status = "not_installed"
369
+ elif not version_match:
370
+ status = "version_mismatch"
371
+
372
+ pkg_status.append({
373
+ "package": pkg_name,
374
+ "manager": pkg_conf.get("manager", "?"),
375
+ "patched_version": patched_ver,
376
+ "installed_version": current_ver or "?",
377
+ "patches": patch_count,
378
+ "status": status,
379
+ })
380
+
381
+ return {
382
+ "ok": True,
383
+ "packages": pkg_status,
384
+ "total_packages": len(pkg_status),
385
+ "total_patches": sum(p["patches"] for p in pkg_status),
386
+ }
387
+
388
+ # ─── List ────────────────────────────────────────────────────────────
389
+
390
+ def list_patches(self, package: str = "") -> Dict[str, Any]:
391
+ """List all patches, optionally for a specific package."""
392
+ config = self._load_config()
393
+ packages = config.get("packages", {})
394
+ all_patches = []
395
+
396
+ targets = {package: packages[package]} if package and package in packages else packages
397
+ for pkg_name, pkg_conf in targets.items():
398
+ descs = pkg_conf.get("descriptions", {})
399
+ for patch_name in pkg_conf.get("patches", []):
400
+ patch_path = os.path.join(self.patches_dir, pkg_name, patch_name)
401
+ all_patches.append({
402
+ "package": pkg_name,
403
+ "patch": patch_name,
404
+ "description": descs.get(patch_name, ""),
405
+ "version": pkg_conf.get("version", "?"),
406
+ "exists": os.path.isfile(patch_path),
407
+ })
408
+
409
+ return {"ok": True, "patches": all_patches}
410
+
411
+ # ─── Sync (after npm/pip update) ─────────────────────────────────────
412
+
413
+ def sync(self) -> Dict[str, Any]:
414
+ """After a package manager update, re-apply patches and detect conflicts."""
415
+ config = self._load_config()
416
+ packages = config.get("packages", {})
417
+ results = []
418
+
419
+ for pkg_name, pkg_conf in packages.items():
420
+ backend = self._get_backend(pkg_conf.get("manager", ""))
421
+ if not backend:
422
+ continue
423
+
424
+ current_ver = backend.get_installed_version(pkg_name, self.cwd)
425
+ patched_ver = pkg_conf.get("version", "")
426
+
427
+ if not current_ver:
428
+ results.append({
429
+ "package": pkg_name, "status": "not_installed",
430
+ "old_version": patched_ver, "new_version": None,
431
+ })
432
+ continue
433
+
434
+ if current_ver == patched_ver:
435
+ # Same version — just re-apply
436
+ apply_result = self.apply(pkg_name)
437
+ results.append({
438
+ "package": pkg_name, "status": "reapplied",
439
+ "old_version": patched_ver, "new_version": current_ver,
440
+ "apply": apply_result,
441
+ })
442
+ else:
443
+ # Version changed — try to apply, may conflict
444
+ install_path = backend.get_install_path(pkg_name, self.cwd)
445
+ conflicts = []
446
+ applied = []
447
+
448
+ for patch_name in pkg_conf.get("patches", []):
449
+ patch_path = os.path.join(self.patches_dir, pkg_name, patch_name)
450
+ if not os.path.isfile(patch_path):
451
+ conflicts.append({
452
+ "patch": patch_name,
453
+ "error": "Patch file missing",
454
+ })
455
+ continue
456
+
457
+ success, error = _apply_patch(patch_path, install_path)
458
+ if success:
459
+ applied.append(patch_name)
460
+ else:
461
+ conflicts.append({
462
+ "patch": patch_name,
463
+ "error": error,
464
+ "hint": f"Package updated {patched_ver} → {current_ver}. "
465
+ f"Regenerate patch: bingo-light dep patch {pkg_name}",
466
+ })
467
+
468
+ status = "synced" if not conflicts else "conflict"
469
+ results.append({
470
+ "package": pkg_name,
471
+ "status": status,
472
+ "old_version": patched_ver,
473
+ "new_version": current_ver,
474
+ "applied": applied,
475
+ "conflicts": conflicts,
476
+ })
477
+
478
+ # Update tracked version if all patches applied
479
+ if not conflicts:
480
+ pkg_conf["version"] = current_ver
481
+ self._save_config()
482
+
483
+ total_conflicts = sum(
484
+ len(r.get("conflicts", [])) for r in results
485
+ )
486
+ return {
487
+ "ok": total_conflicts == 0,
488
+ "results": results,
489
+ "total_conflicts": total_conflicts,
490
+ "recommended_action": "ok" if total_conflicts == 0 else "regenerate_patches",
491
+ }
492
+
493
+ # ─── Drop ────────────────────────────────────────────────────────────
494
+
495
+ def drop(self, package: str, patch_name: str = "") -> Dict[str, Any]:
496
+ """Remove a patch or all patches for a package."""
497
+ config = self._load_config()
498
+ packages = config.get("packages", {})
499
+
500
+ if package not in packages:
501
+ return {"ok": False, "error": f"No patches tracked for '{package}'"}
502
+
503
+ if patch_name:
504
+ # Remove specific patch
505
+ if not patch_name.endswith(".patch"):
506
+ patch_name += ".patch"
507
+ patches = packages[package].get("patches", [])
508
+ if patch_name not in patches:
509
+ return {"ok": False, "error": f"Patch '{patch_name}' not found"}
510
+ patches.remove(patch_name)
511
+ patch_path = os.path.join(self.patches_dir, package, patch_name)
512
+ if os.path.isfile(patch_path):
513
+ os.remove(patch_path)
514
+ if not patches:
515
+ del packages[package]
516
+ pkg_dir = os.path.join(self.patches_dir, package)
517
+ if os.path.isdir(pkg_dir):
518
+ shutil.rmtree(pkg_dir)
519
+ else:
520
+ # Remove all patches for package
521
+ del packages[package]
522
+ pkg_dir = os.path.join(self.patches_dir, package)
523
+ if os.path.isdir(pkg_dir):
524
+ shutil.rmtree(pkg_dir)
525
+
526
+ self._save_config()
527
+ return {"ok": True, "package": package, "dropped": patch_name or "all"}
528
+
529
+ # ── Override Management ─────────────────────────────────────────────────
530
+
531
+ def _read_package_json(self) -> Optional[dict]:
532
+ """Read package.json from cwd. Returns None if not found."""
533
+ pj_path = os.path.join(self.cwd, "package.json")
534
+ if not os.path.isfile(pj_path):
535
+ return None
536
+ try:
537
+ with open(pj_path) as f:
538
+ return json.load(f)
539
+ except (json.JSONDecodeError, IOError):
540
+ return None
541
+
542
+ def _write_package_json(self, data: dict) -> None:
543
+ """Atomically write package.json preserving 2-space indent."""
544
+ pj_path = os.path.join(self.cwd, "package.json")
545
+ fd, tmp = tempfile.mkstemp(suffix=".tmp", dir=self.cwd)
546
+ try:
547
+ with os.fdopen(fd, "w") as f:
548
+ json.dump(data, f, indent=2)
549
+ f.write("\n")
550
+ os.replace(tmp, pj_path)
551
+ except Exception:
552
+ try:
553
+ os.unlink(tmp)
554
+ except FileNotFoundError:
555
+ pass
556
+ raise
557
+
558
+ def _load_overrides_tracking(self) -> dict:
559
+ """Load .bingo-deps/overrides.json tracking data."""
560
+ path = os.path.join(self.cwd, DEP_DIR, "overrides.json")
561
+ if not os.path.isfile(path):
562
+ return {"overrides": {}}
563
+ try:
564
+ with open(path) as f:
565
+ return json.load(f)
566
+ except (json.JSONDecodeError, IOError):
567
+ return {"overrides": {}}
568
+
569
+ def _save_overrides_tracking(self, data: dict) -> None:
570
+ """Write .bingo-deps/overrides.json."""
571
+ os.makedirs(os.path.join(self.cwd, DEP_DIR), exist_ok=True)
572
+ path = os.path.join(self.cwd, DEP_DIR, "overrides.json")
573
+ with open(path, "w") as f:
574
+ json.dump(data, f, indent=2)
575
+ f.write("\n")
576
+
577
+ def override_list(self) -> dict:
578
+ """List npm/yarn overrides with tracked reasons.
579
+
580
+ Returns {"ok": True, "overrides": [...], "count": N}
581
+ """
582
+ pj = self._read_package_json()
583
+ if pj is None:
584
+ return {"ok": True, "overrides": [], "count": 0, "note": "No package.json"}
585
+
586
+ # npm uses "overrides", yarn uses "resolutions"
587
+ overrides = pj.get("overrides", {})
588
+ resolutions = pj.get("resolutions", {})
589
+ all_ovs = {}
590
+ for pkg, ver in overrides.items():
591
+ all_ovs[pkg] = {"version": ver if isinstance(ver, str) else json.dumps(ver), "source": "overrides"}
592
+ for pkg, ver in resolutions.items():
593
+ if pkg not in all_ovs:
594
+ all_ovs[pkg] = {"version": ver, "source": "resolutions"}
595
+
596
+ # Merge with tracking data
597
+ tracking = self._load_overrides_tracking()
598
+ result = []
599
+ for pkg, info in all_ovs.items():
600
+ tracked = tracking.get("overrides", {}).get(pkg, {})
601
+ result.append({
602
+ "package": pkg,
603
+ "version": info["version"],
604
+ "source": info["source"],
605
+ "reason": tracked.get("reason", ""),
606
+ "created": tracked.get("created", ""),
607
+ "tracked": bool(tracked),
608
+ })
609
+
610
+ return {"ok": True, "overrides": result, "count": len(result)}
611
+
612
+ def override_check(self) -> dict:
613
+ """Check if npm overrides are still needed.
614
+
615
+ Reads package-lock.json to determine what version the tree resolves to.
616
+ Returns {"ok": True, "overrides": [{"package", "status", "reason"}]}
617
+ """
618
+ pj = self._read_package_json()
619
+ if pj is None:
620
+ return {"ok": True, "overrides": [], "count": 0}
621
+
622
+ overrides = pj.get("overrides", {})
623
+ resolutions = pj.get("resolutions", {})
624
+ all_ovs = dict(overrides)
625
+ all_ovs.update(resolutions)
626
+
627
+ if not all_ovs:
628
+ return {"ok": True, "overrides": [], "count": 0}
629
+
630
+ # Try reading package-lock.json for resolved versions
631
+ lock_path = os.path.join(self.cwd, "package-lock.json")
632
+ lock_data: Optional[dict] = None
633
+ if os.path.isfile(lock_path):
634
+ try:
635
+ with open(lock_path) as f:
636
+ lock_data = json.load(f)
637
+ except (json.JSONDecodeError, IOError):
638
+ pass
639
+
640
+ results = []
641
+ for pkg, override_ver in all_ovs.items():
642
+ if not isinstance(override_ver, str):
643
+ results.append({
644
+ "package": pkg,
645
+ "override_version": json.dumps(override_ver),
646
+ "status": "complex",
647
+ "reason": "Nested override — manual check required",
648
+ })
649
+ continue
650
+
651
+ # Look up in lock file
652
+ resolved_ver = None
653
+ if lock_data:
654
+ # npm v2/v3 lock format: packages["node_modules/<pkg>"].version
655
+ packages = lock_data.get("packages", {})
656
+ lock_key = f"node_modules/{pkg}"
657
+ if lock_key in packages:
658
+ resolved_ver = packages[lock_key].get("version", "")
659
+
660
+ if resolved_ver is None:
661
+ results.append({
662
+ "package": pkg,
663
+ "override_version": override_ver,
664
+ "status": "unknown",
665
+ "reason": "Cannot determine resolved version",
666
+ })
667
+ elif resolved_ver == override_ver:
668
+ # Lock resolved to override version — could be redundant
669
+ # Check if the package's parent requires a different version
670
+ # by looking at the dependency entry in lock file
671
+ # If the lock resolves to the same version, the override
672
+ # may no longer be needed
673
+ results.append({
674
+ "package": pkg,
675
+ "override_version": override_ver,
676
+ "resolved_version": resolved_ver,
677
+ "status": "redundant",
678
+ "reason": "Lock resolves to override version — may no longer be needed",
679
+ })
680
+ else:
681
+ results.append({
682
+ "package": pkg,
683
+ "override_version": override_ver,
684
+ "resolved_version": resolved_ver,
685
+ "status": "active",
686
+ "reason": f"Override forcing {override_ver} (tree wants {resolved_ver})",
687
+ })
688
+
689
+ redundant = sum(1 for r in results if r["status"] == "redundant")
690
+ return {"ok": True, "overrides": results, "count": len(results), "redundant": redundant}
691
+
692
+ def override_add(self, package: str, version: str, reason: str = "") -> dict:
693
+ """Add an npm override with reason tracking.
694
+
695
+ Returns {"ok": True, "package": ..., "version": ...}
696
+ """
697
+ pj = self._read_package_json()
698
+ if pj is None:
699
+ return {"ok": False, "error": "No package.json found"}
700
+
701
+ # Detect yarn vs npm
702
+ yarn_lock = os.path.isfile(os.path.join(self.cwd, "yarn.lock"))
703
+ field = "resolutions" if yarn_lock else "overrides"
704
+
705
+ if field not in pj:
706
+ pj[field] = {}
707
+ pj[field][package] = version
708
+ self._write_package_json(pj)
709
+
710
+ # Track reason
711
+ tracking = self._load_overrides_tracking()
712
+ tracking.setdefault("overrides", {})[package] = {
713
+ "version": version,
714
+ "reason": reason,
715
+ "created": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
716
+ "manager_field": field,
717
+ }
718
+ self._save_overrides_tracking(tracking)
719
+
720
+ return {"ok": True, "package": package, "version": version, "field": field}
721
+
722
+ def override_drop(self, package: str) -> dict:
723
+ """Remove an npm override.
724
+
725
+ Returns {"ok": True, "package": ..., "dropped": True}
726
+ """
727
+ pj = self._read_package_json()
728
+ if pj is None:
729
+ return {"ok": False, "error": "No package.json found"}
730
+
731
+ dropped = False
732
+ for pj_key in ("overrides", "resolutions"):
733
+ if pj_key in pj and package in pj[pj_key]:
734
+ del pj[pj_key][package]
735
+ if not pj[pj_key]:
736
+ del pj[pj_key]
737
+ dropped = True
738
+ if dropped:
739
+ self._write_package_json(pj)
740
+
741
+ # Remove tracking
742
+ tracking = self._load_overrides_tracking()
743
+ if package in tracking.get("overrides", {}):
744
+ del tracking["overrides"][package]
745
+ self._save_overrides_tracking(tracking)
746
+
747
+ return {"ok": True, "package": package, "dropped": dropped}
748
+
749
+
750
+ # ─── Diff Utilities ──────────────────────────────────────────────────────────
751
+
752
+
753
+ def _generate_diff(original_dir: str, modified_dir: str,
754
+ label: str) -> List[str]:
755
+ """Generate unified diff between two directory trees."""
756
+ diff_lines: List[str] = []
757
+
758
+ for root, _dirs, files in os.walk(original_dir):
759
+ for fname in sorted(files):
760
+ orig_path = os.path.join(root, fname)
761
+ rel = os.path.relpath(orig_path, original_dir)
762
+ mod_path = os.path.join(modified_dir, rel)
763
+
764
+ # Skip non-text files
765
+ if _is_binary(orig_path):
766
+ continue
767
+
768
+ try:
769
+ with open(orig_path) as f:
770
+ orig_lines = f.readlines()
771
+ except (UnicodeDecodeError, OSError):
772
+ continue
773
+
774
+ if os.path.isfile(mod_path):
775
+ try:
776
+ with open(mod_path) as f:
777
+ mod_lines = f.readlines()
778
+ except (UnicodeDecodeError, OSError):
779
+ continue
780
+
781
+ if orig_lines != mod_lines:
782
+ diff = difflib.unified_diff(
783
+ orig_lines, mod_lines,
784
+ fromfile=f"a/{label}/{rel}",
785
+ tofile=f"b/{label}/{rel}",
786
+ )
787
+ diff_lines.extend(diff)
788
+
789
+ # Check for new files in modified that don't exist in original
790
+ for root, _dirs, files in os.walk(modified_dir):
791
+ for fname in sorted(files):
792
+ mod_path = os.path.join(root, fname)
793
+ rel = os.path.relpath(mod_path, modified_dir)
794
+ orig_path = os.path.join(original_dir, rel)
795
+
796
+ if not os.path.isfile(orig_path) and not _is_binary(mod_path):
797
+ try:
798
+ with open(mod_path) as f:
799
+ mod_lines = f.readlines()
800
+ diff = difflib.unified_diff(
801
+ [], mod_lines,
802
+ fromfile=f"a/{label}/{rel}",
803
+ tofile=f"b/{label}/{rel}",
804
+ )
805
+ diff_lines.extend(diff)
806
+ except (UnicodeDecodeError, OSError):
807
+ continue
808
+
809
+ return diff_lines
810
+
811
+
812
+ def _apply_patch(patch_path: str, target_dir: str) -> Tuple[bool, str]:
813
+ """Apply a unified diff patch to a directory. Returns (success, error)."""
814
+ # Try system `patch` command first (most reliable)
815
+ # Patches use a/<pkg>/<file> format, so -p2 strips the a/<pkg>/ prefix
816
+ if shutil.which("patch"):
817
+ with open(patch_path) as pf:
818
+ result = subprocess.run(
819
+ ["patch", "-p2", "-d", target_dir, "--forward", "--batch"],
820
+ stdin=pf,
821
+ capture_output=True, text=True,
822
+ )
823
+ if result.returncode == 0:
824
+ return (True, "")
825
+ # patch may partially apply; check if "FAILED" in output
826
+ if "FAILED" in result.stdout or result.returncode != 0:
827
+ return (False, result.stdout.strip() or result.stderr.strip())
828
+
829
+ # Fallback: Python-based patch application
830
+ return _apply_patch_python(patch_path, target_dir)
831
+
832
+
833
+ def _apply_patch_python(patch_path: str, target_dir: str) -> Tuple[bool, str]:
834
+ """Pure-Python unified diff patch application.
835
+
836
+ Parses unified diff format and applies hunks to target files.
837
+ Supports: context matching, fuzzy offset (±3 lines), new/deleted files.
838
+ Processes hunks in reverse order to avoid line number cascading.
839
+ """
840
+ import re
841
+
842
+ try:
843
+ with open(patch_path) as f:
844
+ patch_lines = f.readlines()
845
+ except OSError as e:
846
+ return (False, str(e))
847
+
848
+ # Parse into file-level diffs
849
+ file_diffs: List[dict] = []
850
+ i = 0
851
+ while i < len(patch_lines):
852
+ line = patch_lines[i]
853
+
854
+ # Find --- a/... and +++ b/... pair
855
+ if line.startswith("--- "):
856
+ if i + 1 < len(patch_lines) and patch_lines[i + 1].startswith("+++ "):
857
+ old_path = line[4:].strip()
858
+ new_line = patch_lines[i + 1]
859
+ # Strip p2: +++ b/<pkg>/<file> -> <file>
860
+ new_path_raw = new_line[6:].strip()
861
+ parts = new_path_raw.split("/", 1)
862
+ rel_path = parts[1] if len(parts) > 1 else parts[0]
863
+
864
+ is_new = old_path == "/dev/null" or old_path.endswith("/dev/null")
865
+ is_delete = new_line.strip().endswith("/dev/null")
866
+
867
+ # Collect hunks for this file
868
+ hunks: List[dict] = []
869
+ i += 2
870
+ while i < len(patch_lines):
871
+ hunk_line = patch_lines[i]
872
+ m = re.match(
873
+ r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@',
874
+ hunk_line,
875
+ )
876
+ if m:
877
+ old_start = int(m.group(1))
878
+ old_count = int(m.group(2)) if m.group(2) is not None else 1
879
+ new_start = int(m.group(3))
880
+ new_count = int(m.group(4)) if m.group(4) is not None else 1
881
+ hunk_body: List[str] = []
882
+ i += 1
883
+ while i < len(patch_lines):
884
+ hl = patch_lines[i]
885
+ if hl.startswith((" ", "+", "-")):
886
+ hunk_body.append(hl)
887
+ i += 1
888
+ elif hl.startswith("\\ No newline"):
889
+ i += 1 # skip no-newline marker
890
+ else:
891
+ break
892
+ hunks.append({
893
+ "old_start": old_start,
894
+ "old_count": old_count,
895
+ "new_start": new_start,
896
+ "new_count": new_count,
897
+ "lines": hunk_body,
898
+ })
899
+ elif hunk_line.startswith("--- ") or hunk_line.startswith("diff "):
900
+ break # next file diff
901
+ else:
902
+ i += 1
903
+
904
+ file_diffs.append({
905
+ "path": rel_path,
906
+ "is_new": is_new,
907
+ "is_delete": is_delete,
908
+ "hunks": hunks,
909
+ })
910
+ continue
911
+ i += 1
912
+
913
+ if not file_diffs:
914
+ return (False, "No file diffs found in patch")
915
+
916
+ # Apply each file diff
917
+ for fd in file_diffs:
918
+ target_file = os.path.join(target_dir, fd["path"])
919
+
920
+ if fd["is_delete"]:
921
+ try:
922
+ os.remove(target_file)
923
+ except FileNotFoundError:
924
+ pass
925
+ continue
926
+
927
+ if fd["is_new"]:
928
+ os.makedirs(os.path.dirname(target_file) or ".", exist_ok=True)
929
+ new_lines: List[str] = []
930
+ for hunk in fd["hunks"]:
931
+ for hl in hunk["lines"]:
932
+ if hl.startswith("+"):
933
+ new_lines.append(hl[1:])
934
+ elif hl.startswith(" "):
935
+ new_lines.append(hl[1:])
936
+ with open(target_file, "w") as f:
937
+ f.writelines(new_lines)
938
+ continue
939
+
940
+ # Existing file — read, apply hunks in reverse, write
941
+ if not os.path.isfile(target_file):
942
+ return (False, f"File not found: {fd['path']}")
943
+
944
+ with open(target_file) as f:
945
+ file_lines = f.readlines()
946
+
947
+ # Process hunks in reverse order to preserve line numbers
948
+ for hunk in reversed(fd["hunks"]):
949
+ old_start = hunk["old_start"] - 1 # 0-indexed
950
+ hunk_lines = hunk["lines"]
951
+
952
+ # Build expected old lines and new lines
953
+ old_expected: List[str] = []
954
+ new_replacement: List[str] = []
955
+ for hl in hunk_lines:
956
+ if hl.startswith(" "):
957
+ old_expected.append(hl[1:])
958
+ new_replacement.append(hl[1:])
959
+ elif hl.startswith("-"):
960
+ old_expected.append(hl[1:])
961
+ elif hl.startswith("+"):
962
+ new_replacement.append(hl[1:])
963
+
964
+ # Try exact match first, then fuzzy offset ±3
965
+ match_pos = -1
966
+ for offset in range(0, 4):
967
+ for sign in (0, -1, 1) if offset == 0 else (-1, 1):
968
+ pos = old_start + offset * sign
969
+ if pos < 0 or pos + len(old_expected) > len(file_lines):
970
+ continue
971
+ chunk = file_lines[pos:pos + len(old_expected)]
972
+ if _lines_match(chunk, old_expected):
973
+ match_pos = pos
974
+ break
975
+ if match_pos >= 0:
976
+ break
977
+
978
+ if match_pos < 0:
979
+ context = old_expected[0].rstrip() if old_expected else "(empty)"
980
+ return (
981
+ False,
982
+ f"Hunk failed for {fd['path']} at line {hunk['old_start']}: "
983
+ f"context mismatch near '{context}'",
984
+ )
985
+
986
+ # Apply: replace old lines with new lines
987
+ file_lines[match_pos:match_pos + len(old_expected)] = new_replacement
988
+
989
+ with open(target_file, "w") as f:
990
+ f.writelines(file_lines)
991
+
992
+ return (True, "")
993
+
994
+
995
+ def _lines_match(actual: List[str], expected: List[str]) -> bool:
996
+ """Compare lines ignoring trailing whitespace differences."""
997
+ if len(actual) != len(expected):
998
+ return False
999
+ for a, e in zip(actual, expected):
1000
+ if a.rstrip("\n\r") != e.rstrip("\n\r"):
1001
+ return False
1002
+ return True
1003
+
1004
+
1005
+ def _is_binary(path: str) -> bool:
1006
+ """Heuristic: check if a file is binary."""
1007
+ try:
1008
+ with open(path, "rb") as f:
1009
+ chunk = f.read(8192)
1010
+ return b"\x00" in chunk
1011
+ except OSError:
1012
+ return True