bingo-light 2.1.2 → 2.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bingo_core/dep.py CHANGED
@@ -26,6 +26,7 @@ import shutil
26
26
  import subprocess
27
27
  import tempfile
28
28
  from dataclasses import dataclass, field
29
+ from datetime import datetime, timezone
29
30
  from typing import Any, Dict, List, Optional, Tuple
30
31
 
31
32
 
@@ -525,6 +526,226 @@ class DepManager:
525
526
  self._save_config()
526
527
  return {"ok": True, "package": package, "dropped": patch_name or "all"}
527
528
 
529
+ # ── Override Management ─────────────────────────────────────────────────
530
+
531
+ def _read_package_json(self) -> Optional[dict]:
532
+ """Read package.json from cwd. Returns None if not found."""
533
+ pj_path = os.path.join(self.cwd, "package.json")
534
+ if not os.path.isfile(pj_path):
535
+ return None
536
+ try:
537
+ with open(pj_path) as f:
538
+ return json.load(f)
539
+ except (json.JSONDecodeError, IOError):
540
+ return None
541
+
542
+ def _write_package_json(self, data: dict) -> None:
543
+ """Atomically write package.json preserving 2-space indent."""
544
+ pj_path = os.path.join(self.cwd, "package.json")
545
+ fd, tmp = tempfile.mkstemp(suffix=".tmp", dir=self.cwd)
546
+ try:
547
+ with os.fdopen(fd, "w") as f:
548
+ json.dump(data, f, indent=2)
549
+ f.write("\n")
550
+ os.replace(tmp, pj_path)
551
+ except Exception:
552
+ try:
553
+ os.unlink(tmp)
554
+ except FileNotFoundError:
555
+ pass
556
+ raise
557
+
558
+ def _load_overrides_tracking(self) -> dict:
559
+ """Load .bingo-deps/overrides.json tracking data."""
560
+ path = os.path.join(self.cwd, DEP_DIR, "overrides.json")
561
+ if not os.path.isfile(path):
562
+ return {"overrides": {}}
563
+ try:
564
+ with open(path) as f:
565
+ return json.load(f)
566
+ except (json.JSONDecodeError, IOError):
567
+ return {"overrides": {}}
568
+
569
+ def _save_overrides_tracking(self, data: dict) -> None:
570
+ """Write .bingo-deps/overrides.json."""
571
+ os.makedirs(os.path.join(self.cwd, DEP_DIR), exist_ok=True)
572
+ path = os.path.join(self.cwd, DEP_DIR, "overrides.json")
573
+ with open(path, "w") as f:
574
+ json.dump(data, f, indent=2)
575
+ f.write("\n")
576
+
577
+ def override_list(self) -> dict:
578
+ """List npm/yarn overrides with tracked reasons.
579
+
580
+ Returns {"ok": True, "overrides": [...], "count": N}
581
+ """
582
+ pj = self._read_package_json()
583
+ if pj is None:
584
+ return {"ok": True, "overrides": [], "count": 0, "note": "No package.json"}
585
+
586
+ # npm uses "overrides", yarn uses "resolutions"
587
+ overrides = pj.get("overrides", {})
588
+ resolutions = pj.get("resolutions", {})
589
+ all_ovs = {}
590
+ for pkg, ver in overrides.items():
591
+ all_ovs[pkg] = {"version": ver if isinstance(ver, str) else json.dumps(ver), "source": "overrides"}
592
+ for pkg, ver in resolutions.items():
593
+ if pkg not in all_ovs:
594
+ all_ovs[pkg] = {"version": ver, "source": "resolutions"}
595
+
596
+ # Merge with tracking data
597
+ tracking = self._load_overrides_tracking()
598
+ result = []
599
+ for pkg, info in all_ovs.items():
600
+ tracked = tracking.get("overrides", {}).get(pkg, {})
601
+ result.append({
602
+ "package": pkg,
603
+ "version": info["version"],
604
+ "source": info["source"],
605
+ "reason": tracked.get("reason", ""),
606
+ "created": tracked.get("created", ""),
607
+ "tracked": bool(tracked),
608
+ })
609
+
610
+ return {"ok": True, "overrides": result, "count": len(result)}
611
+
612
+ def override_check(self) -> dict:
613
+ """Check if npm overrides are still needed.
614
+
615
+ Reads package-lock.json to determine what version the tree resolves to.
616
+ Returns {"ok": True, "overrides": [{"package", "status", "reason"}]}
617
+ """
618
+ pj = self._read_package_json()
619
+ if pj is None:
620
+ return {"ok": True, "overrides": [], "count": 0}
621
+
622
+ overrides = pj.get("overrides", {})
623
+ resolutions = pj.get("resolutions", {})
624
+ all_ovs = dict(overrides)
625
+ all_ovs.update(resolutions)
626
+
627
+ if not all_ovs:
628
+ return {"ok": True, "overrides": [], "count": 0}
629
+
630
+ # Try reading package-lock.json for resolved versions
631
+ lock_path = os.path.join(self.cwd, "package-lock.json")
632
+ lock_data: Optional[dict] = None
633
+ if os.path.isfile(lock_path):
634
+ try:
635
+ with open(lock_path) as f:
636
+ lock_data = json.load(f)
637
+ except (json.JSONDecodeError, IOError):
638
+ pass
639
+
640
+ results = []
641
+ for pkg, override_ver in all_ovs.items():
642
+ if not isinstance(override_ver, str):
643
+ results.append({
644
+ "package": pkg,
645
+ "override_version": json.dumps(override_ver),
646
+ "status": "complex",
647
+ "reason": "Nested override — manual check required",
648
+ })
649
+ continue
650
+
651
+ # Look up in lock file
652
+ resolved_ver = None
653
+ if lock_data:
654
+ # npm v2/v3 lock format: packages["node_modules/<pkg>"].version
655
+ packages = lock_data.get("packages", {})
656
+ lock_key = f"node_modules/{pkg}"
657
+ if lock_key in packages:
658
+ resolved_ver = packages[lock_key].get("version", "")
659
+
660
+ if resolved_ver is None:
661
+ results.append({
662
+ "package": pkg,
663
+ "override_version": override_ver,
664
+ "status": "unknown",
665
+ "reason": "Cannot determine resolved version",
666
+ })
667
+ elif resolved_ver == override_ver:
668
+ # Lock resolved to override version — could be redundant
669
+ # Check if the package's parent requires a different version
670
+ # by looking at the dependency entry in lock file
671
+ # If the lock resolves to the same version, the override
672
+ # may no longer be needed
673
+ results.append({
674
+ "package": pkg,
675
+ "override_version": override_ver,
676
+ "resolved_version": resolved_ver,
677
+ "status": "redundant",
678
+ "reason": "Lock resolves to override version — may no longer be needed",
679
+ })
680
+ else:
681
+ results.append({
682
+ "package": pkg,
683
+ "override_version": override_ver,
684
+ "resolved_version": resolved_ver,
685
+ "status": "active",
686
+ "reason": f"Override forcing {override_ver} (tree wants {resolved_ver})",
687
+ })
688
+
689
+ redundant = sum(1 for r in results if r["status"] == "redundant")
690
+ return {"ok": True, "overrides": results, "count": len(results), "redundant": redundant}
691
+
692
+ def override_add(self, package: str, version: str, reason: str = "") -> dict:
693
+ """Add an npm override with reason tracking.
694
+
695
+ Returns {"ok": True, "package": ..., "version": ...}
696
+ """
697
+ pj = self._read_package_json()
698
+ if pj is None:
699
+ return {"ok": False, "error": "No package.json found"}
700
+
701
+ # Detect yarn vs npm
702
+ yarn_lock = os.path.isfile(os.path.join(self.cwd, "yarn.lock"))
703
+ field = "resolutions" if yarn_lock else "overrides"
704
+
705
+ if field not in pj:
706
+ pj[field] = {}
707
+ pj[field][package] = version
708
+ self._write_package_json(pj)
709
+
710
+ # Track reason
711
+ tracking = self._load_overrides_tracking()
712
+ tracking.setdefault("overrides", {})[package] = {
713
+ "version": version,
714
+ "reason": reason,
715
+ "created": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
716
+ "manager_field": field,
717
+ }
718
+ self._save_overrides_tracking(tracking)
719
+
720
+ return {"ok": True, "package": package, "version": version, "field": field}
721
+
722
+ def override_drop(self, package: str) -> dict:
723
+ """Remove an npm override.
724
+
725
+ Returns {"ok": True, "package": ..., "dropped": True}
726
+ """
727
+ pj = self._read_package_json()
728
+ if pj is None:
729
+ return {"ok": False, "error": "No package.json found"}
730
+
731
+ dropped = False
732
+ for pj_key in ("overrides", "resolutions"):
733
+ if pj_key in pj and package in pj[pj_key]:
734
+ del pj[pj_key][package]
735
+ if not pj[pj_key]:
736
+ del pj[pj_key]
737
+ dropped = True
738
+ if dropped:
739
+ self._write_package_json(pj)
740
+
741
+ # Remove tracking
742
+ tracking = self._load_overrides_tracking()
743
+ if package in tracking.get("overrides", {}):
744
+ del tracking["overrides"][package]
745
+ self._save_overrides_tracking(tracking)
746
+
747
+ return {"ok": True, "package": package, "dropped": dropped}
748
+
528
749
 
529
750
  # ─── Diff Utilities ──────────────────────────────────────────────────────────
530
751
 
@@ -610,36 +831,175 @@ def _apply_patch(patch_path: str, target_dir: str) -> Tuple[bool, str]:
610
831
 
611
832
 
612
833
  def _apply_patch_python(patch_path: str, target_dir: str) -> Tuple[bool, str]:
613
- """Pure-Python patch application (basic unified diff support)."""
834
+ """Pure-Python unified diff patch application.
835
+
836
+ Parses unified diff format and applies hunks to target files.
837
+ Supports: context matching, fuzzy offset (±3 lines), new/deleted files.
838
+ Processes hunks in reverse order to avoid line number cascading.
839
+ """
840
+ import re
841
+
614
842
  try:
615
843
  with open(patch_path) as f:
616
- patch_text = f.read()
844
+ patch_lines = f.readlines()
617
845
  except OSError as e:
618
846
  return (False, str(e))
619
847
 
620
- # Parse hunks
621
- current_file = None
622
- hunks: Dict[str, List[str]] = {}
623
-
624
- for line in patch_text.splitlines(keepends=True):
625
- if line.startswith("+++ b/"):
626
- # Extract relative path after the package name prefix
627
- parts = line[6:].strip().split("/", 1)
628
- current_file = parts[1] if len(parts) > 1 else parts[0]
629
- hunks.setdefault(current_file, [])
630
- elif current_file and (line.startswith("+") or line.startswith("-")
631
- or line.startswith(" ") or line.startswith("@@")):
632
- hunks[current_file].append(line)
633
-
634
- if not hunks:
635
- return (False, "No hunks found in patch")
636
-
637
- # For now, just verify the files exist — full Python patching is complex
638
- missing = [f for f in hunks if not os.path.isfile(os.path.join(target_dir, f))]
639
- if missing:
640
- return (False, f"Files not found: {', '.join(missing[:3])}")
641
-
642
- return (False, "Python-only patch application not fully implemented; install 'patch' command")
848
+ # Parse into file-level diffs
849
+ file_diffs: List[dict] = []
850
+ i = 0
851
+ while i < len(patch_lines):
852
+ line = patch_lines[i]
853
+
854
+ # Find --- a/... and +++ b/... pair
855
+ if line.startswith("--- "):
856
+ if i + 1 < len(patch_lines) and patch_lines[i + 1].startswith("+++ "):
857
+ old_path = line[4:].strip()
858
+ new_line = patch_lines[i + 1]
859
+ # Strip p2: +++ b/<pkg>/<file> -> <file>
860
+ new_path_raw = new_line[6:].strip()
861
+ parts = new_path_raw.split("/", 1)
862
+ rel_path = parts[1] if len(parts) > 1 else parts[0]
863
+
864
+ is_new = old_path == "/dev/null" or old_path.endswith("/dev/null")
865
+ is_delete = new_line.strip().endswith("/dev/null")
866
+
867
+ # Collect hunks for this file
868
+ hunks: List[dict] = []
869
+ i += 2
870
+ while i < len(patch_lines):
871
+ hunk_line = patch_lines[i]
872
+ m = re.match(
873
+ r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@',
874
+ hunk_line,
875
+ )
876
+ if m:
877
+ old_start = int(m.group(1))
878
+ old_count = int(m.group(2)) if m.group(2) is not None else 1
879
+ new_start = int(m.group(3))
880
+ new_count = int(m.group(4)) if m.group(4) is not None else 1
881
+ hunk_body: List[str] = []
882
+ i += 1
883
+ while i < len(patch_lines):
884
+ hl = patch_lines[i]
885
+ if hl.startswith((" ", "+", "-")):
886
+ hunk_body.append(hl)
887
+ i += 1
888
+ elif hl.startswith("\\ No newline"):
889
+ i += 1 # skip no-newline marker
890
+ else:
891
+ break
892
+ hunks.append({
893
+ "old_start": old_start,
894
+ "old_count": old_count,
895
+ "new_start": new_start,
896
+ "new_count": new_count,
897
+ "lines": hunk_body,
898
+ })
899
+ elif hunk_line.startswith("--- ") or hunk_line.startswith("diff "):
900
+ break # next file diff
901
+ else:
902
+ i += 1
903
+
904
+ file_diffs.append({
905
+ "path": rel_path,
906
+ "is_new": is_new,
907
+ "is_delete": is_delete,
908
+ "hunks": hunks,
909
+ })
910
+ continue
911
+ i += 1
912
+
913
+ if not file_diffs:
914
+ return (False, "No file diffs found in patch")
915
+
916
+ # Apply each file diff
917
+ for fd in file_diffs:
918
+ target_file = os.path.join(target_dir, fd["path"])
919
+
920
+ if fd["is_delete"]:
921
+ try:
922
+ os.remove(target_file)
923
+ except FileNotFoundError:
924
+ pass
925
+ continue
926
+
927
+ if fd["is_new"]:
928
+ os.makedirs(os.path.dirname(target_file) or ".", exist_ok=True)
929
+ new_lines: List[str] = []
930
+ for hunk in fd["hunks"]:
931
+ for hl in hunk["lines"]:
932
+ if hl.startswith("+"):
933
+ new_lines.append(hl[1:])
934
+ elif hl.startswith(" "):
935
+ new_lines.append(hl[1:])
936
+ with open(target_file, "w") as f:
937
+ f.writelines(new_lines)
938
+ continue
939
+
940
+ # Existing file — read, apply hunks in reverse, write
941
+ if not os.path.isfile(target_file):
942
+ return (False, f"File not found: {fd['path']}")
943
+
944
+ with open(target_file) as f:
945
+ file_lines = f.readlines()
946
+
947
+ # Process hunks in reverse order to preserve line numbers
948
+ for hunk in reversed(fd["hunks"]):
949
+ old_start = hunk["old_start"] - 1 # 0-indexed
950
+ hunk_lines = hunk["lines"]
951
+
952
+ # Build expected old lines and new lines
953
+ old_expected: List[str] = []
954
+ new_replacement: List[str] = []
955
+ for hl in hunk_lines:
956
+ if hl.startswith(" "):
957
+ old_expected.append(hl[1:])
958
+ new_replacement.append(hl[1:])
959
+ elif hl.startswith("-"):
960
+ old_expected.append(hl[1:])
961
+ elif hl.startswith("+"):
962
+ new_replacement.append(hl[1:])
963
+
964
+ # Try exact match first, then fuzzy offset ±3
965
+ match_pos = -1
966
+ for offset in range(0, 4):
967
+ for sign in (0, -1, 1) if offset == 0 else (-1, 1):
968
+ pos = old_start + offset * sign
969
+ if pos < 0 or pos + len(old_expected) > len(file_lines):
970
+ continue
971
+ chunk = file_lines[pos:pos + len(old_expected)]
972
+ if _lines_match(chunk, old_expected):
973
+ match_pos = pos
974
+ break
975
+ if match_pos >= 0:
976
+ break
977
+
978
+ if match_pos < 0:
979
+ context = old_expected[0].rstrip() if old_expected else "(empty)"
980
+ return (
981
+ False,
982
+ f"Hunk failed for {fd['path']} at line {hunk['old_start']}: "
983
+ f"context mismatch near '{context}'",
984
+ )
985
+
986
+ # Apply: replace old lines with new lines
987
+ file_lines[match_pos:match_pos + len(old_expected)] = new_replacement
988
+
989
+ with open(target_file, "w") as f:
990
+ f.writelines(file_lines)
991
+
992
+ return (True, "")
993
+
994
+
995
+ def _lines_match(actual: List[str], expected: List[str]) -> bool:
996
+ """Compare lines ignoring trailing whitespace differences."""
997
+ if len(actual) != len(expected):
998
+ return False
999
+ for a, e in zip(actual, expected):
1000
+ if a.rstrip("\n\r") != e.rstrip("\n\r"):
1001
+ return False
1002
+ return True
643
1003
 
644
1004
 
645
1005
  def _is_binary(path: str) -> bool: