bingo-light 2.1.1 → 2.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.en.md +20 -7
- package/README.md +209 -126
- package/bingo-light +385 -11
- package/bingo_core/__init__.py +3 -1
- package/bingo_core/dep.py +1012 -0
- package/bingo_core/dep_fork.py +268 -0
- package/bingo_core/dep_npm.py +113 -0
- package/bingo_core/dep_pip.py +178 -0
- package/bingo_core/repo.py +795 -8
- package/bingo_core/setup.py +73 -17
- package/bingo_core/state.py +1 -1
- package/bingo_core/team.py +170 -0
- package/completions/bingo-light.bash +26 -3
- package/completions/bingo-light.fish +46 -1
- package/completions/bingo-light.zsh +38 -2
- package/mcp-server.py +346 -6
- package/package.json +1 -1
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
"""
|
|
2
|
+
bingo_core.dep_fork — Fork-as-dependency tracking for npm projects.
|
|
3
|
+
|
|
4
|
+
Scans package.json for git-based dependencies (github:user/repo, git+https://,
|
|
5
|
+
etc.), detects drift from upstream releases, and updates fork refs.
|
|
6
|
+
|
|
7
|
+
Python 3.8+ stdlib only.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import re
|
|
15
|
+
import tempfile
|
|
16
|
+
import urllib.request
|
|
17
|
+
import urllib.error
|
|
18
|
+
from typing import Any, Dict, List, Optional
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# Git dependency patterns in package.json
|
|
22
|
+
_GIT_DEP_PATTERNS = [
|
|
23
|
+
re.compile(r'^github:(.+)$'), # github:user/repo#ref
|
|
24
|
+
re.compile(r'^git\+https?://github\.com/(.+?)(?:\.git)?(?:#(.+))?$'), # git+https://
|
|
25
|
+
re.compile(r'^git\+ssh://git@github\.com[:/](.+?)(?:\.git)?(?:#(.+))?$'), # git+ssh://
|
|
26
|
+
re.compile(r'^([a-zA-Z0-9_-]+/[a-zA-Z0-9._-]+)(?:#(.+))?$'), # user/repo shorthand
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ForkTracker:
|
|
31
|
+
"""Track git-based dependencies in npm projects."""
|
|
32
|
+
|
|
33
|
+
def __init__(self, cwd: str = "."):
|
|
34
|
+
self.cwd = os.path.abspath(cwd)
|
|
35
|
+
|
|
36
|
+
def _read_package_json(self) -> Optional[dict]:
|
|
37
|
+
pj = os.path.join(self.cwd, "package.json")
|
|
38
|
+
if not os.path.isfile(pj):
|
|
39
|
+
return None
|
|
40
|
+
try:
|
|
41
|
+
with open(pj) as f:
|
|
42
|
+
return json.load(f)
|
|
43
|
+
except (json.JSONDecodeError, IOError):
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
def _write_package_json(self, data: dict) -> None:
|
|
47
|
+
pj = os.path.join(self.cwd, "package.json")
|
|
48
|
+
fd, tmp = tempfile.mkstemp(suffix=".tmp", dir=self.cwd)
|
|
49
|
+
try:
|
|
50
|
+
with os.fdopen(fd, "w") as f:
|
|
51
|
+
json.dump(data, f, indent=2)
|
|
52
|
+
f.write("\n")
|
|
53
|
+
os.replace(tmp, pj)
|
|
54
|
+
except Exception:
|
|
55
|
+
try:
|
|
56
|
+
os.unlink(tmp)
|
|
57
|
+
except FileNotFoundError:
|
|
58
|
+
pass
|
|
59
|
+
raise
|
|
60
|
+
|
|
61
|
+
def _parse_git_dep(self, value: str) -> Optional[dict]:
|
|
62
|
+
"""Parse a git-based dependency value. Returns {repo, ref, protocol} or None."""
|
|
63
|
+
for pat in _GIT_DEP_PATTERNS:
|
|
64
|
+
m = pat.match(value)
|
|
65
|
+
if m:
|
|
66
|
+
groups = m.groups()
|
|
67
|
+
repo_part = groups[0]
|
|
68
|
+
ref = groups[1] if len(groups) > 1 and groups[1] else ""
|
|
69
|
+
|
|
70
|
+
# Handle github:user/repo#ref
|
|
71
|
+
if "#" in repo_part:
|
|
72
|
+
repo_part, ref = repo_part.split("#", 1)
|
|
73
|
+
|
|
74
|
+
# Normalize repo
|
|
75
|
+
repo_part = repo_part.rstrip("/")
|
|
76
|
+
|
|
77
|
+
return {"repo": repo_part, "ref": ref, "raw": value}
|
|
78
|
+
return None
|
|
79
|
+
|
|
80
|
+
def _fetch_json(self, url: str) -> Optional[dict]:
|
|
81
|
+
"""Fetch JSON from URL with timeout and error handling."""
|
|
82
|
+
headers = {"Accept": "application/json", "User-Agent": "bingo-light"}
|
|
83
|
+
# Use GITHUB_TOKEN if available
|
|
84
|
+
token = os.environ.get("GITHUB_TOKEN", "")
|
|
85
|
+
if token and "github" in url:
|
|
86
|
+
headers["Authorization"] = f"token {token}"
|
|
87
|
+
|
|
88
|
+
req = urllib.request.Request(url, headers=headers)
|
|
89
|
+
try:
|
|
90
|
+
with urllib.request.urlopen(req, timeout=15) as resp:
|
|
91
|
+
# Check GitHub rate limit
|
|
92
|
+
remaining = resp.headers.get("X-RateLimit-Remaining", "")
|
|
93
|
+
if remaining and int(remaining) <= 1:
|
|
94
|
+
import sys
|
|
95
|
+
print(
|
|
96
|
+
"warning: GitHub API rate limit nearly exhausted. "
|
|
97
|
+
"Set GITHUB_TOKEN env var for higher limits.",
|
|
98
|
+
file=sys.stderr,
|
|
99
|
+
)
|
|
100
|
+
return json.loads(resp.read().decode())
|
|
101
|
+
except (urllib.error.URLError, urllib.error.HTTPError, json.JSONDecodeError,
|
|
102
|
+
OSError, ValueError):
|
|
103
|
+
return None
|
|
104
|
+
|
|
105
|
+
@staticmethod
|
|
106
|
+
def _is_sha_like(ref: str) -> bool:
|
|
107
|
+
"""Check if a ref looks like a commit SHA (hex string, 7+ chars)."""
|
|
108
|
+
return len(ref) >= 7 and all(c in "0123456789abcdef" for c in ref.lower())
|
|
109
|
+
|
|
110
|
+
def fork_list(self) -> dict:
|
|
111
|
+
"""List all git-based dependencies in package.json.
|
|
112
|
+
|
|
113
|
+
Returns {"ok": True, "forks": [...], "count": N}
|
|
114
|
+
"""
|
|
115
|
+
pj = self._read_package_json()
|
|
116
|
+
if pj is None:
|
|
117
|
+
return {"ok": True, "forks": [], "count": 0, "note": "No package.json"}
|
|
118
|
+
|
|
119
|
+
forks: List[dict] = []
|
|
120
|
+
for dep_type in ("dependencies", "devDependencies"):
|
|
121
|
+
deps = pj.get(dep_type, {})
|
|
122
|
+
for name, value in deps.items():
|
|
123
|
+
if not isinstance(value, str):
|
|
124
|
+
continue
|
|
125
|
+
parsed = self._parse_git_dep(value)
|
|
126
|
+
if parsed:
|
|
127
|
+
forks.append({
|
|
128
|
+
"package": name,
|
|
129
|
+
"repo": parsed["repo"],
|
|
130
|
+
"ref": parsed["ref"],
|
|
131
|
+
"dep_type": dep_type,
|
|
132
|
+
"raw": parsed["raw"],
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
return {"ok": True, "forks": forks, "count": len(forks)}
|
|
136
|
+
|
|
137
|
+
def fork_check(self) -> dict:
|
|
138
|
+
"""Check fork drift against upstream npm releases and GitHub commits.
|
|
139
|
+
|
|
140
|
+
Returns {"ok": True, "forks": [...], "drifted": N}
|
|
141
|
+
"""
|
|
142
|
+
list_result = self.fork_list()
|
|
143
|
+
forks = list_result.get("forks", [])
|
|
144
|
+
if not forks:
|
|
145
|
+
return {"ok": True, "forks": [], "drifted": 0}
|
|
146
|
+
|
|
147
|
+
results: List[dict] = []
|
|
148
|
+
drifted = 0
|
|
149
|
+
|
|
150
|
+
for fork in forks:
|
|
151
|
+
entry: Dict[str, Any] = {
|
|
152
|
+
"package": fork["package"],
|
|
153
|
+
"repo": fork["repo"],
|
|
154
|
+
"ref": fork["ref"],
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
# Check npm registry for latest published version
|
|
158
|
+
npm_url = f"https://registry.npmjs.org/{fork['package']}/latest"
|
|
159
|
+
npm_data = self._fetch_json(npm_url)
|
|
160
|
+
if npm_data:
|
|
161
|
+
entry["npm_latest"] = npm_data.get("version", "")
|
|
162
|
+
else:
|
|
163
|
+
entry["npm_latest"] = ""
|
|
164
|
+
|
|
165
|
+
# Check GitHub for latest commit on default branch
|
|
166
|
+
gh_url = f"https://api.github.com/repos/{fork['repo']}/commits?per_page=1"
|
|
167
|
+
gh_data = self._fetch_json(gh_url)
|
|
168
|
+
if gh_data and isinstance(gh_data, list) and len(gh_data) > 0:
|
|
169
|
+
latest_sha = gh_data[0].get("sha", "")[:12]
|
|
170
|
+
entry["latest_commit"] = latest_sha
|
|
171
|
+
entry["commit_date"] = gh_data[0].get("commit", {}).get(
|
|
172
|
+
"committer", {}
|
|
173
|
+
).get("date", "")
|
|
174
|
+
|
|
175
|
+
# Compare ref — handle SHA, tag, and branch refs
|
|
176
|
+
ref = fork["ref"]
|
|
177
|
+
if not ref:
|
|
178
|
+
entry["status"] = "no_ref_pinned"
|
|
179
|
+
elif self._is_sha_like(ref):
|
|
180
|
+
# ref looks like a commit SHA — compare directly
|
|
181
|
+
if latest_sha.startswith(ref[:8]) or ref.startswith(latest_sha[:8]):
|
|
182
|
+
entry["status"] = "up_to_date"
|
|
183
|
+
else:
|
|
184
|
+
entry["status"] = "drifted"
|
|
185
|
+
drifted += 1
|
|
186
|
+
else:
|
|
187
|
+
# ref is a tag or branch name — resolve via GitHub API
|
|
188
|
+
ref_url = f"https://api.github.com/repos/{fork['repo']}/git/ref/tags/{ref}"
|
|
189
|
+
ref_data = self._fetch_json(ref_url)
|
|
190
|
+
if not ref_data:
|
|
191
|
+
# Try as branch
|
|
192
|
+
ref_url = f"https://api.github.com/repos/{fork['repo']}/git/ref/heads/{ref}"
|
|
193
|
+
ref_data = self._fetch_json(ref_url)
|
|
194
|
+
if ref_data and isinstance(ref_data, dict):
|
|
195
|
+
ref_sha = ref_data.get("object", {}).get("sha", "")[:12]
|
|
196
|
+
if ref_sha and (latest_sha.startswith(ref_sha[:8]) or ref_sha.startswith(latest_sha[:8])):
|
|
197
|
+
entry["status"] = "up_to_date"
|
|
198
|
+
else:
|
|
199
|
+
entry["status"] = "drifted"
|
|
200
|
+
entry["ref_resolved"] = ref_sha
|
|
201
|
+
drifted += 1
|
|
202
|
+
else:
|
|
203
|
+
# Can't resolve ref — report as unknown
|
|
204
|
+
entry["status"] = "unknown"
|
|
205
|
+
entry["note"] = f"Cannot resolve ref '{ref}'"
|
|
206
|
+
else:
|
|
207
|
+
entry["latest_commit"] = ""
|
|
208
|
+
entry["status"] = "unknown"
|
|
209
|
+
|
|
210
|
+
results.append(entry)
|
|
211
|
+
|
|
212
|
+
return {"ok": True, "forks": results, "drifted": drifted}
|
|
213
|
+
|
|
214
|
+
def fork_sync(self, package: str) -> dict:
|
|
215
|
+
"""Update a fork dependency ref to the latest commit.
|
|
216
|
+
|
|
217
|
+
Returns {"ok": True, "package": ..., "old_ref": ..., "new_ref": ...}
|
|
218
|
+
"""
|
|
219
|
+
pj = self._read_package_json()
|
|
220
|
+
if pj is None:
|
|
221
|
+
return {"ok": False, "error": "No package.json found"}
|
|
222
|
+
|
|
223
|
+
# Find the package
|
|
224
|
+
found_type = None
|
|
225
|
+
old_value = None
|
|
226
|
+
for dep_type in ("dependencies", "devDependencies"):
|
|
227
|
+
deps = pj.get(dep_type, {})
|
|
228
|
+
if package in deps:
|
|
229
|
+
found_type = dep_type
|
|
230
|
+
old_value = deps[package]
|
|
231
|
+
break
|
|
232
|
+
|
|
233
|
+
if not found_type or not isinstance(old_value, str):
|
|
234
|
+
return {"ok": False, "error": f"Package '{package}' not found in dependencies"}
|
|
235
|
+
|
|
236
|
+
parsed = self._parse_git_dep(old_value)
|
|
237
|
+
if not parsed:
|
|
238
|
+
return {"ok": False, "error": f"'{package}' is not a git-based dependency"}
|
|
239
|
+
|
|
240
|
+
old_ref = parsed["ref"]
|
|
241
|
+
|
|
242
|
+
# Fetch latest commit from GitHub
|
|
243
|
+
gh_url = f"https://api.github.com/repos/{parsed['repo']}/commits?per_page=1"
|
|
244
|
+
gh_data = self._fetch_json(gh_url)
|
|
245
|
+
if not gh_data or not isinstance(gh_data, list) or len(gh_data) == 0:
|
|
246
|
+
return {"ok": False, "error": f"Cannot fetch latest commit for {parsed['repo']}"}
|
|
247
|
+
|
|
248
|
+
new_ref = gh_data[0].get("sha", "")[:12]
|
|
249
|
+
if not new_ref:
|
|
250
|
+
return {"ok": False, "error": "Empty commit SHA from GitHub"}
|
|
251
|
+
|
|
252
|
+
# Update the dependency value
|
|
253
|
+
if "#" in old_value:
|
|
254
|
+
new_value = old_value.rsplit("#", 1)[0] + "#" + new_ref
|
|
255
|
+
else:
|
|
256
|
+
new_value = old_value + "#" + new_ref
|
|
257
|
+
|
|
258
|
+
pj[found_type][package] = new_value
|
|
259
|
+
self._write_package_json(pj)
|
|
260
|
+
|
|
261
|
+
return {
|
|
262
|
+
"ok": True,
|
|
263
|
+
"package": package,
|
|
264
|
+
"old_ref": old_ref,
|
|
265
|
+
"new_ref": new_ref,
|
|
266
|
+
"old_value": old_value,
|
|
267
|
+
"new_value": new_value,
|
|
268
|
+
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"""
|
|
2
|
+
bingo_core.dep_npm — npm/pnpm/yarn backend for dependency patching.
|
|
3
|
+
|
|
4
|
+
Detects npm projects, fetches original packages from registry,
|
|
5
|
+
resolves install paths in node_modules/.
|
|
6
|
+
|
|
7
|
+
Python 3.8+ stdlib only.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import tarfile
|
|
15
|
+
from typing import List, Optional
|
|
16
|
+
from urllib.request import urlopen
|
|
17
|
+
|
|
18
|
+
from bingo_core.dep import DepBackend
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class NpmBackend(DepBackend):
|
|
22
|
+
"""Backend for npm/pnpm/yarn packages."""
|
|
23
|
+
|
|
24
|
+
name = "npm"
|
|
25
|
+
|
|
26
|
+
def detect(self, cwd: str) -> bool:
|
|
27
|
+
"""Detect npm project by presence of package.json or node_modules/."""
|
|
28
|
+
return (
|
|
29
|
+
os.path.isfile(os.path.join(cwd, "package.json"))
|
|
30
|
+
or os.path.isdir(os.path.join(cwd, "node_modules"))
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
def get_installed_version(self, package: str, cwd: str) -> Optional[str]:
|
|
34
|
+
"""Get installed version from node_modules/<package>/package.json."""
|
|
35
|
+
pkg_json = os.path.join(cwd, "node_modules", package, "package.json")
|
|
36
|
+
if not os.path.isfile(pkg_json):
|
|
37
|
+
# Try scoped package
|
|
38
|
+
if "/" in package:
|
|
39
|
+
pkg_json = os.path.join(cwd, "node_modules", package, "package.json")
|
|
40
|
+
if not os.path.isfile(pkg_json):
|
|
41
|
+
return None
|
|
42
|
+
try:
|
|
43
|
+
with open(pkg_json) as f:
|
|
44
|
+
data = json.load(f)
|
|
45
|
+
return data.get("version")
|
|
46
|
+
except (json.JSONDecodeError, OSError):
|
|
47
|
+
return None
|
|
48
|
+
|
|
49
|
+
def get_install_path(self, package: str, cwd: str) -> Optional[str]:
|
|
50
|
+
"""Get the filesystem path of the installed package."""
|
|
51
|
+
path = os.path.join(cwd, "node_modules", package)
|
|
52
|
+
if os.path.isdir(path):
|
|
53
|
+
return path
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
def fetch_original(self, package: str, version: str, dest: str) -> bool:
|
|
57
|
+
"""Download original package from npm registry and extract to dest.
|
|
58
|
+
|
|
59
|
+
Uses the npm registry API to get the tarball URL, downloads and
|
|
60
|
+
extracts it. The tarball contains a 'package/' prefix which we strip.
|
|
61
|
+
"""
|
|
62
|
+
try:
|
|
63
|
+
# Fetch package metadata from registry
|
|
64
|
+
# Handle scoped packages: @scope/name -> @scope%2Fname
|
|
65
|
+
encoded = package.replace("/", "%2F")
|
|
66
|
+
url = f"https://registry.npmjs.org/{encoded}/{version}"
|
|
67
|
+
with urlopen(url, timeout=30) as resp:
|
|
68
|
+
meta = json.loads(resp.read().decode())
|
|
69
|
+
|
|
70
|
+
tarball_url = meta.get("dist", {}).get("tarball")
|
|
71
|
+
if not tarball_url:
|
|
72
|
+
return False
|
|
73
|
+
|
|
74
|
+
# Download tarball
|
|
75
|
+
tmptar = os.path.join(dest, "_pkg.tgz")
|
|
76
|
+
with urlopen(tarball_url, timeout=60) as resp:
|
|
77
|
+
with open(tmptar, "wb") as f:
|
|
78
|
+
f.write(resp.read())
|
|
79
|
+
|
|
80
|
+
# Extract (npm tarballs have a 'package/' prefix)
|
|
81
|
+
with tarfile.open(tmptar, "r:gz") as tar:
|
|
82
|
+
for member in tar.getmembers():
|
|
83
|
+
# Strip the 'package/' prefix
|
|
84
|
+
if member.name.startswith("package/"):
|
|
85
|
+
member.name = member.name[len("package/"):]
|
|
86
|
+
elif member.name == "package":
|
|
87
|
+
continue
|
|
88
|
+
# Security: skip absolute paths and ..
|
|
89
|
+
if member.name.startswith("/") or ".." in member.name:
|
|
90
|
+
continue
|
|
91
|
+
tar.extract(member, dest, filter="data" if hasattr(tarfile, 'data_filter') else None)
|
|
92
|
+
|
|
93
|
+
os.remove(tmptar)
|
|
94
|
+
return True
|
|
95
|
+
|
|
96
|
+
except Exception:
|
|
97
|
+
return False
|
|
98
|
+
|
|
99
|
+
def list_files(self, package: str, cwd: str) -> List[str]:
|
|
100
|
+
"""List all files in the installed package."""
|
|
101
|
+
install_path = self.get_install_path(package, cwd)
|
|
102
|
+
if not install_path:
|
|
103
|
+
return []
|
|
104
|
+
result = []
|
|
105
|
+
for root, _dirs, files in os.walk(install_path):
|
|
106
|
+
for f in files:
|
|
107
|
+
full = os.path.join(root, f)
|
|
108
|
+
rel = os.path.relpath(full, install_path)
|
|
109
|
+
result.append(rel)
|
|
110
|
+
return sorted(result)
|
|
111
|
+
|
|
112
|
+
def install_hook_command(self) -> str:
|
|
113
|
+
return "bingo-light dep apply"
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
"""
|
|
2
|
+
bingo_core.dep_pip — pip/pipx backend for dependency patching.
|
|
3
|
+
|
|
4
|
+
Detects Python projects, fetches original packages from PyPI,
|
|
5
|
+
resolves install paths in site-packages/.
|
|
6
|
+
|
|
7
|
+
Python 3.8+ stdlib only.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import site
|
|
15
|
+
import zipfile
|
|
16
|
+
from typing import List, Optional
|
|
17
|
+
from urllib.request import urlopen
|
|
18
|
+
|
|
19
|
+
from bingo_core.dep import DepBackend
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class PipBackend(DepBackend):
|
|
23
|
+
"""Backend for pip/pipx Python packages."""
|
|
24
|
+
|
|
25
|
+
name = "pip"
|
|
26
|
+
|
|
27
|
+
def detect(self, cwd: str) -> bool:
|
|
28
|
+
"""Detect Python project by presence of requirements.txt, pyproject.toml, or venv."""
|
|
29
|
+
indicators = [
|
|
30
|
+
"requirements.txt",
|
|
31
|
+
"pyproject.toml",
|
|
32
|
+
"setup.py",
|
|
33
|
+
"setup.cfg",
|
|
34
|
+
"Pipfile",
|
|
35
|
+
".venv",
|
|
36
|
+
"venv",
|
|
37
|
+
]
|
|
38
|
+
return any(
|
|
39
|
+
os.path.exists(os.path.join(cwd, f)) for f in indicators
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
def _find_site_packages(self, cwd: str) -> List[str]:
|
|
43
|
+
"""Find site-packages directories, preferring project venvs."""
|
|
44
|
+
dirs = []
|
|
45
|
+
|
|
46
|
+
# Check for project venv
|
|
47
|
+
for venv_name in (".venv", "venv"):
|
|
48
|
+
venv_path = os.path.join(cwd, venv_name)
|
|
49
|
+
if os.path.isdir(venv_path):
|
|
50
|
+
# Find site-packages inside venv
|
|
51
|
+
for root, dirnames, _files in os.walk(venv_path):
|
|
52
|
+
if "site-packages" in dirnames:
|
|
53
|
+
dirs.append(os.path.join(root, "site-packages"))
|
|
54
|
+
break
|
|
55
|
+
|
|
56
|
+
# System/user site-packages
|
|
57
|
+
dirs.extend(site.getsitepackages())
|
|
58
|
+
user_site = site.getusersitepackages()
|
|
59
|
+
if isinstance(user_site, str):
|
|
60
|
+
dirs.append(user_site)
|
|
61
|
+
|
|
62
|
+
return [d for d in dirs if os.path.isdir(d)]
|
|
63
|
+
|
|
64
|
+
def get_installed_version(self, package: str, cwd: str) -> Optional[str]:
|
|
65
|
+
"""Get installed version via importlib.metadata or dist-info."""
|
|
66
|
+
# Normalize package name
|
|
67
|
+
normalized = package.replace("-", "_").lower()
|
|
68
|
+
|
|
69
|
+
for sp_dir in self._find_site_packages(cwd):
|
|
70
|
+
# Check dist-info directories
|
|
71
|
+
for entry in os.listdir(sp_dir):
|
|
72
|
+
if entry.endswith(".dist-info"):
|
|
73
|
+
dist_name = entry.rsplit("-", 1)[0].replace("-", "_").lower()
|
|
74
|
+
if dist_name == normalized:
|
|
75
|
+
metadata_path = os.path.join(sp_dir, entry, "METADATA")
|
|
76
|
+
if os.path.isfile(metadata_path):
|
|
77
|
+
with open(metadata_path) as f:
|
|
78
|
+
for line in f:
|
|
79
|
+
if line.startswith("Version:"):
|
|
80
|
+
return line.split(":", 1)[1].strip()
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
def get_install_path(self, package: str, cwd: str) -> Optional[str]:
|
|
84
|
+
"""Get the filesystem path of the installed package."""
|
|
85
|
+
normalized = package.replace("-", "_").lower()
|
|
86
|
+
|
|
87
|
+
for sp_dir in self._find_site_packages(cwd):
|
|
88
|
+
# Direct package directory
|
|
89
|
+
for entry in os.listdir(sp_dir):
|
|
90
|
+
if entry.replace("-", "_").lower() == normalized:
|
|
91
|
+
full = os.path.join(sp_dir, entry)
|
|
92
|
+
if os.path.isdir(full):
|
|
93
|
+
return full
|
|
94
|
+
return None
|
|
95
|
+
|
|
96
|
+
def fetch_original(self, package: str, version: str, dest: str) -> bool:
|
|
97
|
+
"""Download original package from PyPI and extract to dest.
|
|
98
|
+
|
|
99
|
+
Prefers wheel (.whl) for consistency, falls back to sdist.
|
|
100
|
+
"""
|
|
101
|
+
try:
|
|
102
|
+
# Fetch package metadata from PyPI JSON API
|
|
103
|
+
url = f"https://pypi.org/pypi/{package}/{version}/json"
|
|
104
|
+
with urlopen(url, timeout=30) as resp:
|
|
105
|
+
meta = json.loads(resp.read().decode())
|
|
106
|
+
|
|
107
|
+
# Find wheel URL (prefer), then sdist
|
|
108
|
+
download_url = None
|
|
109
|
+
is_wheel = False
|
|
110
|
+
for file_info in meta.get("urls", []):
|
|
111
|
+
if file_info.get("packagetype") == "bdist_wheel":
|
|
112
|
+
download_url = file_info["url"]
|
|
113
|
+
is_wheel = True
|
|
114
|
+
break
|
|
115
|
+
if not download_url:
|
|
116
|
+
for file_info in meta.get("urls", []):
|
|
117
|
+
if file_info.get("packagetype") == "sdist":
|
|
118
|
+
download_url = file_info["url"]
|
|
119
|
+
break
|
|
120
|
+
|
|
121
|
+
if not download_url:
|
|
122
|
+
return False
|
|
123
|
+
|
|
124
|
+
# Download
|
|
125
|
+
tmp_file = os.path.join(dest, "_pkg.tmp")
|
|
126
|
+
with urlopen(download_url, timeout=60) as resp:
|
|
127
|
+
with open(tmp_file, "wb") as f:
|
|
128
|
+
f.write(resp.read())
|
|
129
|
+
|
|
130
|
+
if is_wheel:
|
|
131
|
+
# Wheel is a zip file
|
|
132
|
+
normalized = package.replace("-", "_").lower()
|
|
133
|
+
with zipfile.ZipFile(tmp_file) as zf:
|
|
134
|
+
for member in zf.namelist():
|
|
135
|
+
# Extract only the package directory
|
|
136
|
+
parts = member.split("/")
|
|
137
|
+
if parts[0].replace("-", "_").lower() == normalized:
|
|
138
|
+
zf.extract(member, dest)
|
|
139
|
+
elif parts[0].endswith(".dist-info") or parts[0].endswith(".data"):
|
|
140
|
+
continue # Skip metadata
|
|
141
|
+
else:
|
|
142
|
+
# Single-file module
|
|
143
|
+
if member.endswith(".py"):
|
|
144
|
+
zf.extract(member, dest)
|
|
145
|
+
# Move extracted package dir to dest root
|
|
146
|
+
extracted = os.path.join(dest, normalized)
|
|
147
|
+
if os.path.isdir(extracted):
|
|
148
|
+
# Already in right place
|
|
149
|
+
pass
|
|
150
|
+
else:
|
|
151
|
+
# Sdist: tar.gz — extract and find the package dir
|
|
152
|
+
import tarfile
|
|
153
|
+
with tarfile.open(tmp_file, "r:gz") as tar:
|
|
154
|
+
tar.extractall(dest)
|
|
155
|
+
# Find the actual package inside the extracted tree
|
|
156
|
+
# Sdist typically has project-version/ at top level
|
|
157
|
+
|
|
158
|
+
os.remove(tmp_file)
|
|
159
|
+
return True
|
|
160
|
+
|
|
161
|
+
except Exception:
|
|
162
|
+
return False
|
|
163
|
+
|
|
164
|
+
def list_files(self, package: str, cwd: str) -> List[str]:
|
|
165
|
+
"""List all files in the installed package."""
|
|
166
|
+
install_path = self.get_install_path(package, cwd)
|
|
167
|
+
if not install_path:
|
|
168
|
+
return []
|
|
169
|
+
result = []
|
|
170
|
+
for root, _dirs, files in os.walk(install_path):
|
|
171
|
+
for f in files:
|
|
172
|
+
full = os.path.join(root, f)
|
|
173
|
+
rel = os.path.relpath(full, install_path)
|
|
174
|
+
result.append(rel)
|
|
175
|
+
return sorted(result)
|
|
176
|
+
|
|
177
|
+
def install_hook_command(self) -> str:
|
|
178
|
+
return "bingo-light dep apply"
|