gitinstall 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitinstall/__init__.py +61 -0
- gitinstall/_sdk.py +541 -0
- gitinstall/academic.py +831 -0
- gitinstall/admin.html +327 -0
- gitinstall/auto_update.py +384 -0
- gitinstall/autopilot.py +349 -0
- gitinstall/badge.py +476 -0
- gitinstall/checkpoint.py +330 -0
- gitinstall/cicd.py +499 -0
- gitinstall/clawhub.html +718 -0
- gitinstall/config_schema.py +353 -0
- gitinstall/db.py +984 -0
- gitinstall/db_backend.py +445 -0
- gitinstall/dep_chain.py +337 -0
- gitinstall/dependency_audit.py +1153 -0
- gitinstall/detector.py +542 -0
- gitinstall/doctor.py +493 -0
- gitinstall/education.py +869 -0
- gitinstall/enterprise.py +802 -0
- gitinstall/error_fixer.py +953 -0
- gitinstall/event_bus.py +251 -0
- gitinstall/executor.py +577 -0
- gitinstall/feature_flags.py +138 -0
- gitinstall/fetcher.py +921 -0
- gitinstall/huggingface.py +922 -0
- gitinstall/hw_detect.py +988 -0
- gitinstall/i18n.py +664 -0
- gitinstall/installer_registry.py +362 -0
- gitinstall/knowledge_base.py +379 -0
- gitinstall/license_check.py +605 -0
- gitinstall/llm.py +569 -0
- gitinstall/log.py +236 -0
- gitinstall/main.py +1408 -0
- gitinstall/mcp_agent.py +841 -0
- gitinstall/mcp_server.py +386 -0
- gitinstall/monorepo.py +810 -0
- gitinstall/multi_source.py +425 -0
- gitinstall/onboard.py +276 -0
- gitinstall/planner.py +222 -0
- gitinstall/planner_helpers.py +323 -0
- gitinstall/planner_known_projects.py +1010 -0
- gitinstall/planner_templates.py +996 -0
- gitinstall/remote_gpu.py +633 -0
- gitinstall/resilience.py +608 -0
- gitinstall/run_tests.py +572 -0
- gitinstall/skills.py +476 -0
- gitinstall/tool_schemas.py +324 -0
- gitinstall/trending.py +279 -0
- gitinstall/uninstaller.py +415 -0
- gitinstall/validate_top100.py +607 -0
- gitinstall/watchdog.py +180 -0
- gitinstall/web.py +1277 -0
- gitinstall/web_ui.html +2277 -0
- gitinstall-1.1.0.dist-info/METADATA +275 -0
- gitinstall-1.1.0.dist-info/RECORD +59 -0
- gitinstall-1.1.0.dist-info/WHEEL +5 -0
- gitinstall-1.1.0.dist-info/entry_points.txt +3 -0
- gitinstall-1.1.0.dist-info/licenses/LICENSE +21 -0
- gitinstall-1.1.0.dist-info/top_level.txt +1 -0
gitinstall/monorepo.py
ADDED
|
@@ -0,0 +1,810 @@
|
|
|
1
|
+
"""
|
|
2
|
+
monorepo.py — Monorepo 子项目安装引擎
|
|
3
|
+
=======================================
|
|
4
|
+
|
|
5
|
+
目标市场:Monorepo 子项目安装(Top 1000 的 30%,★★★☆☆)
|
|
6
|
+
|
|
7
|
+
功能:
|
|
8
|
+
1. Monorepo 检测(识别 workspace/lerna/turborepo/nx/cargo workspace/go workspace)
|
|
9
|
+
2. 子项目发现 & 依赖图谱
|
|
10
|
+
3. 选择性安装(只装需要的子项目)
|
|
11
|
+
4. 子项目间依赖解析
|
|
12
|
+
5. 增量安装(只安装变更的子项目)
|
|
13
|
+
|
|
14
|
+
零外部依赖,纯 Python 标准库。
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import json
|
|
20
|
+
import os
|
|
21
|
+
import re
|
|
22
|
+
from dataclasses import dataclass, field
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Any, Optional
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# ─────────────────────────────────────────────
|
|
28
|
+
# 数据结构
|
|
29
|
+
# ─────────────────────────────────────────────
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class MonorepoInfo:
|
|
33
|
+
"""Monorepo 元信息"""
|
|
34
|
+
is_monorepo: bool = False
|
|
35
|
+
monorepo_type: str = "" # npm_workspaces | lerna | turborepo | nx | pnpm | cargo | go | bazel | pants
|
|
36
|
+
root_dir: str = ""
|
|
37
|
+
packages: list["SubProject"] = field(default_factory=list)
|
|
38
|
+
total_packages: int = 0
|
|
39
|
+
shared_deps: dict[str, str] = field(default_factory=dict)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class SubProject:
|
|
44
|
+
"""子项目"""
|
|
45
|
+
name: str = ""
|
|
46
|
+
path: str = "" # 相对路径
|
|
47
|
+
project_type: str = "" # python | node | rust | go | java | ...
|
|
48
|
+
version: str = ""
|
|
49
|
+
dependencies: list[str] = field(default_factory=list) # 外部依赖
|
|
50
|
+
internal_deps: list[str] = field(default_factory=list) # 内部依赖(其他子项目)
|
|
51
|
+
scripts: dict[str, str] = field(default_factory=dict) # npm scripts / Makefile targets
|
|
52
|
+
has_tests: bool = False
|
|
53
|
+
size_files: int = 0
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@dataclass
|
|
57
|
+
class DependencyEdge:
|
|
58
|
+
"""子项目间依赖关系"""
|
|
59
|
+
source: str = ""
|
|
60
|
+
target: str = ""
|
|
61
|
+
dep_type: str = "runtime" # runtime | dev | peer | build
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# ─────────────────────────────────────────────
|
|
65
|
+
# Monorepo 检测
|
|
66
|
+
# ─────────────────────────────────────────────
|
|
67
|
+
|
|
68
|
+
def detect_monorepo(project_dir: str) -> MonorepoInfo:
|
|
69
|
+
"""
|
|
70
|
+
检测目录是否为 monorepo,识别类型和子项目。
|
|
71
|
+
|
|
72
|
+
支持的 monorepo 类型:
|
|
73
|
+
- npm workspaces (package.json workspaces)
|
|
74
|
+
- pnpm workspaces (pnpm-workspace.yaml)
|
|
75
|
+
- Lerna (lerna.json)
|
|
76
|
+
- Turborepo (turbo.json)
|
|
77
|
+
- Nx (nx.json)
|
|
78
|
+
- Cargo workspaces (Cargo.toml [workspace])
|
|
79
|
+
- Go workspaces (go.work)
|
|
80
|
+
- Bazel (BUILD / WORKSPACE)
|
|
81
|
+
- Pants (pants.toml / BUILD)
|
|
82
|
+
"""
|
|
83
|
+
root = Path(project_dir)
|
|
84
|
+
if not root.is_dir():
|
|
85
|
+
return MonorepoInfo()
|
|
86
|
+
|
|
87
|
+
# 按优先级检测
|
|
88
|
+
detectors = [
|
|
89
|
+
_detect_pnpm_workspaces,
|
|
90
|
+
_detect_npm_workspaces,
|
|
91
|
+
_detect_lerna,
|
|
92
|
+
_detect_turborepo,
|
|
93
|
+
_detect_nx,
|
|
94
|
+
_detect_cargo_workspaces,
|
|
95
|
+
_detect_go_workspaces,
|
|
96
|
+
_detect_bazel,
|
|
97
|
+
_detect_pants,
|
|
98
|
+
]
|
|
99
|
+
|
|
100
|
+
for detector in detectors:
|
|
101
|
+
info = detector(root)
|
|
102
|
+
if info.is_monorepo:
|
|
103
|
+
info.root_dir = str(root)
|
|
104
|
+
info.total_packages = len(info.packages)
|
|
105
|
+
return info
|
|
106
|
+
|
|
107
|
+
# 通用检测:多个独立子项目
|
|
108
|
+
info = _detect_generic_multi_project(root)
|
|
109
|
+
if info.is_monorepo:
|
|
110
|
+
info.root_dir = str(root)
|
|
111
|
+
info.total_packages = len(info.packages)
|
|
112
|
+
return info
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _detect_npm_workspaces(root: Path) -> MonorepoInfo:
|
|
116
|
+
"""检测 npm workspaces"""
|
|
117
|
+
pkg_json = root / "package.json"
|
|
118
|
+
if not pkg_json.exists():
|
|
119
|
+
return MonorepoInfo()
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
data = json.loads(pkg_json.read_text(encoding="utf-8", errors="ignore"))
|
|
123
|
+
except (json.JSONDecodeError, OSError):
|
|
124
|
+
return MonorepoInfo()
|
|
125
|
+
|
|
126
|
+
workspaces = data.get("workspaces", [])
|
|
127
|
+
if isinstance(workspaces, dict):
|
|
128
|
+
workspaces = workspaces.get("packages", [])
|
|
129
|
+
if not workspaces:
|
|
130
|
+
return MonorepoInfo()
|
|
131
|
+
|
|
132
|
+
packages = _resolve_workspace_globs(root, workspaces, "node")
|
|
133
|
+
return MonorepoInfo(
|
|
134
|
+
is_monorepo=True,
|
|
135
|
+
monorepo_type="npm_workspaces",
|
|
136
|
+
packages=packages,
|
|
137
|
+
shared_deps=_extract_shared_deps_node(data),
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _detect_pnpm_workspaces(root: Path) -> MonorepoInfo:
|
|
142
|
+
"""检测 pnpm workspaces"""
|
|
143
|
+
pnpm_ws = root / "pnpm-workspace.yaml"
|
|
144
|
+
if not pnpm_ws.exists():
|
|
145
|
+
return MonorepoInfo()
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
content = pnpm_ws.read_text(encoding="utf-8", errors="ignore")
|
|
149
|
+
except OSError:
|
|
150
|
+
return MonorepoInfo()
|
|
151
|
+
|
|
152
|
+
# 简易 YAML 解析 — 只提取 packages 列表
|
|
153
|
+
patterns = []
|
|
154
|
+
in_packages = False
|
|
155
|
+
for line in content.splitlines():
|
|
156
|
+
stripped = line.strip()
|
|
157
|
+
if stripped == "packages:":
|
|
158
|
+
in_packages = True
|
|
159
|
+
continue
|
|
160
|
+
if in_packages:
|
|
161
|
+
if stripped.startswith("- "):
|
|
162
|
+
pat = stripped[2:].strip().strip("'\"")
|
|
163
|
+
patterns.append(pat)
|
|
164
|
+
elif not stripped.startswith("#") and stripped and not stripped.startswith("-"):
|
|
165
|
+
break
|
|
166
|
+
|
|
167
|
+
if not patterns:
|
|
168
|
+
return MonorepoInfo()
|
|
169
|
+
|
|
170
|
+
packages = _resolve_workspace_globs(root, patterns, "node")
|
|
171
|
+
return MonorepoInfo(
|
|
172
|
+
is_monorepo=True,
|
|
173
|
+
monorepo_type="pnpm",
|
|
174
|
+
packages=packages,
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _detect_lerna(root: Path) -> MonorepoInfo:
|
|
179
|
+
"""检测 Lerna"""
|
|
180
|
+
lerna_json = root / "lerna.json"
|
|
181
|
+
if not lerna_json.exists():
|
|
182
|
+
return MonorepoInfo()
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
data = json.loads(lerna_json.read_text(encoding="utf-8", errors="ignore"))
|
|
186
|
+
except (json.JSONDecodeError, OSError):
|
|
187
|
+
return MonorepoInfo()
|
|
188
|
+
|
|
189
|
+
patterns = data.get("packages", ["packages/*"])
|
|
190
|
+
packages = _resolve_workspace_globs(root, patterns, "node")
|
|
191
|
+
return MonorepoInfo(
|
|
192
|
+
is_monorepo=True,
|
|
193
|
+
monorepo_type="lerna",
|
|
194
|
+
packages=packages,
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _detect_turborepo(root: Path) -> MonorepoInfo:
|
|
199
|
+
"""检测 Turborepo"""
|
|
200
|
+
turbo_json = root / "turbo.json"
|
|
201
|
+
if not turbo_json.exists():
|
|
202
|
+
return MonorepoInfo()
|
|
203
|
+
|
|
204
|
+
# Turborepo 通常配合 npm/pnpm workspaces
|
|
205
|
+
info = _detect_pnpm_workspaces(root)
|
|
206
|
+
if not info.is_monorepo:
|
|
207
|
+
info = _detect_npm_workspaces(root)
|
|
208
|
+
|
|
209
|
+
if info.is_monorepo:
|
|
210
|
+
info.monorepo_type = "turborepo"
|
|
211
|
+
return info
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _detect_nx(root: Path) -> MonorepoInfo:
|
|
215
|
+
"""检测 Nx"""
|
|
216
|
+
nx_json = root / "nx.json"
|
|
217
|
+
if not nx_json.exists():
|
|
218
|
+
return MonorepoInfo()
|
|
219
|
+
|
|
220
|
+
# Nx 项目在 apps/ 和 libs/ 下
|
|
221
|
+
packages = []
|
|
222
|
+
for subdir in ("apps", "libs", "packages"):
|
|
223
|
+
d = root / subdir
|
|
224
|
+
if d.is_dir():
|
|
225
|
+
for child in sorted(d.iterdir()):
|
|
226
|
+
if child.is_dir() and (child / "package.json").exists():
|
|
227
|
+
pkg = _parse_node_package(child)
|
|
228
|
+
if pkg:
|
|
229
|
+
packages.append(pkg)
|
|
230
|
+
elif child.is_dir() and (child / "project.json").exists():
|
|
231
|
+
packages.append(SubProject(
|
|
232
|
+
name=child.name,
|
|
233
|
+
path=str(child.relative_to(root)),
|
|
234
|
+
project_type="node",
|
|
235
|
+
))
|
|
236
|
+
|
|
237
|
+
return MonorepoInfo(
|
|
238
|
+
is_monorepo=bool(packages),
|
|
239
|
+
monorepo_type="nx",
|
|
240
|
+
packages=packages,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def _detect_cargo_workspaces(root: Path) -> MonorepoInfo:
|
|
245
|
+
"""检测 Cargo workspace"""
|
|
246
|
+
cargo_toml = root / "Cargo.toml"
|
|
247
|
+
if not cargo_toml.exists():
|
|
248
|
+
return MonorepoInfo()
|
|
249
|
+
|
|
250
|
+
try:
|
|
251
|
+
content = cargo_toml.read_text(encoding="utf-8", errors="ignore")
|
|
252
|
+
except OSError:
|
|
253
|
+
return MonorepoInfo()
|
|
254
|
+
|
|
255
|
+
# 简易 TOML 解析 [workspace] members
|
|
256
|
+
if "[workspace]" not in content:
|
|
257
|
+
return MonorepoInfo()
|
|
258
|
+
|
|
259
|
+
members = []
|
|
260
|
+
in_members = False
|
|
261
|
+
for line in content.splitlines():
|
|
262
|
+
stripped = line.strip()
|
|
263
|
+
if stripped.startswith("members"):
|
|
264
|
+
in_members = True
|
|
265
|
+
# members = ["a", "b"]
|
|
266
|
+
m = re.search(r'\[(.+)\]', stripped)
|
|
267
|
+
if m:
|
|
268
|
+
members = [s.strip().strip('"\'') for s in m.group(1).split(",")]
|
|
269
|
+
break
|
|
270
|
+
continue
|
|
271
|
+
if in_members:
|
|
272
|
+
if stripped == "]":
|
|
273
|
+
break
|
|
274
|
+
cleaned = stripped.strip(',"\'')
|
|
275
|
+
if cleaned:
|
|
276
|
+
members.append(cleaned)
|
|
277
|
+
|
|
278
|
+
packages = []
|
|
279
|
+
for pattern in members:
|
|
280
|
+
# Cargo workspace 支持 glob
|
|
281
|
+
if "*" in pattern:
|
|
282
|
+
parent = root / pattern.split("*")[0]
|
|
283
|
+
if parent.is_dir():
|
|
284
|
+
for child in sorted(parent.iterdir()):
|
|
285
|
+
if child.is_dir() and (child / "Cargo.toml").exists():
|
|
286
|
+
packages.append(_parse_cargo_package(child, root))
|
|
287
|
+
else:
|
|
288
|
+
member_dir = root / pattern
|
|
289
|
+
if member_dir.is_dir() and (member_dir / "Cargo.toml").exists():
|
|
290
|
+
packages.append(_parse_cargo_package(member_dir, root))
|
|
291
|
+
|
|
292
|
+
return MonorepoInfo(
|
|
293
|
+
is_monorepo=bool(packages),
|
|
294
|
+
monorepo_type="cargo",
|
|
295
|
+
packages=packages,
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def _detect_go_workspaces(root: Path) -> MonorepoInfo:
|
|
300
|
+
"""检测 Go workspace"""
|
|
301
|
+
go_work = root / "go.work"
|
|
302
|
+
if not go_work.exists():
|
|
303
|
+
return MonorepoInfo()
|
|
304
|
+
|
|
305
|
+
try:
|
|
306
|
+
content = go_work.read_text(encoding="utf-8", errors="ignore")
|
|
307
|
+
except OSError:
|
|
308
|
+
return MonorepoInfo()
|
|
309
|
+
|
|
310
|
+
packages = []
|
|
311
|
+
in_use = False
|
|
312
|
+
for line in content.splitlines():
|
|
313
|
+
stripped = line.strip()
|
|
314
|
+
if stripped.startswith("use"):
|
|
315
|
+
in_use = True
|
|
316
|
+
# use (
|
|
317
|
+
if "(" not in stripped:
|
|
318
|
+
# single module: use ./cmd
|
|
319
|
+
mod = stripped[3:].strip()
|
|
320
|
+
if mod:
|
|
321
|
+
mod_dir = root / mod
|
|
322
|
+
if mod_dir.is_dir():
|
|
323
|
+
packages.append(SubProject(
|
|
324
|
+
name=mod_dir.name,
|
|
325
|
+
path=mod,
|
|
326
|
+
project_type="go",
|
|
327
|
+
))
|
|
328
|
+
continue
|
|
329
|
+
if in_use:
|
|
330
|
+
if stripped == ")":
|
|
331
|
+
break
|
|
332
|
+
if stripped and not stripped.startswith("//"):
|
|
333
|
+
mod_dir = root / stripped
|
|
334
|
+
if mod_dir.is_dir():
|
|
335
|
+
packages.append(SubProject(
|
|
336
|
+
name=mod_dir.name,
|
|
337
|
+
path=stripped,
|
|
338
|
+
project_type="go",
|
|
339
|
+
))
|
|
340
|
+
|
|
341
|
+
return MonorepoInfo(
|
|
342
|
+
is_monorepo=bool(packages),
|
|
343
|
+
monorepo_type="go",
|
|
344
|
+
packages=packages,
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def _detect_bazel(root: Path) -> MonorepoInfo:
|
|
349
|
+
"""检测 Bazel monorepo"""
|
|
350
|
+
workspace = root / "WORKSPACE"
|
|
351
|
+
workspace_bzl = root / "WORKSPACE.bazel"
|
|
352
|
+
if not workspace.exists() and not workspace_bzl.exists():
|
|
353
|
+
return MonorepoInfo()
|
|
354
|
+
|
|
355
|
+
# Bazel 子项目通过 BUILD 文件标识
|
|
356
|
+
packages = []
|
|
357
|
+
for build_file in root.rglob("BUILD"):
|
|
358
|
+
if build_file.parent == root:
|
|
359
|
+
continue
|
|
360
|
+
rel = str(build_file.parent.relative_to(root))
|
|
361
|
+
depth = rel.count(os.sep)
|
|
362
|
+
if depth <= 2: # 只看前两层
|
|
363
|
+
packages.append(SubProject(
|
|
364
|
+
name=build_file.parent.name,
|
|
365
|
+
path=rel,
|
|
366
|
+
project_type="bazel",
|
|
367
|
+
))
|
|
368
|
+
|
|
369
|
+
# 也查 BUILD.bazel
|
|
370
|
+
for build_file in root.rglob("BUILD.bazel"):
|
|
371
|
+
if build_file.parent == root:
|
|
372
|
+
continue
|
|
373
|
+
rel = str(build_file.parent.relative_to(root))
|
|
374
|
+
depth = rel.count(os.sep)
|
|
375
|
+
if depth <= 2:
|
|
376
|
+
name = build_file.parent.name
|
|
377
|
+
if not any(p.name == name for p in packages):
|
|
378
|
+
packages.append(SubProject(
|
|
379
|
+
name=name,
|
|
380
|
+
path=rel,
|
|
381
|
+
project_type="bazel",
|
|
382
|
+
))
|
|
383
|
+
|
|
384
|
+
return MonorepoInfo(
|
|
385
|
+
is_monorepo=bool(packages),
|
|
386
|
+
monorepo_type="bazel",
|
|
387
|
+
packages=packages[:50], # 限制数量
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def _detect_pants(root: Path) -> MonorepoInfo:
|
|
392
|
+
"""检测 Pants build system"""
|
|
393
|
+
if not (root / "pants.toml").exists():
|
|
394
|
+
return MonorepoInfo()
|
|
395
|
+
|
|
396
|
+
packages = []
|
|
397
|
+
for build_file in root.rglob("BUILD"):
|
|
398
|
+
if build_file.parent == root:
|
|
399
|
+
continue
|
|
400
|
+
rel = str(build_file.parent.relative_to(root))
|
|
401
|
+
depth = rel.count(os.sep)
|
|
402
|
+
if depth <= 2:
|
|
403
|
+
packages.append(SubProject(
|
|
404
|
+
name=build_file.parent.name,
|
|
405
|
+
path=rel,
|
|
406
|
+
project_type="pants",
|
|
407
|
+
))
|
|
408
|
+
|
|
409
|
+
return MonorepoInfo(
|
|
410
|
+
is_monorepo=bool(packages),
|
|
411
|
+
monorepo_type="pants",
|
|
412
|
+
packages=packages[:50],
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
def _detect_generic_multi_project(root: Path) -> MonorepoInfo:
|
|
417
|
+
"""通用多项目检测 — 目录下有多个独立项目"""
|
|
418
|
+
packages = []
|
|
419
|
+
|
|
420
|
+
for child in sorted(root.iterdir()):
|
|
421
|
+
if not child.is_dir() or child.name.startswith("."):
|
|
422
|
+
continue
|
|
423
|
+
|
|
424
|
+
# 检查是否是独立项目
|
|
425
|
+
indicators = [
|
|
426
|
+
("package.json", "node"),
|
|
427
|
+
("setup.py", "python"), ("pyproject.toml", "python"),
|
|
428
|
+
("Cargo.toml", "rust"), ("go.mod", "go"),
|
|
429
|
+
("pom.xml", "java"), ("build.gradle", "java"),
|
|
430
|
+
("CMakeLists.txt", "cpp"), ("Makefile", "make"),
|
|
431
|
+
]
|
|
432
|
+
|
|
433
|
+
for fname, ptype in indicators:
|
|
434
|
+
if (child / fname).exists():
|
|
435
|
+
packages.append(SubProject(
|
|
436
|
+
name=child.name,
|
|
437
|
+
path=str(child.relative_to(root)),
|
|
438
|
+
project_type=ptype,
|
|
439
|
+
))
|
|
440
|
+
break
|
|
441
|
+
|
|
442
|
+
# 至少2个子项目才算 monorepo
|
|
443
|
+
return MonorepoInfo(
|
|
444
|
+
is_monorepo=len(packages) >= 2,
|
|
445
|
+
monorepo_type="generic",
|
|
446
|
+
packages=packages,
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
# ─────────────────────────────────────────────
|
|
451
|
+
# 辅助函数
|
|
452
|
+
# ─────────────────────────────────────────────
|
|
453
|
+
|
|
454
|
+
def _resolve_workspace_globs(root: Path, patterns: list[str], ptype: str) -> list[SubProject]:
|
|
455
|
+
"""解析 workspace glob 模式,返回子项目列表"""
|
|
456
|
+
packages = []
|
|
457
|
+
|
|
458
|
+
for pattern in patterns:
|
|
459
|
+
pattern = pattern.strip().rstrip("/")
|
|
460
|
+
|
|
461
|
+
if "*" in pattern:
|
|
462
|
+
# packages/* → 查找匹配的目录
|
|
463
|
+
parent = pattern.split("*")[0].rstrip("/")
|
|
464
|
+
parent_dir = root / parent if parent else root
|
|
465
|
+
|
|
466
|
+
if parent_dir.is_dir():
|
|
467
|
+
for child in sorted(parent_dir.iterdir()):
|
|
468
|
+
if child.is_dir() and not child.name.startswith("."):
|
|
469
|
+
pkg = _parse_subproject(child, root, ptype)
|
|
470
|
+
if pkg:
|
|
471
|
+
packages.append(pkg)
|
|
472
|
+
else:
|
|
473
|
+
# 具体路径
|
|
474
|
+
target = root / pattern
|
|
475
|
+
if target.is_dir():
|
|
476
|
+
pkg = _parse_subproject(target, root, ptype)
|
|
477
|
+
if pkg:
|
|
478
|
+
packages.append(pkg)
|
|
479
|
+
|
|
480
|
+
return packages
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
def _parse_subproject(pkg_dir: Path, root: Path, ptype: str) -> SubProject | None:
|
|
484
|
+
"""解析子项目"""
|
|
485
|
+
if ptype == "node":
|
|
486
|
+
return _parse_node_package(pkg_dir, root)
|
|
487
|
+
elif ptype == "rust":
|
|
488
|
+
return _parse_cargo_package(pkg_dir, root)
|
|
489
|
+
return SubProject(
|
|
490
|
+
name=pkg_dir.name,
|
|
491
|
+
path=str(pkg_dir.relative_to(root)),
|
|
492
|
+
project_type=ptype,
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
|
|
496
|
+
def _parse_node_package(pkg_dir: Path, root: Path | None = None) -> SubProject | None:
|
|
497
|
+
"""解析 Node.js 包"""
|
|
498
|
+
pkg_json = pkg_dir / "package.json"
|
|
499
|
+
if not pkg_json.exists():
|
|
500
|
+
return None
|
|
501
|
+
|
|
502
|
+
try:
|
|
503
|
+
data = json.loads(pkg_json.read_text(encoding="utf-8", errors="ignore"))
|
|
504
|
+
except (json.JSONDecodeError, OSError):
|
|
505
|
+
return None
|
|
506
|
+
|
|
507
|
+
name = data.get("name", pkg_dir.name)
|
|
508
|
+
version = data.get("version", "")
|
|
509
|
+
|
|
510
|
+
# 外部依赖
|
|
511
|
+
deps = list(data.get("dependencies", {}).keys())
|
|
512
|
+
dev_deps = list(data.get("devDependencies", {}).keys())
|
|
513
|
+
|
|
514
|
+
# 脚本
|
|
515
|
+
scripts = data.get("scripts", {})
|
|
516
|
+
|
|
517
|
+
# 测试
|
|
518
|
+
has_tests = bool(
|
|
519
|
+
(pkg_dir / "tests").is_dir() or
|
|
520
|
+
(pkg_dir / "test").is_dir() or
|
|
521
|
+
(pkg_dir / "__tests__").is_dir() or
|
|
522
|
+
scripts.get("test")
|
|
523
|
+
)
|
|
524
|
+
|
|
525
|
+
rel_path = str(pkg_dir.relative_to(root)) if root else str(pkg_dir)
|
|
526
|
+
|
|
527
|
+
return SubProject(
|
|
528
|
+
name=name,
|
|
529
|
+
path=rel_path,
|
|
530
|
+
project_type="node",
|
|
531
|
+
version=version,
|
|
532
|
+
dependencies=deps + dev_deps,
|
|
533
|
+
scripts=scripts,
|
|
534
|
+
has_tests=has_tests,
|
|
535
|
+
)
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
def _parse_cargo_package(pkg_dir: Path, root: Path) -> SubProject:
|
|
539
|
+
"""解析 Cargo 包"""
|
|
540
|
+
name = pkg_dir.name
|
|
541
|
+
version = ""
|
|
542
|
+
|
|
543
|
+
cargo_toml = pkg_dir / "Cargo.toml"
|
|
544
|
+
if cargo_toml.exists():
|
|
545
|
+
try:
|
|
546
|
+
content = cargo_toml.read_text(encoding="utf-8", errors="ignore")
|
|
547
|
+
m = re.search(r'name\s*=\s*"([^"]+)"', content)
|
|
548
|
+
if m:
|
|
549
|
+
name = m.group(1)
|
|
550
|
+
m = re.search(r'version\s*=\s*"([^"]+)"', content)
|
|
551
|
+
if m:
|
|
552
|
+
version = m.group(1)
|
|
553
|
+
except OSError:
|
|
554
|
+
pass
|
|
555
|
+
|
|
556
|
+
return SubProject(
|
|
557
|
+
name=name,
|
|
558
|
+
path=str(pkg_dir.relative_to(root)),
|
|
559
|
+
project_type="rust",
|
|
560
|
+
version=version,
|
|
561
|
+
)
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
def _extract_shared_deps_node(data: dict) -> dict[str, str]:
|
|
565
|
+
"""提取根 package.json 的共享依赖"""
|
|
566
|
+
shared = {}
|
|
567
|
+
for section in ("dependencies", "devDependencies"):
|
|
568
|
+
for name, version in data.get(section, {}).items():
|
|
569
|
+
shared[name] = str(version)
|
|
570
|
+
return shared
|
|
571
|
+
|
|
572
|
+
|
|
573
|
+
# ─────────────────────────────────────────────
|
|
574
|
+
# 依赖图谱
|
|
575
|
+
# ─────────────────────────────────────────────
|
|
576
|
+
|
|
577
|
+
def build_dependency_graph(info: MonorepoInfo) -> list[DependencyEdge]:
|
|
578
|
+
"""
|
|
579
|
+
构建子项目间的依赖图谱。
|
|
580
|
+
|
|
581
|
+
分析每个子项目的依赖声明,识别对其他子项目的引用。
|
|
582
|
+
"""
|
|
583
|
+
edges = []
|
|
584
|
+
pkg_names = {p.name for p in info.packages}
|
|
585
|
+
|
|
586
|
+
for pkg in info.packages:
|
|
587
|
+
# 检查外部依赖中是否引用了其他子项目
|
|
588
|
+
for dep in pkg.dependencies:
|
|
589
|
+
if dep in pkg_names:
|
|
590
|
+
edges.append(DependencyEdge(
|
|
591
|
+
source=pkg.name,
|
|
592
|
+
target=dep,
|
|
593
|
+
dep_type="runtime",
|
|
594
|
+
))
|
|
595
|
+
if dep not in pkg.internal_deps:
|
|
596
|
+
pkg.internal_deps.append(dep)
|
|
597
|
+
|
|
598
|
+
return edges
|
|
599
|
+
|
|
600
|
+
|
|
601
|
+
def topological_sort(info: MonorepoInfo) -> list[str]:
|
|
602
|
+
"""拓扑排序 — 确定安装顺序"""
|
|
603
|
+
edges = build_dependency_graph(info)
|
|
604
|
+
|
|
605
|
+
# 构建邻接表
|
|
606
|
+
graph: dict[str, list[str]] = {}
|
|
607
|
+
in_degree: dict[str, int] = {}
|
|
608
|
+
|
|
609
|
+
for pkg in info.packages:
|
|
610
|
+
graph[pkg.name] = []
|
|
611
|
+
in_degree[pkg.name] = 0
|
|
612
|
+
|
|
613
|
+
for edge in edges:
|
|
614
|
+
graph[edge.target].append(edge.source)
|
|
615
|
+
in_degree[edge.source] = in_degree.get(edge.source, 0) + 1
|
|
616
|
+
|
|
617
|
+
# Kahn's algorithm
|
|
618
|
+
queue = [name for name, deg in in_degree.items() if deg == 0]
|
|
619
|
+
result = []
|
|
620
|
+
|
|
621
|
+
while queue:
|
|
622
|
+
queue.sort()
|
|
623
|
+
node = queue.pop(0)
|
|
624
|
+
result.append(node)
|
|
625
|
+
|
|
626
|
+
for neighbor in graph.get(node, []):
|
|
627
|
+
in_degree[neighbor] -= 1
|
|
628
|
+
if in_degree[neighbor] == 0:
|
|
629
|
+
queue.append(neighbor)
|
|
630
|
+
|
|
631
|
+
# 如果有循环依赖,附加剩余节点
|
|
632
|
+
remaining = [name for name in in_degree if name not in result]
|
|
633
|
+
result.extend(remaining)
|
|
634
|
+
|
|
635
|
+
return result
|
|
636
|
+
|
|
637
|
+
|
|
638
|
+
# ─────────────────────────────────────────────
|
|
639
|
+
# 选择性安装
|
|
640
|
+
# ─────────────────────────────────────────────
|
|
641
|
+
|
|
642
|
+
def plan_selective_install(
|
|
643
|
+
info: MonorepoInfo,
|
|
644
|
+
targets: list[str],
|
|
645
|
+
include_deps: bool = True,
|
|
646
|
+
) -> list[SubProject]:
|
|
647
|
+
"""
|
|
648
|
+
为选定的子项目生成安装计划。
|
|
649
|
+
|
|
650
|
+
自动包含其依赖的子项目。
|
|
651
|
+
|
|
652
|
+
Args:
|
|
653
|
+
info: monorepo 信息
|
|
654
|
+
targets: 要安装的子项目名列表
|
|
655
|
+
include_deps: 是否自动包含依赖
|
|
656
|
+
"""
|
|
657
|
+
if not include_deps:
|
|
658
|
+
return [p for p in info.packages if p.name in targets]
|
|
659
|
+
|
|
660
|
+
# 构建依赖图
|
|
661
|
+
build_dependency_graph(info)
|
|
662
|
+
|
|
663
|
+
# 收集所有需要的包(包括依赖)
|
|
664
|
+
needed = set(targets)
|
|
665
|
+
pkg_by_name = {p.name: p for p in info.packages}
|
|
666
|
+
|
|
667
|
+
changed = True
|
|
668
|
+
while changed:
|
|
669
|
+
changed = False
|
|
670
|
+
for name in list(needed):
|
|
671
|
+
pkg = pkg_by_name.get(name)
|
|
672
|
+
if pkg:
|
|
673
|
+
for dep in pkg.internal_deps:
|
|
674
|
+
if dep not in needed:
|
|
675
|
+
needed.add(dep)
|
|
676
|
+
changed = True
|
|
677
|
+
|
|
678
|
+
# 按拓扑排序
|
|
679
|
+
order = topological_sort(info)
|
|
680
|
+
result = []
|
|
681
|
+
for name in order:
|
|
682
|
+
if name in needed:
|
|
683
|
+
pkg = pkg_by_name.get(name)
|
|
684
|
+
if pkg:
|
|
685
|
+
result.append(pkg)
|
|
686
|
+
|
|
687
|
+
return result
|
|
688
|
+
|
|
689
|
+
|
|
690
|
+
def generate_install_commands(
|
|
691
|
+
info: MonorepoInfo,
|
|
692
|
+
targets: list[str] | None = None,
|
|
693
|
+
) -> list[dict[str, Any]]:
|
|
694
|
+
"""
|
|
695
|
+
生成安装命令。
|
|
696
|
+
|
|
697
|
+
Returns:
|
|
698
|
+
[{"package": "名称", "path": "路径", "commands": [...]}]
|
|
699
|
+
"""
|
|
700
|
+
if targets:
|
|
701
|
+
packages = plan_selective_install(info, targets)
|
|
702
|
+
else:
|
|
703
|
+
order = topological_sort(info)
|
|
704
|
+
pkg_by_name = {p.name: p for p in info.packages}
|
|
705
|
+
packages = [pkg_by_name[n] for n in order if n in pkg_by_name]
|
|
706
|
+
|
|
707
|
+
result = []
|
|
708
|
+
for pkg in packages:
|
|
709
|
+
cmds = _generate_pkg_commands(pkg, info.monorepo_type)
|
|
710
|
+
result.append({
|
|
711
|
+
"package": pkg.name,
|
|
712
|
+
"path": pkg.path,
|
|
713
|
+
"project_type": pkg.project_type,
|
|
714
|
+
"commands": cmds,
|
|
715
|
+
})
|
|
716
|
+
|
|
717
|
+
return result
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
def _generate_pkg_commands(pkg: SubProject, mono_type: str) -> list[str]:
|
|
721
|
+
"""为子项目生成安装命令"""
|
|
722
|
+
cmds = [f"cd {pkg.path}"]
|
|
723
|
+
|
|
724
|
+
if pkg.project_type == "node":
|
|
725
|
+
if mono_type in ("pnpm", "turborepo"):
|
|
726
|
+
cmds.append(f"pnpm install --filter {pkg.name}")
|
|
727
|
+
elif mono_type == "npm_workspaces":
|
|
728
|
+
cmds.append(f"npm install -w {pkg.path}")
|
|
729
|
+
elif mono_type == "lerna":
|
|
730
|
+
cmds.append(f"npx lerna bootstrap --scope={pkg.name}")
|
|
731
|
+
else:
|
|
732
|
+
cmds.append("npm install")
|
|
733
|
+
|
|
734
|
+
if "build" in pkg.scripts:
|
|
735
|
+
cmds.append("npm run build")
|
|
736
|
+
|
|
737
|
+
elif pkg.project_type == "python":
|
|
738
|
+
cmds.append("pip install -e .")
|
|
739
|
+
|
|
740
|
+
elif pkg.project_type == "rust":
|
|
741
|
+
cmds.append(f"cargo build -p {pkg.name}")
|
|
742
|
+
|
|
743
|
+
elif pkg.project_type == "go":
|
|
744
|
+
cmds.append("go build ./...")
|
|
745
|
+
|
|
746
|
+
elif pkg.project_type == "java":
|
|
747
|
+
if (Path(pkg.path) / "build.gradle").exists():
|
|
748
|
+
cmds.append("gradle build")
|
|
749
|
+
else:
|
|
750
|
+
cmds.append("mvn install")
|
|
751
|
+
|
|
752
|
+
else:
|
|
753
|
+
if (Path(pkg.path) / "Makefile").exists():
|
|
754
|
+
cmds.append("make")
|
|
755
|
+
|
|
756
|
+
return cmds
|
|
757
|
+
|
|
758
|
+
|
|
759
|
+
# ─────────────────────────────────────────────
|
|
760
|
+
# 格式化输出
|
|
761
|
+
# ─────────────────────────────────────────────
|
|
762
|
+
|
|
763
|
+
def format_monorepo_info(info: MonorepoInfo) -> str:
|
|
764
|
+
"""格式化 monorepo 信息"""
|
|
765
|
+
if not info.is_monorepo:
|
|
766
|
+
return "📁 不是 monorepo 项目"
|
|
767
|
+
|
|
768
|
+
type_labels = {
|
|
769
|
+
"npm_workspaces": "npm Workspaces",
|
|
770
|
+
"pnpm": "pnpm Workspaces",
|
|
771
|
+
"lerna": "Lerna",
|
|
772
|
+
"turborepo": "Turborepo",
|
|
773
|
+
"nx": "Nx",
|
|
774
|
+
"cargo": "Cargo Workspace",
|
|
775
|
+
"go": "Go Workspace",
|
|
776
|
+
"bazel": "Bazel",
|
|
777
|
+
"pants": "Pants",
|
|
778
|
+
"generic": "通用多项目",
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
lines = [
|
|
782
|
+
f"📦 Monorepo 检测结果",
|
|
783
|
+
f" 类型: {type_labels.get(info.monorepo_type, info.monorepo_type)}",
|
|
784
|
+
f" 子项目数: {info.total_packages}",
|
|
785
|
+
"",
|
|
786
|
+
" 子项目列表:",
|
|
787
|
+
]
|
|
788
|
+
|
|
789
|
+
for pkg in info.packages[:20]:
|
|
790
|
+
deps_str = f" (依赖: {', '.join(pkg.internal_deps[:3])})" if pkg.internal_deps else ""
|
|
791
|
+
lines.append(f" 📄 {pkg.name} [{pkg.project_type}] — {pkg.path}{deps_str}")
|
|
792
|
+
|
|
793
|
+
if info.total_packages > 20:
|
|
794
|
+
lines.append(f" ... 还有 {info.total_packages - 20} 个子项目")
|
|
795
|
+
|
|
796
|
+
return "\n".join(lines)
|
|
797
|
+
|
|
798
|
+
|
|
799
|
+
def format_install_plan(commands: list[dict]) -> str:
|
|
800
|
+
"""格式化安装计划"""
|
|
801
|
+
lines = ["📋 Monorepo 安装计划", ""]
|
|
802
|
+
|
|
803
|
+
for i, item in enumerate(commands, 1):
|
|
804
|
+
lines.append(f" {i}. {item['package']} [{item['project_type']}]")
|
|
805
|
+
lines.append(f" 路径: {item['path']}")
|
|
806
|
+
for cmd in item["commands"]:
|
|
807
|
+
lines.append(f" $ {cmd}")
|
|
808
|
+
lines.append("")
|
|
809
|
+
|
|
810
|
+
return "\n".join(lines)
|