commiter-cli 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- commiter/__init__.py +3 -0
- commiter/adapters/__init__.py +0 -0
- commiter/adapters/base.py +96 -0
- commiter/adapters/django_rest.py +247 -0
- commiter/adapters/express.py +204 -0
- commiter/adapters/fastapi.py +170 -0
- commiter/adapters/flask.py +169 -0
- commiter/adapters/nextjs.py +180 -0
- commiter/adapters/prisma.py +76 -0
- commiter/adapters/raw_sql.py +191 -0
- commiter/adapters/react.py +129 -0
- commiter/adapters/sqlalchemy.py +99 -0
- commiter/adapters/supabase.py +68 -0
- commiter/auth.py +130 -0
- commiter/cli.py +667 -0
- commiter/correlator.py +208 -0
- commiter/extractors/__init__.py +0 -0
- commiter/extractors/api_calls.py +91 -0
- commiter/extractors/api_endpoints.py +354 -0
- commiter/extractors/backend_files.py +33 -0
- commiter/extractors/base.py +40 -0
- commiter/extractors/db_operations.py +69 -0
- commiter/extractors/dependencies.py +219 -0
- commiter/generic_resolver.py +204 -0
- commiter/handler_index.py +97 -0
- commiter/lib.py +63 -0
- commiter/middleware_index.py +350 -0
- commiter/models.py +117 -0
- commiter/parser.py +1283 -0
- commiter/prefix_index.py +211 -0
- commiter/report/__init__.py +0 -0
- commiter/report/ai.py +120 -0
- commiter/report/api_guide.py +217 -0
- commiter/report/architecture.py +930 -0
- commiter/report/console.py +254 -0
- commiter/report/json_output.py +122 -0
- commiter/report/markdown.py +163 -0
- commiter/scanner.py +383 -0
- commiter/type_index.py +304 -0
- commiter/uploader.py +46 -0
- commiter/utils/__init__.py +0 -0
- commiter/utils/env_reader.py +78 -0
- commiter/utils/file_classifier.py +187 -0
- commiter/utils/path_helpers.py +73 -0
- commiter/utils/tsconfig_resolver.py +281 -0
- commiter/wrapper_index.py +288 -0
- commiter_cli-0.3.0.dist-info/METADATA +14 -0
- commiter_cli-0.3.0.dist-info/RECORD +96 -0
- commiter_cli-0.3.0.dist-info/WHEEL +5 -0
- commiter_cli-0.3.0.dist-info/entry_points.txt +2 -0
- commiter_cli-0.3.0.dist-info/top_level.txt +2 -0
- tests/__init__.py +0 -0
- tests/fixtures/arch_backend/app.py +22 -0
- tests/fixtures/arch_backend/middleware/__init__.py +0 -0
- tests/fixtures/arch_backend/middleware/rate_limit.py +4 -0
- tests/fixtures/arch_backend/routes/__init__.py +0 -0
- tests/fixtures/arch_backend/routes/analytics.py +20 -0
- tests/fixtures/arch_backend/routes/auth.py +29 -0
- tests/fixtures/arch_backend/routes/projects.py +60 -0
- tests/fixtures/arch_backend/routes/users.py +55 -0
- tests/fixtures/arch_monorepo/apps/api/app.py +30 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/auth.py +17 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/rate_limit.py +10 -0
- tests/fixtures/arch_monorepo/apps/api/routes/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/routes/auth.py +46 -0
- tests/fixtures/arch_monorepo/apps/api/routes/invites.py +30 -0
- tests/fixtures/arch_monorepo/apps/api/routes/notifications.py +25 -0
- tests/fixtures/arch_monorepo/apps/api/routes/projects.py +80 -0
- tests/fixtures/arch_monorepo/apps/api/routes/tasks.py +91 -0
- tests/fixtures/arch_monorepo/apps/api/routes/users.py +48 -0
- tests/fixtures/arch_monorepo/apps/api/services/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/services/email.py +11 -0
- tests/fixtures/backend_b/app.py +17 -0
- tests/fixtures/fastapi_app/app.py +48 -0
- tests/fixtures/fastapi_crossfile/routes.py +18 -0
- tests/fixtures/fastapi_crossfile/schemas.py +21 -0
- tests/fixtures/flask_app/app.py +33 -0
- tests/fixtures/flask_blueprint/app.py +7 -0
- tests/fixtures/flask_blueprint/routes/items.py +13 -0
- tests/fixtures/flask_blueprint/routes/users.py +20 -0
- tests/fixtures/middleware_test_flask/routes/public.py +8 -0
- tests/fixtures/middleware_test_flask/routes/users.py +26 -0
- tests/fixtures/python_deep_imports/app/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/health.py +11 -0
- tests/fixtures/python_deep_imports/app/api/v1/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/v1/items.py +18 -0
- tests/fixtures/python_deep_imports/app/api/v1/users.py +27 -0
- tests/fixtures/python_deep_imports/app/schemas/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/schemas/item.py +13 -0
- tests/fixtures/python_deep_imports/app/schemas/user.py +15 -0
- tests/fixtures/python_deep_imports/app/shared/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/shared/models.py +7 -0
- tests/fixtures/raw_sql_test/app.py +54 -0
- tests/test_architecture.py +757 -0
commiter/uploader.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""Upload architecture JSON to commiter.dev for AI enrichment."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import json
|
|
7
|
+
import urllib.request
|
|
8
|
+
import urllib.error
|
|
9
|
+
|
|
10
|
+
UPLOAD_URL = "https://commiter-api.up.railway.app/v1/architecture/upload"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def upload_architecture(json_str: str, api_token: str | None = None) -> dict:
|
|
14
|
+
"""POST architecture JSON to the Commiter API and return the response.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
json_str: The raw architecture JSON string.
|
|
18
|
+
api_token: Optional JWT for authenticated enrichment.
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Dict with keys: snapshot_id, url, status, tier
|
|
22
|
+
Or dict with key: error
|
|
23
|
+
"""
|
|
24
|
+
headers = {"Content-Type": "application/json"}
|
|
25
|
+
if api_token:
|
|
26
|
+
headers["Authorization"] = f"Bearer {api_token}"
|
|
27
|
+
|
|
28
|
+
data = json_str.encode("utf-8")
|
|
29
|
+
headers["X-Content-Hash"] = hashlib.sha256(data).hexdigest()
|
|
30
|
+
|
|
31
|
+
req = urllib.request.Request(UPLOAD_URL, data=data, headers=headers, method="POST")
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
with urllib.request.urlopen(req, timeout=30) as resp:
|
|
35
|
+
return json.loads(resp.read().decode("utf-8"))
|
|
36
|
+
except urllib.error.HTTPError as e:
|
|
37
|
+
body = e.read().decode("utf-8", errors="replace")
|
|
38
|
+
try:
|
|
39
|
+
error_data = json.loads(body)
|
|
40
|
+
return {"error": error_data.get("error", f"HTTP {e.code}")}
|
|
41
|
+
except json.JSONDecodeError:
|
|
42
|
+
return {"error": f"HTTP {e.code}: {body[:200]}"}
|
|
43
|
+
except urllib.error.URLError as e:
|
|
44
|
+
return {"error": f"Connection failed: {e.reason}"}
|
|
45
|
+
except Exception as e:
|
|
46
|
+
return {"error": str(e)}
|
|
File without changes
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""Read .env files and resolve environment variable references."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
# .env files to check, in priority order (later overrides earlier)
|
|
9
|
+
ENV_FILES = [
|
|
10
|
+
".env",
|
|
11
|
+
".env.local",
|
|
12
|
+
".env.development",
|
|
13
|
+
".env.development.local",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def load_env_files(repo_root: str) -> dict[str, str]:
|
|
18
|
+
"""Read all .env files in the repo root and return a merged dict.
|
|
19
|
+
|
|
20
|
+
Later files override earlier ones. Only reads key=value pairs,
|
|
21
|
+
ignores comments and empty lines.
|
|
22
|
+
"""
|
|
23
|
+
env: dict[str, str] = {}
|
|
24
|
+
root = Path(repo_root)
|
|
25
|
+
|
|
26
|
+
for filename in ENV_FILES:
|
|
27
|
+
env_path = root / filename
|
|
28
|
+
if env_path.is_file():
|
|
29
|
+
try:
|
|
30
|
+
content = env_path.read_text(encoding="utf-8", errors="replace")
|
|
31
|
+
env.update(_parse_env_content(content))
|
|
32
|
+
except OSError:
|
|
33
|
+
continue
|
|
34
|
+
|
|
35
|
+
return env
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _parse_env_content(content: str) -> dict[str, str]:
|
|
39
|
+
"""Parse .env file content into key-value pairs."""
|
|
40
|
+
result: dict[str, str] = {}
|
|
41
|
+
for line in content.splitlines():
|
|
42
|
+
line = line.strip()
|
|
43
|
+
# Skip empty lines and comments
|
|
44
|
+
if not line or line.startswith("#"):
|
|
45
|
+
continue
|
|
46
|
+
# Handle export prefix: export VAR=value
|
|
47
|
+
if line.startswith("export "):
|
|
48
|
+
line = line[7:].strip()
|
|
49
|
+
# Split on first =
|
|
50
|
+
if "=" not in line:
|
|
51
|
+
continue
|
|
52
|
+
key, _, value = line.partition("=")
|
|
53
|
+
key = key.strip()
|
|
54
|
+
value = value.strip()
|
|
55
|
+
# Strip surrounding quotes
|
|
56
|
+
if len(value) >= 2 and value[0] == value[-1] and value[0] in ("'", '"'):
|
|
57
|
+
value = value[1:-1]
|
|
58
|
+
if key:
|
|
59
|
+
result[key] = value
|
|
60
|
+
|
|
61
|
+
return result
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def resolve_env_var(var_name: str, env: dict[str, str]) -> str | None:
|
|
65
|
+
"""Resolve a variable name to its .env value.
|
|
66
|
+
|
|
67
|
+
Handles common patterns:
|
|
68
|
+
process.env.API_URL -> looks up "API_URL"
|
|
69
|
+
process.env.NEXT_PUBLIC_API_URL -> looks up "NEXT_PUBLIC_API_URL"
|
|
70
|
+
import.meta.env.VITE_API_URL -> looks up "VITE_API_URL"
|
|
71
|
+
"""
|
|
72
|
+
# Strip common prefixes
|
|
73
|
+
for prefix in ("process.env.", "import.meta.env."):
|
|
74
|
+
if var_name.startswith(prefix):
|
|
75
|
+
var_name = var_name[len(prefix):]
|
|
76
|
+
break
|
|
77
|
+
|
|
78
|
+
return env.get(var_name)
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
"""Detect language and framework from file contents and project structure."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from commiter.models import FileClassification, FileRole
|
|
9
|
+
|
|
10
|
+
# Extension to language mapping
|
|
11
|
+
EXTENSION_LANG: dict[str, str] = {
|
|
12
|
+
".py": "python",
|
|
13
|
+
".js": "javascript",
|
|
14
|
+
".mjs": "javascript",
|
|
15
|
+
".cjs": "javascript",
|
|
16
|
+
".jsx": "javascript",
|
|
17
|
+
".ts": "typescript",
|
|
18
|
+
".tsx": "typescript",
|
|
19
|
+
".go": "go",
|
|
20
|
+
".rb": "ruby",
|
|
21
|
+
".rs": "rust",
|
|
22
|
+
".java": "java",
|
|
23
|
+
".kt": "kotlin",
|
|
24
|
+
".php": "php",
|
|
25
|
+
".cs": "csharp",
|
|
26
|
+
".swift": "swift",
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
# Patterns that indicate test files
|
|
30
|
+
TEST_PATTERNS = {
|
|
31
|
+
"test_", "_test.", ".test.", ".spec.", "__tests__", "tests/", "test/",
|
|
32
|
+
"spec/", "specs/",
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
# Config file names
|
|
36
|
+
CONFIG_FILES = {
|
|
37
|
+
".env", ".env.local", ".env.production", ".env.development",
|
|
38
|
+
"docker-compose.yml", "docker-compose.yaml", "Dockerfile",
|
|
39
|
+
"tsconfig.json", "jsconfig.json", ".eslintrc", ".prettierrc",
|
|
40
|
+
"webpack.config.js", "vite.config.ts", "vite.config.js",
|
|
41
|
+
"next.config.js", "next.config.mjs", "next.config.ts",
|
|
42
|
+
"tailwind.config.js", "tailwind.config.ts",
|
|
43
|
+
"pyproject.toml", "setup.py", "setup.cfg", "tox.ini",
|
|
44
|
+
"Makefile", "Procfile", ".gitignore", ".dockerignore",
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
# Migration directory patterns
|
|
48
|
+
MIGRATION_PATTERNS = {"migrations/", "alembic/", "migrate/", "db/migrate/"}
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def classify_file(file_path: str, repo_root: str) -> FileClassification:
|
|
52
|
+
"""Classify a single file by role, language, and framework hints."""
|
|
53
|
+
path = Path(file_path).resolve()
|
|
54
|
+
root = Path(repo_root).resolve()
|
|
55
|
+
try:
|
|
56
|
+
rel = str(path.relative_to(root))
|
|
57
|
+
except ValueError:
|
|
58
|
+
rel = str(path)
|
|
59
|
+
name = path.name
|
|
60
|
+
ext = path.suffix.lower()
|
|
61
|
+
|
|
62
|
+
language = EXTENSION_LANG.get(ext, ext.lstrip(".") if ext else "unknown")
|
|
63
|
+
role = _detect_role(rel, name, ext)
|
|
64
|
+
framework_hints = _detect_framework_hints(rel, name)
|
|
65
|
+
|
|
66
|
+
return FileClassification(
|
|
67
|
+
file_path=file_path,
|
|
68
|
+
role=role,
|
|
69
|
+
language=language,
|
|
70
|
+
framework_hints=framework_hints,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _detect_role(rel_path: str, name: str, ext: str) -> FileRole:
|
|
75
|
+
"""Determine the file's role based on path and name patterns."""
|
|
76
|
+
rel_lower = rel_path.lower().replace("\\", "/")
|
|
77
|
+
|
|
78
|
+
# Config files
|
|
79
|
+
if name in CONFIG_FILES or ext in (".yml", ".yaml", ".toml", ".ini", ".cfg"):
|
|
80
|
+
return FileRole.CONFIG
|
|
81
|
+
|
|
82
|
+
# Test files
|
|
83
|
+
for pattern in TEST_PATTERNS:
|
|
84
|
+
if pattern in rel_lower:
|
|
85
|
+
return FileRole.TEST
|
|
86
|
+
|
|
87
|
+
# Migration files
|
|
88
|
+
for pattern in MIGRATION_PATTERNS:
|
|
89
|
+
if pattern in rel_lower:
|
|
90
|
+
return FileRole.MIGRATION
|
|
91
|
+
|
|
92
|
+
# Next.js conventions
|
|
93
|
+
if "pages/api/" in rel_lower or "app/" in rel_lower and "route." in name:
|
|
94
|
+
return FileRole.BACKEND
|
|
95
|
+
if "pages/" in rel_lower or ("app/" in rel_lower and "page." in name):
|
|
96
|
+
return FileRole.FRONTEND
|
|
97
|
+
|
|
98
|
+
# Common backend patterns
|
|
99
|
+
backend_indicators = ("routes/", "api/", "controllers/", "handlers/", "views/", "endpoints/", "server.")
|
|
100
|
+
for indicator in backend_indicators:
|
|
101
|
+
if indicator in rel_lower:
|
|
102
|
+
return FileRole.BACKEND
|
|
103
|
+
|
|
104
|
+
# Common frontend patterns
|
|
105
|
+
frontend_indicators = ("components/", "pages/", "src/app/", "views/", "layouts/", "hooks/")
|
|
106
|
+
for indicator in frontend_indicators:
|
|
107
|
+
if indicator in rel_lower:
|
|
108
|
+
return FileRole.FRONTEND
|
|
109
|
+
|
|
110
|
+
# Fallback by extension
|
|
111
|
+
if ext in (".py", ".go", ".rb", ".java", ".php", ".rs"):
|
|
112
|
+
return FileRole.BACKEND
|
|
113
|
+
if ext in (".jsx", ".tsx"):
|
|
114
|
+
return FileRole.FRONTEND
|
|
115
|
+
|
|
116
|
+
return FileRole.UNKNOWN
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _detect_framework_hints(rel_path: str, name: str) -> list[str]:
|
|
120
|
+
"""Detect possible framework associations from path conventions."""
|
|
121
|
+
hints = []
|
|
122
|
+
rel_path = rel_path.replace("\\", "/")
|
|
123
|
+
if "pages/api/" in rel_path or "app/" in rel_path and "route." in name:
|
|
124
|
+
hints.append("nextjs")
|
|
125
|
+
if name in ("next.config.js", "next.config.mjs", "next.config.ts"):
|
|
126
|
+
hints.append("nextjs")
|
|
127
|
+
return hints
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def detect_repo_frameworks(repo_root: str) -> list[str]:
|
|
131
|
+
"""Detect which frameworks a repository uses by checking dependency manifests and imports."""
|
|
132
|
+
frameworks = []
|
|
133
|
+
root = Path(repo_root)
|
|
134
|
+
|
|
135
|
+
# Check package.json
|
|
136
|
+
pkg_json = root / "package.json"
|
|
137
|
+
if pkg_json.exists():
|
|
138
|
+
try:
|
|
139
|
+
data = json.loads(pkg_json.read_text(encoding="utf-8", errors="replace"))
|
|
140
|
+
all_deps = set(data.get("dependencies", {}).keys()) | set(data.get("devDependencies", {}).keys())
|
|
141
|
+
if "express" in all_deps:
|
|
142
|
+
frameworks.append("express")
|
|
143
|
+
if "next" in all_deps:
|
|
144
|
+
frameworks.append("nextjs")
|
|
145
|
+
if "react" in all_deps:
|
|
146
|
+
frameworks.append("react")
|
|
147
|
+
if "fastify" in all_deps:
|
|
148
|
+
frameworks.append("fastify")
|
|
149
|
+
if "hono" in all_deps:
|
|
150
|
+
frameworks.append("hono")
|
|
151
|
+
if "@prisma/client" in all_deps or "prisma" in all_deps:
|
|
152
|
+
frameworks.append("prisma")
|
|
153
|
+
if "@supabase/supabase-js" in all_deps:
|
|
154
|
+
frameworks.append("supabase-js")
|
|
155
|
+
if "drizzle-orm" in all_deps:
|
|
156
|
+
frameworks.append("drizzle")
|
|
157
|
+
if "axios" in all_deps:
|
|
158
|
+
frameworks.append("axios")
|
|
159
|
+
except (json.JSONDecodeError, OSError):
|
|
160
|
+
pass
|
|
161
|
+
|
|
162
|
+
# Check Python manifests
|
|
163
|
+
for manifest in ("requirements.txt", "pyproject.toml", "setup.cfg", "Pipfile"):
|
|
164
|
+
manifest_path = root / manifest
|
|
165
|
+
if manifest_path.exists():
|
|
166
|
+
try:
|
|
167
|
+
content = manifest_path.read_text(encoding="utf-8", errors="replace").lower()
|
|
168
|
+
if "flask" in content:
|
|
169
|
+
frameworks.append("flask")
|
|
170
|
+
if "fastapi" in content:
|
|
171
|
+
frameworks.append("fastapi")
|
|
172
|
+
if "django" in content:
|
|
173
|
+
frameworks.append("django")
|
|
174
|
+
if "supabase" in content:
|
|
175
|
+
frameworks.append("supabase-py")
|
|
176
|
+
if "sqlalchemy" in content:
|
|
177
|
+
frameworks.append("sqlalchemy")
|
|
178
|
+
except OSError:
|
|
179
|
+
pass
|
|
180
|
+
|
|
181
|
+
# Check for Next.js by directory structure
|
|
182
|
+
if not any(f in frameworks for f in ("nextjs",)):
|
|
183
|
+
if (root / "pages").is_dir() or (root / "app").is_dir():
|
|
184
|
+
if (root / "next.config.js").exists() or (root / "next.config.mjs").exists() or (root / "next.config.ts").exists():
|
|
185
|
+
frameworks.append("nextjs")
|
|
186
|
+
|
|
187
|
+
return list(dict.fromkeys(frameworks)) # deduplicate preserving order
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"""Path utilities: repo root detection, ignore patterns, file walking."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
# Directories to always skip
|
|
8
|
+
IGNORE_DIRS = {
|
|
9
|
+
".git", "node_modules", "__pycache__", ".next", ".nuxt",
|
|
10
|
+
"dist", "build", ".tox", ".venv", "venv", "env",
|
|
11
|
+
".mypy_cache", ".pytest_cache", ".ruff_cache",
|
|
12
|
+
"vendor", "target", ".idea", ".vscode",
|
|
13
|
+
"coverage", ".nyc_output", ".turbo",
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
# File extensions to skip
|
|
17
|
+
IGNORE_EXTENSIONS = {
|
|
18
|
+
".pyc", ".pyo", ".class", ".o", ".so", ".dll",
|
|
19
|
+
".png", ".jpg", ".jpeg", ".gif", ".svg", ".ico",
|
|
20
|
+
".woff", ".woff2", ".ttf", ".eot",
|
|
21
|
+
".zip", ".tar", ".gz", ".bz2",
|
|
22
|
+
".lock", ".sum",
|
|
23
|
+
".map",
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
# Binary / non-code files to skip by name
|
|
27
|
+
IGNORE_FILES = {
|
|
28
|
+
".DS_Store", "Thumbs.db", "package-lock.json", "yarn.lock",
|
|
29
|
+
"pnpm-lock.yaml", "poetry.lock", "Pipfile.lock",
|
|
30
|
+
"Cargo.lock", "Gemfile.lock", "composer.lock",
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def walk_repo(repo_root: str, extra_excludes: list[str] | None = None) -> list[str]:
|
|
35
|
+
"""Walk a repository and return all relevant file paths.
|
|
36
|
+
|
|
37
|
+
Skips directories and files in the ignore lists.
|
|
38
|
+
"""
|
|
39
|
+
root = Path(repo_root)
|
|
40
|
+
if not root.is_dir():
|
|
41
|
+
return []
|
|
42
|
+
|
|
43
|
+
exclude_set = set(extra_excludes) if extra_excludes else set()
|
|
44
|
+
files: list[str] = []
|
|
45
|
+
|
|
46
|
+
for path in root.rglob("*"):
|
|
47
|
+
if not path.is_file():
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
# Check if any parent directory should be skipped
|
|
51
|
+
parts = path.relative_to(root).parts
|
|
52
|
+
if any(part in IGNORE_DIRS for part in parts):
|
|
53
|
+
continue
|
|
54
|
+
|
|
55
|
+
# Check file-level ignores
|
|
56
|
+
if path.name in IGNORE_FILES:
|
|
57
|
+
continue
|
|
58
|
+
if path.suffix.lower() in IGNORE_EXTENSIONS:
|
|
59
|
+
continue
|
|
60
|
+
|
|
61
|
+
# Check extra excludes (glob-style not needed, simple substring match)
|
|
62
|
+
rel = str(path.relative_to(root))
|
|
63
|
+
if any(excl in rel for excl in exclude_set):
|
|
64
|
+
continue
|
|
65
|
+
|
|
66
|
+
files.append(str(path))
|
|
67
|
+
|
|
68
|
+
return sorted(files)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def detect_repo_name(repo_root: str) -> str:
|
|
72
|
+
"""Detect a human-friendly repository name from the root path."""
|
|
73
|
+
return Path(repo_root).name
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
"""Parse tsconfig.json / jsconfig.json and resolve path aliases.
|
|
2
|
+
|
|
3
|
+
Supports:
|
|
4
|
+
- compilerOptions.baseUrl and compilerOptions.paths
|
|
5
|
+
- "extends" inheritance chains (recursive)
|
|
6
|
+
- Monorepo multi-tsconfig via TSConfigRegistry
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
import re
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
# Extensions to try when probing for a file
|
|
17
|
+
RESOLVE_EXTENSIONS = [".ts", ".tsx", ".js", ".jsx", "/index.ts", "/index.js", "/index.tsx"]
|
|
18
|
+
|
|
19
|
+
# Directories to skip when scanning for tsconfig files in monorepos
|
|
20
|
+
SKIP_DIRS = {
|
|
21
|
+
"node_modules", ".git", "dist", "build", ".next", ".nuxt",
|
|
22
|
+
"__pycache__", ".tox", ".venv", "venv", "coverage",
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
# Max depth for extends chains to prevent infinite loops
|
|
26
|
+
MAX_EXTENDS_DEPTH = 5
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _strip_jsonc_comments(text: str) -> str:
|
|
30
|
+
"""Strip // and /* */ comments from JSONC, preserving strings.
|
|
31
|
+
|
|
32
|
+
Uses a regex that matches strings first (to skip them), then comments.
|
|
33
|
+
This prevents stripping // inside strings like "@/*": ["src/*"].
|
|
34
|
+
"""
|
|
35
|
+
# Match either a double-quoted string, a single-line comment, or a block comment.
|
|
36
|
+
# Strings are captured and preserved; comments are replaced with empty string.
|
|
37
|
+
def _replacer(match: re.Match) -> str:
|
|
38
|
+
if match.group(1) is not None:
|
|
39
|
+
return match.group(1) # preserve string
|
|
40
|
+
return "" # remove comment
|
|
41
|
+
|
|
42
|
+
return re.sub(
|
|
43
|
+
r'("(?:[^"\\]|\\.)*")' # group 1: double-quoted string (preserved)
|
|
44
|
+
r'|//.*?$' # single-line comment
|
|
45
|
+
r'|/\*.*?\*/', # block comment
|
|
46
|
+
_replacer,
|
|
47
|
+
text,
|
|
48
|
+
flags=re.MULTILINE | re.DOTALL,
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _load_json(config_path: str) -> dict | None:
|
|
53
|
+
"""Load and parse a JSONC file (tsconfig/jsconfig). Returns None on failure."""
|
|
54
|
+
try:
|
|
55
|
+
raw = Path(config_path).read_text(encoding="utf-8", errors="replace")
|
|
56
|
+
raw = _strip_jsonc_comments(raw)
|
|
57
|
+
return json.loads(raw)
|
|
58
|
+
except (json.JSONDecodeError, OSError):
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _resolve_extends(data: dict, config_dir: str, seen: set[str] | None = None, depth: int = 0) -> dict:
|
|
63
|
+
"""Recursively follow the 'extends' field and merge parent configs.
|
|
64
|
+
|
|
65
|
+
Merge rules (matching TypeScript behavior):
|
|
66
|
+
- compilerOptions: child overrides parent per-key (shallow merge)
|
|
67
|
+
- paths: child completely replaces parent if present
|
|
68
|
+
- include/exclude: child replaces parent
|
|
69
|
+
"""
|
|
70
|
+
if depth >= MAX_EXTENDS_DEPTH:
|
|
71
|
+
return data
|
|
72
|
+
|
|
73
|
+
extends_value = data.get("extends")
|
|
74
|
+
if not extends_value:
|
|
75
|
+
return data
|
|
76
|
+
|
|
77
|
+
if seen is None:
|
|
78
|
+
seen = set()
|
|
79
|
+
|
|
80
|
+
# Resolve the extends path
|
|
81
|
+
if extends_value.startswith("."):
|
|
82
|
+
# Relative path: "./tsconfig.base.json" or "../shared/tsconfig.json"
|
|
83
|
+
parent_path = os.path.normpath(os.path.join(config_dir, extends_value))
|
|
84
|
+
# TypeScript also tries appending .json if not present
|
|
85
|
+
if not parent_path.endswith(".json"):
|
|
86
|
+
if os.path.isfile(parent_path + ".json"):
|
|
87
|
+
parent_path += ".json"
|
|
88
|
+
else:
|
|
89
|
+
# Package extends like "@tsconfig/next" — skip, can't resolve without node_modules
|
|
90
|
+
return data
|
|
91
|
+
|
|
92
|
+
parent_path = os.path.abspath(parent_path)
|
|
93
|
+
|
|
94
|
+
# Guard against circular extends
|
|
95
|
+
if parent_path in seen or not os.path.isfile(parent_path):
|
|
96
|
+
return data
|
|
97
|
+
|
|
98
|
+
seen.add(parent_path)
|
|
99
|
+
|
|
100
|
+
parent_data = _load_json(parent_path)
|
|
101
|
+
if parent_data is None:
|
|
102
|
+
return data
|
|
103
|
+
|
|
104
|
+
# Recursively resolve parent's extends
|
|
105
|
+
parent_dir = os.path.dirname(parent_path)
|
|
106
|
+
parent_data = _resolve_extends(parent_data, parent_dir, seen, depth + 1)
|
|
107
|
+
|
|
108
|
+
# Merge: parent as base, child overrides
|
|
109
|
+
merged = dict(parent_data)
|
|
110
|
+
|
|
111
|
+
# Merge compilerOptions (shallow: child keys override parent keys)
|
|
112
|
+
parent_opts = merged.get("compilerOptions", {})
|
|
113
|
+
child_opts = data.get("compilerOptions", {})
|
|
114
|
+
merged_opts = dict(parent_opts)
|
|
115
|
+
merged_opts.update(child_opts)
|
|
116
|
+
merged["compilerOptions"] = merged_opts
|
|
117
|
+
|
|
118
|
+
# Top-level fields: child replaces parent entirely
|
|
119
|
+
for key in ("include", "exclude", "files", "references"):
|
|
120
|
+
if key in data:
|
|
121
|
+
merged[key] = data[key]
|
|
122
|
+
|
|
123
|
+
# Remove extends from merged result
|
|
124
|
+
merged.pop("extends", None)
|
|
125
|
+
|
|
126
|
+
return merged
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class TSConfigResolver:
|
|
130
|
+
"""Resolves TypeScript/JavaScript path aliases from a single tsconfig.json.
|
|
131
|
+
|
|
132
|
+
Handles compilerOptions.baseUrl, compilerOptions.paths, and "extends" chains.
|
|
133
|
+
"""
|
|
134
|
+
|
|
135
|
+
def __init__(self, repo_root: str) -> None:
|
|
136
|
+
self.repo_root = os.path.abspath(repo_root)
|
|
137
|
+
self.base_url: str | None = None
|
|
138
|
+
self.paths: dict[str, list[str]] = {}
|
|
139
|
+
self._alias_rules: list[tuple[str, list[str]]] = []
|
|
140
|
+
|
|
141
|
+
self._load()
|
|
142
|
+
|
|
143
|
+
def _load(self) -> None:
|
|
144
|
+
"""Find and parse tsconfig.json or jsconfig.json."""
|
|
145
|
+
for name in ("tsconfig.json", "jsconfig.json"):
|
|
146
|
+
config_path = os.path.join(self.repo_root, name)
|
|
147
|
+
if os.path.isfile(config_path):
|
|
148
|
+
self._parse(config_path)
|
|
149
|
+
return
|
|
150
|
+
|
|
151
|
+
def _parse(self, config_path: str) -> None:
|
|
152
|
+
"""Parse a tsconfig/jsconfig file, following extends if present."""
|
|
153
|
+
data = _load_json(config_path)
|
|
154
|
+
if data is None:
|
|
155
|
+
return
|
|
156
|
+
|
|
157
|
+
config_dir = os.path.dirname(os.path.abspath(config_path))
|
|
158
|
+
|
|
159
|
+
# Follow extends chain
|
|
160
|
+
if "extends" in data:
|
|
161
|
+
data = _resolve_extends(data, config_dir)
|
|
162
|
+
|
|
163
|
+
compiler_opts = data.get("compilerOptions", {})
|
|
164
|
+
|
|
165
|
+
# baseUrl is relative to the tsconfig location
|
|
166
|
+
base_url = compiler_opts.get("baseUrl")
|
|
167
|
+
if base_url:
|
|
168
|
+
self.base_url = os.path.normpath(os.path.join(config_dir, base_url))
|
|
169
|
+
|
|
170
|
+
# paths entries
|
|
171
|
+
self.paths = compiler_opts.get("paths", {})
|
|
172
|
+
|
|
173
|
+
# Build alias rules: (prefix, [replacement_dirs])
|
|
174
|
+
for pattern, replacements in self.paths.items():
|
|
175
|
+
prefix = pattern.replace("*", "")
|
|
176
|
+
resolved_replacements = []
|
|
177
|
+
for repl in replacements:
|
|
178
|
+
repl_prefix = repl.replace("*", "")
|
|
179
|
+
base = self.base_url or config_dir
|
|
180
|
+
resolved_replacements.append(os.path.normpath(os.path.join(base, repl_prefix)))
|
|
181
|
+
self._alias_rules.append((prefix, resolved_replacements))
|
|
182
|
+
|
|
183
|
+
def resolve(self, import_path: str, caller_file: str = "") -> str | None:
|
|
184
|
+
"""Resolve a non-relative import path using tsconfig aliases."""
|
|
185
|
+
if import_path.startswith("."):
|
|
186
|
+
return None
|
|
187
|
+
|
|
188
|
+
for prefix, replacement_dirs in self._alias_rules:
|
|
189
|
+
if not import_path.startswith(prefix):
|
|
190
|
+
continue
|
|
191
|
+
|
|
192
|
+
suffix = import_path[len(prefix):]
|
|
193
|
+
for repl_dir in replacement_dirs:
|
|
194
|
+
candidate_base = os.path.join(repl_dir, suffix)
|
|
195
|
+
result = self._probe_file(candidate_base)
|
|
196
|
+
if result:
|
|
197
|
+
return result
|
|
198
|
+
|
|
199
|
+
if self.base_url:
|
|
200
|
+
candidate_base = os.path.join(self.base_url, import_path)
|
|
201
|
+
result = self._probe_file(candidate_base)
|
|
202
|
+
if result:
|
|
203
|
+
return result
|
|
204
|
+
|
|
205
|
+
return None
|
|
206
|
+
|
|
207
|
+
def _probe_file(self, base_path: str) -> str | None:
|
|
208
|
+
"""Try to find a file at base_path with various extensions."""
|
|
209
|
+
if os.path.isfile(base_path):
|
|
210
|
+
return os.path.abspath(base_path)
|
|
211
|
+
|
|
212
|
+
for ext in RESOLVE_EXTENSIONS:
|
|
213
|
+
candidate = base_path + ext
|
|
214
|
+
if os.path.isfile(candidate):
|
|
215
|
+
return os.path.abspath(candidate)
|
|
216
|
+
|
|
217
|
+
return None
|
|
218
|
+
|
|
219
|
+
@property
|
|
220
|
+
def has_config(self) -> bool:
|
|
221
|
+
"""Whether a tsconfig/jsconfig was found and parsed."""
|
|
222
|
+
return self.base_url is not None or bool(self.paths)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
class TSConfigRegistry:
|
|
226
|
+
"""Manages multiple TSConfigResolvers for monorepo support.
|
|
227
|
+
|
|
228
|
+
Finds all tsconfig.json/jsconfig.json files in the repo, creates a resolver
|
|
229
|
+
for each, and routes resolve() calls to the nearest resolver for a given file.
|
|
230
|
+
|
|
231
|
+
Drop-in replacement for TSConfigResolver — same resolve() interface.
|
|
232
|
+
"""
|
|
233
|
+
|
|
234
|
+
def __init__(self, repo_root: str) -> None:
|
|
235
|
+
self.repo_root = os.path.abspath(repo_root)
|
|
236
|
+
self._resolvers: dict[str, TSConfigResolver] = {}
|
|
237
|
+
self._build()
|
|
238
|
+
|
|
239
|
+
def _build(self) -> None:
|
|
240
|
+
"""Find all tsconfig/jsconfig files and create resolvers."""
|
|
241
|
+
for root, dirs, files in os.walk(self.repo_root):
|
|
242
|
+
# Skip irrelevant directories
|
|
243
|
+
dirs[:] = [d for d in dirs if d not in SKIP_DIRS]
|
|
244
|
+
|
|
245
|
+
for name in ("tsconfig.json", "jsconfig.json"):
|
|
246
|
+
if name in files:
|
|
247
|
+
config_dir = os.path.abspath(root)
|
|
248
|
+
self._resolvers[config_dir] = TSConfigResolver(config_dir)
|
|
249
|
+
break # one config per directory
|
|
250
|
+
|
|
251
|
+
def get_resolver_for_file(self, file_path: str) -> TSConfigResolver | None:
|
|
252
|
+
"""Find the nearest tsconfig resolver for a given file."""
|
|
253
|
+
current = Path(os.path.abspath(file_path)).parent
|
|
254
|
+
repo = Path(self.repo_root)
|
|
255
|
+
|
|
256
|
+
while current >= repo:
|
|
257
|
+
key = str(current)
|
|
258
|
+
if key in self._resolvers:
|
|
259
|
+
return self._resolvers[key]
|
|
260
|
+
current = current.parent
|
|
261
|
+
|
|
262
|
+
return None
|
|
263
|
+
|
|
264
|
+
def resolve(self, import_path: str, caller_file: str = "") -> str | None:
|
|
265
|
+
"""Resolve using the nearest tsconfig for the caller file.
|
|
266
|
+
|
|
267
|
+
Same interface as TSConfigResolver.resolve() — drop-in replacement.
|
|
268
|
+
"""
|
|
269
|
+
resolver = self.get_resolver_for_file(caller_file)
|
|
270
|
+
if resolver:
|
|
271
|
+
return resolver.resolve(import_path, caller_file)
|
|
272
|
+
return None
|
|
273
|
+
|
|
274
|
+
@property
|
|
275
|
+
def has_config(self) -> bool:
|
|
276
|
+
"""Whether any tsconfig/jsconfig was found."""
|
|
277
|
+
return bool(self._resolvers)
|
|
278
|
+
|
|
279
|
+
@property
|
|
280
|
+
def resolver_count(self) -> int:
|
|
281
|
+
return len(self._resolvers)
|