commiter-cli 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- commiter/__init__.py +3 -0
- commiter/adapters/__init__.py +0 -0
- commiter/adapters/base.py +96 -0
- commiter/adapters/django_rest.py +247 -0
- commiter/adapters/express.py +204 -0
- commiter/adapters/fastapi.py +170 -0
- commiter/adapters/flask.py +169 -0
- commiter/adapters/nextjs.py +180 -0
- commiter/adapters/prisma.py +76 -0
- commiter/adapters/raw_sql.py +191 -0
- commiter/adapters/react.py +129 -0
- commiter/adapters/sqlalchemy.py +99 -0
- commiter/adapters/supabase.py +68 -0
- commiter/auth.py +130 -0
- commiter/cli.py +667 -0
- commiter/correlator.py +208 -0
- commiter/extractors/__init__.py +0 -0
- commiter/extractors/api_calls.py +91 -0
- commiter/extractors/api_endpoints.py +354 -0
- commiter/extractors/backend_files.py +33 -0
- commiter/extractors/base.py +40 -0
- commiter/extractors/db_operations.py +69 -0
- commiter/extractors/dependencies.py +219 -0
- commiter/generic_resolver.py +204 -0
- commiter/handler_index.py +97 -0
- commiter/lib.py +63 -0
- commiter/middleware_index.py +350 -0
- commiter/models.py +117 -0
- commiter/parser.py +1283 -0
- commiter/prefix_index.py +211 -0
- commiter/report/__init__.py +0 -0
- commiter/report/ai.py +120 -0
- commiter/report/api_guide.py +217 -0
- commiter/report/architecture.py +930 -0
- commiter/report/console.py +254 -0
- commiter/report/json_output.py +122 -0
- commiter/report/markdown.py +163 -0
- commiter/scanner.py +383 -0
- commiter/type_index.py +304 -0
- commiter/uploader.py +46 -0
- commiter/utils/__init__.py +0 -0
- commiter/utils/env_reader.py +78 -0
- commiter/utils/file_classifier.py +187 -0
- commiter/utils/path_helpers.py +73 -0
- commiter/utils/tsconfig_resolver.py +281 -0
- commiter/wrapper_index.py +288 -0
- commiter_cli-0.3.0.dist-info/METADATA +14 -0
- commiter_cli-0.3.0.dist-info/RECORD +96 -0
- commiter_cli-0.3.0.dist-info/WHEEL +5 -0
- commiter_cli-0.3.0.dist-info/entry_points.txt +2 -0
- commiter_cli-0.3.0.dist-info/top_level.txt +2 -0
- tests/__init__.py +0 -0
- tests/fixtures/arch_backend/app.py +22 -0
- tests/fixtures/arch_backend/middleware/__init__.py +0 -0
- tests/fixtures/arch_backend/middleware/rate_limit.py +4 -0
- tests/fixtures/arch_backend/routes/__init__.py +0 -0
- tests/fixtures/arch_backend/routes/analytics.py +20 -0
- tests/fixtures/arch_backend/routes/auth.py +29 -0
- tests/fixtures/arch_backend/routes/projects.py +60 -0
- tests/fixtures/arch_backend/routes/users.py +55 -0
- tests/fixtures/arch_monorepo/apps/api/app.py +30 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/auth.py +17 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/rate_limit.py +10 -0
- tests/fixtures/arch_monorepo/apps/api/routes/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/routes/auth.py +46 -0
- tests/fixtures/arch_monorepo/apps/api/routes/invites.py +30 -0
- tests/fixtures/arch_monorepo/apps/api/routes/notifications.py +25 -0
- tests/fixtures/arch_monorepo/apps/api/routes/projects.py +80 -0
- tests/fixtures/arch_monorepo/apps/api/routes/tasks.py +91 -0
- tests/fixtures/arch_monorepo/apps/api/routes/users.py +48 -0
- tests/fixtures/arch_monorepo/apps/api/services/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/services/email.py +11 -0
- tests/fixtures/backend_b/app.py +17 -0
- tests/fixtures/fastapi_app/app.py +48 -0
- tests/fixtures/fastapi_crossfile/routes.py +18 -0
- tests/fixtures/fastapi_crossfile/schemas.py +21 -0
- tests/fixtures/flask_app/app.py +33 -0
- tests/fixtures/flask_blueprint/app.py +7 -0
- tests/fixtures/flask_blueprint/routes/items.py +13 -0
- tests/fixtures/flask_blueprint/routes/users.py +20 -0
- tests/fixtures/middleware_test_flask/routes/public.py +8 -0
- tests/fixtures/middleware_test_flask/routes/users.py +26 -0
- tests/fixtures/python_deep_imports/app/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/health.py +11 -0
- tests/fixtures/python_deep_imports/app/api/v1/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/v1/items.py +18 -0
- tests/fixtures/python_deep_imports/app/api/v1/users.py +27 -0
- tests/fixtures/python_deep_imports/app/schemas/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/schemas/item.py +13 -0
- tests/fixtures/python_deep_imports/app/schemas/user.py +15 -0
- tests/fixtures/python_deep_imports/app/shared/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/shared/models.py +7 -0
- tests/fixtures/raw_sql_test/app.py +54 -0
- tests/test_architecture.py +757 -0
commiter/cli.py
ADDED
|
@@ -0,0 +1,667 @@
|
|
|
1
|
+
"""Click CLI entry point for the commiter CLI."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
import os
|
|
10
|
+
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
import click
|
|
16
|
+
|
|
17
|
+
from rich.console import Console
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
from commiter.scanner import scan_repos
|
|
22
|
+
|
|
23
|
+
from commiter.models import RepoDocumentation, APIEndpoint, APICall, DBOperation, Dependency
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# Markers that indicate a repository root directory
|
|
28
|
+
|
|
29
|
+
_REPO_ROOT_MARKERS = {
|
|
30
|
+
|
|
31
|
+
".git", "package.json", "pyproject.toml", "go.mod",
|
|
32
|
+
|
|
33
|
+
"Cargo.toml", "Gemfile", "composer.json", "pom.xml",
|
|
34
|
+
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class OptionalQuery(click.ParamType):
|
|
42
|
+
|
|
43
|
+
"""Click parameter type that accepts a flag with an optional query string.
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
--endpoints → value = "" (show all)
|
|
48
|
+
|
|
49
|
+
--endpoints users → value = "users" (filter by query)
|
|
50
|
+
|
|
51
|
+
(not passed) → value = None (don't show this section)
|
|
52
|
+
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
name = "query"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def convert(self, value, param, ctx):
|
|
60
|
+
|
|
61
|
+
return value
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@click.command()
|
|
68
|
+
|
|
69
|
+
@click.argument("repo_paths", nargs=-1, required=False, type=click.Path(exists=True))
|
|
70
|
+
|
|
71
|
+
@click.option("--format", "output_format", type=click.Choice(["console", "markdown", "json", "ai", "architecture"]), default="console", help="Output format.")
|
|
72
|
+
|
|
73
|
+
@click.option("--output", "-o", "output_path", type=click.Path(), default=None, help="Output file or directory.")
|
|
74
|
+
|
|
75
|
+
@click.option("--file", "-f", "file_filter", type=str, default=None, help="Show results only for this file.")
|
|
76
|
+
|
|
77
|
+
@click.option("--endpoints", "-e", "endpoints_query", type=str, default=None, help="Show only endpoints. Pass 'all' or a search query (e.g. --endpoints users).")
|
|
78
|
+
|
|
79
|
+
@click.option("--calls", "-c", "calls_query", type=str, default=None, help="Show only API calls. Pass 'all' or a search query.")
|
|
80
|
+
|
|
81
|
+
@click.option("--db", "-d", "db_query", type=str, default=None, help="Show only DB operations. Pass 'all' or a search query.")
|
|
82
|
+
|
|
83
|
+
@click.option("--deps", "deps_query", type=str, default=None, help="Show only dependencies. Pass 'all' or a search query.")
|
|
84
|
+
|
|
85
|
+
@click.option("--api-guide", "api_guide_query", type=str, default=None, help="Generate curl commands for endpoints. Pass 'all' or a search query.")
|
|
86
|
+
|
|
87
|
+
@click.option("--exclude", multiple=True, help="Path patterns to exclude.")
|
|
88
|
+
|
|
89
|
+
@click.option("--verbose", "-v", is_flag=True, help="Show detailed progress.")
|
|
90
|
+
|
|
91
|
+
@click.option("--enrich", is_flag=True, default=False, help="Upload architecture to commiter.dev for AI-generated descriptions.")
|
|
92
|
+
|
|
93
|
+
@click.option("--token", "api_token", type=str, default=None, envvar="COMMITER_TOKEN", help="Auth token override (or set COMMITER_TOKEN env var).")
|
|
94
|
+
|
|
95
|
+
@click.option("--login", "do_login", is_flag=True, default=False, help="Log in to commiter.dev via browser.")
|
|
96
|
+
|
|
97
|
+
@click.option("--logout", "do_logout", is_flag=True, default=False, help="Log out and remove saved credentials.")
|
|
98
|
+
|
|
99
|
+
def main(repo_paths: tuple[str, ...], output_format: str, output_path: str | None,
|
|
100
|
+
|
|
101
|
+
file_filter: str | None, endpoints_query: str | None, calls_query: str | None,
|
|
102
|
+
|
|
103
|
+
db_query: str | None, deps_query: str | None, api_guide_query: str | None,
|
|
104
|
+
|
|
105
|
+
exclude: tuple[str, ...], verbose: bool, enrich: bool, api_token: str | None,
|
|
106
|
+
|
|
107
|
+
do_login: bool, do_logout: bool) -> None:
|
|
108
|
+
|
|
109
|
+
"""Scan repositories and generate documentation.
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
Pass one or more REPO_PATHS to analyze, or use --file to focus on a single file.
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
\b
|
|
118
|
+
|
|
119
|
+
Category flags (show specific sections, optional query to filter):
|
|
120
|
+
|
|
121
|
+
--endpoints [QUERY] Show API endpoints
|
|
122
|
+
|
|
123
|
+
--calls [QUERY] Show frontend API calls
|
|
124
|
+
|
|
125
|
+
--db [QUERY] Show database operations
|
|
126
|
+
|
|
127
|
+
--deps [QUERY] Show dependencies
|
|
128
|
+
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
console = Console()
|
|
132
|
+
|
|
133
|
+
# Handle --login / --logout before anything else
|
|
134
|
+
if do_login:
|
|
135
|
+
from commiter.auth import login
|
|
136
|
+
login()
|
|
137
|
+
return
|
|
138
|
+
|
|
139
|
+
if do_logout:
|
|
140
|
+
from commiter.auth import logout
|
|
141
|
+
logout()
|
|
142
|
+
return
|
|
143
|
+
|
|
144
|
+
# Auto-load saved token if no explicit --token given
|
|
145
|
+
if not api_token:
|
|
146
|
+
from commiter.auth import get_saved_token
|
|
147
|
+
api_token = get_saved_token()
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
# Handle --file without repo paths: auto-detect repo root
|
|
152
|
+
|
|
153
|
+
if file_filter and not repo_paths:
|
|
154
|
+
|
|
155
|
+
file_abs = os.path.abspath(file_filter)
|
|
156
|
+
|
|
157
|
+
if not os.path.isfile(file_abs):
|
|
158
|
+
|
|
159
|
+
console.print(f"[red]File not found: {file_filter}[/red]")
|
|
160
|
+
|
|
161
|
+
raise SystemExit(1)
|
|
162
|
+
|
|
163
|
+
repo_root = _detect_repo_root(file_abs)
|
|
164
|
+
|
|
165
|
+
if not repo_root:
|
|
166
|
+
|
|
167
|
+
console.print(f"[red]Could not detect repository root for {file_filter}. Pass the repo path explicitly.[/red]")
|
|
168
|
+
|
|
169
|
+
raise SystemExit(1)
|
|
170
|
+
|
|
171
|
+
repo_paths = (repo_root,)
|
|
172
|
+
|
|
173
|
+
if verbose:
|
|
174
|
+
|
|
175
|
+
console.print(f"[dim]Auto-detected repo root: {repo_root}[/dim]")
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
if not repo_paths:
|
|
180
|
+
|
|
181
|
+
console.print("[red]Please provide at least one REPO_PATH or use --file.[/red]")
|
|
182
|
+
|
|
183
|
+
raise SystemExit(1)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
if verbose:
|
|
188
|
+
|
|
189
|
+
console.print(f"[dim]Scanning {len(repo_paths)} repo(s)...[/dim]")
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
extra_excludes = list(exclude) if exclude else None
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
# Architecture format needs full scan results (with indexes)
|
|
198
|
+
|
|
199
|
+
if output_format == "architecture":
|
|
200
|
+
|
|
201
|
+
from commiter.scanner import scan_repos_full
|
|
202
|
+
|
|
203
|
+
from commiter.report.architecture import generate_architecture
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
scan_results = scan_repos_full(list(repo_paths), extra_excludes=extra_excludes)
|
|
208
|
+
|
|
209
|
+
json_str = generate_architecture(scan_results)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
if output_path:
|
|
214
|
+
|
|
215
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
216
|
+
|
|
217
|
+
f.write(json_str)
|
|
218
|
+
|
|
219
|
+
console.print(f"[green]Architecture JSON written to {output_path}[/green]")
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
if enrich:
|
|
224
|
+
|
|
225
|
+
from commiter.uploader import upload_architecture
|
|
226
|
+
|
|
227
|
+
console.print("[dim]Uploading to commiter.dev for AI enrichment...[/dim]")
|
|
228
|
+
|
|
229
|
+
result = upload_architecture(json_str, api_token=api_token)
|
|
230
|
+
|
|
231
|
+
if "error" in result:
|
|
232
|
+
|
|
233
|
+
console.print(f"[red]Upload failed: {result['error']}[/red]")
|
|
234
|
+
|
|
235
|
+
else:
|
|
236
|
+
workspace = result.get("workspace")
|
|
237
|
+
if workspace and workspace.get("name") and workspace.get("version") is not None:
|
|
238
|
+
ws_name = workspace["name"]
|
|
239
|
+
ws_version = workspace["version"]
|
|
240
|
+
verb = "Cached" if result.get("cached") else "Uploaded"
|
|
241
|
+
console.print(
|
|
242
|
+
f"[green]{verb} to workspace \"{ws_name}\" as version {ws_version}[/green]"
|
|
243
|
+
)
|
|
244
|
+
console.print(f"[green]View at: {result['url']}[/green]")
|
|
245
|
+
elif result.get("cached"):
|
|
246
|
+
console.print(f"[green]Cached! View at: {result['url']}[/green]")
|
|
247
|
+
else:
|
|
248
|
+
console.print(f"[green]View at: {result['url']}[/green]")
|
|
249
|
+
|
|
250
|
+
tier = result.get("tier", "anonymous")
|
|
251
|
+
|
|
252
|
+
if tier == "anonymous" and not result.get("cached"):
|
|
253
|
+
console.print("[dim]Tip: run commiter enrich --login for full AI enrichment[/dim]")
|
|
254
|
+
elif tier == "authenticated" and not workspace:
|
|
255
|
+
console.print(
|
|
256
|
+
"[dim]Tip: connect this repo to a workspace at commiter.dev "
|
|
257
|
+
"to enable versioning and roadmap navigation[/dim]"
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
console.print(f"[dim]Status: {result.get('status', 'unknown')} - descriptions will be ready in ~30 seconds[/dim]")
|
|
261
|
+
|
|
262
|
+
elif not output_path:
|
|
263
|
+
|
|
264
|
+
click.echo(json_str)
|
|
265
|
+
|
|
266
|
+
return
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
docs = scan_repos(list(repo_paths), extra_excludes=extra_excludes)
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
# Apply file filter if specified
|
|
275
|
+
|
|
276
|
+
if file_filter:
|
|
277
|
+
|
|
278
|
+
docs = _filter_docs_by_file(docs, file_filter)
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
# Determine which sections to show
|
|
283
|
+
|
|
284
|
+
has_category_flags = any(x is not None for x in (endpoints_query, calls_query, db_query, deps_query))
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
# Build sections config: which sections to show + query filters
|
|
289
|
+
|
|
290
|
+
# None = don't show, "" = show all, "query" = show filtered
|
|
291
|
+
|
|
292
|
+
if has_category_flags:
|
|
293
|
+
|
|
294
|
+
# Normalize "all" to empty string (show everything in that category)
|
|
295
|
+
|
|
296
|
+
sections = {
|
|
297
|
+
|
|
298
|
+
"endpoints": ("" if endpoints_query and endpoints_query.lower() == "all" else endpoints_query),
|
|
299
|
+
|
|
300
|
+
"calls": ("" if calls_query and calls_query.lower() == "all" else calls_query),
|
|
301
|
+
|
|
302
|
+
"db": ("" if db_query and db_query.lower() == "all" else db_query),
|
|
303
|
+
|
|
304
|
+
"deps": ("" if deps_query and deps_query.lower() == "all" else deps_query),
|
|
305
|
+
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
else:
|
|
309
|
+
|
|
310
|
+
sections = {"endpoints": "", "calls": "", "db": "", "deps": ""}
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
# Apply query filters to the data
|
|
315
|
+
|
|
316
|
+
if has_category_flags:
|
|
317
|
+
|
|
318
|
+
docs = _apply_query_filters(docs, sections)
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
# --api-guide overrides format output: generates curl commands
|
|
323
|
+
|
|
324
|
+
if api_guide_query is not None:
|
|
325
|
+
|
|
326
|
+
from commiter.report.api_guide import generate_api_guide
|
|
327
|
+
|
|
328
|
+
from commiter.utils.env_reader import load_env_files
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
# Filter endpoints by query
|
|
333
|
+
|
|
334
|
+
query = "" if api_guide_query.lower() == "all" else api_guide_query
|
|
335
|
+
|
|
336
|
+
if query:
|
|
337
|
+
|
|
338
|
+
for doc in docs:
|
|
339
|
+
|
|
340
|
+
doc.endpoints = [ep for ep in doc.endpoints if _endpoint_matches(ep, query)]
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
env_vars = load_env_files(list(repo_paths)[0]) if repo_paths else {}
|
|
345
|
+
|
|
346
|
+
guide = generate_api_guide(docs, env_vars=env_vars)
|
|
347
|
+
|
|
348
|
+
if output_path:
|
|
349
|
+
|
|
350
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
351
|
+
|
|
352
|
+
f.write(guide)
|
|
353
|
+
|
|
354
|
+
console.print(f"[green]API guide written to {output_path}[/green]")
|
|
355
|
+
|
|
356
|
+
else:
|
|
357
|
+
|
|
358
|
+
click.echo(guide)
|
|
359
|
+
|
|
360
|
+
return
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
if output_format == "console":
|
|
365
|
+
|
|
366
|
+
from commiter.report.console import print_report
|
|
367
|
+
|
|
368
|
+
print_report(docs, console, sections=sections)
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
elif output_format == "markdown":
|
|
373
|
+
|
|
374
|
+
from commiter.report.markdown import generate_markdown
|
|
375
|
+
|
|
376
|
+
md = generate_markdown(docs, sections=sections)
|
|
377
|
+
|
|
378
|
+
if output_path:
|
|
379
|
+
|
|
380
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
381
|
+
|
|
382
|
+
f.write(md)
|
|
383
|
+
|
|
384
|
+
console.print(f"[green]Documentation written to {output_path}[/green]")
|
|
385
|
+
|
|
386
|
+
else:
|
|
387
|
+
|
|
388
|
+
console.print(md)
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
elif output_format == "json":
|
|
393
|
+
|
|
394
|
+
from commiter.report.json_output import generate_json
|
|
395
|
+
|
|
396
|
+
json_str = generate_json(docs, sections=sections)
|
|
397
|
+
|
|
398
|
+
if output_path:
|
|
399
|
+
|
|
400
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
401
|
+
|
|
402
|
+
f.write(json_str)
|
|
403
|
+
|
|
404
|
+
console.print(f"[green]JSON written to {output_path}[/green]")
|
|
405
|
+
|
|
406
|
+
else:
|
|
407
|
+
|
|
408
|
+
console.print(json_str)
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
elif output_format == "ai":
|
|
413
|
+
|
|
414
|
+
from commiter.report.ai import generate_ai
|
|
415
|
+
|
|
416
|
+
ai_output = generate_ai(docs, sections=sections)
|
|
417
|
+
|
|
418
|
+
if output_path:
|
|
419
|
+
|
|
420
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
421
|
+
|
|
422
|
+
f.write(ai_output)
|
|
423
|
+
|
|
424
|
+
console.print(f"[green]AI output written to {output_path}[/green]")
|
|
425
|
+
|
|
426
|
+
else:
|
|
427
|
+
|
|
428
|
+
click.echo(ai_output)
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
def _apply_query_filters(docs: list[RepoDocumentation], sections: dict[str, str | None]) -> list[RepoDocumentation]:
|
|
435
|
+
|
|
436
|
+
"""Apply query-based filtering to each section's data."""
|
|
437
|
+
|
|
438
|
+
for doc in docs:
|
|
439
|
+
|
|
440
|
+
# Filter endpoints
|
|
441
|
+
|
|
442
|
+
if sections.get("endpoints") is None:
|
|
443
|
+
|
|
444
|
+
doc.endpoints = []
|
|
445
|
+
|
|
446
|
+
elif sections["endpoints"]:
|
|
447
|
+
|
|
448
|
+
doc.endpoints = [ep for ep in doc.endpoints if _endpoint_matches(ep, sections["endpoints"])]
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
# Filter API calls
|
|
453
|
+
|
|
454
|
+
if sections.get("calls") is None:
|
|
455
|
+
|
|
456
|
+
doc.api_calls = []
|
|
457
|
+
|
|
458
|
+
elif sections["calls"]:
|
|
459
|
+
|
|
460
|
+
doc.api_calls = [c for c in doc.api_calls if _api_call_matches(c, sections["calls"])]
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
# Filter DB operations
|
|
465
|
+
|
|
466
|
+
if sections.get("db") is None:
|
|
467
|
+
|
|
468
|
+
doc.db_operations = []
|
|
469
|
+
|
|
470
|
+
elif sections["db"]:
|
|
471
|
+
|
|
472
|
+
doc.db_operations = [op for op in doc.db_operations if _db_op_matches(op, sections["db"])]
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
# Filter dependencies
|
|
477
|
+
|
|
478
|
+
if sections.get("deps") is None:
|
|
479
|
+
|
|
480
|
+
doc.dependencies = []
|
|
481
|
+
|
|
482
|
+
elif sections["deps"]:
|
|
483
|
+
|
|
484
|
+
doc.dependencies = [d for d in doc.dependencies if _dep_matches(d, sections["deps"])]
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
return docs
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
def _endpoint_matches(ep: APIEndpoint, query: str) -> bool:
|
|
495
|
+
|
|
496
|
+
"""Check if an endpoint matches a search query across all its fields."""
|
|
497
|
+
|
|
498
|
+
q = query.lower()
|
|
499
|
+
|
|
500
|
+
searchable = [
|
|
501
|
+
|
|
502
|
+
ep.http_method,
|
|
503
|
+
|
|
504
|
+
ep.route_pattern,
|
|
505
|
+
|
|
506
|
+
ep.handler_name,
|
|
507
|
+
|
|
508
|
+
ep.framework,
|
|
509
|
+
|
|
510
|
+
ep.request_body_type or "",
|
|
511
|
+
|
|
512
|
+
ep.response_type or "",
|
|
513
|
+
|
|
514
|
+
]
|
|
515
|
+
|
|
516
|
+
searchable.extend(ep.request_body_fields)
|
|
517
|
+
|
|
518
|
+
searchable.extend(ep.response_fields)
|
|
519
|
+
|
|
520
|
+
searchable.extend(ep.auth_decorators)
|
|
521
|
+
|
|
522
|
+
searchable.extend(ep.middleware)
|
|
523
|
+
|
|
524
|
+
searchable.extend(ep.db_tables)
|
|
525
|
+
|
|
526
|
+
searchable.extend(p.name for p in ep.parameters)
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
|
|
530
|
+
return any(q in field.lower() for field in searchable)
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
def _api_call_matches(call: APICall, query: str) -> bool:
|
|
537
|
+
|
|
538
|
+
"""Check if an API call matches a search query."""
|
|
539
|
+
|
|
540
|
+
q = query.lower()
|
|
541
|
+
|
|
542
|
+
searchable = [
|
|
543
|
+
|
|
544
|
+
call.http_method,
|
|
545
|
+
|
|
546
|
+
call.url_pattern,
|
|
547
|
+
|
|
548
|
+
call.component_or_page,
|
|
549
|
+
|
|
550
|
+
call.client_library,
|
|
551
|
+
|
|
552
|
+
call.traced_from or "",
|
|
553
|
+
|
|
554
|
+
call.response_type or "",
|
|
555
|
+
|
|
556
|
+
call.body_type or "",
|
|
557
|
+
|
|
558
|
+
]
|
|
559
|
+
|
|
560
|
+
return any(q in field.lower() for field in searchable)
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
|
|
565
|
+
|
|
566
|
+
def _db_op_matches(op: DBOperation, query: str) -> bool:
|
|
567
|
+
|
|
568
|
+
"""Check if a DB operation matches a search query."""
|
|
569
|
+
|
|
570
|
+
q = query.lower()
|
|
571
|
+
|
|
572
|
+
searchable = [
|
|
573
|
+
|
|
574
|
+
op.operation_type,
|
|
575
|
+
|
|
576
|
+
op.table_name,
|
|
577
|
+
|
|
578
|
+
op.orm_library,
|
|
579
|
+
|
|
580
|
+
]
|
|
581
|
+
|
|
582
|
+
searchable.extend(op.filters)
|
|
583
|
+
|
|
584
|
+
return any(q in field.lower() for field in searchable)
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
def _dep_matches(dep: Dependency, query: str) -> bool:
|
|
591
|
+
|
|
592
|
+
"""Check if a dependency matches a search query."""
|
|
593
|
+
|
|
594
|
+
q = query.lower()
|
|
595
|
+
|
|
596
|
+
return q in dep.name.lower() or q in dep.version_constraint.lower()
|
|
597
|
+
|
|
598
|
+
|
|
599
|
+
|
|
600
|
+
|
|
601
|
+
|
|
602
|
+
def _filter_docs_by_file(docs: list[RepoDocumentation], file_filter: str) -> list[RepoDocumentation]:
|
|
603
|
+
|
|
604
|
+
"""Filter scan results to only show artifacts from a specific file."""
|
|
605
|
+
|
|
606
|
+
filter_normalized = file_filter.replace("\\", "/").rstrip("/")
|
|
607
|
+
|
|
608
|
+
|
|
609
|
+
|
|
610
|
+
def _matches(file_path: str) -> bool:
|
|
611
|
+
|
|
612
|
+
normalized = file_path.replace("\\", "/")
|
|
613
|
+
|
|
614
|
+
return normalized.endswith(filter_normalized) or normalized.endswith("/" + filter_normalized)
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
|
|
618
|
+
for doc in docs:
|
|
619
|
+
|
|
620
|
+
doc.endpoints = [ep for ep in doc.endpoints if _matches(ep.file_path)]
|
|
621
|
+
|
|
622
|
+
doc.api_calls = [c for c in doc.api_calls if _matches(c.file_path)]
|
|
623
|
+
|
|
624
|
+
doc.db_operations = [op for op in doc.db_operations if _matches(op.file_path)]
|
|
625
|
+
|
|
626
|
+
doc.file_classifications = [fc for fc in doc.file_classifications if _matches(fc.file_path)]
|
|
627
|
+
|
|
628
|
+
|
|
629
|
+
|
|
630
|
+
return docs
|
|
631
|
+
|
|
632
|
+
|
|
633
|
+
|
|
634
|
+
|
|
635
|
+
|
|
636
|
+
def _detect_repo_root(file_path: str) -> str | None:
|
|
637
|
+
|
|
638
|
+
"""Walk up from a file path to find the repository root."""
|
|
639
|
+
|
|
640
|
+
current = Path(file_path).parent
|
|
641
|
+
|
|
642
|
+
for _ in range(20):
|
|
643
|
+
|
|
644
|
+
for marker in _REPO_ROOT_MARKERS:
|
|
645
|
+
|
|
646
|
+
if (current / marker).exists():
|
|
647
|
+
|
|
648
|
+
return str(current)
|
|
649
|
+
|
|
650
|
+
parent = current.parent
|
|
651
|
+
|
|
652
|
+
if parent == current:
|
|
653
|
+
|
|
654
|
+
break
|
|
655
|
+
|
|
656
|
+
current = parent
|
|
657
|
+
|
|
658
|
+
return None
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
if __name__ == "__main__":
|
|
665
|
+
|
|
666
|
+
main()
|
|
667
|
+
|