aja-codeintel 0.2.0__tar.gz → 0.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/PKG-INFO +1 -1
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/aja_codeintel.egg-info/PKG-INFO +1 -1
- aja_codeintel-0.2.1/codeintel_cli/commands/project/entire_cmd.py +639 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/pyproject.toml +1 -1
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/tests/test_entire_cmd.py +1 -1
- aja_codeintel-0.2.0/codeintel_cli/commands/project/entire_cmd.py +0 -417
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/LICENSE +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/README.md +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/aja_codeintel.egg-info/SOURCES.txt +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/aja_codeintel.egg-info/dependency_links.txt +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/aja_codeintel.egg-info/entry_points.txt +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/aja_codeintel.egg-info/requires.txt +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/aja_codeintel.egg-info/top_level.txt +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/__main__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/cli.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/graph/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/graph/deps_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/graph/related_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/graph/relsymbols_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/graph/reverse_related_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/nav/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/nav/copy_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/nav/open_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/nav/where_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/context_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/debug_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/endpoints_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/errors_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/fastapi_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/folder_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/frontend_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/imports_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/models_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/modeltree_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/new.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/overview_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/resolve_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/scan_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/servicemap_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/trace_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/tree_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/types_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/commands/project/version_cmd.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/context/java_context.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/context/java_rel.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/context/java_service.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/context/python_context.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/context/python_rel.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/context/python_service.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/core/fuzzy.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/core/opener.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/core/project.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/core/resolve_folder.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/core/resolve_model_target.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/core/resolve_target.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/core/timing.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/core/where.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/db/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/db/cache.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/db/operations.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/db/schema.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/endpoints/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/endpoints/fastapi_scanner.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/endpoints/java_spring.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/endpoints/models.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/endpoints/openapi_spec.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/endpoints/python_web.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/endpoints/scan.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/errors.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/frontend/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/frontend/server.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/graph/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/graph/builder.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/graph/query.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/graph/traverse.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/ast_engine.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/auth.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/base_path.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/call_graph.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/engine.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/enums.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/method_index.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/models.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/pagination.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/resolve.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/types.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/java/validation.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/python/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/python/engine.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/python/models.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/router.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/lang/shared_models.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/parser/imports.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/parser/resolve.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/parser/symbols.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/scanner/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/scanner/scanner.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/terminal/__init__.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/terminal/error_parser.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/codeintel_cli/terminal/printer.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/setup.cfg +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/tests/test_fastapi_scanner.py +0 -0
- {aja_codeintel-0.2.0 → aja_codeintel-0.2.1}/tests/test_trace_cmd.py +0 -0
|
@@ -0,0 +1,639 @@
|
|
|
1
|
+
"""
|
|
2
|
+
aja entire [path]
|
|
3
|
+
|
|
4
|
+
Full project intelligence report — NOT just names, but actual shapes:
|
|
5
|
+
- Function SIGNATURES (params + return types)
|
|
6
|
+
- Model/Schema DEFINITIONS (all Pydantic fields, types, defaults)
|
|
7
|
+
- Cosmos DB / database QUERY PATTERNS extracted from source
|
|
8
|
+
- Route definitions with full request/response shapes
|
|
9
|
+
- Import-aware call chain traces for every endpoint
|
|
10
|
+
|
|
11
|
+
Designed to produce an actionable prompt without needing to read individual files.
|
|
12
|
+
"""
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import ast
|
|
16
|
+
import os
|
|
17
|
+
import re
|
|
18
|
+
import time
|
|
19
|
+
from collections import defaultdict
|
|
20
|
+
from dataclasses import dataclass, field
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from typing import Optional
|
|
23
|
+
|
|
24
|
+
import typer
|
|
25
|
+
|
|
26
|
+
from ...core.project import find_project_root
|
|
27
|
+
from ...endpoints.fastapi_scanner import scan_fastapi_project, _collect_python_files, PydanticDTO
|
|
28
|
+
from .trace_cmd import (
|
|
29
|
+
_build_func_index,
|
|
30
|
+
_trace,
|
|
31
|
+
_render,
|
|
32
|
+
_file_role,
|
|
33
|
+
_short_path,
|
|
34
|
+
_get_ast,
|
|
35
|
+
_find_func,
|
|
36
|
+
_ast_cache,
|
|
37
|
+
FuncDef,
|
|
38
|
+
TraceNode,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# --------------------------------------------------------------------------- #
|
|
42
|
+
# Ignore dirs
|
|
43
|
+
# --------------------------------------------------------------------------- #
|
|
44
|
+
|
|
45
|
+
_SKIP_DIRS = {
|
|
46
|
+
".git", ".idea", ".vscode", "__pycache__", ".pytest_cache",
|
|
47
|
+
".ruff_cache", ".tox", ".venv", "venv", "env",
|
|
48
|
+
"node_modules", "dist", "build", "out", "target", ".gradle",
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
# --------------------------------------------------------------------------- #
|
|
52
|
+
# Source file walking
|
|
53
|
+
# --------------------------------------------------------------------------- #
|
|
54
|
+
|
|
55
|
+
def _walk_source_files(root: Path) -> list[Path]:
|
|
56
|
+
"""Walk project and collect all .py files, .env, config files."""
|
|
57
|
+
files: list[Path] = []
|
|
58
|
+
stack = [root.resolve()]
|
|
59
|
+
while stack:
|
|
60
|
+
cur = stack.pop()
|
|
61
|
+
try:
|
|
62
|
+
for child in sorted(cur.iterdir(), key=lambda p: (p.is_file(), p.name.lower())):
|
|
63
|
+
if child.is_dir():
|
|
64
|
+
if child.name not in _SKIP_DIRS:
|
|
65
|
+
stack.append(child)
|
|
66
|
+
elif child.suffix in (".py", ".env", ".toml", ".yaml", ".yml", ".cfg", ".ini", ".json"):
|
|
67
|
+
files.append(child)
|
|
68
|
+
elif child.name in (".env", ".env.example", ".env.local", "Dockerfile", "docker-compose.yml"):
|
|
69
|
+
files.append(child)
|
|
70
|
+
except (PermissionError, FileNotFoundError):
|
|
71
|
+
continue
|
|
72
|
+
return files
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
# --------------------------------------------------------------------------- #
|
|
76
|
+
# Extract full function signatures (not just names)
|
|
77
|
+
# --------------------------------------------------------------------------- #
|
|
78
|
+
|
|
79
|
+
def _extract_file_symbols(path: Path) -> dict:
|
|
80
|
+
"""
|
|
81
|
+
Extract classes (with methods+fields) and top-level functions with
|
|
82
|
+
FULL SIGNATURES (params, types, return types, defaults).
|
|
83
|
+
"""
|
|
84
|
+
result = {"classes": [], "functions": []}
|
|
85
|
+
try:
|
|
86
|
+
src = path.read_text(encoding="utf-8", errors="ignore")
|
|
87
|
+
tree = ast.parse(src)
|
|
88
|
+
except Exception:
|
|
89
|
+
return result
|
|
90
|
+
|
|
91
|
+
for node in tree.body:
|
|
92
|
+
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
93
|
+
result["functions"].append(_extract_func_sig(node))
|
|
94
|
+
elif isinstance(node, ast.ClassDef):
|
|
95
|
+
cls_info = {
|
|
96
|
+
"name": node.name,
|
|
97
|
+
"bases": [ast.unparse(b) for b in node.bases],
|
|
98
|
+
"methods": [],
|
|
99
|
+
"fields": [],
|
|
100
|
+
"line": node.lineno,
|
|
101
|
+
}
|
|
102
|
+
for item in node.body:
|
|
103
|
+
if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
104
|
+
cls_info["methods"].append(_extract_func_sig(item))
|
|
105
|
+
elif isinstance(item, ast.AnnAssign) and isinstance(item.target, ast.Name):
|
|
106
|
+
field_type = ast.unparse(item.annotation) if item.annotation else "Any"
|
|
107
|
+
default_val = ast.unparse(item.value) if item.value else None
|
|
108
|
+
cls_info["fields"].append({
|
|
109
|
+
"name": item.target.id,
|
|
110
|
+
"type": field_type,
|
|
111
|
+
"default": default_val,
|
|
112
|
+
})
|
|
113
|
+
result["classes"].append(cls_info)
|
|
114
|
+
|
|
115
|
+
return result
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _extract_func_sig(node: ast.FunctionDef) -> dict:
|
|
119
|
+
"""Extract full function signature: name, params with types+defaults, return type."""
|
|
120
|
+
params = []
|
|
121
|
+
args = node.args
|
|
122
|
+
|
|
123
|
+
# Positional args
|
|
124
|
+
n_defaults = len(args.defaults)
|
|
125
|
+
n_args = len(args.args)
|
|
126
|
+
for i, arg in enumerate(args.args):
|
|
127
|
+
if arg.arg == "self" or arg.arg == "cls":
|
|
128
|
+
continue
|
|
129
|
+
ann = ast.unparse(arg.annotation) if arg.annotation else None
|
|
130
|
+
# Check if this arg has a default
|
|
131
|
+
default_idx = i - (n_args - n_defaults)
|
|
132
|
+
default_val = None
|
|
133
|
+
if default_idx >= 0:
|
|
134
|
+
default_val = ast.unparse(args.defaults[default_idx])
|
|
135
|
+
param_str = arg.arg
|
|
136
|
+
if ann:
|
|
137
|
+
param_str += f": {ann}"
|
|
138
|
+
if default_val:
|
|
139
|
+
param_str += f" = {default_val}"
|
|
140
|
+
params.append(param_str)
|
|
141
|
+
|
|
142
|
+
# Keyword-only args
|
|
143
|
+
for arg, default in zip(args.kwonlyargs, args.kw_defaults):
|
|
144
|
+
ann = ast.unparse(arg.annotation) if arg.annotation else None
|
|
145
|
+
default_val = ast.unparse(default) if default else None
|
|
146
|
+
param_str = arg.arg
|
|
147
|
+
if ann:
|
|
148
|
+
param_str += f": {ann}"
|
|
149
|
+
if default_val:
|
|
150
|
+
param_str += f" = {default_val}"
|
|
151
|
+
params.append(param_str)
|
|
152
|
+
|
|
153
|
+
ret = ast.unparse(node.returns) if node.returns else None
|
|
154
|
+
is_async = isinstance(node, ast.AsyncFunctionDef)
|
|
155
|
+
|
|
156
|
+
return {
|
|
157
|
+
"name": node.name,
|
|
158
|
+
"params": params,
|
|
159
|
+
"return": ret,
|
|
160
|
+
"line": node.lineno,
|
|
161
|
+
"async": is_async,
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
# --------------------------------------------------------------------------- #
|
|
166
|
+
# Cosmos DB / Database query pattern extraction
|
|
167
|
+
# --------------------------------------------------------------------------- #
|
|
168
|
+
|
|
169
|
+
_COSMOS_QUERY_RE = re.compile(
|
|
170
|
+
r"""(?:query_items|query|read_all_items|read_item|create_item|upsert_item|"""
|
|
171
|
+
r"""replace_item|delete_item|patch_item)\s*\(""",
|
|
172
|
+
re.IGNORECASE,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
_SQL_QUERY_RE = re.compile(
|
|
176
|
+
r"""SELECT\s+.+?\s+FROM\s+\w+""",
|
|
177
|
+
re.IGNORECASE | re.DOTALL,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
@dataclass
|
|
182
|
+
class QueryPattern:
|
|
183
|
+
file: str
|
|
184
|
+
line: int
|
|
185
|
+
operation: str # "query_items", "upsert_item", etc.
|
|
186
|
+
query_text: str # the SQL/query string if found
|
|
187
|
+
container: str # container name if detectable
|
|
188
|
+
function: str # enclosing function name
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _extract_query_patterns(all_py: list[Path], project_root: Path) -> list[QueryPattern]:
|
|
192
|
+
"""
|
|
193
|
+
Extract actual database query patterns from source files.
|
|
194
|
+
Looks for Cosmos DB SDK calls AND embedded SQL strings.
|
|
195
|
+
"""
|
|
196
|
+
patterns: list[QueryPattern] = []
|
|
197
|
+
|
|
198
|
+
# Cosmos SDK operations
|
|
199
|
+
cosmos_ops_re = re.compile(
|
|
200
|
+
r"""(query_items|read_all_items|read_item|create_item|"""
|
|
201
|
+
r"""upsert_item|replace_item|delete_item|patch_item)\s*\(""",
|
|
202
|
+
)
|
|
203
|
+
# SQL query strings
|
|
204
|
+
sql_str_re = re.compile(
|
|
205
|
+
r"""['"](SELECT\s+.+?FROM\s+\w+[^'"]*?)['"]""",
|
|
206
|
+
re.IGNORECASE | re.DOTALL,
|
|
207
|
+
)
|
|
208
|
+
# Container references
|
|
209
|
+
container_re = re.compile(
|
|
210
|
+
r"""get_container_client\s*\(\s*['"]([\w-]+)['"]\)"""
|
|
211
|
+
r"""|container_name\s*=\s*['"]([\w-]+)['"]"""
|
|
212
|
+
r"""|CONTAINER(?:_NAME)?\s*=\s*['"]([\w-]+)['"]""",
|
|
213
|
+
re.IGNORECASE,
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
for path in all_py:
|
|
217
|
+
parts_lower = {p.lower() for p in path.parts}
|
|
218
|
+
if parts_lower & {"test", "tests"}:
|
|
219
|
+
continue
|
|
220
|
+
try:
|
|
221
|
+
src = path.read_text(encoding="utf-8", errors="ignore")
|
|
222
|
+
except Exception:
|
|
223
|
+
continue
|
|
224
|
+
|
|
225
|
+
if not (cosmos_ops_re.search(src) or sql_str_re.search(src)):
|
|
226
|
+
continue
|
|
227
|
+
|
|
228
|
+
rel = _short_path(path, project_root, parts=4)
|
|
229
|
+
|
|
230
|
+
# Find container name in this file
|
|
231
|
+
container = ""
|
|
232
|
+
cm = container_re.search(src)
|
|
233
|
+
if cm:
|
|
234
|
+
container = cm.group(1) or cm.group(2) or cm.group(3) or ""
|
|
235
|
+
|
|
236
|
+
# Parse to find which function each query is in
|
|
237
|
+
try:
|
|
238
|
+
tree = ast.parse(src)
|
|
239
|
+
except Exception:
|
|
240
|
+
continue
|
|
241
|
+
|
|
242
|
+
lines = src.splitlines()
|
|
243
|
+
# Index: line_number -> enclosing function name
|
|
244
|
+
func_ranges: list[tuple[int, int, str]] = []
|
|
245
|
+
for node in ast.walk(tree):
|
|
246
|
+
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
247
|
+
end = getattr(node, 'end_lineno', node.lineno + 50)
|
|
248
|
+
func_ranges.append((node.lineno, end, node.name))
|
|
249
|
+
|
|
250
|
+
def _enclosing_func(lineno: int) -> str:
|
|
251
|
+
for start, end, name in func_ranges:
|
|
252
|
+
if start <= lineno <= end:
|
|
253
|
+
return name
|
|
254
|
+
return "<module>"
|
|
255
|
+
|
|
256
|
+
# Extract Cosmos operations
|
|
257
|
+
for m in cosmos_ops_re.finditer(src):
|
|
258
|
+
lineno = src[:m.start()].count('\n') + 1
|
|
259
|
+
op = m.group(1)
|
|
260
|
+
# Try to grab the query string argument
|
|
261
|
+
query_str = ""
|
|
262
|
+
# Look for a SQL string in nearby lines (within 5 lines)
|
|
263
|
+
start_line = max(0, lineno - 1)
|
|
264
|
+
end_line = min(len(lines), lineno + 5)
|
|
265
|
+
snippet = "\n".join(lines[start_line:end_line])
|
|
266
|
+
sql_m = sql_str_re.search(snippet)
|
|
267
|
+
if sql_m:
|
|
268
|
+
query_str = sql_m.group(1).strip()
|
|
269
|
+
|
|
270
|
+
patterns.append(QueryPattern(
|
|
271
|
+
file=rel, line=lineno, operation=op,
|
|
272
|
+
query_text=query_str, container=container,
|
|
273
|
+
function=_enclosing_func(lineno),
|
|
274
|
+
))
|
|
275
|
+
|
|
276
|
+
# Extract standalone SQL strings not already captured
|
|
277
|
+
for m in sql_str_re.finditer(src):
|
|
278
|
+
lineno = src[:m.start()].count('\n') + 1
|
|
279
|
+
# Skip if already captured by cosmos ops
|
|
280
|
+
already = any(p.line == lineno and p.file == rel for p in patterns)
|
|
281
|
+
if already:
|
|
282
|
+
continue
|
|
283
|
+
patterns.append(QueryPattern(
|
|
284
|
+
file=rel, line=lineno, operation="SQL_QUERY",
|
|
285
|
+
query_text=m.group(1).strip(), container=container,
|
|
286
|
+
function=_enclosing_func(lineno),
|
|
287
|
+
))
|
|
288
|
+
|
|
289
|
+
return patterns
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
# --------------------------------------------------------------------------- #
|
|
293
|
+
# Tree: show signatures, not just names
|
|
294
|
+
# --------------------------------------------------------------------------- #
|
|
295
|
+
|
|
296
|
+
def _print_project_tree(project_root: Path) -> None:
|
|
297
|
+
"""Print project tree with full function signatures and model fields."""
|
|
298
|
+
typer.echo("=" * 70)
|
|
299
|
+
typer.echo(" PROJECT STRUCTURE (signatures + schemas)")
|
|
300
|
+
typer.echo("=" * 70)
|
|
301
|
+
typer.echo(f" ROOT: {project_root}")
|
|
302
|
+
typer.echo("")
|
|
303
|
+
|
|
304
|
+
# Show .env files first
|
|
305
|
+
env_files = []
|
|
306
|
+
for pattern in (".env", ".env.example", ".env.local", ".env.development", ".env.production"):
|
|
307
|
+
f = project_root / pattern
|
|
308
|
+
if f.exists():
|
|
309
|
+
env_files.append(f)
|
|
310
|
+
|
|
311
|
+
if env_files:
|
|
312
|
+
typer.echo(" ╔══ ENVIRONMENT FILES")
|
|
313
|
+
for ef in env_files:
|
|
314
|
+
typer.echo(f" ║ {ef.name}")
|
|
315
|
+
try:
|
|
316
|
+
lines = ef.read_text(encoding="utf-8", errors="ignore").splitlines()
|
|
317
|
+
for line in lines:
|
|
318
|
+
line = line.strip()
|
|
319
|
+
if not line or line.startswith("#"):
|
|
320
|
+
continue
|
|
321
|
+
if "=" in line:
|
|
322
|
+
key = line.split("=", 1)[0].strip()
|
|
323
|
+
typer.echo(f" ║ {key}=***")
|
|
324
|
+
else:
|
|
325
|
+
typer.echo(f" ║ {line}")
|
|
326
|
+
except Exception:
|
|
327
|
+
pass
|
|
328
|
+
typer.echo(" ╚══")
|
|
329
|
+
typer.echo("")
|
|
330
|
+
|
|
331
|
+
def _walk_tree(cur: Path, prefix: str = "", depth: int = 0, max_depth: int = 10) -> None:
|
|
332
|
+
if depth > max_depth:
|
|
333
|
+
return
|
|
334
|
+
try:
|
|
335
|
+
entries = sorted(cur.iterdir(), key=lambda p: (p.is_file(), p.name.lower()))
|
|
336
|
+
except (PermissionError, FileNotFoundError):
|
|
337
|
+
return
|
|
338
|
+
|
|
339
|
+
visible = []
|
|
340
|
+
for e in entries:
|
|
341
|
+
if e.name in _SKIP_DIRS:
|
|
342
|
+
continue
|
|
343
|
+
if e.is_dir():
|
|
344
|
+
visible.append(e)
|
|
345
|
+
elif e.suffix in (".py",) or e.name in (".env",):
|
|
346
|
+
visible.append(e)
|
|
347
|
+
|
|
348
|
+
for i, entry in enumerate(visible):
|
|
349
|
+
is_last = i == len(visible) - 1
|
|
350
|
+
branch = "└── " if is_last else "├── "
|
|
351
|
+
child_prefix = prefix + (" " if is_last else "│ ")
|
|
352
|
+
|
|
353
|
+
if entry.is_dir():
|
|
354
|
+
typer.echo(f" {prefix}{branch}{entry.name}/")
|
|
355
|
+
_walk_tree(entry, child_prefix, depth + 1, max_depth)
|
|
356
|
+
else:
|
|
357
|
+
typer.echo(f" {prefix}{branch}{entry.name}")
|
|
358
|
+
if entry.suffix == ".py":
|
|
359
|
+
symbols = _extract_file_symbols(entry)
|
|
360
|
+
inner_items = []
|
|
361
|
+
for cls in symbols["classes"]:
|
|
362
|
+
inner_items.append(("class", cls))
|
|
363
|
+
for fn in symbols["functions"]:
|
|
364
|
+
inner_items.append(("func", fn))
|
|
365
|
+
|
|
366
|
+
for j, (kind, info) in enumerate(inner_items):
|
|
367
|
+
is_inner_last = j == len(inner_items) - 1
|
|
368
|
+
inner_branch = "└─ " if is_inner_last else "├─ "
|
|
369
|
+
inner_prefix = child_prefix + (" " if is_inner_last else "│ ")
|
|
370
|
+
|
|
371
|
+
if kind == "class":
|
|
372
|
+
bases = f"({', '.join(info['bases'])})" if info['bases'] else ""
|
|
373
|
+
typer.echo(f" {child_prefix}{inner_branch}class {info['name']}{bases}")
|
|
374
|
+
# Show fields with types and defaults
|
|
375
|
+
members = []
|
|
376
|
+
for fld in info.get("fields", []):
|
|
377
|
+
default_str = f" = {fld['default']}" if fld.get('default') else ""
|
|
378
|
+
members.append(("field", f"{fld['name']}: {fld['type']}{default_str}"))
|
|
379
|
+
for meth in info.get("methods", []):
|
|
380
|
+
sig = _format_sig(meth)
|
|
381
|
+
members.append(("method", sig))
|
|
382
|
+
for k, (mtype, mem) in enumerate(members[:20]):
|
|
383
|
+
mem_last = k == len(members[:20]) - 1
|
|
384
|
+
mem_branch = "└─ " if mem_last else "├─ "
|
|
385
|
+
typer.echo(f" {inner_prefix}{mem_branch}{mem}")
|
|
386
|
+
if len(members) > 20:
|
|
387
|
+
typer.echo(f" {inner_prefix} ... +{len(members)-20} more")
|
|
388
|
+
else:
|
|
389
|
+
sig = _format_sig(info)
|
|
390
|
+
typer.echo(f" {child_prefix}{inner_branch}{sig}")
|
|
391
|
+
|
|
392
|
+
_walk_tree(project_root)
|
|
393
|
+
typer.echo("")
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
def _format_sig(info: dict) -> str:
|
|
397
|
+
"""Format a function signature compactly: async def name(params) -> ReturnType"""
|
|
398
|
+
prefix = "async def " if info.get("async") else "def "
|
|
399
|
+
params_str = ", ".join(info["params"]) if info["params"] else ""
|
|
400
|
+
ret = f" -> {info['return']}" if info.get("return") else ""
|
|
401
|
+
return f"{prefix}{info['name']}({params_str}){ret}"
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
# --------------------------------------------------------------------------- #
|
|
405
|
+
# DTO / Model display helpers
|
|
406
|
+
# --------------------------------------------------------------------------- #
|
|
407
|
+
|
|
408
|
+
def _format_dto(dto: PydanticDTO, indent: str = " ") -> list[str]:
|
|
409
|
+
"""Format a Pydantic DTO with all fields."""
|
|
410
|
+
lines = []
|
|
411
|
+
bases = f"({', '.join(dto.bases)})" if dto.bases else ""
|
|
412
|
+
lines.append(f"{indent}{dto.name}{bases}")
|
|
413
|
+
for f in dto.fields:
|
|
414
|
+
req = "*" if f.required else " "
|
|
415
|
+
default = f" = {f.default}" if f.default else ""
|
|
416
|
+
lines.append(f"{indent} {req} {f.name}: {f.type}{default}")
|
|
417
|
+
return lines
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
# --------------------------------------------------------------------------- #
|
|
421
|
+
# ENTIRE command
|
|
422
|
+
# --------------------------------------------------------------------------- #
|
|
423
|
+
|
|
424
|
+
def register_entire(app: typer.Typer) -> None:
|
|
425
|
+
@app.command("entire", help="Full project report: tree + every endpoint traced with request/response.")
|
|
426
|
+
def entire(
|
|
427
|
+
path: str = typer.Argument(".", help="Project root folder"),
|
|
428
|
+
depth: int = typer.Option(6, "--depth", "-d", help="Max trace depth per endpoint"),
|
|
429
|
+
no_tree: bool = typer.Option(False, "--no-tree", help="Skip project tree section"),
|
|
430
|
+
) -> None:
|
|
431
|
+
_ast_cache.clear()
|
|
432
|
+
t0 = time.perf_counter()
|
|
433
|
+
|
|
434
|
+
folder = Path(path).resolve()
|
|
435
|
+
if not folder.exists() or not folder.is_dir():
|
|
436
|
+
typer.echo(f" Invalid folder: {folder}")
|
|
437
|
+
raise typer.Exit(1)
|
|
438
|
+
|
|
439
|
+
project_root = find_project_root(folder)
|
|
440
|
+
|
|
441
|
+
# ── 1. Scan all endpoints and models ───────────────────────────
|
|
442
|
+
all_py = _collect_python_files(project_root, _SKIP_DIRS)
|
|
443
|
+
endpoints, pydantic_dtos, sqla_models, request_models, response_models, endpoint_deps = \
|
|
444
|
+
scan_fastapi_project(project_root, files=all_py)
|
|
445
|
+
|
|
446
|
+
# Build indexes
|
|
447
|
+
func_index = _build_func_index(all_py)
|
|
448
|
+
dto_by_name: dict[str, PydanticDTO] = {d.name: d for d in pydantic_dtos}
|
|
449
|
+
|
|
450
|
+
# De-duplicate endpoints
|
|
451
|
+
seen_keys: set[tuple[str, str]] = set()
|
|
452
|
+
unique_endpoints = []
|
|
453
|
+
for e in endpoints:
|
|
454
|
+
k = (e.method, e.path)
|
|
455
|
+
if k not in seen_keys:
|
|
456
|
+
seen_keys.add(k)
|
|
457
|
+
unique_endpoints.append(e)
|
|
458
|
+
unique_endpoints.sort(key=lambda e: (e.path, e.method))
|
|
459
|
+
|
|
460
|
+
# ── 2. Project Tree (with full signatures) ─────────────────────
|
|
461
|
+
if not no_tree:
|
|
462
|
+
_print_project_tree(project_root)
|
|
463
|
+
|
|
464
|
+
# ── 3. SCHEMA DEFINITIONS ─────────────────────────────────────
|
|
465
|
+
typer.echo("=" * 70)
|
|
466
|
+
typer.echo(" SCHEMA DEFINITIONS (Pydantic models — the data shapes)")
|
|
467
|
+
typer.echo("=" * 70)
|
|
468
|
+
typer.echo("")
|
|
469
|
+
|
|
470
|
+
if pydantic_dtos:
|
|
471
|
+
# Classify by usage
|
|
472
|
+
req_dtos = [d for d in pydantic_dtos if d.name in request_models and d.name not in response_models]
|
|
473
|
+
res_dtos = [d for d in pydantic_dtos if d.name in response_models and d.name not in request_models]
|
|
474
|
+
both_dtos = [d for d in pydantic_dtos if d.name in request_models and d.name in response_models]
|
|
475
|
+
other_dtos = [d for d in pydantic_dtos if d.name not in request_models and d.name not in response_models]
|
|
476
|
+
|
|
477
|
+
for label, group in [("REQUEST SCHEMAS", req_dtos), ("RESPONSE SCHEMAS", res_dtos),
|
|
478
|
+
("REQUEST+RESPONSE", both_dtos), ("SHARED/DOMAIN MODELS", other_dtos)]:
|
|
479
|
+
if not group:
|
|
480
|
+
continue
|
|
481
|
+
typer.echo(f" ── {label} ({len(group)}) ──")
|
|
482
|
+
for dto in group:
|
|
483
|
+
for line in _format_dto(dto):
|
|
484
|
+
typer.echo(line)
|
|
485
|
+
typer.echo("")
|
|
486
|
+
else:
|
|
487
|
+
typer.echo(" No Pydantic models found.")
|
|
488
|
+
typer.echo("")
|
|
489
|
+
|
|
490
|
+
if sqla_models:
|
|
491
|
+
typer.echo(" ── ORM / SQLAlchemy Models ──")
|
|
492
|
+
for m in sqla_models:
|
|
493
|
+
typer.echo(f" {m.name} (table: {m.table_name}) {m.file}")
|
|
494
|
+
for col in m.columns:
|
|
495
|
+
pk = " [PK]" if col.primary_key else ""
|
|
496
|
+
fk = f" [FK→{col.foreign_key}]" if col.foreign_key else ""
|
|
497
|
+
typer.echo(f" {col.name}: {col.col_type}{pk}{fk}")
|
|
498
|
+
for rel in m.relationships:
|
|
499
|
+
typer.echo(f" →{rel.name}: {rel.target}")
|
|
500
|
+
typer.echo("")
|
|
501
|
+
|
|
502
|
+
# ── 4. DATABASE QUERY PATTERNS ────────────────────────────────
|
|
503
|
+
query_patterns = _extract_query_patterns(all_py, project_root)
|
|
504
|
+
if query_patterns:
|
|
505
|
+
typer.echo("=" * 70)
|
|
506
|
+
typer.echo(" DATABASE QUERY PATTERNS")
|
|
507
|
+
typer.echo("=" * 70)
|
|
508
|
+
typer.echo("")
|
|
509
|
+
|
|
510
|
+
# Group by container
|
|
511
|
+
by_container: dict[str, list[QueryPattern]] = defaultdict(list)
|
|
512
|
+
for qp in query_patterns:
|
|
513
|
+
key = qp.container or "(unknown container)"
|
|
514
|
+
by_container[key].append(qp)
|
|
515
|
+
|
|
516
|
+
for container, qps in sorted(by_container.items()):
|
|
517
|
+
typer.echo(f" Container: {container}")
|
|
518
|
+
for qp in qps:
|
|
519
|
+
query_display = ""
|
|
520
|
+
if qp.query_text:
|
|
521
|
+
# Compact multiline queries to single line
|
|
522
|
+
q = re.sub(r'\s+', ' ', qp.query_text).strip()
|
|
523
|
+
query_display = f' → "{q}"'
|
|
524
|
+
typer.echo(f" {qp.operation} in {qp.function}() ({qp.file}:{qp.line}){query_display}")
|
|
525
|
+
typer.echo("")
|
|
526
|
+
|
|
527
|
+
# ── 5. ROUTE DEFINITIONS WITH SHAPES ──────────────────────────
|
|
528
|
+
typer.echo("=" * 70)
|
|
529
|
+
typer.echo(f" ROUTE DEFINITIONS ({len(unique_endpoints)} endpoints)")
|
|
530
|
+
typer.echo("=" * 70)
|
|
531
|
+
typer.echo("")
|
|
532
|
+
|
|
533
|
+
for idx, ep in enumerate(unique_endpoints, 1):
|
|
534
|
+
handler_name = ep.handler
|
|
535
|
+
handler_file = Path(ep.file)
|
|
536
|
+
|
|
537
|
+
# Get handler signature
|
|
538
|
+
tree = _get_ast(handler_file)
|
|
539
|
+
handler_sig = ""
|
|
540
|
+
handler_params = []
|
|
541
|
+
if tree:
|
|
542
|
+
handler_node = _find_func(tree, handler_name)
|
|
543
|
+
if handler_node:
|
|
544
|
+
sig_info = _extract_func_sig(handler_node)
|
|
545
|
+
handler_sig = _format_sig(sig_info)
|
|
546
|
+
handler_params = sig_info["params"]
|
|
547
|
+
|
|
548
|
+
typer.echo(f" {'━' * 66}")
|
|
549
|
+
typer.echo(f" [{idx:03d}] {ep.method} {ep.path}")
|
|
550
|
+
typer.echo(f" {handler_sig}")
|
|
551
|
+
typer.echo(f" file: {_short_path(handler_file, project_root)}")
|
|
552
|
+
|
|
553
|
+
# Request shape
|
|
554
|
+
req_model = getattr(ep, 'request_body', None) or None
|
|
555
|
+
if req_model:
|
|
556
|
+
typer.echo(f" ┌─ REQUEST: {req_model}")
|
|
557
|
+
if req_model in dto_by_name:
|
|
558
|
+
dto = dto_by_name[req_model]
|
|
559
|
+
for f in dto.fields:
|
|
560
|
+
req_mark = "*" if f.required else "?"
|
|
561
|
+
default_str = f" = {f.default}" if f.default else ""
|
|
562
|
+
typer.echo(f" │ {req_mark} {f.name}: {f.type}{default_str}")
|
|
563
|
+
typer.echo(f" └─")
|
|
564
|
+
|
|
565
|
+
# Response shape
|
|
566
|
+
resp_model = getattr(ep, 'response_model', None) or None
|
|
567
|
+
if resp_model:
|
|
568
|
+
typer.echo(f" ┌─ RESPONSE: {resp_model}")
|
|
569
|
+
inner = re.sub(r"^(?:list|List)\[(.+)\]$", r"\1", resp_model)
|
|
570
|
+
inner = re.sub(r"^Optional\[(.+)\]$", r"\1", inner)
|
|
571
|
+
is_list = inner != resp_model and ("list" in resp_model.lower() or "List" in resp_model)
|
|
572
|
+
if is_list:
|
|
573
|
+
typer.echo(f" │ (returns a list of:)")
|
|
574
|
+
if inner in dto_by_name:
|
|
575
|
+
dto = dto_by_name[inner]
|
|
576
|
+
for f in dto.fields:
|
|
577
|
+
typer.echo(f" │ {f.name}: {f.type}")
|
|
578
|
+
typer.echo(f" └─")
|
|
579
|
+
|
|
580
|
+
# Dependencies
|
|
581
|
+
ep_deps = None
|
|
582
|
+
for dep_item in endpoint_deps:
|
|
583
|
+
if dep_item["method"] == ep.method and dep_item["path"] == ep.path:
|
|
584
|
+
ep_deps = dep_item["depends"]
|
|
585
|
+
break
|
|
586
|
+
if ep_deps:
|
|
587
|
+
typer.echo(f" depends: {', '.join(ep_deps)}")
|
|
588
|
+
|
|
589
|
+
typer.echo("")
|
|
590
|
+
|
|
591
|
+
# ── 6. ENDPOINT CALL TRACES ───────────────────────────────────
|
|
592
|
+
typer.echo("=" * 70)
|
|
593
|
+
typer.echo(f" ENDPOINT CALL TRACES ({len(unique_endpoints)} endpoints)")
|
|
594
|
+
typer.echo("=" * 70)
|
|
595
|
+
typer.echo("")
|
|
596
|
+
|
|
597
|
+
for idx, ep in enumerate(unique_endpoints, 1):
|
|
598
|
+
handler_name = ep.handler
|
|
599
|
+
handler_file = Path(ep.file)
|
|
600
|
+
|
|
601
|
+
tree = _get_ast(handler_file)
|
|
602
|
+
if tree is None:
|
|
603
|
+
typer.echo(f" [{idx:03d}] {ep.method} {ep.path} — ⚠ could not parse")
|
|
604
|
+
continue
|
|
605
|
+
|
|
606
|
+
handler_node = _find_func(tree, handler_name)
|
|
607
|
+
if handler_node is None:
|
|
608
|
+
typer.echo(f" [{idx:03d}] {ep.method} {ep.path} — ⚠ handler not found")
|
|
609
|
+
continue
|
|
610
|
+
|
|
611
|
+
visited: set[tuple[str, str]] = set()
|
|
612
|
+
root_node = _trace(handler_node, handler_file, func_index, project_root,
|
|
613
|
+
visited, depth=0, max_depth=depth)
|
|
614
|
+
|
|
615
|
+
typer.echo(f" [{idx:03d}] {ep.method} {ep.path}")
|
|
616
|
+
typer.echo(f" {handler_name}()")
|
|
617
|
+
child_prefix = " "
|
|
618
|
+
for i, child in enumerate(root_node.calls):
|
|
619
|
+
for line in _render(child, project_root, child_prefix, i == len(root_node.calls) - 1):
|
|
620
|
+
typer.echo(line)
|
|
621
|
+
|
|
622
|
+
if root_node.unresolved:
|
|
623
|
+
typer.echo(f" ⚠ unresolved: {', '.join(root_node.unresolved)}")
|
|
624
|
+
typer.echo("")
|
|
625
|
+
|
|
626
|
+
# ── 7. Summary ────────────────────────────────────────────────
|
|
627
|
+
elapsed = time.perf_counter() - t0
|
|
628
|
+
typer.echo("=" * 70)
|
|
629
|
+
typer.echo(f" SUMMARY")
|
|
630
|
+
typer.echo("=" * 70)
|
|
631
|
+
typer.echo(f" Total endpoints : {len(unique_endpoints)}")
|
|
632
|
+
typer.echo(f" Schema definitions : {len(pydantic_dtos)}")
|
|
633
|
+
typer.echo(f" Request schemas : {len(request_models)}")
|
|
634
|
+
typer.echo(f" Response schemas : {len(response_models)}")
|
|
635
|
+
typer.echo(f" ORM models : {len(sqla_models)}")
|
|
636
|
+
typer.echo(f" DB query patterns : {len(query_patterns)}")
|
|
637
|
+
typer.echo(f" Python files : {len(all_py)}")
|
|
638
|
+
typer.echo(f" Finished in : {elapsed:.2f}s")
|
|
639
|
+
typer.echo("")
|
|
@@ -172,7 +172,7 @@ class TestEntireCommand:
|
|
|
172
172
|
result = runner.invoke(cli_app, ["entire", str(tmp_path), "--no-tree"])
|
|
173
173
|
# Should not crash
|
|
174
174
|
assert result.exit_code == 0
|
|
175
|
-
assert "ENDPOINT TRACES" in result.output
|
|
175
|
+
assert "ENDPOINT CALL TRACES" in result.output
|
|
176
176
|
assert "/health" in result.output
|
|
177
177
|
|
|
178
178
|
def test_shows_request_response_models(self, tmp_path):
|