commiter-cli 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- commiter/__init__.py +3 -0
- commiter/adapters/__init__.py +0 -0
- commiter/adapters/base.py +96 -0
- commiter/adapters/django_rest.py +247 -0
- commiter/adapters/express.py +204 -0
- commiter/adapters/fastapi.py +170 -0
- commiter/adapters/flask.py +169 -0
- commiter/adapters/nextjs.py +180 -0
- commiter/adapters/prisma.py +76 -0
- commiter/adapters/raw_sql.py +191 -0
- commiter/adapters/react.py +129 -0
- commiter/adapters/sqlalchemy.py +99 -0
- commiter/adapters/supabase.py +68 -0
- commiter/auth.py +130 -0
- commiter/cli.py +667 -0
- commiter/correlator.py +208 -0
- commiter/extractors/__init__.py +0 -0
- commiter/extractors/api_calls.py +91 -0
- commiter/extractors/api_endpoints.py +354 -0
- commiter/extractors/backend_files.py +33 -0
- commiter/extractors/base.py +40 -0
- commiter/extractors/db_operations.py +69 -0
- commiter/extractors/dependencies.py +219 -0
- commiter/generic_resolver.py +204 -0
- commiter/handler_index.py +97 -0
- commiter/lib.py +63 -0
- commiter/middleware_index.py +350 -0
- commiter/models.py +117 -0
- commiter/parser.py +1283 -0
- commiter/prefix_index.py +211 -0
- commiter/report/__init__.py +0 -0
- commiter/report/ai.py +120 -0
- commiter/report/api_guide.py +217 -0
- commiter/report/architecture.py +930 -0
- commiter/report/console.py +254 -0
- commiter/report/json_output.py +122 -0
- commiter/report/markdown.py +163 -0
- commiter/scanner.py +383 -0
- commiter/type_index.py +304 -0
- commiter/uploader.py +46 -0
- commiter/utils/__init__.py +0 -0
- commiter/utils/env_reader.py +78 -0
- commiter/utils/file_classifier.py +187 -0
- commiter/utils/path_helpers.py +73 -0
- commiter/utils/tsconfig_resolver.py +281 -0
- commiter/wrapper_index.py +288 -0
- commiter_cli-0.3.0.dist-info/METADATA +14 -0
- commiter_cli-0.3.0.dist-info/RECORD +96 -0
- commiter_cli-0.3.0.dist-info/WHEEL +5 -0
- commiter_cli-0.3.0.dist-info/entry_points.txt +2 -0
- commiter_cli-0.3.0.dist-info/top_level.txt +2 -0
- tests/__init__.py +0 -0
- tests/fixtures/arch_backend/app.py +22 -0
- tests/fixtures/arch_backend/middleware/__init__.py +0 -0
- tests/fixtures/arch_backend/middleware/rate_limit.py +4 -0
- tests/fixtures/arch_backend/routes/__init__.py +0 -0
- tests/fixtures/arch_backend/routes/analytics.py +20 -0
- tests/fixtures/arch_backend/routes/auth.py +29 -0
- tests/fixtures/arch_backend/routes/projects.py +60 -0
- tests/fixtures/arch_backend/routes/users.py +55 -0
- tests/fixtures/arch_monorepo/apps/api/app.py +30 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/auth.py +17 -0
- tests/fixtures/arch_monorepo/apps/api/middleware/rate_limit.py +10 -0
- tests/fixtures/arch_monorepo/apps/api/routes/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/routes/auth.py +46 -0
- tests/fixtures/arch_monorepo/apps/api/routes/invites.py +30 -0
- tests/fixtures/arch_monorepo/apps/api/routes/notifications.py +25 -0
- tests/fixtures/arch_monorepo/apps/api/routes/projects.py +80 -0
- tests/fixtures/arch_monorepo/apps/api/routes/tasks.py +91 -0
- tests/fixtures/arch_monorepo/apps/api/routes/users.py +48 -0
- tests/fixtures/arch_monorepo/apps/api/services/__init__.py +0 -0
- tests/fixtures/arch_monorepo/apps/api/services/email.py +11 -0
- tests/fixtures/backend_b/app.py +17 -0
- tests/fixtures/fastapi_app/app.py +48 -0
- tests/fixtures/fastapi_crossfile/routes.py +18 -0
- tests/fixtures/fastapi_crossfile/schemas.py +21 -0
- tests/fixtures/flask_app/app.py +33 -0
- tests/fixtures/flask_blueprint/app.py +7 -0
- tests/fixtures/flask_blueprint/routes/items.py +13 -0
- tests/fixtures/flask_blueprint/routes/users.py +20 -0
- tests/fixtures/middleware_test_flask/routes/public.py +8 -0
- tests/fixtures/middleware_test_flask/routes/users.py +26 -0
- tests/fixtures/python_deep_imports/app/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/health.py +11 -0
- tests/fixtures/python_deep_imports/app/api/v1/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/api/v1/items.py +18 -0
- tests/fixtures/python_deep_imports/app/api/v1/users.py +27 -0
- tests/fixtures/python_deep_imports/app/schemas/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/schemas/item.py +13 -0
- tests/fixtures/python_deep_imports/app/schemas/user.py +15 -0
- tests/fixtures/python_deep_imports/app/shared/__init__.py +0 -0
- tests/fixtures/python_deep_imports/app/shared/models.py +7 -0
- tests/fixtures/raw_sql_test/app.py +54 -0
- tests/test_architecture.py +757 -0
|
@@ -0,0 +1,757 @@
|
|
|
1
|
+
"""Tests for the architecture output format."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
|
|
8
|
+
import pytest
|
|
9
|
+
|
|
10
|
+
from commiter.report.architecture import (
|
|
11
|
+
_build_file_tree,
|
|
12
|
+
_build_nodes,
|
|
13
|
+
_compute_layout,
|
|
14
|
+
_build_edges,
|
|
15
|
+
_build_node_analysis,
|
|
16
|
+
generate_architecture,
|
|
17
|
+
)
|
|
18
|
+
from commiter.models import (
|
|
19
|
+
APICall,
|
|
20
|
+
APIEndpoint,
|
|
21
|
+
DBOperation,
|
|
22
|
+
Dependency,
|
|
23
|
+
FileClassification,
|
|
24
|
+
FileRole,
|
|
25
|
+
RepoDocumentation,
|
|
26
|
+
)
|
|
27
|
+
from commiter.scanner import ScanResult, _scan_repo_full, scan_repos_full
|
|
28
|
+
from commiter.type_index import TypeIndex
|
|
29
|
+
from commiter.middleware_index import MiddlewareIndex
|
|
30
|
+
from commiter.utils.tsconfig_resolver import TSConfigRegistry
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
FIXTURES = os.path.join(os.path.dirname(__file__), "fixtures")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# ---------------------------------------------------------------------------
|
|
37
|
+
# Helpers
|
|
38
|
+
# ---------------------------------------------------------------------------
|
|
39
|
+
|
|
40
|
+
def _make_doc(
|
|
41
|
+
repo_name: str = "test-repo",
|
|
42
|
+
repo_path: str = "/tmp/test-repo",
|
|
43
|
+
endpoints: list | None = None,
|
|
44
|
+
api_calls: list | None = None,
|
|
45
|
+
db_operations: list | None = None,
|
|
46
|
+
file_classifications: list | None = None,
|
|
47
|
+
) -> RepoDocumentation:
|
|
48
|
+
doc = RepoDocumentation(repo_name=repo_name, repo_path=repo_path)
|
|
49
|
+
if endpoints:
|
|
50
|
+
doc.endpoints = endpoints
|
|
51
|
+
if api_calls:
|
|
52
|
+
doc.api_calls = api_calls
|
|
53
|
+
if db_operations:
|
|
54
|
+
doc.db_operations = db_operations
|
|
55
|
+
if file_classifications:
|
|
56
|
+
doc.file_classifications = file_classifications
|
|
57
|
+
return doc
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _make_scan_result(doc: RepoDocumentation, file_list: list[str] | None = None) -> ScanResult:
|
|
61
|
+
return ScanResult(
|
|
62
|
+
doc=doc,
|
|
63
|
+
type_index=TypeIndex(alias_resolver=TSConfigRegistry("/tmp")),
|
|
64
|
+
middleware_index=MiddlewareIndex(),
|
|
65
|
+
file_list=file_list or [],
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# ---------------------------------------------------------------------------
|
|
70
|
+
# _build_file_tree
|
|
71
|
+
# ---------------------------------------------------------------------------
|
|
72
|
+
|
|
73
|
+
class TestBuildFileTree:
|
|
74
|
+
def test_flat_files(self):
|
|
75
|
+
files = ["/repo/a.py", "/repo/b.py"]
|
|
76
|
+
tree = _build_file_tree(files, "/repo")
|
|
77
|
+
assert len(tree) == 2
|
|
78
|
+
assert tree[0] == {"name": "a.py", "type": "file", "path": "a.py"}
|
|
79
|
+
assert tree[1] == {"name": "b.py", "type": "file", "path": "b.py"}
|
|
80
|
+
|
|
81
|
+
def test_nested_folders(self):
|
|
82
|
+
files = ["/repo/src/app/page.tsx", "/repo/src/lib/api.ts"]
|
|
83
|
+
tree = _build_file_tree(files, "/repo")
|
|
84
|
+
assert len(tree) == 1 # just "src" folder
|
|
85
|
+
assert tree[0]["name"] == "src"
|
|
86
|
+
assert tree[0]["type"] == "folder"
|
|
87
|
+
assert len(tree[0]["children"]) == 2 # app + lib
|
|
88
|
+
|
|
89
|
+
def test_paths_are_relative(self):
|
|
90
|
+
files = ["/repo/components/Button.tsx"]
|
|
91
|
+
tree = _build_file_tree(files, "/repo")
|
|
92
|
+
assert tree[0]["children"][0]["path"] == "components/Button.tsx"
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
# ---------------------------------------------------------------------------
|
|
96
|
+
# _build_nodes
|
|
97
|
+
# ---------------------------------------------------------------------------
|
|
98
|
+
|
|
99
|
+
class TestBuildNodes:
|
|
100
|
+
def test_frontend_page_gets_individual_node(self):
|
|
101
|
+
doc = _make_doc(
|
|
102
|
+
file_classifications=[
|
|
103
|
+
FileClassification(file_path="/tmp/test-repo/app/page.tsx", role=FileRole.FRONTEND, language="tsx"),
|
|
104
|
+
],
|
|
105
|
+
)
|
|
106
|
+
files = ["/tmp/test-repo/app/page.tsx"]
|
|
107
|
+
nodes = _build_nodes(doc, files, "/tmp/test-repo", "")
|
|
108
|
+
page_nodes = [n for n in nodes if n["type"] == "page"]
|
|
109
|
+
assert len(page_nodes) == 1
|
|
110
|
+
assert page_nodes[0]["category"] == "frontend"
|
|
111
|
+
|
|
112
|
+
def test_backend_routes_grouped(self):
|
|
113
|
+
doc = _make_doc(
|
|
114
|
+
endpoints=[
|
|
115
|
+
APIEndpoint(repo="test", file_path="/tmp/test-repo/routes/users.py", line=1,
|
|
116
|
+
http_method="GET", route_pattern="/users", handler_name="list_users", framework="flask"),
|
|
117
|
+
APIEndpoint(repo="test", file_path="/tmp/test-repo/routes/auth.py", line=1,
|
|
118
|
+
http_method="POST", route_pattern="/login", handler_name="login", framework="flask"),
|
|
119
|
+
],
|
|
120
|
+
file_classifications=[
|
|
121
|
+
FileClassification(file_path="/tmp/test-repo/routes/users.py", role=FileRole.BACKEND, language="python"),
|
|
122
|
+
FileClassification(file_path="/tmp/test-repo/routes/auth.py", role=FileRole.BACKEND, language="python"),
|
|
123
|
+
],
|
|
124
|
+
)
|
|
125
|
+
doc.frameworks = ["flask"]
|
|
126
|
+
files = ["/tmp/test-repo/routes/users.py", "/tmp/test-repo/routes/auth.py"]
|
|
127
|
+
nodes = _build_nodes(doc, files, "/tmp/test-repo", "")
|
|
128
|
+
service_nodes = [n for n in nodes if n["type"] == "service"]
|
|
129
|
+
assert len(service_nodes) == 1 # grouped into one
|
|
130
|
+
assert len(service_nodes[0]["files"]) == 2
|
|
131
|
+
assert "Flask" in service_nodes[0]["label"]
|
|
132
|
+
|
|
133
|
+
def test_provider_gets_own_node(self):
|
|
134
|
+
doc = _make_doc(
|
|
135
|
+
file_classifications=[
|
|
136
|
+
FileClassification(file_path="/tmp/test-repo/components/providers/auth-provider.tsx",
|
|
137
|
+
role=FileRole.FRONTEND, language="tsx"),
|
|
138
|
+
],
|
|
139
|
+
)
|
|
140
|
+
files = ["/tmp/test-repo/components/providers/auth-provider.tsx"]
|
|
141
|
+
nodes = _build_nodes(doc, files, "/tmp/test-repo", "")
|
|
142
|
+
assert any(n["type"] == "provider" for n in nodes)
|
|
143
|
+
|
|
144
|
+
def test_db_node_created_from_operations(self):
|
|
145
|
+
doc = _make_doc(
|
|
146
|
+
db_operations=[
|
|
147
|
+
DBOperation(repo="test", file_path="/tmp/test-repo/app.py", line=10,
|
|
148
|
+
operation_type="select", table_name="users", orm_library="supabase"),
|
|
149
|
+
],
|
|
150
|
+
file_classifications=[],
|
|
151
|
+
)
|
|
152
|
+
nodes = _build_nodes(doc, ["/tmp/test-repo/app.py"], "/tmp/test-repo", "")
|
|
153
|
+
db_nodes = [n for n in nodes if n["type"] == "database"]
|
|
154
|
+
assert len(db_nodes) == 1
|
|
155
|
+
assert "supabase" in db_nodes[0]["label"].lower()
|
|
156
|
+
|
|
157
|
+
def test_multi_repo_prefix(self):
|
|
158
|
+
doc = _make_doc(
|
|
159
|
+
file_classifications=[
|
|
160
|
+
FileClassification(file_path="/tmp/test-repo/app/page.tsx", role=FileRole.FRONTEND, language="tsx"),
|
|
161
|
+
],
|
|
162
|
+
)
|
|
163
|
+
files = ["/tmp/test-repo/app/page.tsx"]
|
|
164
|
+
nodes = _build_nodes(doc, files, "/tmp/test-repo", "my-frontend")
|
|
165
|
+
assert nodes[0]["id"].startswith("my-frontend-")
|
|
166
|
+
|
|
167
|
+
def test_files_with_endpoints_become_backend(self):
|
|
168
|
+
"""Files classified as UNKNOWN but containing endpoints should become service nodes."""
|
|
169
|
+
doc = _make_doc(
|
|
170
|
+
endpoints=[
|
|
171
|
+
APIEndpoint(repo="test", file_path="/tmp/test-repo/api/routes.ts", line=5,
|
|
172
|
+
http_method="GET", route_pattern="/health", handler_name="health", framework="express"),
|
|
173
|
+
],
|
|
174
|
+
file_classifications=[
|
|
175
|
+
FileClassification(file_path="/tmp/test-repo/api/routes.ts", role=FileRole.UNKNOWN, language="typescript"),
|
|
176
|
+
],
|
|
177
|
+
)
|
|
178
|
+
doc.frameworks = ["express"]
|
|
179
|
+
files = ["/tmp/test-repo/api/routes.ts"]
|
|
180
|
+
nodes = _build_nodes(doc, files, "/tmp/test-repo", "")
|
|
181
|
+
service_nodes = [n for n in nodes if n["type"] == "service"]
|
|
182
|
+
assert len(service_nodes) == 1
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
# ---------------------------------------------------------------------------
|
|
186
|
+
# _compute_layout
|
|
187
|
+
# ---------------------------------------------------------------------------
|
|
188
|
+
|
|
189
|
+
class TestComputeLayout:
|
|
190
|
+
def test_all_nodes_get_coordinates(self):
|
|
191
|
+
nodes = [
|
|
192
|
+
{"id": "n1", "type": "page", "category": "frontend", "x": 0, "y": 0},
|
|
193
|
+
{"id": "n2", "type": "service", "category": "backend", "x": 0, "y": 0},
|
|
194
|
+
{"id": "n3", "type": "database", "category": "data", "x": 0, "y": 0},
|
|
195
|
+
]
|
|
196
|
+
_compute_layout(nodes)
|
|
197
|
+
for n in nodes:
|
|
198
|
+
assert n["x"] > 0
|
|
199
|
+
assert n["y"] > 0
|
|
200
|
+
|
|
201
|
+
def test_layers_have_different_y(self):
|
|
202
|
+
nodes = [
|
|
203
|
+
{"id": "n1", "type": "page", "category": "frontend", "x": 0, "y": 0},
|
|
204
|
+
{"id": "n2", "type": "service", "category": "backend", "x": 0, "y": 0},
|
|
205
|
+
{"id": "n3", "type": "database", "category": "data", "x": 0, "y": 0},
|
|
206
|
+
]
|
|
207
|
+
_compute_layout(nodes)
|
|
208
|
+
ys = [n["y"] for n in nodes]
|
|
209
|
+
assert len(set(ys)) == 3 # each on a different layer
|
|
210
|
+
|
|
211
|
+
def test_no_x_overlap_in_same_layer(self):
|
|
212
|
+
nodes = [
|
|
213
|
+
{"id": f"p{i}", "type": "page", "category": "frontend", "x": 0, "y": 0}
|
|
214
|
+
for i in range(5)
|
|
215
|
+
]
|
|
216
|
+
_compute_layout(nodes)
|
|
217
|
+
xs = [n["x"] for n in nodes]
|
|
218
|
+
assert len(set(xs)) == 5 # all unique
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
# ---------------------------------------------------------------------------
|
|
222
|
+
# Integration: generate_architecture with real fixtures
|
|
223
|
+
# ---------------------------------------------------------------------------
|
|
224
|
+
|
|
225
|
+
class TestGenerateArchitecture:
|
|
226
|
+
def test_flask_app_valid_json(self):
|
|
227
|
+
fixture = os.path.join(FIXTURES, "flask_app")
|
|
228
|
+
if not os.path.isdir(fixture):
|
|
229
|
+
pytest.skip("flask_app fixture not found")
|
|
230
|
+
results = scan_repos_full([fixture])
|
|
231
|
+
output = generate_architecture(results)
|
|
232
|
+
data = json.loads(output)
|
|
233
|
+
|
|
234
|
+
assert "nodes" in data
|
|
235
|
+
assert "edges" in data
|
|
236
|
+
assert "fileTree" in data
|
|
237
|
+
assert "nodeAnalysis" in data
|
|
238
|
+
assert isinstance(data["nodes"], list)
|
|
239
|
+
assert isinstance(data["edges"], list)
|
|
240
|
+
assert isinstance(data["fileTree"], list)
|
|
241
|
+
assert isinstance(data["nodeAnalysis"], dict)
|
|
242
|
+
|
|
243
|
+
def test_flask_app_has_service_node(self):
|
|
244
|
+
fixture = os.path.join(FIXTURES, "flask_app")
|
|
245
|
+
if not os.path.isdir(fixture):
|
|
246
|
+
pytest.skip("flask_app fixture not found")
|
|
247
|
+
results = scan_repos_full([fixture])
|
|
248
|
+
data = json.loads(generate_architecture(results))
|
|
249
|
+
types = [n["type"] for n in data["nodes"]]
|
|
250
|
+
assert "service" in types
|
|
251
|
+
|
|
252
|
+
def test_flask_app_has_data_edge(self):
|
|
253
|
+
fixture = os.path.join(FIXTURES, "flask_app")
|
|
254
|
+
if not os.path.isdir(fixture):
|
|
255
|
+
pytest.skip("flask_app fixture not found")
|
|
256
|
+
results = scan_repos_full([fixture])
|
|
257
|
+
data = json.loads(generate_architecture(results))
|
|
258
|
+
edge_types = [e["type"] for e in data["edges"]]
|
|
259
|
+
assert "data" in edge_types
|
|
260
|
+
|
|
261
|
+
def test_express_real_has_endpoints_in_analysis(self):
|
|
262
|
+
fixture = os.path.join(FIXTURES, "express_real")
|
|
263
|
+
if not os.path.isdir(fixture):
|
|
264
|
+
pytest.skip("express_real fixture not found")
|
|
265
|
+
results = scan_repos_full([fixture])
|
|
266
|
+
data = json.loads(generate_architecture(results))
|
|
267
|
+
# Find the service node analysis
|
|
268
|
+
for node_id, analysis in data["nodeAnalysis"].items():
|
|
269
|
+
if "endpoints" in analysis and analysis["endpoints"]:
|
|
270
|
+
# Verify endpoint shape
|
|
271
|
+
ep = analysis["endpoints"][0]
|
|
272
|
+
assert "method" in ep
|
|
273
|
+
assert "path" in ep
|
|
274
|
+
assert "category" in ep
|
|
275
|
+
assert "usedBy" in ep
|
|
276
|
+
return
|
|
277
|
+
pytest.fail("No endpoints found in any node analysis")
|
|
278
|
+
|
|
279
|
+
def test_multi_repo_cross_edges(self):
|
|
280
|
+
frontend = os.path.join(FIXTURES, "nextjs_app")
|
|
281
|
+
backend = os.path.join(FIXTURES, "flask_app")
|
|
282
|
+
if not os.path.isdir(frontend) or not os.path.isdir(backend):
|
|
283
|
+
pytest.skip("fixtures not found")
|
|
284
|
+
results = scan_repos_full([frontend, backend])
|
|
285
|
+
data = json.loads(generate_architecture(results))
|
|
286
|
+
|
|
287
|
+
# Should have nodes from both repos
|
|
288
|
+
node_ids = [n["id"] for n in data["nodes"]]
|
|
289
|
+
assert any("nextjs" in nid for nid in node_ids)
|
|
290
|
+
assert any("flask" in nid for nid in node_ids)
|
|
291
|
+
|
|
292
|
+
# Should have at least one cross-repo api edge
|
|
293
|
+
api_edges = [e for e in data["edges"] if e["type"] == "api"]
|
|
294
|
+
assert len(api_edges) > 0
|
|
295
|
+
|
|
296
|
+
def test_node_analysis_keys_match_node_ids(self):
|
|
297
|
+
fixture = os.path.join(FIXTURES, "flask_app")
|
|
298
|
+
if not os.path.isdir(fixture):
|
|
299
|
+
pytest.skip("flask_app fixture not found")
|
|
300
|
+
results = scan_repos_full([fixture])
|
|
301
|
+
data = json.loads(generate_architecture(results))
|
|
302
|
+
node_ids = {n["id"] for n in data["nodes"]}
|
|
303
|
+
analysis_ids = set(data["nodeAnalysis"].keys())
|
|
304
|
+
assert analysis_ids == node_ids
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
# ---------------------------------------------------------------------------
|
|
308
|
+
# Comprehensive dual-repo integration test
|
|
309
|
+
# ---------------------------------------------------------------------------
|
|
310
|
+
|
|
311
|
+
class TestDualRepoArchitecture:
|
|
312
|
+
"""End-to-end test with a realistic frontend + backend split repo.
|
|
313
|
+
|
|
314
|
+
Frontend (arch_frontend): Next.js with pages, providers, components,
|
|
315
|
+
API client layer, and Supabase client.
|
|
316
|
+
Backend (arch_backend): Flask with blueprints, auth, 4 route modules,
|
|
317
|
+
Supabase DB operations, and login_required decorator.
|
|
318
|
+
"""
|
|
319
|
+
|
|
320
|
+
@pytest.fixture(autouse=True)
|
|
321
|
+
def setup(self):
|
|
322
|
+
self.frontend = os.path.join(FIXTURES, "arch_frontend")
|
|
323
|
+
self.backend = os.path.join(FIXTURES, "arch_backend")
|
|
324
|
+
if not os.path.isdir(self.frontend) or not os.path.isdir(self.backend):
|
|
325
|
+
pytest.skip("arch_frontend / arch_backend fixtures not found")
|
|
326
|
+
results = scan_repos_full([self.frontend, self.backend])
|
|
327
|
+
self.data = json.loads(generate_architecture(results))
|
|
328
|
+
self.nodes = self.data["nodes"]
|
|
329
|
+
self.edges = self.data["edges"]
|
|
330
|
+
self.tree = self.data["fileTree"]
|
|
331
|
+
self.analysis = self.data["nodeAnalysis"]
|
|
332
|
+
|
|
333
|
+
# --- Structural validity ---
|
|
334
|
+
|
|
335
|
+
def test_output_has_all_top_level_keys(self):
|
|
336
|
+
assert set(self.data.keys()) == {"nodes", "edges", "fileTree", "nodeAnalysis", "nodeHashes"}
|
|
337
|
+
|
|
338
|
+
def test_every_node_has_required_fields(self):
|
|
339
|
+
required = {"id", "label", "type", "category", "x", "y", "description", "files"}
|
|
340
|
+
for node in self.nodes:
|
|
341
|
+
assert required.issubset(node.keys()), f"Node {node['id']} missing keys"
|
|
342
|
+
|
|
343
|
+
def test_every_edge_has_required_fields(self):
|
|
344
|
+
required = {"id", "from", "to", "type"}
|
|
345
|
+
for edge in self.edges:
|
|
346
|
+
assert required.issubset(edge.keys()), f"Edge {edge['id']} missing keys"
|
|
347
|
+
|
|
348
|
+
def test_edge_refs_point_to_existing_nodes(self):
|
|
349
|
+
node_ids = {n["id"] for n in self.nodes}
|
|
350
|
+
for edge in self.edges:
|
|
351
|
+
assert edge["from"] in node_ids, f"Edge {edge['id']} 'from' refs unknown node {edge['from']}"
|
|
352
|
+
assert edge["to"] in node_ids, f"Edge {edge['id']} 'to' refs unknown node {edge['to']}"
|
|
353
|
+
|
|
354
|
+
def test_analysis_keys_match_nodes(self):
|
|
355
|
+
node_ids = {n["id"] for n in self.nodes}
|
|
356
|
+
assert set(self.analysis.keys()) == node_ids
|
|
357
|
+
|
|
358
|
+
# --- Node detection ---
|
|
359
|
+
|
|
360
|
+
def test_has_frontend_pages(self):
|
|
361
|
+
pages = [n for n in self.nodes if n["type"] == "page"]
|
|
362
|
+
page_labels = {n["label"] for n in pages}
|
|
363
|
+
assert len(pages) >= 4
|
|
364
|
+
# Should detect Dashboard, Login, Settings, and the root App page
|
|
365
|
+
assert any("dashboard" in l.lower() for l in page_labels)
|
|
366
|
+
assert any("login" in l.lower() for l in page_labels)
|
|
367
|
+
assert any("settings" in l.lower() for l in page_labels)
|
|
368
|
+
|
|
369
|
+
def test_has_providers(self):
|
|
370
|
+
providers = [n for n in self.nodes if n["type"] == "provider"]
|
|
371
|
+
assert len(providers) >= 2
|
|
372
|
+
labels = {n["label"].lower() for n in providers}
|
|
373
|
+
assert any("auth" in l for l in labels)
|
|
374
|
+
assert any("project" in l for l in labels)
|
|
375
|
+
|
|
376
|
+
def test_has_components(self):
|
|
377
|
+
components = [n for n in self.nodes if n["type"] == "component"]
|
|
378
|
+
assert len(components) >= 2
|
|
379
|
+
labels = {n["label"].lower() for n in components}
|
|
380
|
+
assert any("sidebar" in l for l in labels)
|
|
381
|
+
assert any("project" in l for l in labels)
|
|
382
|
+
|
|
383
|
+
def test_has_api_layer_nodes(self):
|
|
384
|
+
api_nodes = [n for n in self.nodes if n["type"] == "api"]
|
|
385
|
+
assert len(api_nodes) >= 2
|
|
386
|
+
labels = {n["label"].lower() for n in api_nodes}
|
|
387
|
+
assert any("client" in l for l in labels)
|
|
388
|
+
assert any("auth" in l for l in labels)
|
|
389
|
+
|
|
390
|
+
def test_backend_routes_grouped_into_one_service(self):
|
|
391
|
+
services = [n for n in self.nodes if n["type"] == "service" and n["category"] == "backend"]
|
|
392
|
+
assert len(services) == 1
|
|
393
|
+
svc = services[0]
|
|
394
|
+
assert "flask" in svc["label"].lower()
|
|
395
|
+
# Should contain all 4 route files (plus app.py, middleware, __init__.py)
|
|
396
|
+
route_files = {os.path.basename(f) for f in svc["files"] if "routes/" in f.replace("\\", "/")}
|
|
397
|
+
assert {"auth.py", "users.py", "projects.py", "analytics.py"}.issubset(route_files)
|
|
398
|
+
|
|
399
|
+
def test_has_database_nodes(self):
|
|
400
|
+
db_nodes = [n for n in self.nodes if n["type"] == "database"]
|
|
401
|
+
assert len(db_nodes) >= 1
|
|
402
|
+
|
|
403
|
+
def test_all_nodes_have_repo_prefix(self):
|
|
404
|
+
for node in self.nodes:
|
|
405
|
+
assert node["id"].startswith("arch-frontend-") or node["id"].startswith("arch-backend-")
|
|
406
|
+
|
|
407
|
+
# --- Edge detection ---
|
|
408
|
+
|
|
409
|
+
def test_has_cross_repo_api_edges(self):
|
|
410
|
+
"""Frontend nodes should have API edges to the backend service."""
|
|
411
|
+
api_edges = [e for e in self.edges if e["type"] == "api"]
|
|
412
|
+
assert len(api_edges) >= 3
|
|
413
|
+
# At least some should cross from frontend to backend
|
|
414
|
+
cross_repo = [
|
|
415
|
+
e for e in api_edges
|
|
416
|
+
if e["from"].startswith("arch-frontend-") and e["to"].startswith("arch-backend-")
|
|
417
|
+
]
|
|
418
|
+
assert len(cross_repo) >= 3
|
|
419
|
+
|
|
420
|
+
def test_has_data_edges_to_database(self):
|
|
421
|
+
data_edges = [e for e in self.edges if e["type"] == "data"]
|
|
422
|
+
assert len(data_edges) >= 1
|
|
423
|
+
for e in data_edges:
|
|
424
|
+
target = next(n for n in self.nodes if n["id"] == e["to"])
|
|
425
|
+
assert target["type"] == "database"
|
|
426
|
+
|
|
427
|
+
def test_has_auth_edge(self):
|
|
428
|
+
auth_edges = [e for e in self.edges if e["type"] == "auth"]
|
|
429
|
+
assert len(auth_edges) >= 1
|
|
430
|
+
|
|
431
|
+
def test_has_dependency_edges(self):
|
|
432
|
+
dep_edges = [e for e in self.edges if e["type"] == "dependency"]
|
|
433
|
+
assert len(dep_edges) >= 2
|
|
434
|
+
|
|
435
|
+
def test_no_self_loops(self):
|
|
436
|
+
for edge in self.edges:
|
|
437
|
+
assert edge["from"] != edge["to"], f"Edge {edge['id']} is a self-loop"
|
|
438
|
+
|
|
439
|
+
# --- Layout ---
|
|
440
|
+
|
|
441
|
+
def test_pages_on_top_layer(self):
|
|
442
|
+
pages = [n for n in self.nodes if n["type"] == "page"]
|
|
443
|
+
other = [n for n in self.nodes if n["type"] != "page"]
|
|
444
|
+
if pages and other:
|
|
445
|
+
max_page_y = max(n["y"] for n in pages)
|
|
446
|
+
min_other_y = min(n["y"] for n in other)
|
|
447
|
+
assert max_page_y <= min_other_y
|
|
448
|
+
|
|
449
|
+
def test_database_on_bottom_layer(self):
|
|
450
|
+
db = [n for n in self.nodes if n["type"] == "database"]
|
|
451
|
+
other = [n for n in self.nodes if n["type"] != "database"]
|
|
452
|
+
if db and other:
|
|
453
|
+
min_db_y = min(n["y"] for n in db)
|
|
454
|
+
max_other_y = max(n["y"] for n in other)
|
|
455
|
+
assert min_db_y >= max_other_y
|
|
456
|
+
|
|
457
|
+
# --- Node analysis content ---
|
|
458
|
+
|
|
459
|
+
def test_backend_service_has_all_endpoints(self):
|
|
460
|
+
svc = next(n for n in self.nodes if n["type"] == "service" and n["category"] == "backend")
|
|
461
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
462
|
+
assert len(endpoints) >= 15 # 3 auth + 5 users + 5 projects + 2 analytics
|
|
463
|
+
methods = {ep["method"] for ep in endpoints}
|
|
464
|
+
assert {"GET", "POST", "PUT", "DELETE"}.issubset(methods)
|
|
465
|
+
|
|
466
|
+
def test_endpoints_have_categories(self):
|
|
467
|
+
svc = next(n for n in self.nodes if n["type"] == "service")
|
|
468
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
469
|
+
categories = {ep["category"] for ep in endpoints}
|
|
470
|
+
assert "Auth" in categories
|
|
471
|
+
assert "Users" in categories
|
|
472
|
+
assert "Projects" in categories
|
|
473
|
+
assert "Analytics" in categories
|
|
474
|
+
|
|
475
|
+
def test_endpoints_have_db_operations(self):
|
|
476
|
+
svc = next(n for n in self.nodes if n["type"] == "service")
|
|
477
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
478
|
+
eps_with_db = [ep for ep in endpoints if ep.get("dbOperations")]
|
|
479
|
+
assert len(eps_with_db) >= 10 # most endpoints touch the DB
|
|
480
|
+
# Check DB op shape
|
|
481
|
+
op = eps_with_db[0]["dbOperations"][0]
|
|
482
|
+
assert "type" in op
|
|
483
|
+
assert "table" in op
|
|
484
|
+
|
|
485
|
+
def test_endpoints_have_request_shapes(self):
|
|
486
|
+
svc = next(n for n in self.nodes if n["type"] == "service")
|
|
487
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
488
|
+
post_eps = [ep for ep in endpoints if ep["method"] == "POST"]
|
|
489
|
+
eps_with_body = [ep for ep in post_eps if ep.get("requestShape")]
|
|
490
|
+
assert len(eps_with_body) >= 2 # login, create_project, etc.
|
|
491
|
+
|
|
492
|
+
def test_backend_has_login_required_rule(self):
|
|
493
|
+
svc = next(n for n in self.nodes if n["type"] == "service")
|
|
494
|
+
rules = self.analysis[svc["id"]].get("rules", [])
|
|
495
|
+
assert len(rules) >= 1
|
|
496
|
+
rule_names = {r["name"] for r in rules}
|
|
497
|
+
assert "login_required" in rule_names
|
|
498
|
+
lr = next(r for r in rules if r["name"] == "login_required")
|
|
499
|
+
assert lr["type"] == "guard"
|
|
500
|
+
assert len(lr["appliedTo"]) >= 10 # most routes require auth
|
|
501
|
+
|
|
502
|
+
def test_pages_have_data_sources(self):
|
|
503
|
+
pages = [n for n in self.nodes if n["type"] == "page"]
|
|
504
|
+
pages_with_data = [
|
|
505
|
+
n for n in pages
|
|
506
|
+
if self.analysis[n["id"]].get("dataUsed")
|
|
507
|
+
]
|
|
508
|
+
assert len(pages_with_data) >= 3 # dashboard, settings, home all fetch data
|
|
509
|
+
|
|
510
|
+
def test_analysis_has_empty_commits_and_issues(self):
|
|
511
|
+
"""Commits and issues require user accounts — should be empty."""
|
|
512
|
+
for node_id, a in self.analysis.items():
|
|
513
|
+
assert a["commits"] == []
|
|
514
|
+
assert a["issues"] == []
|
|
515
|
+
|
|
516
|
+
# --- File tree ---
|
|
517
|
+
|
|
518
|
+
def test_file_tree_has_both_repos(self):
|
|
519
|
+
top_names = {n["name"] for n in self.tree}
|
|
520
|
+
# Frontend has src/, backend has routes/
|
|
521
|
+
all_names = set()
|
|
522
|
+
def collect(nodes):
|
|
523
|
+
for n in nodes:
|
|
524
|
+
all_names.add(n["name"])
|
|
525
|
+
if "children" in n:
|
|
526
|
+
collect(n["children"])
|
|
527
|
+
collect(self.tree)
|
|
528
|
+
assert "src" in all_names # frontend
|
|
529
|
+
assert "routes" in all_names # backend
|
|
530
|
+
|
|
531
|
+
def test_file_tree_structure_is_valid(self):
|
|
532
|
+
def validate(nodes, prefix=""):
|
|
533
|
+
for n in nodes:
|
|
534
|
+
assert "name" in n
|
|
535
|
+
assert "type" in n
|
|
536
|
+
assert "path" in n
|
|
537
|
+
assert n["type"] in ("file", "folder")
|
|
538
|
+
if n["type"] == "folder":
|
|
539
|
+
assert "children" in n
|
|
540
|
+
validate(n["children"], n["path"])
|
|
541
|
+
validate(self.tree)
|
|
542
|
+
|
|
543
|
+
# --- DB tables ---
|
|
544
|
+
|
|
545
|
+
def test_backend_touches_expected_tables(self):
|
|
546
|
+
svc = next(n for n in self.nodes if n["type"] == "service")
|
|
547
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
548
|
+
all_tables = set()
|
|
549
|
+
for ep in endpoints:
|
|
550
|
+
for op in ep.get("dbOperations", []):
|
|
551
|
+
all_tables.add(op["table"])
|
|
552
|
+
assert "users" in all_tables
|
|
553
|
+
assert "sessions" in all_tables
|
|
554
|
+
assert "projects" in all_tables
|
|
555
|
+
assert "connections" in all_tables
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
# ---------------------------------------------------------------------------
|
|
559
|
+
# Monorepo integration test (single scan, frontend + backend in one repo)
|
|
560
|
+
# ---------------------------------------------------------------------------
|
|
561
|
+
|
|
562
|
+
class TestMonorepoArchitecture:
|
|
563
|
+
"""End-to-end test with a realistic monorepo simulating a SaaS project management app.
|
|
564
|
+
|
|
565
|
+
Structure:
|
|
566
|
+
apps/web/ - Next.js frontend (6 pages, 2 providers, 5 components, 5 API modules)
|
|
567
|
+
apps/api/ - Flask backend (6 route modules, middleware, services)
|
|
568
|
+
packages/shared-types/ - Shared TypeScript types
|
|
569
|
+
|
|
570
|
+
This tests single-repo monorepo scanning (not multi-repo).
|
|
571
|
+
"""
|
|
572
|
+
|
|
573
|
+
@pytest.fixture(autouse=True)
|
|
574
|
+
def setup(self):
|
|
575
|
+
self.fixture = os.path.join(FIXTURES, "arch_monorepo")
|
|
576
|
+
if not os.path.isdir(self.fixture):
|
|
577
|
+
pytest.skip("arch_monorepo fixture not found")
|
|
578
|
+
results = scan_repos_full([self.fixture])
|
|
579
|
+
self.data = json.loads(generate_architecture(results))
|
|
580
|
+
self.nodes = self.data["nodes"]
|
|
581
|
+
self.edges = self.data["edges"]
|
|
582
|
+
self.tree = self.data["fileTree"]
|
|
583
|
+
self.analysis = self.data["nodeAnalysis"]
|
|
584
|
+
|
|
585
|
+
# --- Structural validity ---
|
|
586
|
+
|
|
587
|
+
def test_valid_json_structure(self):
|
|
588
|
+
assert set(self.data.keys()) == {"nodes", "edges", "fileTree", "nodeAnalysis", "nodeHashes"}
|
|
589
|
+
assert isinstance(self.data["nodes"], list)
|
|
590
|
+
assert isinstance(self.data["edges"], list)
|
|
591
|
+
assert isinstance(self.data["fileTree"], list)
|
|
592
|
+
assert isinstance(self.data["nodeAnalysis"], dict)
|
|
593
|
+
|
|
594
|
+
def test_all_nodes_have_coordinates(self):
|
|
595
|
+
for node in self.nodes:
|
|
596
|
+
assert node["x"] > 0, f"Node {node['id']} has x=0"
|
|
597
|
+
assert node["y"] > 0, f"Node {node['id']} has y=0"
|
|
598
|
+
|
|
599
|
+
def test_no_self_loop_edges(self):
|
|
600
|
+
for edge in self.edges:
|
|
601
|
+
assert edge["from"] != edge["to"], f"Self-loop: {edge['id']}"
|
|
602
|
+
|
|
603
|
+
def test_edge_refs_valid(self):
|
|
604
|
+
ids = {n["id"] for n in self.nodes}
|
|
605
|
+
for edge in self.edges:
|
|
606
|
+
assert edge["from"] in ids, f"Edge {edge['id']} from unknown node"
|
|
607
|
+
assert edge["to"] in ids, f"Edge {edge['id']} to unknown node"
|
|
608
|
+
|
|
609
|
+
def test_analysis_covers_all_nodes(self):
|
|
610
|
+
assert set(self.analysis.keys()) == {n["id"] for n in self.nodes}
|
|
611
|
+
|
|
612
|
+
# --- Node detection across monorepo ---
|
|
613
|
+
|
|
614
|
+
def test_detects_frontend_pages(self):
|
|
615
|
+
pages = [n for n in self.nodes if n["type"] == "page"]
|
|
616
|
+
labels = {n["label"].lower() for n in pages}
|
|
617
|
+
assert len(pages) >= 5
|
|
618
|
+
assert "dashboard" in labels
|
|
619
|
+
assert "login" in labels
|
|
620
|
+
assert "projects" in labels
|
|
621
|
+
assert "settings" in labels
|
|
622
|
+
assert "invite" in labels
|
|
623
|
+
|
|
624
|
+
def test_detects_providers(self):
|
|
625
|
+
providers = [n for n in self.nodes if n["type"] == "provider"]
|
|
626
|
+
labels = {n["label"].lower() for n in providers}
|
|
627
|
+
assert len(providers) >= 2
|
|
628
|
+
assert any("auth" in l for l in labels)
|
|
629
|
+
assert any("workspace" in l for l in labels)
|
|
630
|
+
|
|
631
|
+
def test_detects_feature_components(self):
|
|
632
|
+
components = [n for n in self.nodes if n["type"] == "component"]
|
|
633
|
+
labels = {n["label"].lower() for n in components}
|
|
634
|
+
assert any("kanban" in l for l in labels)
|
|
635
|
+
assert any("sidebar" in l for l in labels)
|
|
636
|
+
|
|
637
|
+
def test_detects_api_layer(self):
|
|
638
|
+
api_nodes = [n for n in self.nodes if n["type"] == "api"]
|
|
639
|
+
labels = {n["label"].lower() for n in api_nodes}
|
|
640
|
+
assert len(api_nodes) >= 4
|
|
641
|
+
assert any("client" in l for l in labels)
|
|
642
|
+
|
|
643
|
+
def test_backend_grouped_into_one_service(self):
|
|
644
|
+
services = [n for n in self.nodes if n["type"] == "service" and n["category"] == "backend"]
|
|
645
|
+
assert len(services) == 1
|
|
646
|
+
svc = services[0]
|
|
647
|
+
route_files = [f for f in svc["files"] if "routes/" in f.replace("\\", "/")]
|
|
648
|
+
assert len(route_files) >= 6
|
|
649
|
+
|
|
650
|
+
def test_detects_database_node(self):
|
|
651
|
+
db_nodes = [n for n in self.nodes if n["type"] == "database"]
|
|
652
|
+
assert len(db_nodes) >= 1
|
|
653
|
+
|
|
654
|
+
# --- Edge detection ---
|
|
655
|
+
|
|
656
|
+
def test_has_api_edges(self):
|
|
657
|
+
api_edges = [e for e in self.edges if e["type"] == "api"]
|
|
658
|
+
assert len(api_edges) >= 5
|
|
659
|
+
|
|
660
|
+
def test_has_data_edges(self):
|
|
661
|
+
data_edges = [e for e in self.edges if e["type"] == "data"]
|
|
662
|
+
assert len(data_edges) >= 1
|
|
663
|
+
|
|
664
|
+
def test_has_auth_edge(self):
|
|
665
|
+
auth_edges = [e for e in self.edges if e["type"] == "auth"]
|
|
666
|
+
assert len(auth_edges) >= 1
|
|
667
|
+
|
|
668
|
+
def test_has_dependency_edges(self):
|
|
669
|
+
dep_edges = [e for e in self.edges if e["type"] == "dependency"]
|
|
670
|
+
assert len(dep_edges) >= 3
|
|
671
|
+
|
|
672
|
+
# --- Backend analysis ---
|
|
673
|
+
|
|
674
|
+
def test_backend_has_many_endpoints(self):
|
|
675
|
+
svc = next(n for n in self.nodes if n["type"] == "service" and n["category"] == "backend")
|
|
676
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
677
|
+
assert len(endpoints) >= 20
|
|
678
|
+
|
|
679
|
+
def test_endpoints_span_multiple_categories(self):
|
|
680
|
+
svc = next(n for n in self.nodes if n["type"] == "service" and n["category"] == "backend")
|
|
681
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
682
|
+
categories = {ep["category"] for ep in endpoints}
|
|
683
|
+
assert len(categories) >= 4
|
|
684
|
+
|
|
685
|
+
def test_endpoints_cover_all_http_methods(self):
|
|
686
|
+
svc = next(n for n in self.nodes if n["type"] == "service" and n["category"] == "backend")
|
|
687
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
688
|
+
methods = {ep["method"] for ep in endpoints}
|
|
689
|
+
assert {"GET", "POST", "PUT", "DELETE"}.issubset(methods)
|
|
690
|
+
|
|
691
|
+
def test_endpoints_have_db_operations(self):
|
|
692
|
+
svc = next(n for n in self.nodes if n["type"] == "service" and n["category"] == "backend")
|
|
693
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
694
|
+
with_db = [ep for ep in endpoints if ep.get("dbOperations")]
|
|
695
|
+
assert len(with_db) >= 15
|
|
696
|
+
|
|
697
|
+
def test_db_tables_cover_domain(self):
|
|
698
|
+
svc = next(n for n in self.nodes if n["type"] == "service" and n["category"] == "backend")
|
|
699
|
+
endpoints = self.analysis[svc["id"]].get("endpoints", [])
|
|
700
|
+
tables = set()
|
|
701
|
+
for ep in endpoints:
|
|
702
|
+
for op in ep.get("dbOperations", []):
|
|
703
|
+
tables.add(op["table"])
|
|
704
|
+
assert "users" in tables
|
|
705
|
+
assert "projects" in tables
|
|
706
|
+
assert "tasks" in tables
|
|
707
|
+
assert "sessions" in tables
|
|
708
|
+
assert "notifications" in tables
|
|
709
|
+
assert "columns" in tables
|
|
710
|
+
|
|
711
|
+
def test_has_login_required_guard(self):
|
|
712
|
+
svc = next(n for n in self.nodes if n["type"] == "service" and n["category"] == "backend")
|
|
713
|
+
rules = self.analysis[svc["id"]].get("rules", [])
|
|
714
|
+
assert any(r["name"] == "login_required" and r["type"] == "guard" for r in rules)
|
|
715
|
+
|
|
716
|
+
# --- Frontend analysis ---
|
|
717
|
+
|
|
718
|
+
def test_pages_have_data_sources(self):
|
|
719
|
+
pages = [n for n in self.nodes if n["type"] == "page"]
|
|
720
|
+
pages_with_data = [n for n in pages if self.analysis[n["id"]].get("dataUsed")]
|
|
721
|
+
assert len(pages_with_data) >= 4
|
|
722
|
+
|
|
723
|
+
def test_api_modules_have_resolved_urls(self):
|
|
724
|
+
"""API wrapper modules should have resolved call-site URLs, not :path placeholders."""
|
|
725
|
+
api_nodes = [n for n in self.nodes if n["type"] == "api"]
|
|
726
|
+
for node in api_nodes:
|
|
727
|
+
for ds in self.analysis[node["id"]].get("dataUsed", []):
|
|
728
|
+
assert ":path" not in ds["source"], f"Unresolved :path in {node['id']}: {ds['source']}"
|
|
729
|
+
|
|
730
|
+
# --- File tree ---
|
|
731
|
+
|
|
732
|
+
def test_file_tree_has_monorepo_structure(self):
|
|
733
|
+
top_names = {n["name"] for n in self.tree}
|
|
734
|
+
assert "apps" in top_names or "packages" in top_names
|
|
735
|
+
|
|
736
|
+
def test_file_tree_valid_structure(self):
|
|
737
|
+
def validate(nodes):
|
|
738
|
+
for n in nodes:
|
|
739
|
+
assert "name" in n and "type" in n and "path" in n
|
|
740
|
+
if n["type"] == "folder":
|
|
741
|
+
assert "children" in n
|
|
742
|
+
validate(n["children"])
|
|
743
|
+
validate(self.tree)
|
|
744
|
+
|
|
745
|
+
# --- Layout sanity ---
|
|
746
|
+
|
|
747
|
+
def test_pages_above_backend(self):
|
|
748
|
+
pages = [n for n in self.nodes if n["type"] == "page"]
|
|
749
|
+
services = [n for n in self.nodes if n["type"] == "service" and n["category"] == "backend"]
|
|
750
|
+
if pages and services:
|
|
751
|
+
assert max(n["y"] for n in pages) < min(n["y"] for n in services)
|
|
752
|
+
|
|
753
|
+
def test_database_at_bottom(self):
|
|
754
|
+
db = [n for n in self.nodes if n["type"] == "database"]
|
|
755
|
+
other = [n for n in self.nodes if n["type"] != "database"]
|
|
756
|
+
if db and other:
|
|
757
|
+
assert min(n["y"] for n in db) >= max(n["y"] for n in other)
|