frontone 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- frontone/__init__.py +1 -0
- frontone/analyzer.py +620 -0
- frontone/git_utils.py +59 -0
- frontone/planner.py +159 -0
- frontone/prompts.py +549 -0
- frontone/scaffold.py +91 -0
- frontone/server.py +90 -0
- frontone-0.1.0.dist-info/METADATA +262 -0
- frontone-0.1.0.dist-info/RECORD +12 -0
- frontone-0.1.0.dist-info/WHEEL +5 -0
- frontone-0.1.0.dist-info/entry_points.txt +2 -0
- frontone-0.1.0.dist-info/top_level.txt +1 -0
frontone/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.0"
|
frontone/analyzer.py
ADDED
|
@@ -0,0 +1,620 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
import urllib.request
|
|
5
|
+
from collections import Counter, defaultdict
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from codeintel_cli.core.project import find_project_root
|
|
9
|
+
from codeintel_cli.scanner.scanner import find_all_supported_files
|
|
10
|
+
from codeintel_cli.endpoints.scan import EndpointScanOptions, iter_supported_source_files
|
|
11
|
+
from codeintel_cli.endpoints.java_spring import extract_spring_endpoints
|
|
12
|
+
from codeintel_cli.endpoints.openapi_spec import extract_openapi_endpoints
|
|
13
|
+
from codeintel_cli.context.java_rel import java_fields_and_rels, top_type_name
|
|
14
|
+
from codeintel_cli.lang.java.types import scan_java_types
|
|
15
|
+
from codeintel_cli.lang.java.models import extract_java_models, resolve_inherited_fields
|
|
16
|
+
from codeintel_cli.commands.project.endpoints_cmd import (
|
|
17
|
+
_detect_spring_security_policy,
|
|
18
|
+
_norm_path,
|
|
19
|
+
_display_role_str,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
23
|
+
# Constants
|
|
24
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
25
|
+
|
|
26
|
+
IGNORE_DIRS = {
|
|
27
|
+
".git", ".idea", ".vscode", ".mvn",
|
|
28
|
+
"__pycache__", ".pytest_cache", ".ruff_cache", ".tox",
|
|
29
|
+
".venv", "venv", "env",
|
|
30
|
+
"node_modules", "dist", "build", "out", "target", ".gradle",
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
MODEL_DIRS = {
|
|
34
|
+
"model", "models", "entity", "entities", "domain", "domains",
|
|
35
|
+
"core", "aggregate", "aggregates", "persistence", "db", "data",
|
|
36
|
+
"schema", "pojo", "bean", "beans", "orm", "schemas",
|
|
37
|
+
"datamodel", "datamodels", "table", "tables", "record", "records",
|
|
38
|
+
"document", "documents", "dao",
|
|
39
|
+
"owner", "vet", "pet", "visit",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
SKIP_MODEL_DIRS = {
|
|
43
|
+
"test", "tests", "controller", "controllers",
|
|
44
|
+
"service", "services", "repository", "repositories",
|
|
45
|
+
"config", "configuration", "util", "utils",
|
|
46
|
+
"exception", "exceptions", "security",
|
|
47
|
+
"mapper", "mappers", "migration", "migrations", "dto",
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
SKIP_MODEL_SUFFIXES = (
|
|
51
|
+
"Repository", "Mapper", "Dao", "Service", "Controller",
|
|
52
|
+
"Config", "Configuration", "Util", "Utils", "Helper",
|
|
53
|
+
"Exception", "Handler", "Interceptor", "Filter",
|
|
54
|
+
"Listener", "Scheduler", "Validator", "Converter",
|
|
55
|
+
"Serializer", "Deserializer", "Factory", "Builder", "Specification",
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
_ENTITY_ANNOTATIONS = {"Entity", "MappedSuperclass", "Embeddable", "Table", "Document"}
|
|
59
|
+
|
|
60
|
+
_BASIC_TYPES = {
|
|
61
|
+
"String", "Long", "Integer", "int", "long", "double", "Double",
|
|
62
|
+
"float", "Float", "boolean", "Boolean", "UUID", "BigDecimal",
|
|
63
|
+
"Date", "LocalDate", "LocalDateTime", "Instant", "Object",
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
# Fake endpoint patterns — constructor injections misread as routes
|
|
67
|
+
_FAKE_ENDPOINT_RE = re.compile(
|
|
68
|
+
r'/api/[^/]+/api/' # doubled prefix like /api/bank-accounts/api/bank-accounts
|
|
69
|
+
r'|^/api/api$' # /api/api
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
73
|
+
# Helpers
|
|
74
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
75
|
+
|
|
76
|
+
def _is_basic_type(name: str) -> bool:
|
|
77
|
+
return (name or "").strip() in _BASIC_TYPES
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _walk_java(root: Path):
|
|
81
|
+
stack = [root]
|
|
82
|
+
while stack:
|
|
83
|
+
cur = stack.pop()
|
|
84
|
+
try:
|
|
85
|
+
for child in cur.iterdir():
|
|
86
|
+
if child.is_dir():
|
|
87
|
+
if child.name not in IGNORE_DIRS:
|
|
88
|
+
stack.append(child)
|
|
89
|
+
elif child.suffix == ".java":
|
|
90
|
+
yield child
|
|
91
|
+
except (PermissionError, FileNotFoundError):
|
|
92
|
+
continue
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _is_model_path(rel: Path) -> bool:
|
|
96
|
+
parts = [p.lower() for p in rel.parts]
|
|
97
|
+
if "src" in parts and "test" in parts:
|
|
98
|
+
return False
|
|
99
|
+
if any(p in SKIP_MODEL_DIRS for p in parts):
|
|
100
|
+
return False
|
|
101
|
+
if any(rel.stem.endswith(s) for s in SKIP_MODEL_SUFFIXES):
|
|
102
|
+
return False
|
|
103
|
+
return any(p in MODEL_DIRS for p in parts)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def _has_entity_annotation(path: Path) -> bool:
|
|
107
|
+
try:
|
|
108
|
+
text = path.read_text(encoding="utf-8", errors="ignore")
|
|
109
|
+
return any(f"@{a}" in text for a in _ENTITY_ANNOTATIONS)
|
|
110
|
+
except Exception:
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
115
|
+
# Models
|
|
116
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
117
|
+
|
|
118
|
+
def _scan_models(project_root: Path) -> list[dict]:
|
|
119
|
+
pr = project_root.resolve()
|
|
120
|
+
files = find_all_supported_files(project_root)
|
|
121
|
+
candidates: list[Path] = []
|
|
122
|
+
for f in files:
|
|
123
|
+
if f.suffix.lower() != ".java":
|
|
124
|
+
continue
|
|
125
|
+
fr = f.resolve()
|
|
126
|
+
try:
|
|
127
|
+
rel = fr.relative_to(pr)
|
|
128
|
+
except Exception:
|
|
129
|
+
continue
|
|
130
|
+
if _is_model_path(rel) or _has_entity_annotation(fr):
|
|
131
|
+
candidates.append(fr)
|
|
132
|
+
|
|
133
|
+
all_models = []
|
|
134
|
+
for f in sorted(candidates, key=lambda x: str(x).lower()):
|
|
135
|
+
all_models.extend(extract_java_models(f, pr))
|
|
136
|
+
all_models = resolve_inherited_fields(all_models)
|
|
137
|
+
|
|
138
|
+
out: list[dict] = []
|
|
139
|
+
seen: set[str] = set()
|
|
140
|
+
for m in all_models:
|
|
141
|
+
if m.name in seen:
|
|
142
|
+
continue
|
|
143
|
+
seen.add(m.name)
|
|
144
|
+
out.append({
|
|
145
|
+
"name": m.name,
|
|
146
|
+
"kind": m.kind,
|
|
147
|
+
"bases": list(m.bases) if m.bases else [],
|
|
148
|
+
"file": m.file,
|
|
149
|
+
"fields": [{"name": fld.name, "type": fld.type} for fld in m.fields],
|
|
150
|
+
})
|
|
151
|
+
|
|
152
|
+
# Fallback: if models still empty, scan all java files for @Entity
|
|
153
|
+
if not out:
|
|
154
|
+
out = _scan_models_fallback(project_root)
|
|
155
|
+
|
|
156
|
+
return out
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _scan_models_fallback(project_root: Path) -> list[dict]:
|
|
160
|
+
"""Direct AST-free fallback: find @Entity classes and parse their fields."""
|
|
161
|
+
field_re = re.compile(
|
|
162
|
+
r'(?:private|protected|public)\s+([\w<>, \[\]]+)\s+(\w+)\s*;'
|
|
163
|
+
)
|
|
164
|
+
results = []
|
|
165
|
+
seen: set[str] = set()
|
|
166
|
+
|
|
167
|
+
for f in project_root.rglob("*.java"):
|
|
168
|
+
if any(x in f.parts for x in ("test", "target", "build")):
|
|
169
|
+
continue
|
|
170
|
+
try:
|
|
171
|
+
src = f.read_text(encoding="utf-8", errors="ignore")
|
|
172
|
+
except Exception:
|
|
173
|
+
continue
|
|
174
|
+
if not any(f"@{a}" in src for a in _ENTITY_ANNOTATIONS):
|
|
175
|
+
continue
|
|
176
|
+
cls = re.search(r'(?:public\s+)?class\s+(\w+)', src)
|
|
177
|
+
if not cls:
|
|
178
|
+
continue
|
|
179
|
+
name = cls.group(1)
|
|
180
|
+
if name in seen:
|
|
181
|
+
continue
|
|
182
|
+
seen.add(name)
|
|
183
|
+
fields = [
|
|
184
|
+
{"name": m.group(2), "type": m.group(1).strip()}
|
|
185
|
+
for m in field_re.finditer(src)
|
|
186
|
+
if not any(m.group(2).endswith(s) for s in SKIP_MODEL_SUFFIXES)
|
|
187
|
+
]
|
|
188
|
+
results.append({
|
|
189
|
+
"name": name,
|
|
190
|
+
"kind": "class",
|
|
191
|
+
"bases": [],
|
|
192
|
+
"file": str(f),
|
|
193
|
+
"fields": fields,
|
|
194
|
+
})
|
|
195
|
+
return results
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
199
|
+
# Relationships
|
|
200
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
201
|
+
|
|
202
|
+
def _scan_relationships(project_root: Path) -> dict:
|
|
203
|
+
files = find_all_supported_files(project_root)
|
|
204
|
+
pr = project_root.resolve()
|
|
205
|
+
candidates: list[Path] = []
|
|
206
|
+
for f in files:
|
|
207
|
+
if f.suffix.lower() != ".java":
|
|
208
|
+
continue
|
|
209
|
+
fr = f.resolve()
|
|
210
|
+
try:
|
|
211
|
+
rel = fr.relative_to(pr)
|
|
212
|
+
except Exception:
|
|
213
|
+
continue
|
|
214
|
+
if _is_model_path(rel):
|
|
215
|
+
candidates.append(fr)
|
|
216
|
+
|
|
217
|
+
edges: list[dict] = []
|
|
218
|
+
counts: dict[str, int] = defaultdict(int)
|
|
219
|
+
per_src: dict[str, list[dict]] = defaultdict(list)
|
|
220
|
+
|
|
221
|
+
for f in sorted(candidates, key=lambda x: str(x).lower()):
|
|
222
|
+
try:
|
|
223
|
+
raw = f.read_text(encoding="utf-8", errors="ignore")
|
|
224
|
+
except Exception:
|
|
225
|
+
continue
|
|
226
|
+
src_type = top_type_name(raw, fallback=f.stem)
|
|
227
|
+
_fields, rels = java_fields_and_rels(f)
|
|
228
|
+
for r in rels:
|
|
229
|
+
tgt = (r.target or "").strip()
|
|
230
|
+
if not tgt or _is_basic_type(tgt):
|
|
231
|
+
continue
|
|
232
|
+
edge = {"src": src_type, "kind": r.kind, "field": r.field, "target": tgt}
|
|
233
|
+
edges.append(edge)
|
|
234
|
+
counts[r.kind] += 1
|
|
235
|
+
per_src[src_type].append(edge)
|
|
236
|
+
|
|
237
|
+
return {
|
|
238
|
+
"counts": dict(counts),
|
|
239
|
+
"total": sum(counts.values()),
|
|
240
|
+
"edges": edges,
|
|
241
|
+
"per_src": {k: v for k, v in per_src.items()},
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
246
|
+
# DTOs / Types
|
|
247
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
248
|
+
|
|
249
|
+
def _scan_types(project_root: Path) -> list[dict]:
|
|
250
|
+
java_files = sorted(_walk_java(project_root), key=lambda p: str(p).lower())
|
|
251
|
+
all_defs = scan_java_types(java_files[:500], project_root)
|
|
252
|
+
if not all_defs:
|
|
253
|
+
return []
|
|
254
|
+
|
|
255
|
+
# Fallback field extraction if scan_java_types returns empty fields
|
|
256
|
+
field_re = re.compile(
|
|
257
|
+
r'(?:private|protected|public)\s+([\w<>, \[\]]+)\s+(\w+)\s*;'
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
out: list[dict] = []
|
|
261
|
+
for d in all_defs:
|
|
262
|
+
fields = [{"name": fld.name, "type": fld.type} for fld in d.fields]
|
|
263
|
+
|
|
264
|
+
# If fields empty, try reading the file directly
|
|
265
|
+
if not fields:
|
|
266
|
+
candidates = list(project_root.rglob(f"{d.name}.java"))
|
|
267
|
+
for cf in candidates:
|
|
268
|
+
if any(x in cf.parts for x in ("test", "target", "build")):
|
|
269
|
+
continue
|
|
270
|
+
try:
|
|
271
|
+
src = cf.read_text(encoding="utf-8", errors="ignore")
|
|
272
|
+
fields = [
|
|
273
|
+
{"name": m.group(2), "type": m.group(1).strip()}
|
|
274
|
+
for m in field_re.finditer(src)
|
|
275
|
+
]
|
|
276
|
+
if fields:
|
|
277
|
+
break
|
|
278
|
+
except Exception:
|
|
279
|
+
continue
|
|
280
|
+
|
|
281
|
+
out.append({
|
|
282
|
+
"name": d.name,
|
|
283
|
+
"kind": d.kind,
|
|
284
|
+
"category": d.category,
|
|
285
|
+
"fields": fields,
|
|
286
|
+
})
|
|
287
|
+
return out
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
291
|
+
# Endpoints
|
|
292
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
293
|
+
|
|
294
|
+
def _scan_endpoints(project_root: Path) -> list[dict]:
|
|
295
|
+
opts = EndpointScanOptions(show_hidden=False, use_gitignore=True)
|
|
296
|
+
files = list(iter_supported_source_files(project_root, opts))
|
|
297
|
+
files_for_security = [
|
|
298
|
+
f for f in files
|
|
299
|
+
if not any(x in _norm_path(str(f)).lower() for x in ("/src/test/", "/target/", "/build/"))
|
|
300
|
+
]
|
|
301
|
+
default_auth, permit_all = _detect_spring_security_policy(files_for_security)
|
|
302
|
+
|
|
303
|
+
all_eps = []
|
|
304
|
+
for f in files:
|
|
305
|
+
if f.suffix.lower() != ".java":
|
|
306
|
+
continue
|
|
307
|
+
try:
|
|
308
|
+
src = f.read_text(encoding="utf-8", errors="ignore")
|
|
309
|
+
except Exception:
|
|
310
|
+
continue
|
|
311
|
+
all_eps.extend(extract_spring_endpoints(src, str(f), default_auth=default_auth, permit_all=permit_all))
|
|
312
|
+
|
|
313
|
+
if not any(e.framework == "spring" for e in all_eps):
|
|
314
|
+
all_eps.extend(extract_openapi_endpoints(project_root))
|
|
315
|
+
|
|
316
|
+
filtered = [
|
|
317
|
+
e for e in all_eps
|
|
318
|
+
if "/src/test/" not in _norm_path(e.file)
|
|
319
|
+
and ("/src/main/" in _norm_path(e.file) or "/main/" in _norm_path(e.file))
|
|
320
|
+
and not _FAKE_ENDPOINT_RE.search(e.path) # remove constructor-injection false positives
|
|
321
|
+
]
|
|
322
|
+
|
|
323
|
+
seen: set[tuple[str, str]] = set()
|
|
324
|
+
uniq = []
|
|
325
|
+
for e in filtered:
|
|
326
|
+
key = (e.method, e.path)
|
|
327
|
+
if key not in seen:
|
|
328
|
+
seen.add(key)
|
|
329
|
+
uniq.append(e)
|
|
330
|
+
uniq.sort(key=lambda e: (e.path, e.method))
|
|
331
|
+
|
|
332
|
+
return [
|
|
333
|
+
{
|
|
334
|
+
"method": e.method,
|
|
335
|
+
"path": e.path,
|
|
336
|
+
"access": _display_role_str(e.path, set(e.roles or []), default_auth=default_auth, permit_all=permit_all),
|
|
337
|
+
"file": e.file,
|
|
338
|
+
"handler": e.handler,
|
|
339
|
+
}
|
|
340
|
+
for e in uniq
|
|
341
|
+
]
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
345
|
+
# Swagger
|
|
346
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
347
|
+
|
|
348
|
+
def detect_swagger(project_root: Path) -> dict:
|
|
349
|
+
"""Check for local swagger files and try to fetch live OpenAPI spec."""
|
|
350
|
+
local_checks = [
|
|
351
|
+
"src/main/resources/static/swagger-ui/index.html",
|
|
352
|
+
"src/main/resources/swagger/openapi.yml",
|
|
353
|
+
"src/main/resources/swagger/openapi.yaml",
|
|
354
|
+
"openapi.yml",
|
|
355
|
+
"openapi.yaml",
|
|
356
|
+
]
|
|
357
|
+
for c in local_checks:
|
|
358
|
+
if (project_root / c).exists():
|
|
359
|
+
return {"found": True, "source": "local", "local_path": c}
|
|
360
|
+
|
|
361
|
+
# Try fetching live OpenAPI spec
|
|
362
|
+
live_urls = [
|
|
363
|
+
"http://localhost:8080/v3/api-docs",
|
|
364
|
+
"http://localhost:8080/v2/api-docs",
|
|
365
|
+
"http://localhost:8080/swagger.json",
|
|
366
|
+
]
|
|
367
|
+
for url in live_urls:
|
|
368
|
+
try:
|
|
369
|
+
with urllib.request.urlopen(url, timeout=2) as resp:
|
|
370
|
+
if resp.status == 200:
|
|
371
|
+
import json as _json
|
|
372
|
+
spec = _json.loads(resp.read().decode())
|
|
373
|
+
return {
|
|
374
|
+
"found": True,
|
|
375
|
+
"source": "live",
|
|
376
|
+
"url": url,
|
|
377
|
+
"title": spec.get("info", {}).get("title", ""),
|
|
378
|
+
"version": spec.get("info", {}).get("version", ""),
|
|
379
|
+
"paths_count": len(spec.get("paths", {})),
|
|
380
|
+
}
|
|
381
|
+
except Exception:
|
|
382
|
+
continue
|
|
383
|
+
|
|
384
|
+
return {
|
|
385
|
+
"found": False,
|
|
386
|
+
"source": "none",
|
|
387
|
+
"likely_runtime_urls": live_urls,
|
|
388
|
+
"note": "Start the app and one of these URLs will return the live OpenAPI spec",
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
393
|
+
# Auth
|
|
394
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
395
|
+
|
|
396
|
+
def _scan_auth(project_root: Path) -> dict:
|
|
397
|
+
_JWT = [
|
|
398
|
+
r"JwtAuthenticationFilter", r"JwtTokenProvider", r"TokenProvider",
|
|
399
|
+
r"Jwts\.", r"JWTVerifier", r"BearerTokenAuthenticationFilter",
|
|
400
|
+
r"import\s+io\.jsonwebtoken", r"import\s+com\.auth0\.jwt",
|
|
401
|
+
r"import\s+org\.springframework\.security\.oauth2\.jwt",
|
|
402
|
+
r"NimbusJwtDecoder", r"NimbusJwtEncoder", r"JwtDecoder",
|
|
403
|
+
r"JwtEncoder", r"JwtClaimsSet",
|
|
404
|
+
]
|
|
405
|
+
_OAUTH2 = [
|
|
406
|
+
r"oauth2Login\s*\(", r"@EnableOAuth2Sso", r"@EnableResourceServer",
|
|
407
|
+
r"@EnableAuthorizationServer",
|
|
408
|
+
r"import\s+org\.springframework\.security\.oauth2\.client",
|
|
409
|
+
]
|
|
410
|
+
jwt_hits = oauth2_hits = stateless = session_hits = 0
|
|
411
|
+
for f in project_root.rglob("*.java"):
|
|
412
|
+
if any(x in f.parts for x in ("test", "target", "build")):
|
|
413
|
+
continue
|
|
414
|
+
try:
|
|
415
|
+
src = f.read_text(encoding="utf-8", errors="ignore")
|
|
416
|
+
except Exception:
|
|
417
|
+
continue
|
|
418
|
+
for p in _JWT:
|
|
419
|
+
if re.search(p, src):
|
|
420
|
+
jwt_hits += 1
|
|
421
|
+
for p in _OAUTH2:
|
|
422
|
+
if re.search(p, src):
|
|
423
|
+
oauth2_hits += 1
|
|
424
|
+
if re.search(r"SessionCreationPolicy\.STATELESS", src):
|
|
425
|
+
stateless = 1
|
|
426
|
+
if re.search(r"\.sessionManagement\(|HttpSession", src):
|
|
427
|
+
session_hits += 1
|
|
428
|
+
|
|
429
|
+
if jwt_hits >= 1:
|
|
430
|
+
return {"type": "JWT", "token_transport": "Bearer header",
|
|
431
|
+
"usage": "Authorization: Bearer <token>", "signals": jwt_hits}
|
|
432
|
+
if oauth2_hits >= 1:
|
|
433
|
+
return {"type": "OAuth2", "token_transport": "Bearer header",
|
|
434
|
+
"usage": "Authorization: Bearer <access_token>", "signals": oauth2_hits}
|
|
435
|
+
if stateless:
|
|
436
|
+
return {"type": "Stateless", "token_transport": "Bearer header (assumed)",
|
|
437
|
+
"usage": "SessionCreationPolicy.STATELESS set but no JWT/OAuth2 class found", "signals": 0}
|
|
438
|
+
if session_hits:
|
|
439
|
+
return {"type": "Session", "token_transport": "Cookie (JSESSIONID)",
|
|
440
|
+
"usage": "Browser manages cookie automatically", "signals": session_hits}
|
|
441
|
+
return {"type": "None", "token_transport": "N/A",
|
|
442
|
+
"usage": "No Spring Security config detected", "signals": 0}
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
446
|
+
# Base Path
|
|
447
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
448
|
+
|
|
449
|
+
def _scan_base_path(project_root: Path) -> dict:
|
|
450
|
+
pattern = re.compile(r'@RequestMapping\s*\(\s*["\']([^"\']+)["\']')
|
|
451
|
+
counter: Counter = Counter()
|
|
452
|
+
for f in project_root.rglob("*.java"):
|
|
453
|
+
if any(x in f.parts for x in ("test", "target", "build")):
|
|
454
|
+
continue
|
|
455
|
+
try:
|
|
456
|
+
src = f.read_text(encoding="utf-8", errors="ignore")
|
|
457
|
+
except Exception:
|
|
458
|
+
continue
|
|
459
|
+
if "@Controller" not in src and "@RestController" not in src:
|
|
460
|
+
continue
|
|
461
|
+
for m in pattern.finditer(src):
|
|
462
|
+
val = m.group(1).strip()
|
|
463
|
+
if not val or "{" in val:
|
|
464
|
+
continue
|
|
465
|
+
val = val if val.startswith("/") else "/" + val
|
|
466
|
+
parts = [p for p in val.split("/") if p]
|
|
467
|
+
if parts:
|
|
468
|
+
counter["/" + parts[0]] += 1
|
|
469
|
+
if len(parts) >= 2:
|
|
470
|
+
counter["/" + parts[0] + "/" + parts[1]] += 1
|
|
471
|
+
if not counter:
|
|
472
|
+
return {"prefix": "/", "axios_base_url": "http://localhost:8080"}
|
|
473
|
+
top = counter.most_common(1)[0][0]
|
|
474
|
+
return {"prefix": top, "axios_base_url": f"http://localhost:8080{top}"}
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
478
|
+
# Pagination
|
|
479
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
480
|
+
|
|
481
|
+
def _scan_pagination(project_root: Path) -> dict:
|
|
482
|
+
pageable_re = re.compile(r"\bPageable\b")
|
|
483
|
+
page_re = re.compile(r"\bPage<")
|
|
484
|
+
class_map_re = re.compile(r'@RequestMapping\s*\(\s*["\']([^"\']+)["\']')
|
|
485
|
+
method_map_re = re.compile(
|
|
486
|
+
r'@(?:Get|Post|Put|Delete|Patch)Mapping\s*(?:\(\s*(?:value\s*=\s*)?["\']([^"\']*)["\'])?'
|
|
487
|
+
)
|
|
488
|
+
paths: set[str] = set()
|
|
489
|
+
for f in project_root.rglob("*.java"):
|
|
490
|
+
if any(x in f.parts for x in ("test", "target", "build")):
|
|
491
|
+
continue
|
|
492
|
+
try:
|
|
493
|
+
src = f.read_text(encoding="utf-8", errors="ignore")
|
|
494
|
+
except Exception:
|
|
495
|
+
continue
|
|
496
|
+
if not (pageable_re.search(src) or page_re.search(src)):
|
|
497
|
+
continue
|
|
498
|
+
if "@Controller" not in src and "@RestController" not in src:
|
|
499
|
+
continue
|
|
500
|
+
base = ""
|
|
501
|
+
cm = class_map_re.search(src)
|
|
502
|
+
if cm:
|
|
503
|
+
base = cm.group(1).strip()
|
|
504
|
+
base = base if base.startswith("/") else "/" + base
|
|
505
|
+
base = base.rstrip("/")
|
|
506
|
+
for m in method_map_re.finditer(src):
|
|
507
|
+
sub = (m.group(1) or "").strip()
|
|
508
|
+
if sub and not sub.startswith("/"):
|
|
509
|
+
sub = "/" + sub
|
|
510
|
+
full = (base + sub) or "/"
|
|
511
|
+
snippet = src[m.start(): m.start() + 600]
|
|
512
|
+
if pageable_re.search(snippet) or page_re.search(snippet):
|
|
513
|
+
paths.add(full)
|
|
514
|
+
if paths:
|
|
515
|
+
return {
|
|
516
|
+
"used": True,
|
|
517
|
+
"style": "Spring Data Page",
|
|
518
|
+
"response_shape": {
|
|
519
|
+
"content": "T[]",
|
|
520
|
+
"totalElements": "number",
|
|
521
|
+
"totalPages": "number",
|
|
522
|
+
"number": "number (0-based)",
|
|
523
|
+
"size": "number",
|
|
524
|
+
},
|
|
525
|
+
"query_params": "?page=0&size=20&sort=fieldName,asc",
|
|
526
|
+
"paginated_paths": sorted(paths),
|
|
527
|
+
}
|
|
528
|
+
return {"used": False, "style": "None", "paginated_paths": []}
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
532
|
+
# Validation
|
|
533
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
534
|
+
|
|
535
|
+
_ANN_BLOCK_RE = re.compile(
|
|
536
|
+
r'(@(?:NotNull|NotBlank|NotEmpty|Email|Positive|PositiveOrZero|Negative|NegativeOrZero)'
|
|
537
|
+
r'|@Size\([^)]*\)|@Min\([^)]*\)|@Max\([^)]*\)|@Pattern\([^)]*\))'
|
|
538
|
+
)
|
|
539
|
+
_FIELD_LINE_RE = re.compile(
|
|
540
|
+
r'((?:@\w+(?:\([^)]*\))?\s*)+)'
|
|
541
|
+
r'(?:private|protected|public)\s+[\w<>, \[\]]+\s+(\w+)\s*;'
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
def _scan_validation(project_root: Path) -> dict:
|
|
546
|
+
result: dict = {}
|
|
547
|
+
for f in project_root.rglob("*.java"):
|
|
548
|
+
if any(x in f.parts for x in ("test", "target", "build")):
|
|
549
|
+
continue
|
|
550
|
+
try:
|
|
551
|
+
src = f.read_text(encoding="utf-8", errors="ignore")
|
|
552
|
+
except Exception:
|
|
553
|
+
continue
|
|
554
|
+
if not _ANN_BLOCK_RE.search(src):
|
|
555
|
+
continue
|
|
556
|
+
cls = re.search(r'class\s+(\w+)', src)
|
|
557
|
+
class_name = cls.group(1) if cls else f.stem
|
|
558
|
+
fields: dict = {}
|
|
559
|
+
for m in _FIELD_LINE_RE.finditer(src):
|
|
560
|
+
constraints = _ANN_BLOCK_RE.findall(m.group(1))
|
|
561
|
+
if constraints:
|
|
562
|
+
fields[m.group(2)] = constraints
|
|
563
|
+
if fields:
|
|
564
|
+
result[class_name] = fields
|
|
565
|
+
return result
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
569
|
+
# Enums
|
|
570
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
571
|
+
|
|
572
|
+
def _scan_enums(project_root: Path) -> dict:
|
|
573
|
+
enum_re = re.compile(
|
|
574
|
+
r'(?:public\s+)?enum\s+(\w+)\s*(?:implements[^{]*)?\{([^}]*)\}', re.DOTALL
|
|
575
|
+
)
|
|
576
|
+
result: dict = {}
|
|
577
|
+
for f in project_root.rglob("*.java"):
|
|
578
|
+
if any(x in f.parts for x in ("test", "target", "build")):
|
|
579
|
+
continue
|
|
580
|
+
try:
|
|
581
|
+
src = f.read_text(encoding="utf-8", errors="ignore")
|
|
582
|
+
except Exception:
|
|
583
|
+
continue
|
|
584
|
+
if "enum " not in src:
|
|
585
|
+
continue
|
|
586
|
+
for m in enum_re.finditer(src):
|
|
587
|
+
constants = [
|
|
588
|
+
c.strip().split("(")[0].strip()
|
|
589
|
+
for c in m.group(2).split(";")[0].split(",")
|
|
590
|
+
]
|
|
591
|
+
constants = [c for c in constants if re.match(r'^[A-Z][A-Z0-9_]*$', c)]
|
|
592
|
+
if constants:
|
|
593
|
+
result[m.group(1)] = constants
|
|
594
|
+
return result
|
|
595
|
+
|
|
596
|
+
|
|
597
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
598
|
+
# Main entry point
|
|
599
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
600
|
+
|
|
601
|
+
def analyze(repo_path: str) -> dict:
|
|
602
|
+
"""
|
|
603
|
+
Main entry point. Call this from server.py.
|
|
604
|
+
Returns a clean dict ready to be sent as JSON to any MCP client.
|
|
605
|
+
"""
|
|
606
|
+
project_root = find_project_root(Path(repo_path))
|
|
607
|
+
|
|
608
|
+
return {
|
|
609
|
+
"project_root": str(project_root),
|
|
610
|
+
"models": _scan_models(project_root),
|
|
611
|
+
"relationships": _scan_relationships(project_root),
|
|
612
|
+
"dtos": _scan_types(project_root),
|
|
613
|
+
"endpoints": _scan_endpoints(project_root),
|
|
614
|
+
"swagger": detect_swagger(project_root),
|
|
615
|
+
"auth": _scan_auth(project_root),
|
|
616
|
+
"base_path": _scan_base_path(project_root),
|
|
617
|
+
"pagination": _scan_pagination(project_root),
|
|
618
|
+
"validation": _scan_validation(project_root),
|
|
619
|
+
"enums": _scan_enums(project_root),
|
|
620
|
+
}
|
frontone/git_utils.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
import shutil
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from urllib.parse import urlparse
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _repo_name(repo_url: str) -> str:
|
|
11
|
+
path = urlparse(repo_url.rstrip("/")).path
|
|
12
|
+
name = path.rstrip("/").split("/")[-1]
|
|
13
|
+
name = re.sub(r"\.git$", "", name, flags=re.IGNORECASE)
|
|
14
|
+
return name or "repo"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _build_clone_url(repo_url: str) -> str:
|
|
18
|
+
token = os.environ.get("GITHUB_TOKEN", "").strip()
|
|
19
|
+
if not token:
|
|
20
|
+
return repo_url
|
|
21
|
+
parsed = urlparse(repo_url)
|
|
22
|
+
if parsed.scheme in ("http", "https") and "github.com" in (parsed.netloc or ""):
|
|
23
|
+
authed = parsed._replace(netloc=f"{token}@{parsed.netloc}")
|
|
24
|
+
return authed.geturl()
|
|
25
|
+
return repo_url
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def clone_repo(repo_url: str) -> Path:
|
|
29
|
+
"""
|
|
30
|
+
Clone repo into C:/<repo-name>-frontend.
|
|
31
|
+
If folder already exists, reuse it (cache).
|
|
32
|
+
Returns the cloned repo path.
|
|
33
|
+
"""
|
|
34
|
+
try:
|
|
35
|
+
import git
|
|
36
|
+
except ImportError as exc:
|
|
37
|
+
raise RuntimeError("gitpython is required: pip install gitpython") from exc
|
|
38
|
+
|
|
39
|
+
repo_name = _repo_name(repo_url)
|
|
40
|
+
dest = Path("C:/") / f"{repo_name}-frontend"
|
|
41
|
+
|
|
42
|
+
if dest.exists():
|
|
43
|
+
return dest
|
|
44
|
+
|
|
45
|
+
clone_url = _build_clone_url(repo_url)
|
|
46
|
+
dest.mkdir(parents=True, exist_ok=True)
|
|
47
|
+
|
|
48
|
+
try:
|
|
49
|
+
git.Repo.clone_from(
|
|
50
|
+
clone_url,
|
|
51
|
+
str(dest),
|
|
52
|
+
depth=1,
|
|
53
|
+
single_branch=True,
|
|
54
|
+
)
|
|
55
|
+
except Exception as exc:
|
|
56
|
+
shutil.rmtree(dest, ignore_errors=True)
|
|
57
|
+
raise RuntimeError(f"Failed to clone {repo_url}: {exc}") from exc
|
|
58
|
+
|
|
59
|
+
return dest
|