devrel-origin 0.2.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- devrel_origin/__init__.py +15 -0
- devrel_origin/cli/__init__.py +92 -0
- devrel_origin/cli/_common.py +243 -0
- devrel_origin/cli/analytics.py +28 -0
- devrel_origin/cli/argus.py +497 -0
- devrel_origin/cli/auth.py +227 -0
- devrel_origin/cli/config.py +108 -0
- devrel_origin/cli/content.py +259 -0
- devrel_origin/cli/cost.py +108 -0
- devrel_origin/cli/cro.py +298 -0
- devrel_origin/cli/deliverables.py +65 -0
- devrel_origin/cli/docs.py +91 -0
- devrel_origin/cli/doctor.py +178 -0
- devrel_origin/cli/experiment.py +29 -0
- devrel_origin/cli/growth.py +97 -0
- devrel_origin/cli/init.py +472 -0
- devrel_origin/cli/intel.py +27 -0
- devrel_origin/cli/kb.py +96 -0
- devrel_origin/cli/listen.py +31 -0
- devrel_origin/cli/marketing.py +66 -0
- devrel_origin/cli/migrate.py +45 -0
- devrel_origin/cli/run.py +46 -0
- devrel_origin/cli/sales.py +57 -0
- devrel_origin/cli/schedule.py +62 -0
- devrel_origin/cli/synthesize.py +28 -0
- devrel_origin/cli/triage.py +29 -0
- devrel_origin/cli/video.py +35 -0
- devrel_origin/core/__init__.py +58 -0
- devrel_origin/core/agent_config.py +75 -0
- devrel_origin/core/argus.py +964 -0
- devrel_origin/core/atlas.py +1450 -0
- devrel_origin/core/base.py +372 -0
- devrel_origin/core/cyra.py +563 -0
- devrel_origin/core/dex.py +708 -0
- devrel_origin/core/echo.py +614 -0
- devrel_origin/core/growth/__init__.py +27 -0
- devrel_origin/core/growth/recommendations.py +219 -0
- devrel_origin/core/growth/target_kinds.py +51 -0
- devrel_origin/core/iris.py +513 -0
- devrel_origin/core/kai.py +1367 -0
- devrel_origin/core/llm.py +542 -0
- devrel_origin/core/llm_backends.py +274 -0
- devrel_origin/core/mox.py +514 -0
- devrel_origin/core/nova.py +349 -0
- devrel_origin/core/pax.py +1205 -0
- devrel_origin/core/rex.py +532 -0
- devrel_origin/core/sage.py +486 -0
- devrel_origin/core/sentinel.py +385 -0
- devrel_origin/core/types.py +98 -0
- devrel_origin/core/video/__init__.py +22 -0
- devrel_origin/core/video/assembler.py +131 -0
- devrel_origin/core/video/browser_recorder.py +118 -0
- devrel_origin/core/video/desktop_recorder.py +254 -0
- devrel_origin/core/video/overlay_renderer.py +143 -0
- devrel_origin/core/video/script_parser.py +147 -0
- devrel_origin/core/video/tts_engine.py +82 -0
- devrel_origin/core/vox.py +268 -0
- devrel_origin/core/watchdog.py +321 -0
- devrel_origin/project/__init__.py +1 -0
- devrel_origin/project/config.py +75 -0
- devrel_origin/project/cost_sink.py +61 -0
- devrel_origin/project/init.py +104 -0
- devrel_origin/project/paths.py +75 -0
- devrel_origin/project/state.py +241 -0
- devrel_origin/project/templates/__init__.py +4 -0
- devrel_origin/project/templates/config.toml +24 -0
- devrel_origin/project/templates/devrel.gitignore +10 -0
- devrel_origin/project/templates/slop-blocklist.md +45 -0
- devrel_origin/project/templates/style.md +24 -0
- devrel_origin/project/templates/voice.md +29 -0
- devrel_origin/quality/__init__.py +66 -0
- devrel_origin/quality/editorial.py +357 -0
- devrel_origin/quality/persona.py +84 -0
- devrel_origin/quality/readability.py +148 -0
- devrel_origin/quality/slop.py +167 -0
- devrel_origin/quality/style.py +110 -0
- devrel_origin/quality/voice.py +15 -0
- devrel_origin/tools/__init__.py +9 -0
- devrel_origin/tools/analytics.py +304 -0
- devrel_origin/tools/api_client.py +393 -0
- devrel_origin/tools/apollo_client.py +305 -0
- devrel_origin/tools/code_validator.py +428 -0
- devrel_origin/tools/github_tools.py +297 -0
- devrel_origin/tools/instantly_client.py +412 -0
- devrel_origin/tools/kb_harvester.py +340 -0
- devrel_origin/tools/mcp_server.py +578 -0
- devrel_origin/tools/notifications.py +245 -0
- devrel_origin/tools/run_report.py +193 -0
- devrel_origin/tools/scheduler.py +231 -0
- devrel_origin/tools/search_tools.py +321 -0
- devrel_origin/tools/self_improve.py +168 -0
- devrel_origin/tools/sheets.py +236 -0
- devrel_origin-0.2.14.dist-info/METADATA +354 -0
- devrel_origin-0.2.14.dist-info/RECORD +98 -0
- devrel_origin-0.2.14.dist-info/WHEEL +5 -0
- devrel_origin-0.2.14.dist-info/entry_points.txt +2 -0
- devrel_origin-0.2.14.dist-info/licenses/LICENSE +21 -0
- devrel_origin-0.2.14.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,708 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Dex — Documentation Generator Agent
|
|
3
|
+
|
|
4
|
+
Reads source code from repositories and generates technical documentation:
|
|
5
|
+
architecture overviews, API references, module guides, and README content.
|
|
6
|
+
Uses AST parsing for Python and heuristic analysis for other languages.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import ast
|
|
10
|
+
import logging
|
|
11
|
+
import re
|
|
12
|
+
from dataclasses import dataclass, field
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Optional
|
|
15
|
+
|
|
16
|
+
from devrel_origin.core.llm import LLMClient
|
|
17
|
+
from devrel_origin.tools.api_client import PostHogClient
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
# File extensions Dex knows how to analyse
|
|
22
|
+
SUPPORTED_EXTENSIONS = {
|
|
23
|
+
".py": "python",
|
|
24
|
+
".js": "javascript",
|
|
25
|
+
".ts": "typescript",
|
|
26
|
+
".jsx": "javascript",
|
|
27
|
+
".tsx": "typescript",
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
# Directories to always skip
|
|
31
|
+
SKIP_DIRS = {
|
|
32
|
+
"__pycache__",
|
|
33
|
+
".git",
|
|
34
|
+
"node_modules",
|
|
35
|
+
".venv",
|
|
36
|
+
"venv",
|
|
37
|
+
"dist",
|
|
38
|
+
"build",
|
|
39
|
+
".mypy_cache",
|
|
40
|
+
".pytest_cache",
|
|
41
|
+
".ruff_cache",
|
|
42
|
+
".tox",
|
|
43
|
+
"egg-info",
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
# Max file size to analyse (256 KB)
|
|
47
|
+
MAX_FILE_SIZE = 256 * 1024
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class ParsedSymbol:
|
|
52
|
+
"""A single extracted symbol (class, function, variable)."""
|
|
53
|
+
|
|
54
|
+
name: str
|
|
55
|
+
kind: str # 'class', 'function', 'method', 'constant'
|
|
56
|
+
signature: str # e.g. "def foo(x: int, y: str) -> bool"
|
|
57
|
+
docstring: str
|
|
58
|
+
line_number: int
|
|
59
|
+
decorators: list[str] = field(default_factory=list)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@dataclass
|
|
63
|
+
class ParsedModule:
|
|
64
|
+
"""Analysis of a single source file."""
|
|
65
|
+
|
|
66
|
+
path: str # relative to repo root
|
|
67
|
+
language: str
|
|
68
|
+
imports: list[str]
|
|
69
|
+
symbols: list[ParsedSymbol]
|
|
70
|
+
line_count: int
|
|
71
|
+
docstring: str # module-level docstring
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@dataclass
|
|
75
|
+
class RepoAnalysis:
|
|
76
|
+
"""Full analysis of a repository."""
|
|
77
|
+
|
|
78
|
+
root: str
|
|
79
|
+
modules: list[ParsedModule]
|
|
80
|
+
total_files: int
|
|
81
|
+
total_lines: int
|
|
82
|
+
languages: dict[str, int] # language → file count
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class Dex:
|
|
86
|
+
"""
|
|
87
|
+
Documentation Generator agent that reads source code and produces
|
|
88
|
+
technical documentation.
|
|
89
|
+
|
|
90
|
+
Capabilities:
|
|
91
|
+
- Scan repository file trees and identify source modules
|
|
92
|
+
- Parse Python files via AST for classes, functions, signatures, docstrings
|
|
93
|
+
- Parse JavaScript/TypeScript files via heuristics for exports and functions
|
|
94
|
+
- Generate architecture overviews, API references, and module guides
|
|
95
|
+
- Optionally use an LLM to produce natural-language summaries
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
SYSTEM_PROMPT = """You are Dex, a technical documentation generator for developer tools.
|
|
99
|
+
Your role is to produce clear, accurate documentation from source code analysis.
|
|
100
|
+
|
|
101
|
+
Guidelines:
|
|
102
|
+
1. ACCURACY FIRST — Every function signature, parameter type, and return type must
|
|
103
|
+
match the source code exactly. Never invent APIs that don't exist.
|
|
104
|
+
2. STRUCTURE — Use consistent heading hierarchy: H1 for the project, H2 for modules,
|
|
105
|
+
H3 for classes/functions. Include a table of contents for documents > 500 words.
|
|
106
|
+
3. DEVELOPER AUDIENCE — Write for engineers who will use this code. Lead with what
|
|
107
|
+
it does and how to use it, then cover internals.
|
|
108
|
+
4. CODE EXAMPLES — Include usage examples for public APIs. Show import paths.
|
|
109
|
+
5. CROSS-REFERENCES — Link related modules and classes to each other.
|
|
110
|
+
|
|
111
|
+
Output formats:
|
|
112
|
+
- Architecture overview: high-level module map, data flow, key patterns
|
|
113
|
+
- API reference: every public class/function with signature, params, return type, example
|
|
114
|
+
- Module guide: purpose, dependencies, key abstractions, usage patterns
|
|
115
|
+
- README: quick start, installation, project structure, contributing"""
|
|
116
|
+
|
|
117
|
+
def __init__(
|
|
118
|
+
self,
|
|
119
|
+
api_client: PostHogClient,
|
|
120
|
+
knowledge_base_path: Path,
|
|
121
|
+
llm_client: Optional[LLMClient] = None,
|
|
122
|
+
):
|
|
123
|
+
self.api_client = api_client
|
|
124
|
+
self.knowledge_base_path = knowledge_base_path
|
|
125
|
+
self.llm_client = llm_client
|
|
126
|
+
|
|
127
|
+
# ------------------------------------------------------------------
|
|
128
|
+
# Repository scanning
|
|
129
|
+
# ------------------------------------------------------------------
|
|
130
|
+
|
|
131
|
+
def scan_repo(self, repo_path: Path) -> RepoAnalysis:
|
|
132
|
+
"""Scan a repository and parse all supported source files."""
|
|
133
|
+
repo_path = Path(repo_path)
|
|
134
|
+
modules: list[ParsedModule] = []
|
|
135
|
+
languages: dict[str, int] = {}
|
|
136
|
+
|
|
137
|
+
for filepath in sorted(repo_path.rglob("*")):
|
|
138
|
+
if not filepath.is_file():
|
|
139
|
+
continue
|
|
140
|
+
if any(skip in filepath.parts for skip in SKIP_DIRS):
|
|
141
|
+
continue
|
|
142
|
+
if filepath.stat().st_size > MAX_FILE_SIZE:
|
|
143
|
+
continue
|
|
144
|
+
|
|
145
|
+
ext = filepath.suffix
|
|
146
|
+
if ext not in SUPPORTED_EXTENSIONS:
|
|
147
|
+
continue
|
|
148
|
+
|
|
149
|
+
language = SUPPORTED_EXTENSIONS[ext]
|
|
150
|
+
languages[language] = languages.get(language, 0) + 1
|
|
151
|
+
|
|
152
|
+
try:
|
|
153
|
+
source = filepath.read_text(encoding="utf-8", errors="replace")
|
|
154
|
+
except (OSError, UnicodeDecodeError):
|
|
155
|
+
continue
|
|
156
|
+
|
|
157
|
+
rel_path = str(filepath.relative_to(repo_path))
|
|
158
|
+
|
|
159
|
+
if language == "python":
|
|
160
|
+
module = self._parse_python(rel_path, source)
|
|
161
|
+
else:
|
|
162
|
+
module = self._parse_js_ts(rel_path, source, language)
|
|
163
|
+
|
|
164
|
+
modules.append(module)
|
|
165
|
+
|
|
166
|
+
total_lines = sum(m.line_count for m in modules)
|
|
167
|
+
|
|
168
|
+
return RepoAnalysis(
|
|
169
|
+
root=str(repo_path),
|
|
170
|
+
modules=modules,
|
|
171
|
+
total_files=len(modules),
|
|
172
|
+
total_lines=total_lines,
|
|
173
|
+
languages=languages,
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
# ------------------------------------------------------------------
|
|
177
|
+
# Python parser (AST-based)
|
|
178
|
+
# ------------------------------------------------------------------
|
|
179
|
+
|
|
180
|
+
def _parse_python(self, rel_path: str, source: str) -> ParsedModule:
|
|
181
|
+
"""Parse a Python file using the ast module."""
|
|
182
|
+
line_count = source.count("\n") + 1
|
|
183
|
+
imports: list[str] = []
|
|
184
|
+
symbols: list[ParsedSymbol] = []
|
|
185
|
+
module_doc = ""
|
|
186
|
+
|
|
187
|
+
try:
|
|
188
|
+
tree = ast.parse(source)
|
|
189
|
+
except SyntaxError:
|
|
190
|
+
return ParsedModule(
|
|
191
|
+
path=rel_path,
|
|
192
|
+
language="python",
|
|
193
|
+
imports=[],
|
|
194
|
+
symbols=[],
|
|
195
|
+
line_count=line_count,
|
|
196
|
+
docstring="",
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
module_doc = ast.get_docstring(tree) or ""
|
|
200
|
+
|
|
201
|
+
for node in ast.iter_child_nodes(tree):
|
|
202
|
+
# Imports
|
|
203
|
+
if isinstance(node, ast.Import):
|
|
204
|
+
for alias in node.names:
|
|
205
|
+
imports.append(alias.name)
|
|
206
|
+
elif isinstance(node, ast.ImportFrom):
|
|
207
|
+
module_name = node.module or ""
|
|
208
|
+
for alias in node.names:
|
|
209
|
+
imports.append(f"{module_name}.{alias.name}")
|
|
210
|
+
|
|
211
|
+
# Functions
|
|
212
|
+
elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
213
|
+
symbols.append(self._parse_python_func(node))
|
|
214
|
+
|
|
215
|
+
# Classes
|
|
216
|
+
elif isinstance(node, ast.ClassDef):
|
|
217
|
+
class_doc = ast.get_docstring(node) or ""
|
|
218
|
+
decorators = [self._decorator_name(d) for d in node.decorator_list]
|
|
219
|
+
bases = [self._node_name(b) for b in node.bases]
|
|
220
|
+
sig = f"class {node.name}"
|
|
221
|
+
if bases:
|
|
222
|
+
sig += f"({', '.join(bases)})"
|
|
223
|
+
|
|
224
|
+
symbols.append(
|
|
225
|
+
ParsedSymbol(
|
|
226
|
+
name=node.name,
|
|
227
|
+
kind="class",
|
|
228
|
+
signature=sig,
|
|
229
|
+
docstring=class_doc,
|
|
230
|
+
line_number=node.lineno,
|
|
231
|
+
decorators=decorators,
|
|
232
|
+
)
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
# Methods inside class — use ast.walk to capture nested
|
|
236
|
+
# classes and decorated/conditionally-defined methods.
|
|
237
|
+
for item in ast.walk(node):
|
|
238
|
+
if item is node:
|
|
239
|
+
continue
|
|
240
|
+
if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
241
|
+
method = self._parse_python_func(item, class_name=node.name)
|
|
242
|
+
symbols.append(method)
|
|
243
|
+
|
|
244
|
+
# Module-level constants (ALL_CAPS assignments)
|
|
245
|
+
elif isinstance(node, ast.Assign):
|
|
246
|
+
for target in node.targets:
|
|
247
|
+
if isinstance(target, ast.Name) and target.id.isupper():
|
|
248
|
+
symbols.append(
|
|
249
|
+
ParsedSymbol(
|
|
250
|
+
name=target.id,
|
|
251
|
+
kind="constant",
|
|
252
|
+
signature=f"{target.id} = ...",
|
|
253
|
+
docstring="",
|
|
254
|
+
line_number=node.lineno,
|
|
255
|
+
)
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
# Annotated module-level constants (e.g. `MAX_RETRIES: int = 5`)
|
|
259
|
+
# — `ast.AnnAssign` has a single `target` (not `targets`), and we
|
|
260
|
+
# only capture ALL_CAPS names so lowercase typed module vars
|
|
261
|
+
# don't pollute the parsed symbol list.
|
|
262
|
+
elif isinstance(node, ast.AnnAssign):
|
|
263
|
+
if isinstance(node.target, ast.Name) and node.target.id.isupper():
|
|
264
|
+
symbols.append(
|
|
265
|
+
ParsedSymbol(
|
|
266
|
+
name=node.target.id,
|
|
267
|
+
kind="constant",
|
|
268
|
+
signature=f"{node.target.id} = ...",
|
|
269
|
+
docstring="",
|
|
270
|
+
line_number=node.lineno,
|
|
271
|
+
)
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
return ParsedModule(
|
|
275
|
+
path=rel_path,
|
|
276
|
+
language="python",
|
|
277
|
+
imports=imports,
|
|
278
|
+
symbols=symbols,
|
|
279
|
+
line_count=line_count,
|
|
280
|
+
docstring=module_doc,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
def _parse_python_func(
|
|
284
|
+
self, node: ast.FunctionDef | ast.AsyncFunctionDef, class_name: str = ""
|
|
285
|
+
) -> ParsedSymbol:
|
|
286
|
+
"""Extract a ParsedSymbol from a function/method AST node."""
|
|
287
|
+
decorators = [self._decorator_name(d) for d in node.decorator_list]
|
|
288
|
+
params = self._format_params(node.args)
|
|
289
|
+
returns = ""
|
|
290
|
+
if node.returns:
|
|
291
|
+
returns = f" -> {self._node_name(node.returns)}"
|
|
292
|
+
|
|
293
|
+
prefix = "async def" if isinstance(node, ast.AsyncFunctionDef) else "def"
|
|
294
|
+
sig = f"{prefix} {node.name}({params}){returns}"
|
|
295
|
+
doc = ast.get_docstring(node) or ""
|
|
296
|
+
|
|
297
|
+
kind = "method" if class_name else "function"
|
|
298
|
+
name = f"{class_name}.{node.name}" if class_name else node.name
|
|
299
|
+
|
|
300
|
+
return ParsedSymbol(
|
|
301
|
+
name=name,
|
|
302
|
+
kind=kind,
|
|
303
|
+
signature=sig,
|
|
304
|
+
docstring=doc,
|
|
305
|
+
line_number=node.lineno,
|
|
306
|
+
decorators=decorators,
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
@staticmethod
|
|
310
|
+
def _format_params(args: ast.arguments) -> str:
|
|
311
|
+
"""Format function parameters into a readable signature string."""
|
|
312
|
+
parts: list[str] = []
|
|
313
|
+
defaults_offset = len(args.args) - len(args.defaults)
|
|
314
|
+
|
|
315
|
+
for i, arg in enumerate(args.args):
|
|
316
|
+
param = arg.arg
|
|
317
|
+
if arg.annotation:
|
|
318
|
+
param += f": {Dex._node_name(arg.annotation)}"
|
|
319
|
+
default_idx = i - defaults_offset
|
|
320
|
+
if default_idx >= 0 and default_idx < len(args.defaults):
|
|
321
|
+
param += " = ..."
|
|
322
|
+
parts.append(param)
|
|
323
|
+
|
|
324
|
+
if args.vararg:
|
|
325
|
+
va = f"*{args.vararg.arg}"
|
|
326
|
+
if args.vararg.annotation:
|
|
327
|
+
va += f": {Dex._node_name(args.vararg.annotation)}"
|
|
328
|
+
parts.append(va)
|
|
329
|
+
|
|
330
|
+
for i, arg in enumerate(args.kwonlyargs):
|
|
331
|
+
param = arg.arg
|
|
332
|
+
if arg.annotation:
|
|
333
|
+
param += f": {Dex._node_name(arg.annotation)}"
|
|
334
|
+
if i < len(args.kw_defaults) and args.kw_defaults[i] is not None:
|
|
335
|
+
param += " = ..."
|
|
336
|
+
parts.append(param)
|
|
337
|
+
|
|
338
|
+
if args.kwarg:
|
|
339
|
+
kw = f"**{args.kwarg.arg}"
|
|
340
|
+
if args.kwarg.annotation:
|
|
341
|
+
kw += f": {Dex._node_name(args.kwarg.annotation)}"
|
|
342
|
+
parts.append(kw)
|
|
343
|
+
|
|
344
|
+
return ", ".join(parts)
|
|
345
|
+
|
|
346
|
+
@staticmethod
|
|
347
|
+
def _node_name(node: ast.expr) -> str:
|
|
348
|
+
"""Best-effort extraction of a name from an AST expression."""
|
|
349
|
+
if isinstance(node, ast.Name):
|
|
350
|
+
return node.id
|
|
351
|
+
elif isinstance(node, ast.Attribute):
|
|
352
|
+
return f"{Dex._node_name(node.value)}.{node.attr}"
|
|
353
|
+
elif isinstance(node, ast.Constant):
|
|
354
|
+
return repr(node.value)
|
|
355
|
+
elif isinstance(node, ast.Subscript):
|
|
356
|
+
return f"{Dex._node_name(node.value)}[{Dex._node_name(node.slice)}]"
|
|
357
|
+
elif isinstance(node, ast.Tuple):
|
|
358
|
+
return ", ".join(Dex._node_name(e) for e in node.elts)
|
|
359
|
+
elif isinstance(node, ast.BinOp) and isinstance(node.op, ast.BitOr):
|
|
360
|
+
return f"{Dex._node_name(node.left)} | {Dex._node_name(node.right)}"
|
|
361
|
+
return "..."
|
|
362
|
+
|
|
363
|
+
@staticmethod
|
|
364
|
+
def _decorator_name(node: ast.expr) -> str:
|
|
365
|
+
"""Extract decorator name."""
|
|
366
|
+
if isinstance(node, ast.Name):
|
|
367
|
+
return node.id
|
|
368
|
+
elif isinstance(node, ast.Attribute):
|
|
369
|
+
return f"{Dex._node_name(node.value)}.{node.attr}"
|
|
370
|
+
elif isinstance(node, ast.Call):
|
|
371
|
+
return Dex._decorator_name(node.func)
|
|
372
|
+
return "..."
|
|
373
|
+
|
|
374
|
+
# ------------------------------------------------------------------
|
|
375
|
+
# JavaScript / TypeScript parser (heuristic)
|
|
376
|
+
# ------------------------------------------------------------------
|
|
377
|
+
|
|
378
|
+
# Regex patterns for JS/TS symbol extraction
|
|
379
|
+
_JS_FUNC_RE = re.compile(
|
|
380
|
+
r"^(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)",
|
|
381
|
+
re.MULTILINE,
|
|
382
|
+
)
|
|
383
|
+
_JS_CLASS_RE = re.compile(
|
|
384
|
+
r"^(?:export\s+)?class\s+(\w+)(?:\s+extends\s+(\w+))?",
|
|
385
|
+
re.MULTILINE,
|
|
386
|
+
)
|
|
387
|
+
_JS_CONST_FUNC_RE = re.compile(
|
|
388
|
+
r"^(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\([^)]*\)\s*=>",
|
|
389
|
+
re.MULTILINE,
|
|
390
|
+
)
|
|
391
|
+
_JS_EXPORT_RE = re.compile(
|
|
392
|
+
r"^export\s+(?:default\s+)?(?:const|let|var|function|class)\s+(\w+)",
|
|
393
|
+
re.MULTILINE,
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
def _parse_js_ts(self, rel_path: str, source: str, language: str) -> ParsedModule:
|
|
397
|
+
"""Heuristic parser for JavaScript/TypeScript files."""
|
|
398
|
+
line_count = source.count("\n") + 1
|
|
399
|
+
symbols: list[ParsedSymbol] = []
|
|
400
|
+
imports: list[str] = []
|
|
401
|
+
|
|
402
|
+
# Extract imports
|
|
403
|
+
for match in re.finditer(
|
|
404
|
+
r"(?:import\s+.*?from\s+['\"]([^'\"]+)['\"]|"
|
|
405
|
+
r"(?:const|let|var)\s+.*?=\s*require\(['\"]([^'\"]+)['\"]\))",
|
|
406
|
+
source,
|
|
407
|
+
):
|
|
408
|
+
imp = match.group(1) or match.group(2)
|
|
409
|
+
if imp:
|
|
410
|
+
imports.append(imp)
|
|
411
|
+
|
|
412
|
+
# Extract functions
|
|
413
|
+
for match in self._JS_FUNC_RE.finditer(source):
|
|
414
|
+
name = match.group(1)
|
|
415
|
+
params = match.group(2).strip()
|
|
416
|
+
line = source[: match.start()].count("\n") + 1
|
|
417
|
+
symbols.append(
|
|
418
|
+
ParsedSymbol(
|
|
419
|
+
name=name,
|
|
420
|
+
kind="function",
|
|
421
|
+
signature=f"function {name}({params})",
|
|
422
|
+
docstring=self._extract_jsdoc(source, match.start()),
|
|
423
|
+
line_number=line,
|
|
424
|
+
)
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
# Extract classes
|
|
428
|
+
for match in self._JS_CLASS_RE.finditer(source):
|
|
429
|
+
name = match.group(1)
|
|
430
|
+
extends = match.group(2)
|
|
431
|
+
line = source[: match.start()].count("\n") + 1
|
|
432
|
+
sig = f"class {name}"
|
|
433
|
+
if extends:
|
|
434
|
+
sig += f" extends {extends}"
|
|
435
|
+
symbols.append(
|
|
436
|
+
ParsedSymbol(
|
|
437
|
+
name=name,
|
|
438
|
+
kind="class",
|
|
439
|
+
signature=sig,
|
|
440
|
+
docstring=self._extract_jsdoc(source, match.start()),
|
|
441
|
+
line_number=line,
|
|
442
|
+
)
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
# Extract arrow function exports
|
|
446
|
+
for match in self._JS_CONST_FUNC_RE.finditer(source):
|
|
447
|
+
name = match.group(1)
|
|
448
|
+
line = source[: match.start()].count("\n") + 1
|
|
449
|
+
symbols.append(
|
|
450
|
+
ParsedSymbol(
|
|
451
|
+
name=name,
|
|
452
|
+
kind="function",
|
|
453
|
+
signature=f"const {name} = (...) => ...",
|
|
454
|
+
docstring=self._extract_jsdoc(source, match.start()),
|
|
455
|
+
line_number=line,
|
|
456
|
+
)
|
|
457
|
+
)
|
|
458
|
+
|
|
459
|
+
# Module docstring: first block comment
|
|
460
|
+
first_comment = re.match(r"\s*/\*\*(.*?)\*/", source, re.DOTALL)
|
|
461
|
+
module_doc = first_comment.group(1).strip() if first_comment else ""
|
|
462
|
+
|
|
463
|
+
return ParsedModule(
|
|
464
|
+
path=rel_path,
|
|
465
|
+
language=language,
|
|
466
|
+
imports=imports,
|
|
467
|
+
symbols=symbols,
|
|
468
|
+
line_count=line_count,
|
|
469
|
+
docstring=module_doc,
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
@staticmethod
|
|
473
|
+
def _extract_jsdoc(source: str, pos: int) -> str:
|
|
474
|
+
"""Extract JSDoc comment immediately preceding position."""
|
|
475
|
+
before = source[:pos].rstrip()
|
|
476
|
+
match = re.search(r"/\*\*(.*?)\*/\s*$", before, re.DOTALL)
|
|
477
|
+
if match:
|
|
478
|
+
raw = match.group(1)
|
|
479
|
+
lines = [re.sub(r"^\s*\*\s?", "", line) for line in raw.splitlines()]
|
|
480
|
+
return "\n".join(line for line in lines if line.strip()).strip()
|
|
481
|
+
return ""
|
|
482
|
+
|
|
483
|
+
# ------------------------------------------------------------------
|
|
484
|
+
# Documentation generation
|
|
485
|
+
# ------------------------------------------------------------------
|
|
486
|
+
|
|
487
|
+
def generate_architecture_doc(self, analysis: RepoAnalysis) -> str:
|
|
488
|
+
"""Generate a markdown architecture overview from repo analysis."""
|
|
489
|
+
lines: list[str] = []
|
|
490
|
+
lines.append("# Architecture Overview\n")
|
|
491
|
+
lines.append(f"**Root:** `{analysis.root}`\n")
|
|
492
|
+
lines.append(f"**Files:** {analysis.total_files} | **Lines:** {analysis.total_lines}\n")
|
|
493
|
+
|
|
494
|
+
# Language breakdown
|
|
495
|
+
if analysis.languages:
|
|
496
|
+
lines.append("## Languages\n")
|
|
497
|
+
lines.append("| Language | Files |")
|
|
498
|
+
lines.append("|----------|-------|")
|
|
499
|
+
for lang, count in sorted(analysis.languages.items(), key=lambda x: -x[1]):
|
|
500
|
+
lines.append(f"| {lang} | {count} |")
|
|
501
|
+
lines.append("")
|
|
502
|
+
|
|
503
|
+
# Module map
|
|
504
|
+
lines.append("## Module Map\n")
|
|
505
|
+
dirs: dict[str, list[ParsedModule]] = {}
|
|
506
|
+
for mod in analysis.modules:
|
|
507
|
+
parts = mod.path.split("/")
|
|
508
|
+
dir_name = "/".join(parts[:-1]) if len(parts) > 1 else "."
|
|
509
|
+
dirs.setdefault(dir_name, []).append(mod)
|
|
510
|
+
|
|
511
|
+
for dir_name in sorted(dirs):
|
|
512
|
+
lines.append(f"### `{dir_name}/`\n")
|
|
513
|
+
for mod in sorted(dirs[dir_name], key=lambda m: m.path):
|
|
514
|
+
filename = mod.path.split("/")[-1]
|
|
515
|
+
summary = mod.docstring.split("\n")[0] if mod.docstring else ""
|
|
516
|
+
classes = [s for s in mod.symbols if s.kind == "class"]
|
|
517
|
+
funcs = [s for s in mod.symbols if s.kind == "function"]
|
|
518
|
+
line = f"- **`{filename}`** ({mod.line_count} lines)"
|
|
519
|
+
if summary:
|
|
520
|
+
line += f" — {summary}"
|
|
521
|
+
if classes:
|
|
522
|
+
line += f" | Classes: {', '.join(c.name for c in classes)}"
|
|
523
|
+
if funcs:
|
|
524
|
+
line += f" | Functions: {', '.join(f.name for f in funcs)}"
|
|
525
|
+
lines.append(line)
|
|
526
|
+
lines.append("")
|
|
527
|
+
|
|
528
|
+
return "\n".join(lines)
|
|
529
|
+
|
|
530
|
+
def generate_api_reference(self, analysis: RepoAnalysis) -> str:
|
|
531
|
+
"""Generate a markdown API reference from repo analysis."""
|
|
532
|
+
lines: list[str] = []
|
|
533
|
+
lines.append("# API Reference\n")
|
|
534
|
+
|
|
535
|
+
for mod in sorted(analysis.modules, key=lambda m: m.path):
|
|
536
|
+
public_symbols = [s for s in mod.symbols if not s.name.split(".")[-1].startswith("_")]
|
|
537
|
+
if not public_symbols:
|
|
538
|
+
continue
|
|
539
|
+
|
|
540
|
+
lines.append(f"## `{mod.path}`\n")
|
|
541
|
+
if mod.docstring:
|
|
542
|
+
lines.append(f"{mod.docstring.split(chr(10))[0]}\n")
|
|
543
|
+
|
|
544
|
+
for sym in public_symbols:
|
|
545
|
+
if sym.kind == "constant":
|
|
546
|
+
lines.append(f"### `{sym.name}`\n")
|
|
547
|
+
lines.append(f"```{mod.language}\n{sym.signature}\n```\n")
|
|
548
|
+
elif sym.kind == "class":
|
|
549
|
+
lines.append(f"### `{sym.name}`\n")
|
|
550
|
+
lines.append(f"```{mod.language}\n{sym.signature}\n```\n")
|
|
551
|
+
if sym.docstring:
|
|
552
|
+
lines.append(f"{sym.docstring}\n")
|
|
553
|
+
elif sym.kind in ("function", "method"):
|
|
554
|
+
lines.append(f"#### `{sym.name}()`\n")
|
|
555
|
+
lines.append(f"```{mod.language}\n{sym.signature}\n```\n")
|
|
556
|
+
if sym.docstring:
|
|
557
|
+
lines.append(f"{sym.docstring}\n")
|
|
558
|
+
if sym.decorators:
|
|
559
|
+
lines.append(
|
|
560
|
+
f"Decorators: {', '.join(f'`@{d}`' for d in sym.decorators)}\n"
|
|
561
|
+
)
|
|
562
|
+
|
|
563
|
+
return "\n".join(lines)
|
|
564
|
+
|
|
565
|
+
def generate_module_guide(self, module: ParsedModule) -> str:
|
|
566
|
+
"""Generate a detailed guide for a single module."""
|
|
567
|
+
lines: list[str] = []
|
|
568
|
+
lines.append(f"# Module: `{module.path}`\n")
|
|
569
|
+
lines.append(f"**Language:** {module.language} | **Lines:** {module.line_count}\n")
|
|
570
|
+
|
|
571
|
+
if module.docstring:
|
|
572
|
+
lines.append(f"## Overview\n\n{module.docstring}\n")
|
|
573
|
+
|
|
574
|
+
if module.imports:
|
|
575
|
+
lines.append("## Dependencies\n")
|
|
576
|
+
for imp in module.imports:
|
|
577
|
+
lines.append(f"- `{imp}`")
|
|
578
|
+
lines.append("")
|
|
579
|
+
|
|
580
|
+
classes = [s for s in module.symbols if s.kind == "class"]
|
|
581
|
+
functions = [s for s in module.symbols if s.kind == "function"]
|
|
582
|
+
constants = [s for s in module.symbols if s.kind == "constant"]
|
|
583
|
+
|
|
584
|
+
if constants:
|
|
585
|
+
lines.append("## Constants\n")
|
|
586
|
+
for c in constants:
|
|
587
|
+
lines.append(f"- `{c.signature}`")
|
|
588
|
+
lines.append("")
|
|
589
|
+
|
|
590
|
+
if classes:
|
|
591
|
+
lines.append("## Classes\n")
|
|
592
|
+
for cls in classes:
|
|
593
|
+
lines.append(f"### `{cls.signature}`\n")
|
|
594
|
+
if cls.docstring:
|
|
595
|
+
lines.append(f"{cls.docstring}\n")
|
|
596
|
+
methods = [
|
|
597
|
+
s
|
|
598
|
+
for s in module.symbols
|
|
599
|
+
if s.kind == "method" and s.name.startswith(f"{cls.name}.")
|
|
600
|
+
]
|
|
601
|
+
if methods:
|
|
602
|
+
lines.append("**Methods:**\n")
|
|
603
|
+
for m in methods:
|
|
604
|
+
short_name = m.name.split(".")[-1]
|
|
605
|
+
if short_name.startswith("_") and short_name != "__init__":
|
|
606
|
+
continue
|
|
607
|
+
lines.append(f"- `{m.signature}`")
|
|
608
|
+
if m.docstring:
|
|
609
|
+
first_line = m.docstring.split("\n")[0]
|
|
610
|
+
lines.append(f" {first_line}")
|
|
611
|
+
lines.append("")
|
|
612
|
+
|
|
613
|
+
if functions:
|
|
614
|
+
lines.append("## Functions\n")
|
|
615
|
+
for fn in functions:
|
|
616
|
+
lines.append(f"### `{fn.signature}`\n")
|
|
617
|
+
if fn.docstring:
|
|
618
|
+
lines.append(f"{fn.docstring}\n")
|
|
619
|
+
|
|
620
|
+
return "\n".join(lines)
|
|
621
|
+
|
|
622
|
+
# ------------------------------------------------------------------
|
|
623
|
+
# Agent interface (matches other agents)
|
|
624
|
+
# ------------------------------------------------------------------
|
|
625
|
+
|
|
626
|
+
async def execute(
|
|
627
|
+
self,
|
|
628
|
+
task: str,
|
|
629
|
+
context: Optional[dict[str, Any]] = None,
|
|
630
|
+
) -> dict[str, Any]:
|
|
631
|
+
"""
|
|
632
|
+
Execute a documentation generation task.
|
|
633
|
+
|
|
634
|
+
Scans the target repo (defaults to this project's root), generates
|
|
635
|
+
docs, and optionally uses an LLM for natural-language summaries.
|
|
636
|
+
"""
|
|
637
|
+
logger.info(f"Dex executing: {task[:80]}...")
|
|
638
|
+
|
|
639
|
+
# Determine repo path: from context, or default to .devrel project
|
|
640
|
+
# root, falling back to cwd if no .devrel/config.toml is reachable.
|
|
641
|
+
if context and "repo_path" in context:
|
|
642
|
+
repo_path = Path(context["repo_path"])
|
|
643
|
+
else:
|
|
644
|
+
try:
|
|
645
|
+
from devrel_origin.project.paths import (
|
|
646
|
+
ProjectNotFoundError,
|
|
647
|
+
find_devrel_root,
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
try:
|
|
651
|
+
repo_path = find_devrel_root()
|
|
652
|
+
except ProjectNotFoundError:
|
|
653
|
+
repo_path = Path(".")
|
|
654
|
+
except Exception:
|
|
655
|
+
repo_path = Path(".")
|
|
656
|
+
|
|
657
|
+
# Scan and analyse
|
|
658
|
+
analysis = self.scan_repo(repo_path)
|
|
659
|
+
|
|
660
|
+
# Generate documentation artifacts
|
|
661
|
+
architecture = self.generate_architecture_doc(analysis)
|
|
662
|
+
api_reference = self.generate_api_reference(analysis)
|
|
663
|
+
|
|
664
|
+
base_result: dict[str, Any] = {
|
|
665
|
+
"agent": "dex",
|
|
666
|
+
"task": task,
|
|
667
|
+
"repo": str(repo_path),
|
|
668
|
+
"total_files": analysis.total_files,
|
|
669
|
+
"total_lines": analysis.total_lines,
|
|
670
|
+
"languages": analysis.languages,
|
|
671
|
+
"modules": [
|
|
672
|
+
{
|
|
673
|
+
"path": m.path,
|
|
674
|
+
"language": m.language,
|
|
675
|
+
"line_count": m.line_count,
|
|
676
|
+
"symbols": len(m.symbols),
|
|
677
|
+
"docstring": m.docstring[:200] if m.docstring else "",
|
|
678
|
+
}
|
|
679
|
+
for m in analysis.modules
|
|
680
|
+
],
|
|
681
|
+
"architecture_doc": architecture,
|
|
682
|
+
"api_reference": api_reference,
|
|
683
|
+
"status": "generated",
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
# Optionally use LLM for a high-level summary
|
|
687
|
+
if self.llm_client:
|
|
688
|
+
try:
|
|
689
|
+
summary_prompt = (
|
|
690
|
+
f"Task: {task}\n\n"
|
|
691
|
+
f"Below is the architecture overview of a codebase. "
|
|
692
|
+
f"Write a concise technical summary (3-5 paragraphs) covering:\n"
|
|
693
|
+
f"- What this project does\n"
|
|
694
|
+
f"- Key architectural patterns\n"
|
|
695
|
+
f"- Main entry points and public APIs\n"
|
|
696
|
+
f"- Notable dependencies\n\n"
|
|
697
|
+
f"Architecture:\n{architecture[:4000]}"
|
|
698
|
+
)
|
|
699
|
+
summary = await self.llm_client.generate(
|
|
700
|
+
system_prompt=self.SYSTEM_PROMPT,
|
|
701
|
+
user_prompt=summary_prompt,
|
|
702
|
+
temperature=0.3,
|
|
703
|
+
)
|
|
704
|
+
base_result["llm_summary"] = summary
|
|
705
|
+
except Exception as exc:
|
|
706
|
+
logger.warning(f"LLM summary generation failed: {exc}")
|
|
707
|
+
|
|
708
|
+
return base_result
|