pdd-cli 0.0.45__py3-none-any.whl → 0.0.90__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pdd/__init__.py +4 -4
- pdd/agentic_common.py +863 -0
- pdd/agentic_crash.py +534 -0
- pdd/agentic_fix.py +1179 -0
- pdd/agentic_langtest.py +162 -0
- pdd/agentic_update.py +370 -0
- pdd/agentic_verify.py +183 -0
- pdd/auto_deps_main.py +15 -5
- pdd/auto_include.py +63 -5
- pdd/bug_main.py +3 -2
- pdd/bug_to_unit_test.py +2 -0
- pdd/change_main.py +11 -4
- pdd/cli.py +22 -1181
- pdd/cmd_test_main.py +73 -21
- pdd/code_generator.py +58 -18
- pdd/code_generator_main.py +672 -25
- pdd/commands/__init__.py +42 -0
- pdd/commands/analysis.py +248 -0
- pdd/commands/fix.py +140 -0
- pdd/commands/generate.py +257 -0
- pdd/commands/maintenance.py +174 -0
- pdd/commands/misc.py +79 -0
- pdd/commands/modify.py +230 -0
- pdd/commands/report.py +144 -0
- pdd/commands/templates.py +215 -0
- pdd/commands/utility.py +110 -0
- pdd/config_resolution.py +58 -0
- pdd/conflicts_main.py +8 -3
- pdd/construct_paths.py +258 -82
- pdd/context_generator.py +10 -2
- pdd/context_generator_main.py +113 -11
- pdd/continue_generation.py +47 -7
- pdd/core/__init__.py +0 -0
- pdd/core/cli.py +503 -0
- pdd/core/dump.py +554 -0
- pdd/core/errors.py +63 -0
- pdd/core/utils.py +90 -0
- pdd/crash_main.py +44 -11
- pdd/data/language_format.csv +71 -63
- pdd/data/llm_model.csv +20 -18
- pdd/detect_change_main.py +5 -4
- pdd/fix_code_loop.py +330 -76
- pdd/fix_error_loop.py +207 -61
- pdd/fix_errors_from_unit_tests.py +4 -3
- pdd/fix_main.py +75 -18
- pdd/fix_verification_errors.py +12 -100
- pdd/fix_verification_errors_loop.py +306 -272
- pdd/fix_verification_main.py +28 -9
- pdd/generate_output_paths.py +93 -10
- pdd/generate_test.py +16 -5
- pdd/get_jwt_token.py +9 -2
- pdd/get_run_command.py +73 -0
- pdd/get_test_command.py +68 -0
- pdd/git_update.py +70 -19
- pdd/incremental_code_generator.py +2 -2
- pdd/insert_includes.py +11 -3
- pdd/llm_invoke.py +1269 -103
- pdd/load_prompt_template.py +36 -10
- pdd/pdd_completion.fish +25 -2
- pdd/pdd_completion.sh +30 -4
- pdd/pdd_completion.zsh +79 -4
- pdd/postprocess.py +10 -3
- pdd/preprocess.py +228 -15
- pdd/preprocess_main.py +8 -5
- pdd/prompts/agentic_crash_explore_LLM.prompt +49 -0
- pdd/prompts/agentic_fix_explore_LLM.prompt +45 -0
- pdd/prompts/agentic_fix_harvest_only_LLM.prompt +48 -0
- pdd/prompts/agentic_fix_primary_LLM.prompt +85 -0
- pdd/prompts/agentic_update_LLM.prompt +1071 -0
- pdd/prompts/agentic_verify_explore_LLM.prompt +45 -0
- pdd/prompts/auto_include_LLM.prompt +100 -905
- pdd/prompts/detect_change_LLM.prompt +122 -20
- pdd/prompts/example_generator_LLM.prompt +22 -1
- pdd/prompts/extract_code_LLM.prompt +5 -1
- pdd/prompts/extract_program_code_fix_LLM.prompt +7 -1
- pdd/prompts/extract_prompt_update_LLM.prompt +7 -8
- pdd/prompts/extract_promptline_LLM.prompt +17 -11
- pdd/prompts/find_verification_errors_LLM.prompt +6 -0
- pdd/prompts/fix_code_module_errors_LLM.prompt +4 -2
- pdd/prompts/fix_errors_from_unit_tests_LLM.prompt +8 -0
- pdd/prompts/fix_verification_errors_LLM.prompt +22 -0
- pdd/prompts/generate_test_LLM.prompt +21 -6
- pdd/prompts/increase_tests_LLM.prompt +1 -5
- pdd/prompts/insert_includes_LLM.prompt +228 -108
- pdd/prompts/trace_LLM.prompt +25 -22
- pdd/prompts/unfinished_prompt_LLM.prompt +85 -1
- pdd/prompts/update_prompt_LLM.prompt +22 -1
- pdd/pytest_output.py +127 -12
- pdd/render_mermaid.py +236 -0
- pdd/setup_tool.py +648 -0
- pdd/simple_math.py +2 -0
- pdd/split_main.py +3 -2
- pdd/summarize_directory.py +49 -6
- pdd/sync_determine_operation.py +543 -98
- pdd/sync_main.py +81 -31
- pdd/sync_orchestration.py +1334 -751
- pdd/sync_tui.py +848 -0
- pdd/template_registry.py +264 -0
- pdd/templates/architecture/architecture_json.prompt +242 -0
- pdd/templates/generic/generate_prompt.prompt +174 -0
- pdd/trace.py +168 -12
- pdd/trace_main.py +4 -3
- pdd/track_cost.py +151 -61
- pdd/unfinished_prompt.py +49 -3
- pdd/update_main.py +549 -67
- pdd/update_model_costs.py +2 -2
- pdd/update_prompt.py +19 -4
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.90.dist-info}/METADATA +19 -6
- pdd_cli-0.0.90.dist-info/RECORD +153 -0
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.90.dist-info}/licenses/LICENSE +1 -1
- pdd_cli-0.0.45.dist-info/RECORD +0 -116
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.90.dist-info}/WHEEL +0 -0
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.90.dist-info}/entry_points.txt +0 -0
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.90.dist-info}/top_level.txt +0 -0
pdd/pytest_output.py
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import argparse
|
|
2
2
|
import json
|
|
3
3
|
import io
|
|
4
|
+
import re
|
|
4
5
|
import sys
|
|
5
6
|
import pytest
|
|
6
7
|
import subprocess
|
|
8
|
+
from pathlib import Path
|
|
7
9
|
from rich.console import Console
|
|
8
10
|
from rich.pretty import pprint
|
|
9
11
|
import os
|
|
@@ -11,6 +13,81 @@ from .python_env_detector import detect_host_python_executable
|
|
|
11
13
|
|
|
12
14
|
console = Console()
|
|
13
15
|
|
|
16
|
+
|
|
17
|
+
def _find_project_root(test_file: Path) -> Path | None:
|
|
18
|
+
"""
|
|
19
|
+
Find the project root directory by looking for .pddrc (definitive PDD marker).
|
|
20
|
+
|
|
21
|
+
Only .pddrc is used as the project marker to ensure we don't incorrectly
|
|
22
|
+
identify project roots for non-PDD projects. This is a conservative approach
|
|
23
|
+
that maintains backward compatibility.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
test_file: Path to the test file
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
The project root directory if .pddrc is found, None otherwise.
|
|
30
|
+
When None is returned, the caller should use original behavior.
|
|
31
|
+
"""
|
|
32
|
+
current = test_file.resolve().parent
|
|
33
|
+
|
|
34
|
+
# Walk up the directory tree looking for .pddrc only
|
|
35
|
+
while current != current.parent:
|
|
36
|
+
if (current / ".pddrc").exists():
|
|
37
|
+
return current
|
|
38
|
+
current = current.parent
|
|
39
|
+
|
|
40
|
+
# No .pddrc found - return None to signal original behavior should be used
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
_ANSI_ESCAPE_RE = re.compile(r"\x1b\[[0-?]*[ -/]*[@-~]")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _strip_ansi(text: str) -> str:
|
|
48
|
+
"""Remove ANSI escape sequences from text for reliable parsing."""
|
|
49
|
+
return _ANSI_ESCAPE_RE.sub("", text)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def extract_failing_files_from_output(pytest_output: str) -> list[str]:
|
|
53
|
+
"""
|
|
54
|
+
Extract unique file paths from pytest FAILED output lines.
|
|
55
|
+
|
|
56
|
+
Parses patterns like:
|
|
57
|
+
- FAILED tests/test_foo.py::test_name - error message
|
|
58
|
+
- tests/test_foo.py::test_name FAILED
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
pytest_output: The combined stdout/stderr from a pytest run
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
List of unique file paths (without ::test_name suffix) that had failures,
|
|
65
|
+
in the order they were first encountered.
|
|
66
|
+
"""
|
|
67
|
+
cleaned_output = _strip_ansi(pytest_output)
|
|
68
|
+
|
|
69
|
+
failing_files = []
|
|
70
|
+
seen = set()
|
|
71
|
+
|
|
72
|
+
# Pattern 1: FAILED path/file.py::test_name (with optional error)
|
|
73
|
+
pattern1 = r'FAILED\s+([^\s:]+\.py)::'
|
|
74
|
+
for match in re.finditer(pattern1, cleaned_output):
|
|
75
|
+
file_path = match.group(1)
|
|
76
|
+
if file_path not in seen:
|
|
77
|
+
failing_files.append(file_path)
|
|
78
|
+
seen.add(file_path)
|
|
79
|
+
|
|
80
|
+
# Pattern 2: path/file.py::test_name FAILED (verbose output)
|
|
81
|
+
pattern2 = r'([^\s:]+\.py)::\S+\s+FAILED'
|
|
82
|
+
for match in re.finditer(pattern2, cleaned_output):
|
|
83
|
+
file_path = match.group(1)
|
|
84
|
+
if file_path not in seen:
|
|
85
|
+
failing_files.append(file_path)
|
|
86
|
+
seen.add(file_path)
|
|
87
|
+
|
|
88
|
+
return failing_files
|
|
89
|
+
|
|
90
|
+
|
|
14
91
|
class TestResultCollector:
|
|
15
92
|
__test__ = False # Prevent pytest from collecting this plugin as a test
|
|
16
93
|
|
|
@@ -84,31 +161,69 @@ def run_pytest_and_capture_output(test_file: str) -> dict:
|
|
|
84
161
|
|
|
85
162
|
# Use environment-aware Python executable for pytest execution
|
|
86
163
|
python_executable = detect_host_python_executable()
|
|
87
|
-
|
|
164
|
+
|
|
165
|
+
# Find the project root directory for proper pytest execution (PDD projects only)
|
|
166
|
+
test_path = Path(test_file).resolve()
|
|
167
|
+
project_root = _find_project_root(test_path)
|
|
168
|
+
|
|
169
|
+
# Build subprocess kwargs - only modify cwd/env for PDD projects (.pddrc found)
|
|
170
|
+
subprocess_kwargs = {
|
|
171
|
+
"capture_output": True,
|
|
172
|
+
"text": True,
|
|
173
|
+
"timeout": 300,
|
|
174
|
+
"stdin": subprocess.DEVNULL,
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
pytest_args = [python_executable, "-B", "-m", "pytest", str(test_path), "-v"]
|
|
178
|
+
|
|
179
|
+
if project_root is not None:
|
|
180
|
+
# PDD project detected - set up proper environment
|
|
181
|
+
subprocess_kwargs["cwd"] = str(project_root)
|
|
182
|
+
|
|
183
|
+
# Build PYTHONPATH with both project root and src/ if it exists
|
|
184
|
+
paths_to_add = [str(project_root)]
|
|
185
|
+
src_dir = project_root / "src"
|
|
186
|
+
if src_dir.is_dir():
|
|
187
|
+
paths_to_add.insert(0, str(src_dir)) # src/ takes priority
|
|
188
|
+
|
|
189
|
+
env = os.environ.copy()
|
|
190
|
+
existing_pythonpath = env.get("PYTHONPATH", "")
|
|
191
|
+
if existing_pythonpath:
|
|
192
|
+
paths_to_add.append(existing_pythonpath)
|
|
193
|
+
env["PYTHONPATH"] = os.pathsep.join(paths_to_add)
|
|
194
|
+
subprocess_kwargs["env"] = env
|
|
195
|
+
|
|
196
|
+
# Add --rootdir to ensure pytest uses project's config
|
|
197
|
+
pytest_args.append(f"--rootdir={project_root}")
|
|
198
|
+
|
|
88
199
|
try:
|
|
89
200
|
# Run pytest using subprocess with the detected Python executable
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
capture_output=True,
|
|
93
|
-
text=True,
|
|
94
|
-
timeout=300
|
|
95
|
-
)
|
|
201
|
+
# Use -B flag to disable bytecode caching, ensuring fresh imports
|
|
202
|
+
result = subprocess.run(pytest_args, **subprocess_kwargs)
|
|
96
203
|
|
|
97
204
|
stdout = result.stdout
|
|
98
205
|
stderr = result.stderr
|
|
99
206
|
return_code = result.returncode
|
|
207
|
+
parse_stdout = _strip_ansi(stdout or "")
|
|
100
208
|
|
|
101
209
|
# Parse the output to extract test results
|
|
102
210
|
# Count passed, failed, and skipped tests from the output
|
|
103
|
-
passed =
|
|
104
|
-
failures =
|
|
211
|
+
passed = parse_stdout.count(" PASSED")
|
|
212
|
+
failures = parse_stdout.count(" FAILED") + parse_stdout.count(" ERROR")
|
|
105
213
|
errors = 0 # Will be included in failures for subprocess execution
|
|
106
|
-
warnings =
|
|
214
|
+
warnings = parse_stdout.lower().count("warning")
|
|
107
215
|
|
|
108
216
|
# If return code is 2, it indicates a pytest error
|
|
109
217
|
if return_code == 2:
|
|
110
218
|
errors = 1
|
|
111
|
-
|
|
219
|
+
# Safety net: if parsing missed failures due to formatting (e.g., ANSI colors),
|
|
220
|
+
# never report a passing result on a non-zero return code.
|
|
221
|
+
if return_code != 0 and failures == 0 and errors == 0:
|
|
222
|
+
if return_code == 1:
|
|
223
|
+
failures = 1
|
|
224
|
+
else:
|
|
225
|
+
errors = 1
|
|
226
|
+
|
|
112
227
|
return {
|
|
113
228
|
"test_file": test_file,
|
|
114
229
|
"test_results": [
|
|
@@ -199,4 +314,4 @@ def main():
|
|
|
199
314
|
save_output_to_json(pytest_output)
|
|
200
315
|
|
|
201
316
|
if __name__ == "__main__":
|
|
202
|
-
main()
|
|
317
|
+
main()
|
pdd/render_mermaid.py
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Render architecture.json as an interactive HTML Mermaid diagram.
|
|
4
|
+
Usage:
|
|
5
|
+
python render_mermaid.py architecture.json "App Name" [output.html]
|
|
6
|
+
Features:
|
|
7
|
+
- Direct browser rendering (no external tools)
|
|
8
|
+
- Beautiful modern UI with statistics
|
|
9
|
+
- Color-coded module categories
|
|
10
|
+
- Interactive Mermaid diagram
|
|
11
|
+
- Self-contained HTML (works offline)
|
|
12
|
+
"""
|
|
13
|
+
import json
|
|
14
|
+
import sys
|
|
15
|
+
import html
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
18
|
+
# Indentation constants for better maintainability
|
|
19
|
+
INDENT = ' ' # 4 spaces per level
|
|
20
|
+
LEVELS = {
|
|
21
|
+
'root': 0,
|
|
22
|
+
'subgraph': 1,
|
|
23
|
+
'node': 2,
|
|
24
|
+
'connection': 1,
|
|
25
|
+
'style': 1
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
def write_pretty_architecture_json(arch_file, architecture):
|
|
29
|
+
"""Rewrite architecture JSON with consistent formatting so diffs stay stable."""
|
|
30
|
+
path = Path(arch_file)
|
|
31
|
+
formatted = json.dumps(architecture, indent=2)
|
|
32
|
+
if not formatted.endswith("\n"):
|
|
33
|
+
formatted += "\n"
|
|
34
|
+
path.write_text(formatted, encoding="utf-8")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def generate_mermaid_code(architecture, app_name="System"):
|
|
38
|
+
"""Generate Mermaid flowchart code from architecture JSON."""
|
|
39
|
+
# Escape quotes for Mermaid label, which uses HTML entities
|
|
40
|
+
escaped_app_name = app_name.replace('"', '"')
|
|
41
|
+
# Match test expectation: add a trailing space only if quotes were present
|
|
42
|
+
prd_label = f'{escaped_app_name} ' if """ in escaped_app_name else escaped_app_name
|
|
43
|
+
|
|
44
|
+
lines = ["flowchart TB", f'{INDENT * LEVELS["node"]}PRD["{prd_label}"]', INDENT]
|
|
45
|
+
|
|
46
|
+
if not architecture:
|
|
47
|
+
lines.append(INDENT)
|
|
48
|
+
|
|
49
|
+
# Categorize modules by tags (frontend takes priority over backend)
|
|
50
|
+
frontend = [
|
|
51
|
+
m
|
|
52
|
+
for m in architecture
|
|
53
|
+
if any(t in m.get('tags', []) for t in ['frontend', 'react', 'nextjs', 'ui', 'page', 'component'])
|
|
54
|
+
]
|
|
55
|
+
backend = [
|
|
56
|
+
m
|
|
57
|
+
for m in architecture
|
|
58
|
+
if m not in frontend
|
|
59
|
+
and any(t in m.get('tags', []) for t in ['backend', 'api', 'database', 'sqlalchemy', 'fastapi'])
|
|
60
|
+
]
|
|
61
|
+
shared = [m for m in architecture if m not in frontend and m not in backend]
|
|
62
|
+
|
|
63
|
+
# Generate subgraphs
|
|
64
|
+
for group_name, modules in [("Frontend", frontend), ("Backend", backend), ("Shared", shared)]:
|
|
65
|
+
if modules:
|
|
66
|
+
lines.append(f"{INDENT * LEVELS['subgraph']}subgraph {group_name}")
|
|
67
|
+
for m in modules:
|
|
68
|
+
name = Path(m['filename']).stem
|
|
69
|
+
pri = m.get('priority', 0)
|
|
70
|
+
lines.append(f'{INDENT * LEVELS["node"]}{name}["{name} ({pri})"]')
|
|
71
|
+
lines.append(f"{INDENT * LEVELS['subgraph']}end")
|
|
72
|
+
lines.append(INDENT)
|
|
73
|
+
|
|
74
|
+
# PRD connections
|
|
75
|
+
if frontend:
|
|
76
|
+
lines.append(f"{INDENT * LEVELS['connection']}PRD --> Frontend")
|
|
77
|
+
if backend:
|
|
78
|
+
lines.append(f"{INDENT * LEVELS['connection']}PRD --> Backend")
|
|
79
|
+
|
|
80
|
+
# Add newline between PRD connections and dependencies
|
|
81
|
+
if frontend or backend:
|
|
82
|
+
lines.append("")
|
|
83
|
+
|
|
84
|
+
# Dependencies
|
|
85
|
+
for m in architecture:
|
|
86
|
+
src = Path(m['filename']).stem
|
|
87
|
+
for dep in m.get('dependencies', []):
|
|
88
|
+
dst = Path(dep).stem
|
|
89
|
+
lines.append(f'{INDENT * LEVELS["connection"]}{src} -->|uses| {dst}')
|
|
90
|
+
|
|
91
|
+
# Add newline after dependencies
|
|
92
|
+
if any(m.get('dependencies', []) for m in architecture):
|
|
93
|
+
lines.append(INDENT)
|
|
94
|
+
|
|
95
|
+
# Styles
|
|
96
|
+
lines.extend([f"{INDENT * LEVELS['style']}classDef frontend fill:#FFF3E0,stroke:#F57C00,stroke-width:2px",
|
|
97
|
+
f"{INDENT * LEVELS['style']}classDef backend fill:#E3F2FD,stroke:#1976D2,stroke-width:2px",
|
|
98
|
+
f"{INDENT * LEVELS['style']}classDef shared fill:#E8F5E9,stroke:#388E3C,stroke-width:2px",
|
|
99
|
+
f"{INDENT * LEVELS['style']}classDef system fill:#E0E0E0,stroke:#616161,stroke-width:3px", INDENT])
|
|
100
|
+
|
|
101
|
+
# Apply classes
|
|
102
|
+
if frontend:
|
|
103
|
+
lines.append(f"{INDENT * LEVELS['style']}class {','.join([Path(m['filename']).stem for m in frontend])} frontend")
|
|
104
|
+
if backend:
|
|
105
|
+
lines.append(f"{INDENT * LEVELS['style']}class {','.join([Path(m['filename']).stem for m in backend])} backend")
|
|
106
|
+
if shared:
|
|
107
|
+
lines.append(f"{INDENT * LEVELS['style']}class {','.join([Path(m['filename']).stem for m in shared])} shared")
|
|
108
|
+
lines.append(f"{INDENT * LEVELS['style']}class PRD system")
|
|
109
|
+
|
|
110
|
+
return "\n".join(lines)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def generate_html(mermaid_code, architecture, app_name):
|
|
114
|
+
"""Generate interactive HTML with hover tooltips."""
|
|
115
|
+
|
|
116
|
+
# Create module data as JSON for tooltips
|
|
117
|
+
module_data = {}
|
|
118
|
+
for m in architecture:
|
|
119
|
+
module_id = Path(m['filename']).stem
|
|
120
|
+
module_data[module_id] = {
|
|
121
|
+
'filename': m['filename'],
|
|
122
|
+
'priority': m.get('priority', 'N/A'),
|
|
123
|
+
'description': m.get('description', 'No description'),
|
|
124
|
+
'dependencies': m.get('dependencies', []),
|
|
125
|
+
'tags': m.get('tags', []),
|
|
126
|
+
'filepath': m.get('filepath', ''),
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
module_json = json.dumps(module_data)
|
|
130
|
+
escaped_app_name = html.escape(app_name)
|
|
131
|
+
|
|
132
|
+
return f"""<!DOCTYPE html>
|
|
133
|
+
<html><head><meta charset="UTF-8"><title>{escaped_app_name}</title>
|
|
134
|
+
<script type=\"module\">
|
|
135
|
+
import mermaid from 'https://cdn.jsdelivr.net/npm/mermaid@10/dist/mermaid.esm.min.mjs';
|
|
136
|
+
mermaid.initialize({{startOnLoad:true,theme:'default'}});
|
|
137
|
+
window.addEventListener('load', () => {{
|
|
138
|
+
const moduleData = {module_json};
|
|
139
|
+
|
|
140
|
+
// Add hover listeners to all nodes
|
|
141
|
+
setTimeout(() => {{
|
|
142
|
+
const nodes = document.querySelectorAll('.node');
|
|
143
|
+
nodes.forEach(node => {{
|
|
144
|
+
const text = node.querySelector('.nodeLabel');
|
|
145
|
+
if (!text) return;
|
|
146
|
+
|
|
147
|
+
const nodeText = text.textContent.trim();
|
|
148
|
+
const moduleId = nodeText.split(' ')[0];
|
|
149
|
+
const data = moduleData[moduleId];
|
|
150
|
+
|
|
151
|
+
if (data) {{
|
|
152
|
+
node.style.cursor = 'pointer';
|
|
153
|
+
|
|
154
|
+
node.addEventListener('mouseenter', (e) => {{
|
|
155
|
+
showTooltip(e, data);
|
|
156
|
+
}});
|
|
157
|
+
|
|
158
|
+
node.addEventListener('mouseleave', () => {{
|
|
159
|
+
hideTooltip();
|
|
160
|
+
}});
|
|
161
|
+
}}
|
|
162
|
+
}});
|
|
163
|
+
}}, 500);
|
|
164
|
+
}});
|
|
165
|
+
function showTooltip(e, data) {{
|
|
166
|
+
hideTooltip();
|
|
167
|
+
|
|
168
|
+
const tooltip = document.createElement('div');
|
|
169
|
+
tooltip.id = 'module-tooltip';
|
|
170
|
+
tooltip.innerHTML = `
|
|
171
|
+
<div style="font-weight:600;margin-bottom:8px;font-size:1.1em;">${{data.filename}}</div>
|
|
172
|
+
<div style="margin-bottom:6px;"><strong>Priority:</strong> ${{data.priority}}</div>
|
|
173
|
+
<div style="margin-bottom:6px;"><strong>Path:</strong> ${{data.filepath}}</div>
|
|
174
|
+
<div style="margin-bottom:6px;"><strong>Tags:</strong> ${{data.tags.join(', ')}}</div>
|
|
175
|
+
<div style="margin-bottom:6px;"><strong>Dependencies:</strong> ${{data.dependencies.length > 0 ? data.dependencies.join(', ') : 'None'}}</div>
|
|
176
|
+
<div style="margin-top:8px;padding-top:8px;border-top:1px solid #ddd;font-size:0.9em;color:#444;">${{data.description}}</div>
|
|
177
|
+
`;
|
|
178
|
+
|
|
179
|
+
document.body.appendChild(tooltip);
|
|
180
|
+
|
|
181
|
+
const rect = e.target.closest('.node').getBoundingClientRect();
|
|
182
|
+
tooltip.style.left = rect.right + 10 + 'px';
|
|
183
|
+
tooltip.style.top = rect.top + window.scrollY + 'px';
|
|
184
|
+
}}
|
|
185
|
+
function hideTooltip() {{
|
|
186
|
+
const existing = document.getElementById('module-tooltip');
|
|
187
|
+
if (existing) existing.remove();
|
|
188
|
+
}}
|
|
189
|
+
</script>
|
|
190
|
+
<style>
|
|
191
|
+
*{{margin:0;padding:0;box-sizing:border-box}}
|
|
192
|
+
body{{font-family:system-ui,sans-serif;background:#fff;color:#000;padding:2rem;max-width:1400px;margin:0 auto}}
|
|
193
|
+
h1{{font-size:2rem;font-weight:600;margin-bottom:2rem;padding-bottom:1rem;border-bottom:2px solid #000}}
|
|
194
|
+
.diagram{{border:1px solid #000;padding:2rem;margin:2rem 0;overflow-x:auto;position:relative}}
|
|
195
|
+
.mermaid{{display:flex;justify-content:center}}
|
|
196
|
+
#module-tooltip{{
|
|
197
|
+
position:absolute;
|
|
198
|
+
background:#fff;
|
|
199
|
+
border:2px solid #000;
|
|
200
|
+
padding:1rem;
|
|
201
|
+
max-width:400px;
|
|
202
|
+
z-index:1000;
|
|
203
|
+
box-shadow:4px 4px 0 rgba(0,0,0,0.1);
|
|
204
|
+
font-size:0.9rem;
|
|
205
|
+
line-height:1.5;
|
|
206
|
+
}}
|
|
207
|
+
.node{{transition:opacity 0.2s}}
|
|
208
|
+
.node:hover{{opacity:0.8}}
|
|
209
|
+
</style></head><body>
|
|
210
|
+
<h1>{escaped_app_name}</h1>
|
|
211
|
+
<div class="diagram"><pre class="mermaid">{mermaid_code}</pre></div>
|
|
212
|
+
</body></html>"""
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
if __name__ == "__main__":
|
|
216
|
+
if len(sys.argv) < 2:
|
|
217
|
+
print("Usage: python render_mermaid.py <architecture.json> [app_name] [output.html]")
|
|
218
|
+
sys.exit(1)
|
|
219
|
+
|
|
220
|
+
arch_file = sys.argv[1]
|
|
221
|
+
app_name = sys.argv[2] if len(sys.argv) > 2 else "System Architecture"
|
|
222
|
+
output_file = sys.argv[3] if len(sys.argv) > 3 else f"{Path(arch_file).stem}_diagram.html"
|
|
223
|
+
|
|
224
|
+
with open(arch_file) as f:
|
|
225
|
+
architecture = json.load(f)
|
|
226
|
+
write_pretty_architecture_json(arch_file, architecture)
|
|
227
|
+
|
|
228
|
+
mermaid_code = generate_mermaid_code(architecture, app_name)
|
|
229
|
+
html_content = generate_html(mermaid_code, architecture, app_name)
|
|
230
|
+
|
|
231
|
+
with open(output_file, 'w', encoding='utf-8') as f:
|
|
232
|
+
f.write(html_content)
|
|
233
|
+
|
|
234
|
+
print(f"✅ Generated: {output_file}")
|
|
235
|
+
print(f"📊 Modules: {len(architecture)}")
|
|
236
|
+
print(f"🌐 Open {output_file} in your browser!")
|