doit-toolkit-cli 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- doit_cli/__init__.py +1356 -0
- doit_cli/cli/__init__.py +26 -0
- doit_cli/cli/analytics_command.py +616 -0
- doit_cli/cli/context_command.py +213 -0
- doit_cli/cli/diagram_command.py +304 -0
- doit_cli/cli/fixit_command.py +641 -0
- doit_cli/cli/hooks_command.py +211 -0
- doit_cli/cli/init_command.py +613 -0
- doit_cli/cli/memory_command.py +293 -0
- doit_cli/cli/status_command.py +117 -0
- doit_cli/cli/sync_prompts_command.py +248 -0
- doit_cli/cli/validate_command.py +196 -0
- doit_cli/cli/verify_command.py +204 -0
- doit_cli/cli/workflow_mixin.py +224 -0
- doit_cli/cli/xref_command.py +555 -0
- doit_cli/formatters/__init__.py +8 -0
- doit_cli/formatters/base.py +38 -0
- doit_cli/formatters/json_formatter.py +126 -0
- doit_cli/formatters/markdown_formatter.py +97 -0
- doit_cli/formatters/rich_formatter.py +257 -0
- doit_cli/main.py +49 -0
- doit_cli/models/__init__.py +139 -0
- doit_cli/models/agent.py +74 -0
- doit_cli/models/analytics_models.py +384 -0
- doit_cli/models/context_config.py +464 -0
- doit_cli/models/crossref_models.py +182 -0
- doit_cli/models/diagram_models.py +363 -0
- doit_cli/models/fixit_models.py +355 -0
- doit_cli/models/hook_config.py +125 -0
- doit_cli/models/project.py +91 -0
- doit_cli/models/results.py +121 -0
- doit_cli/models/search_models.py +228 -0
- doit_cli/models/status_models.py +195 -0
- doit_cli/models/sync_models.py +146 -0
- doit_cli/models/template.py +77 -0
- doit_cli/models/validation_models.py +175 -0
- doit_cli/models/workflow_models.py +319 -0
- doit_cli/prompts/__init__.py +5 -0
- doit_cli/prompts/fixit_prompts.py +344 -0
- doit_cli/prompts/interactive.py +390 -0
- doit_cli/rules/__init__.py +5 -0
- doit_cli/rules/builtin_rules.py +160 -0
- doit_cli/services/__init__.py +79 -0
- doit_cli/services/agent_detector.py +168 -0
- doit_cli/services/analytics_service.py +218 -0
- doit_cli/services/architecture_generator.py +290 -0
- doit_cli/services/backup_service.py +204 -0
- doit_cli/services/config_loader.py +113 -0
- doit_cli/services/context_loader.py +1121 -0
- doit_cli/services/coverage_calculator.py +142 -0
- doit_cli/services/crossref_service.py +237 -0
- doit_cli/services/cycle_time_calculator.py +134 -0
- doit_cli/services/date_inferrer.py +349 -0
- doit_cli/services/diagram_service.py +337 -0
- doit_cli/services/drift_detector.py +109 -0
- doit_cli/services/entity_parser.py +301 -0
- doit_cli/services/er_diagram_generator.py +197 -0
- doit_cli/services/fixit_service.py +699 -0
- doit_cli/services/github_service.py +192 -0
- doit_cli/services/hook_manager.py +258 -0
- doit_cli/services/hook_validator.py +528 -0
- doit_cli/services/input_validator.py +322 -0
- doit_cli/services/memory_search.py +527 -0
- doit_cli/services/mermaid_validator.py +334 -0
- doit_cli/services/prompt_transformer.py +91 -0
- doit_cli/services/prompt_writer.py +133 -0
- doit_cli/services/query_interpreter.py +428 -0
- doit_cli/services/report_exporter.py +219 -0
- doit_cli/services/report_generator.py +256 -0
- doit_cli/services/requirement_parser.py +112 -0
- doit_cli/services/roadmap_summarizer.py +209 -0
- doit_cli/services/rule_engine.py +443 -0
- doit_cli/services/scaffolder.py +215 -0
- doit_cli/services/score_calculator.py +172 -0
- doit_cli/services/section_parser.py +204 -0
- doit_cli/services/spec_scanner.py +327 -0
- doit_cli/services/state_manager.py +355 -0
- doit_cli/services/status_reporter.py +143 -0
- doit_cli/services/task_parser.py +347 -0
- doit_cli/services/template_manager.py +710 -0
- doit_cli/services/template_reader.py +158 -0
- doit_cli/services/user_journey_generator.py +214 -0
- doit_cli/services/user_story_parser.py +232 -0
- doit_cli/services/validation_service.py +188 -0
- doit_cli/services/validator.py +232 -0
- doit_cli/services/velocity_tracker.py +173 -0
- doit_cli/services/workflow_engine.py +405 -0
- doit_cli/templates/agent-file-template.md +28 -0
- doit_cli/templates/checklist-template.md +39 -0
- doit_cli/templates/commands/doit.checkin.md +363 -0
- doit_cli/templates/commands/doit.constitution.md +187 -0
- doit_cli/templates/commands/doit.documentit.md +485 -0
- doit_cli/templates/commands/doit.fixit.md +181 -0
- doit_cli/templates/commands/doit.implementit.md +265 -0
- doit_cli/templates/commands/doit.planit.md +262 -0
- doit_cli/templates/commands/doit.reviewit.md +355 -0
- doit_cli/templates/commands/doit.roadmapit.md +368 -0
- doit_cli/templates/commands/doit.scaffoldit.md +458 -0
- doit_cli/templates/commands/doit.specit.md +521 -0
- doit_cli/templates/commands/doit.taskit.md +304 -0
- doit_cli/templates/commands/doit.testit.md +277 -0
- doit_cli/templates/config/context.yaml +134 -0
- doit_cli/templates/config/hooks.yaml +93 -0
- doit_cli/templates/config/validation-rules.yaml +64 -0
- doit_cli/templates/github-issue-templates/epic.yml +78 -0
- doit_cli/templates/github-issue-templates/feature.yml +116 -0
- doit_cli/templates/github-issue-templates/task.yml +129 -0
- doit_cli/templates/hooks/.gitkeep +0 -0
- doit_cli/templates/hooks/post-commit.sh +25 -0
- doit_cli/templates/hooks/post-merge.sh +75 -0
- doit_cli/templates/hooks/pre-commit.sh +17 -0
- doit_cli/templates/hooks/pre-push.sh +18 -0
- doit_cli/templates/memory/completed_roadmap.md +50 -0
- doit_cli/templates/memory/constitution.md +125 -0
- doit_cli/templates/memory/roadmap.md +61 -0
- doit_cli/templates/plan-template.md +146 -0
- doit_cli/templates/scripts/bash/check-prerequisites.sh +166 -0
- doit_cli/templates/scripts/bash/common.sh +156 -0
- doit_cli/templates/scripts/bash/create-new-feature.sh +297 -0
- doit_cli/templates/scripts/bash/setup-plan.sh +61 -0
- doit_cli/templates/scripts/bash/update-agent-context.sh +675 -0
- doit_cli/templates/scripts/powershell/check-prerequisites.ps1 +148 -0
- doit_cli/templates/scripts/powershell/common.ps1 +137 -0
- doit_cli/templates/scripts/powershell/create-new-feature.ps1 +283 -0
- doit_cli/templates/scripts/powershell/setup-plan.ps1 +61 -0
- doit_cli/templates/scripts/powershell/update-agent-context.ps1 +406 -0
- doit_cli/templates/spec-template.md +159 -0
- doit_cli/templates/tasks-template.md +313 -0
- doit_cli/templates/vscode-settings.json +14 -0
- doit_toolkit_cli-0.1.9.dist-info/METADATA +324 -0
- doit_toolkit_cli-0.1.9.dist-info/RECORD +134 -0
- doit_toolkit_cli-0.1.9.dist-info/WHEEL +4 -0
- doit_toolkit_cli-0.1.9.dist-info/entry_points.txt +2 -0
- doit_toolkit_cli-0.1.9.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
"""Date inference service for spec analytics.
|
|
2
|
+
|
|
3
|
+
This module extracts creation and completion dates for specifications using
|
|
4
|
+
a multi-tier fallback strategy:
|
|
5
|
+
1. Parse dates from spec.md metadata (e.g., **Created**: YYYY-MM-DD)
|
|
6
|
+
2. Extract dates from git history
|
|
7
|
+
3. Fall back to file system timestamps
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import re
|
|
11
|
+
import subprocess
|
|
12
|
+
from datetime import date, datetime
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Optional
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DateInferrer:
|
|
18
|
+
"""Service for inferring spec lifecycle dates.
|
|
19
|
+
|
|
20
|
+
Uses a multi-tier fallback strategy to extract creation and completion
|
|
21
|
+
dates from various sources.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
# Patterns for extracting dates from spec metadata
|
|
25
|
+
CREATED_PATTERN = re.compile(
|
|
26
|
+
r"\*\*Created\*\*:\s*(\d{4}-\d{2}-\d{2})", re.IGNORECASE
|
|
27
|
+
)
|
|
28
|
+
DATE_PATTERN = re.compile(
|
|
29
|
+
r"\*\*Date\*\*:\s*(\d{4}-\d{2}-\d{2})", re.IGNORECASE
|
|
30
|
+
)
|
|
31
|
+
STATUS_COMPLETE_PATTERN = re.compile(
|
|
32
|
+
r"\*\*Status\*\*:\s*(Complete|Completed|Approved)", re.IGNORECASE
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
def __init__(self, project_root: Path):
|
|
36
|
+
"""Initialize the date inferrer.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
project_root: Root directory of the project
|
|
40
|
+
"""
|
|
41
|
+
self.project_root = project_root
|
|
42
|
+
self._git_available: Optional[bool] = None
|
|
43
|
+
|
|
44
|
+
def _is_git_available(self) -> bool:
|
|
45
|
+
"""Check if git is available and project is a git repo."""
|
|
46
|
+
if self._git_available is not None:
|
|
47
|
+
return self._git_available
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
result = subprocess.run(
|
|
51
|
+
["git", "rev-parse", "--git-dir"],
|
|
52
|
+
cwd=self.project_root,
|
|
53
|
+
capture_output=True,
|
|
54
|
+
text=True,
|
|
55
|
+
timeout=5,
|
|
56
|
+
)
|
|
57
|
+
self._git_available = result.returncode == 0
|
|
58
|
+
except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
|
|
59
|
+
self._git_available = False
|
|
60
|
+
|
|
61
|
+
return self._git_available
|
|
62
|
+
|
|
63
|
+
def infer_created_date(self, spec_path: Path) -> Optional[date]:
|
|
64
|
+
"""Infer the creation date for a spec.
|
|
65
|
+
|
|
66
|
+
Tries sources in order:
|
|
67
|
+
1. **Created**: in spec.md metadata
|
|
68
|
+
2. **Date**: in spec.md metadata (fallback)
|
|
69
|
+
3. Git first commit date for the file
|
|
70
|
+
4. File system creation time
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
spec_path: Path to the spec.md file
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
Inferred creation date or None if cannot determine
|
|
77
|
+
"""
|
|
78
|
+
# Tier 1: Parse from metadata
|
|
79
|
+
if spec_path.exists():
|
|
80
|
+
metadata_date = self._parse_created_from_metadata(spec_path)
|
|
81
|
+
if metadata_date:
|
|
82
|
+
return metadata_date
|
|
83
|
+
|
|
84
|
+
# Tier 2: Git first commit
|
|
85
|
+
if self._is_git_available():
|
|
86
|
+
git_date = self._get_git_first_commit_date(spec_path)
|
|
87
|
+
if git_date:
|
|
88
|
+
return git_date
|
|
89
|
+
|
|
90
|
+
# Tier 3: File system
|
|
91
|
+
return self._get_file_creation_date(spec_path)
|
|
92
|
+
|
|
93
|
+
def infer_completed_date(self, spec_path: Path) -> Optional[date]:
|
|
94
|
+
"""Infer the completion date for a spec.
|
|
95
|
+
|
|
96
|
+
Tries sources in order:
|
|
97
|
+
1. Git commit that changed status to Complete/Approved
|
|
98
|
+
2. Git last modification date (if status is Complete/Approved)
|
|
99
|
+
3. File modification time (if status is Complete/Approved)
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
spec_path: Path to the spec.md file
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Inferred completion date or None if not completed
|
|
106
|
+
"""
|
|
107
|
+
if not spec_path.exists():
|
|
108
|
+
return None
|
|
109
|
+
|
|
110
|
+
# Check if spec is actually completed
|
|
111
|
+
if not self._is_spec_completed(spec_path):
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
# Tier 1: Git commit that changed status to Complete
|
|
115
|
+
if self._is_git_available():
|
|
116
|
+
status_change_date = self._get_git_status_change_date(spec_path)
|
|
117
|
+
if status_change_date:
|
|
118
|
+
return status_change_date
|
|
119
|
+
|
|
120
|
+
# Tier 2: Git last modification date
|
|
121
|
+
last_mod_date = self._get_git_last_modified_date(spec_path)
|
|
122
|
+
if last_mod_date:
|
|
123
|
+
return last_mod_date
|
|
124
|
+
|
|
125
|
+
# Tier 3: File modification time
|
|
126
|
+
return self._get_file_modification_date(spec_path)
|
|
127
|
+
|
|
128
|
+
def _parse_created_from_metadata(self, spec_path: Path) -> Optional[date]:
|
|
129
|
+
"""Parse creation date from spec metadata.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
spec_path: Path to spec.md file
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
Parsed date or None
|
|
136
|
+
"""
|
|
137
|
+
try:
|
|
138
|
+
content = spec_path.read_text(encoding="utf-8")
|
|
139
|
+
|
|
140
|
+
# Try **Created**: first
|
|
141
|
+
match = self.CREATED_PATTERN.search(content)
|
|
142
|
+
if match:
|
|
143
|
+
return self._parse_date_string(match.group(1))
|
|
144
|
+
|
|
145
|
+
# Fall back to **Date**:
|
|
146
|
+
match = self.DATE_PATTERN.search(content)
|
|
147
|
+
if match:
|
|
148
|
+
return self._parse_date_string(match.group(1))
|
|
149
|
+
|
|
150
|
+
except (OSError, UnicodeDecodeError):
|
|
151
|
+
pass
|
|
152
|
+
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
def _is_spec_completed(self, spec_path: Path) -> bool:
|
|
156
|
+
"""Check if spec has Complete or Approved status.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
spec_path: Path to spec.md file
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
True if completed, False otherwise
|
|
163
|
+
"""
|
|
164
|
+
try:
|
|
165
|
+
content = spec_path.read_text(encoding="utf-8")
|
|
166
|
+
return bool(self.STATUS_COMPLETE_PATTERN.search(content))
|
|
167
|
+
except (OSError, UnicodeDecodeError):
|
|
168
|
+
return False
|
|
169
|
+
|
|
170
|
+
def _get_git_first_commit_date(self, spec_path: Path) -> Optional[date]:
|
|
171
|
+
"""Get the date of the first git commit for a file.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
spec_path: Path to the file
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Date of first commit or None
|
|
178
|
+
"""
|
|
179
|
+
try:
|
|
180
|
+
result = subprocess.run(
|
|
181
|
+
[
|
|
182
|
+
"git",
|
|
183
|
+
"log",
|
|
184
|
+
"--follow",
|
|
185
|
+
"--diff-filter=A",
|
|
186
|
+
"--format=%aI",
|
|
187
|
+
"--",
|
|
188
|
+
str(spec_path),
|
|
189
|
+
],
|
|
190
|
+
cwd=self.project_root,
|
|
191
|
+
capture_output=True,
|
|
192
|
+
text=True,
|
|
193
|
+
timeout=10,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
197
|
+
# Get the last line (first commit) if multiple
|
|
198
|
+
lines = result.stdout.strip().split("\n")
|
|
199
|
+
first_commit_date = lines[-1] if lines else None
|
|
200
|
+
if first_commit_date:
|
|
201
|
+
return self._parse_iso_datetime(first_commit_date)
|
|
202
|
+
|
|
203
|
+
except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
|
|
204
|
+
pass
|
|
205
|
+
|
|
206
|
+
return None
|
|
207
|
+
|
|
208
|
+
def _get_git_last_modified_date(self, spec_path: Path) -> Optional[date]:
|
|
209
|
+
"""Get the date of the last git modification for a file.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
spec_path: Path to the file
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
Date of last modification or None
|
|
216
|
+
"""
|
|
217
|
+
try:
|
|
218
|
+
result = subprocess.run(
|
|
219
|
+
[
|
|
220
|
+
"git",
|
|
221
|
+
"log",
|
|
222
|
+
"-1",
|
|
223
|
+
"--format=%aI",
|
|
224
|
+
"--",
|
|
225
|
+
str(spec_path),
|
|
226
|
+
],
|
|
227
|
+
cwd=self.project_root,
|
|
228
|
+
capture_output=True,
|
|
229
|
+
text=True,
|
|
230
|
+
timeout=10,
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
234
|
+
return self._parse_iso_datetime(result.stdout.strip())
|
|
235
|
+
|
|
236
|
+
except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
|
|
237
|
+
pass
|
|
238
|
+
|
|
239
|
+
return None
|
|
240
|
+
|
|
241
|
+
def _get_git_status_change_date(self, spec_path: Path) -> Optional[date]:
|
|
242
|
+
"""Get the date when status changed to Complete/Approved.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
spec_path: Path to the file
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
Date of status change or None
|
|
249
|
+
"""
|
|
250
|
+
try:
|
|
251
|
+
# Search git log for commits that mention status change
|
|
252
|
+
result = subprocess.run(
|
|
253
|
+
[
|
|
254
|
+
"git",
|
|
255
|
+
"log",
|
|
256
|
+
"-p",
|
|
257
|
+
"--format=%aI",
|
|
258
|
+
"-S",
|
|
259
|
+
"Status**: Complete",
|
|
260
|
+
"--",
|
|
261
|
+
str(spec_path),
|
|
262
|
+
],
|
|
263
|
+
cwd=self.project_root,
|
|
264
|
+
capture_output=True,
|
|
265
|
+
text=True,
|
|
266
|
+
timeout=15,
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
270
|
+
# Extract the first date (most recent change)
|
|
271
|
+
lines = result.stdout.strip().split("\n")
|
|
272
|
+
for line in lines:
|
|
273
|
+
if line.startswith("20") and "T" in line: # ISO date
|
|
274
|
+
return self._parse_iso_datetime(line)
|
|
275
|
+
|
|
276
|
+
except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
|
|
277
|
+
pass
|
|
278
|
+
|
|
279
|
+
return None
|
|
280
|
+
|
|
281
|
+
def _get_file_creation_date(self, spec_path: Path) -> Optional[date]:
|
|
282
|
+
"""Get file system creation date.
|
|
283
|
+
|
|
284
|
+
Args:
|
|
285
|
+
spec_path: Path to the file
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
Creation date or None
|
|
289
|
+
"""
|
|
290
|
+
try:
|
|
291
|
+
if spec_path.exists():
|
|
292
|
+
stat = spec_path.stat()
|
|
293
|
+
# Try birth time (macOS), fall back to ctime
|
|
294
|
+
timestamp = getattr(stat, "st_birthtime", stat.st_ctime)
|
|
295
|
+
return date.fromtimestamp(timestamp)
|
|
296
|
+
except (OSError, ValueError):
|
|
297
|
+
pass
|
|
298
|
+
|
|
299
|
+
return None
|
|
300
|
+
|
|
301
|
+
def _get_file_modification_date(self, spec_path: Path) -> Optional[date]:
|
|
302
|
+
"""Get file system modification date.
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
spec_path: Path to the file
|
|
306
|
+
|
|
307
|
+
Returns:
|
|
308
|
+
Modification date or None
|
|
309
|
+
"""
|
|
310
|
+
try:
|
|
311
|
+
if spec_path.exists():
|
|
312
|
+
stat = spec_path.stat()
|
|
313
|
+
return date.fromtimestamp(stat.st_mtime)
|
|
314
|
+
except (OSError, ValueError):
|
|
315
|
+
pass
|
|
316
|
+
|
|
317
|
+
return None
|
|
318
|
+
|
|
319
|
+
@staticmethod
|
|
320
|
+
def _parse_date_string(date_str: str) -> Optional[date]:
|
|
321
|
+
"""Parse a YYYY-MM-DD date string.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
date_str: Date string in YYYY-MM-DD format
|
|
325
|
+
|
|
326
|
+
Returns:
|
|
327
|
+
Parsed date or None
|
|
328
|
+
"""
|
|
329
|
+
try:
|
|
330
|
+
return datetime.strptime(date_str.strip(), "%Y-%m-%d").date()
|
|
331
|
+
except ValueError:
|
|
332
|
+
return None
|
|
333
|
+
|
|
334
|
+
@staticmethod
|
|
335
|
+
def _parse_iso_datetime(iso_str: str) -> Optional[date]:
|
|
336
|
+
"""Parse an ISO 8601 datetime string to date.
|
|
337
|
+
|
|
338
|
+
Args:
|
|
339
|
+
iso_str: ISO datetime string (e.g., 2026-01-16T14:30:00-08:00)
|
|
340
|
+
|
|
341
|
+
Returns:
|
|
342
|
+
Parsed date or None
|
|
343
|
+
"""
|
|
344
|
+
try:
|
|
345
|
+
# Handle timezone offset by truncating to date portion
|
|
346
|
+
date_part = iso_str.split("T")[0]
|
|
347
|
+
return datetime.strptime(date_part, "%Y-%m-%d").date()
|
|
348
|
+
except (ValueError, IndexError):
|
|
349
|
+
return None
|
|
@@ -0,0 +1,337 @@
|
|
|
1
|
+
"""Main orchestration service for diagram generation."""
|
|
2
|
+
|
|
3
|
+
import shutil
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from ..models.diagram_models import (
|
|
9
|
+
DiagramResult,
|
|
10
|
+
DiagramSection,
|
|
11
|
+
DiagramType,
|
|
12
|
+
GeneratedDiagram,
|
|
13
|
+
ValidationResult,
|
|
14
|
+
)
|
|
15
|
+
from .entity_parser import EntityParser
|
|
16
|
+
from .er_diagram_generator import ERDiagramGenerator
|
|
17
|
+
from .mermaid_validator import MermaidValidator
|
|
18
|
+
from .section_parser import SectionParser
|
|
19
|
+
from .user_journey_generator import UserJourneyGenerator
|
|
20
|
+
from .user_story_parser import UserStoryParser
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DiagramService:
|
|
24
|
+
"""Service for generating Mermaid diagrams from specifications.
|
|
25
|
+
|
|
26
|
+
Orchestrates the full workflow:
|
|
27
|
+
1. Parse spec content (user stories, entities)
|
|
28
|
+
2. Generate diagrams (flowcharts, ER diagrams)
|
|
29
|
+
3. Validate Mermaid syntax
|
|
30
|
+
4. Insert/replace in AUTO-GENERATED sections
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
# Section name mappings
|
|
34
|
+
SECTION_NAMES = {
|
|
35
|
+
DiagramType.USER_JOURNEY: "user-journey",
|
|
36
|
+
DiagramType.ER_DIAGRAM: "entity-relationships",
|
|
37
|
+
DiagramType.ARCHITECTURE: "architecture",
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
def __init__(self, strict: bool = False, backup: bool = True):
|
|
41
|
+
"""Initialize diagram service.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
strict: If True, fail on validation errors
|
|
45
|
+
backup: If True, create backup before modifying files
|
|
46
|
+
"""
|
|
47
|
+
self.strict = strict
|
|
48
|
+
self.backup = backup
|
|
49
|
+
|
|
50
|
+
# Initialize components
|
|
51
|
+
self.section_parser = SectionParser()
|
|
52
|
+
self.user_story_parser = UserStoryParser()
|
|
53
|
+
self.entity_parser = EntityParser()
|
|
54
|
+
self.user_journey_generator = UserJourneyGenerator()
|
|
55
|
+
self.er_diagram_generator = ERDiagramGenerator()
|
|
56
|
+
self.validator = MermaidValidator()
|
|
57
|
+
|
|
58
|
+
def generate(
|
|
59
|
+
self,
|
|
60
|
+
file_path: Path,
|
|
61
|
+
diagram_types: Optional[list[DiagramType]] = None,
|
|
62
|
+
insert: bool = True,
|
|
63
|
+
) -> DiagramResult:
|
|
64
|
+
"""Generate diagrams for a specification file.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
file_path: Path to spec.md or plan.md file
|
|
68
|
+
diagram_types: Types to generate (default: auto-detect applicable)
|
|
69
|
+
insert: If True, insert diagrams into file
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
DiagramResult with generated diagrams and status
|
|
73
|
+
"""
|
|
74
|
+
result = DiagramResult(file_path=file_path)
|
|
75
|
+
|
|
76
|
+
# Validate file exists
|
|
77
|
+
if not file_path.exists():
|
|
78
|
+
result.success = False
|
|
79
|
+
result.error = f"File not found: {file_path}"
|
|
80
|
+
return result
|
|
81
|
+
|
|
82
|
+
# Read file content
|
|
83
|
+
try:
|
|
84
|
+
content = file_path.read_text(encoding="utf-8")
|
|
85
|
+
except Exception as e:
|
|
86
|
+
result.success = False
|
|
87
|
+
result.error = f"Error reading file: {e}"
|
|
88
|
+
return result
|
|
89
|
+
|
|
90
|
+
# Find existing AUTO-GENERATED sections
|
|
91
|
+
result.sections_found = self.section_parser.find_sections(content)
|
|
92
|
+
|
|
93
|
+
# Auto-detect diagram types if not specified
|
|
94
|
+
if diagram_types is None:
|
|
95
|
+
diagram_types = self._detect_applicable_types(content)
|
|
96
|
+
|
|
97
|
+
# Generate each diagram type
|
|
98
|
+
for diagram_type in diagram_types:
|
|
99
|
+
diagram = self._generate_diagram(content, diagram_type)
|
|
100
|
+
if diagram:
|
|
101
|
+
# Validate
|
|
102
|
+
validation = self.validator.validate(
|
|
103
|
+
diagram.mermaid_content, diagram_type
|
|
104
|
+
)
|
|
105
|
+
diagram.validation = validation
|
|
106
|
+
diagram.is_valid = validation.passed
|
|
107
|
+
|
|
108
|
+
if self.strict and not validation.passed:
|
|
109
|
+
result.success = False
|
|
110
|
+
result.error = f"Validation failed for {diagram_type.value}: {validation.errors}"
|
|
111
|
+
return result
|
|
112
|
+
|
|
113
|
+
result.diagrams.append(diagram)
|
|
114
|
+
|
|
115
|
+
# Insert diagrams into file if requested
|
|
116
|
+
if insert and result.diagrams:
|
|
117
|
+
updated_content, sections_updated = self._insert_diagrams(
|
|
118
|
+
content, result.diagrams
|
|
119
|
+
)
|
|
120
|
+
result.sections_updated = sections_updated
|
|
121
|
+
|
|
122
|
+
if sections_updated:
|
|
123
|
+
# Create backup and write file
|
|
124
|
+
try:
|
|
125
|
+
self._write_file(file_path, updated_content)
|
|
126
|
+
except Exception as e:
|
|
127
|
+
result.success = False
|
|
128
|
+
result.error = f"Error writing file: {e}"
|
|
129
|
+
return result
|
|
130
|
+
|
|
131
|
+
return result
|
|
132
|
+
|
|
133
|
+
def validate(
|
|
134
|
+
self, content: str, diagram_type: Optional[DiagramType] = None
|
|
135
|
+
) -> ValidationResult:
|
|
136
|
+
"""Validate Mermaid diagram syntax.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
content: Mermaid diagram content
|
|
140
|
+
diagram_type: Type of diagram (auto-detected if None)
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
ValidationResult with pass/fail and errors
|
|
144
|
+
"""
|
|
145
|
+
return self.validator.validate(content, diagram_type)
|
|
146
|
+
|
|
147
|
+
def insert_diagram(
|
|
148
|
+
self, file_path: Path, section_name: str, diagram_content: str
|
|
149
|
+
) -> bool:
|
|
150
|
+
"""Insert or replace diagram in AUTO-GENERATED section.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
file_path: Path to target file
|
|
154
|
+
section_name: Section identifier (e.g., "user-journey")
|
|
155
|
+
diagram_content: Mermaid diagram to insert (wrapped in code fence)
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
True if successful, False if markers not found
|
|
159
|
+
"""
|
|
160
|
+
if not file_path.exists():
|
|
161
|
+
return False
|
|
162
|
+
|
|
163
|
+
content = file_path.read_text(encoding="utf-8")
|
|
164
|
+
|
|
165
|
+
# Check if section exists
|
|
166
|
+
if not self.section_parser.has_section(content, section_name):
|
|
167
|
+
return False
|
|
168
|
+
|
|
169
|
+
# Replace section content
|
|
170
|
+
updated_content, success = self.section_parser.replace_section_content(
|
|
171
|
+
content, section_name, diagram_content
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
if success:
|
|
175
|
+
self._write_file(file_path, updated_content)
|
|
176
|
+
|
|
177
|
+
return success
|
|
178
|
+
|
|
179
|
+
def _detect_applicable_types(self, content: str) -> list[DiagramType]:
|
|
180
|
+
"""Detect which diagram types are applicable for content.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
content: File content
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
List of applicable DiagramType values
|
|
187
|
+
"""
|
|
188
|
+
types = []
|
|
189
|
+
|
|
190
|
+
# Check for user stories
|
|
191
|
+
if self.user_story_parser.count_stories(content) > 0:
|
|
192
|
+
types.append(DiagramType.USER_JOURNEY)
|
|
193
|
+
|
|
194
|
+
# Check for entities
|
|
195
|
+
if self.entity_parser.count_entities(content) > 0:
|
|
196
|
+
types.append(DiagramType.ER_DIAGRAM)
|
|
197
|
+
|
|
198
|
+
return types
|
|
199
|
+
|
|
200
|
+
def _generate_diagram(
|
|
201
|
+
self, content: str, diagram_type: DiagramType
|
|
202
|
+
) -> Optional[GeneratedDiagram]:
|
|
203
|
+
"""Generate a single diagram type.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
content: File content
|
|
207
|
+
diagram_type: Type of diagram to generate
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
GeneratedDiagram or None if content not found
|
|
211
|
+
"""
|
|
212
|
+
if diagram_type == DiagramType.USER_JOURNEY:
|
|
213
|
+
stories = self.user_story_parser.parse(content)
|
|
214
|
+
if not stories:
|
|
215
|
+
return None
|
|
216
|
+
return self.user_journey_generator.generate_diagram(stories)
|
|
217
|
+
|
|
218
|
+
elif diagram_type == DiagramType.ER_DIAGRAM:
|
|
219
|
+
entities = self.entity_parser.parse(content)
|
|
220
|
+
if not entities:
|
|
221
|
+
return None
|
|
222
|
+
return self.er_diagram_generator.generate_diagram(entities)
|
|
223
|
+
|
|
224
|
+
elif diagram_type == DiagramType.ARCHITECTURE:
|
|
225
|
+
# Architecture generation would be handled by ArchitectureGenerator
|
|
226
|
+
# For now, return None (to be implemented in Phase 8)
|
|
227
|
+
return None
|
|
228
|
+
|
|
229
|
+
return None
|
|
230
|
+
|
|
231
|
+
def _insert_diagrams(
|
|
232
|
+
self, content: str, diagrams: list[GeneratedDiagram]
|
|
233
|
+
) -> tuple[str, list[str]]:
|
|
234
|
+
"""Insert diagrams into AUTO-GENERATED sections.
|
|
235
|
+
|
|
236
|
+
Args:
|
|
237
|
+
content: Original file content
|
|
238
|
+
diagrams: List of generated diagrams
|
|
239
|
+
|
|
240
|
+
Returns:
|
|
241
|
+
Tuple of (updated content, list of section names updated)
|
|
242
|
+
"""
|
|
243
|
+
updated_content = content
|
|
244
|
+
sections_updated = []
|
|
245
|
+
|
|
246
|
+
for diagram in diagrams:
|
|
247
|
+
section_name = self.SECTION_NAMES.get(diagram.diagram_type)
|
|
248
|
+
if not section_name:
|
|
249
|
+
continue
|
|
250
|
+
|
|
251
|
+
# Check if section exists
|
|
252
|
+
if not self.section_parser.has_section(updated_content, section_name):
|
|
253
|
+
# Try to find alternate section names
|
|
254
|
+
alt_names = self._get_alternate_section_names(diagram.diagram_type)
|
|
255
|
+
for alt_name in alt_names:
|
|
256
|
+
if self.section_parser.has_section(updated_content, alt_name):
|
|
257
|
+
section_name = alt_name
|
|
258
|
+
break
|
|
259
|
+
else:
|
|
260
|
+
# Section not found, skip
|
|
261
|
+
continue
|
|
262
|
+
|
|
263
|
+
# Replace section content
|
|
264
|
+
wrapped_content = diagram.wrapped_content
|
|
265
|
+
updated_content, success = self.section_parser.replace_section_content(
|
|
266
|
+
updated_content, section_name, wrapped_content
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
if success:
|
|
270
|
+
sections_updated.append(section_name)
|
|
271
|
+
|
|
272
|
+
return updated_content, sections_updated
|
|
273
|
+
|
|
274
|
+
def _get_alternate_section_names(self, diagram_type: DiagramType) -> list[str]:
|
|
275
|
+
"""Get alternate section names for a diagram type.
|
|
276
|
+
|
|
277
|
+
Args:
|
|
278
|
+
diagram_type: Type of diagram
|
|
279
|
+
|
|
280
|
+
Returns:
|
|
281
|
+
List of alternate section name possibilities
|
|
282
|
+
"""
|
|
283
|
+
alternates = {
|
|
284
|
+
DiagramType.USER_JOURNEY: ["user-journey", "userjourney", "user_journey", "flowchart"],
|
|
285
|
+
DiagramType.ER_DIAGRAM: ["entity-relationships", "er-diagram", "erdiagram", "entities"],
|
|
286
|
+
DiagramType.ARCHITECTURE: ["architecture", "arch", "system-architecture"],
|
|
287
|
+
}
|
|
288
|
+
return alternates.get(diagram_type, [])
|
|
289
|
+
|
|
290
|
+
def _write_file(self, file_path: Path, content: str) -> None:
|
|
291
|
+
"""Write content to file with optional backup.
|
|
292
|
+
|
|
293
|
+
Args:
|
|
294
|
+
file_path: Path to write to
|
|
295
|
+
content: Content to write
|
|
296
|
+
"""
|
|
297
|
+
if self.backup and file_path.exists():
|
|
298
|
+
# Create backup with timestamp
|
|
299
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
300
|
+
backup_path = file_path.with_suffix(f".{timestamp}.bak")
|
|
301
|
+
shutil.copy2(file_path, backup_path)
|
|
302
|
+
|
|
303
|
+
# Write atomically by writing to temp file first
|
|
304
|
+
temp_path = file_path.with_suffix(".tmp")
|
|
305
|
+
try:
|
|
306
|
+
temp_path.write_text(content, encoding="utf-8")
|
|
307
|
+
temp_path.replace(file_path)
|
|
308
|
+
finally:
|
|
309
|
+
if temp_path.exists():
|
|
310
|
+
temp_path.unlink()
|
|
311
|
+
|
|
312
|
+
def get_diagram_content(
|
|
313
|
+
self, file_path: Path, diagram_type: DiagramType
|
|
314
|
+
) -> Optional[str]:
|
|
315
|
+
"""Get existing diagram content from a file.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
file_path: Path to file
|
|
319
|
+
diagram_type: Type of diagram to find
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
Diagram content if found, None otherwise
|
|
323
|
+
"""
|
|
324
|
+
if not file_path.exists():
|
|
325
|
+
return None
|
|
326
|
+
|
|
327
|
+
content = file_path.read_text(encoding="utf-8")
|
|
328
|
+
section_name = self.SECTION_NAMES.get(diagram_type)
|
|
329
|
+
|
|
330
|
+
if not section_name:
|
|
331
|
+
return None
|
|
332
|
+
|
|
333
|
+
section = self.section_parser.find_section(content, section_name)
|
|
334
|
+
if not section:
|
|
335
|
+
return None
|
|
336
|
+
|
|
337
|
+
return self.section_parser.extract_mermaid_from_section(section)
|