doit-toolkit-cli 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- doit_cli/__init__.py +1356 -0
- doit_cli/cli/__init__.py +26 -0
- doit_cli/cli/analytics_command.py +616 -0
- doit_cli/cli/context_command.py +213 -0
- doit_cli/cli/diagram_command.py +304 -0
- doit_cli/cli/fixit_command.py +641 -0
- doit_cli/cli/hooks_command.py +211 -0
- doit_cli/cli/init_command.py +613 -0
- doit_cli/cli/memory_command.py +293 -0
- doit_cli/cli/status_command.py +117 -0
- doit_cli/cli/sync_prompts_command.py +248 -0
- doit_cli/cli/validate_command.py +196 -0
- doit_cli/cli/verify_command.py +204 -0
- doit_cli/cli/workflow_mixin.py +224 -0
- doit_cli/cli/xref_command.py +555 -0
- doit_cli/formatters/__init__.py +8 -0
- doit_cli/formatters/base.py +38 -0
- doit_cli/formatters/json_formatter.py +126 -0
- doit_cli/formatters/markdown_formatter.py +97 -0
- doit_cli/formatters/rich_formatter.py +257 -0
- doit_cli/main.py +49 -0
- doit_cli/models/__init__.py +139 -0
- doit_cli/models/agent.py +74 -0
- doit_cli/models/analytics_models.py +384 -0
- doit_cli/models/context_config.py +464 -0
- doit_cli/models/crossref_models.py +182 -0
- doit_cli/models/diagram_models.py +363 -0
- doit_cli/models/fixit_models.py +355 -0
- doit_cli/models/hook_config.py +125 -0
- doit_cli/models/project.py +91 -0
- doit_cli/models/results.py +121 -0
- doit_cli/models/search_models.py +228 -0
- doit_cli/models/status_models.py +195 -0
- doit_cli/models/sync_models.py +146 -0
- doit_cli/models/template.py +77 -0
- doit_cli/models/validation_models.py +175 -0
- doit_cli/models/workflow_models.py +319 -0
- doit_cli/prompts/__init__.py +5 -0
- doit_cli/prompts/fixit_prompts.py +344 -0
- doit_cli/prompts/interactive.py +390 -0
- doit_cli/rules/__init__.py +5 -0
- doit_cli/rules/builtin_rules.py +160 -0
- doit_cli/services/__init__.py +79 -0
- doit_cli/services/agent_detector.py +168 -0
- doit_cli/services/analytics_service.py +218 -0
- doit_cli/services/architecture_generator.py +290 -0
- doit_cli/services/backup_service.py +204 -0
- doit_cli/services/config_loader.py +113 -0
- doit_cli/services/context_loader.py +1121 -0
- doit_cli/services/coverage_calculator.py +142 -0
- doit_cli/services/crossref_service.py +237 -0
- doit_cli/services/cycle_time_calculator.py +134 -0
- doit_cli/services/date_inferrer.py +349 -0
- doit_cli/services/diagram_service.py +337 -0
- doit_cli/services/drift_detector.py +109 -0
- doit_cli/services/entity_parser.py +301 -0
- doit_cli/services/er_diagram_generator.py +197 -0
- doit_cli/services/fixit_service.py +699 -0
- doit_cli/services/github_service.py +192 -0
- doit_cli/services/hook_manager.py +258 -0
- doit_cli/services/hook_validator.py +528 -0
- doit_cli/services/input_validator.py +322 -0
- doit_cli/services/memory_search.py +527 -0
- doit_cli/services/mermaid_validator.py +334 -0
- doit_cli/services/prompt_transformer.py +91 -0
- doit_cli/services/prompt_writer.py +133 -0
- doit_cli/services/query_interpreter.py +428 -0
- doit_cli/services/report_exporter.py +219 -0
- doit_cli/services/report_generator.py +256 -0
- doit_cli/services/requirement_parser.py +112 -0
- doit_cli/services/roadmap_summarizer.py +209 -0
- doit_cli/services/rule_engine.py +443 -0
- doit_cli/services/scaffolder.py +215 -0
- doit_cli/services/score_calculator.py +172 -0
- doit_cli/services/section_parser.py +204 -0
- doit_cli/services/spec_scanner.py +327 -0
- doit_cli/services/state_manager.py +355 -0
- doit_cli/services/status_reporter.py +143 -0
- doit_cli/services/task_parser.py +347 -0
- doit_cli/services/template_manager.py +710 -0
- doit_cli/services/template_reader.py +158 -0
- doit_cli/services/user_journey_generator.py +214 -0
- doit_cli/services/user_story_parser.py +232 -0
- doit_cli/services/validation_service.py +188 -0
- doit_cli/services/validator.py +232 -0
- doit_cli/services/velocity_tracker.py +173 -0
- doit_cli/services/workflow_engine.py +405 -0
- doit_cli/templates/agent-file-template.md +28 -0
- doit_cli/templates/checklist-template.md +39 -0
- doit_cli/templates/commands/doit.checkin.md +363 -0
- doit_cli/templates/commands/doit.constitution.md +187 -0
- doit_cli/templates/commands/doit.documentit.md +485 -0
- doit_cli/templates/commands/doit.fixit.md +181 -0
- doit_cli/templates/commands/doit.implementit.md +265 -0
- doit_cli/templates/commands/doit.planit.md +262 -0
- doit_cli/templates/commands/doit.reviewit.md +355 -0
- doit_cli/templates/commands/doit.roadmapit.md +368 -0
- doit_cli/templates/commands/doit.scaffoldit.md +458 -0
- doit_cli/templates/commands/doit.specit.md +521 -0
- doit_cli/templates/commands/doit.taskit.md +304 -0
- doit_cli/templates/commands/doit.testit.md +277 -0
- doit_cli/templates/config/context.yaml +134 -0
- doit_cli/templates/config/hooks.yaml +93 -0
- doit_cli/templates/config/validation-rules.yaml +64 -0
- doit_cli/templates/github-issue-templates/epic.yml +78 -0
- doit_cli/templates/github-issue-templates/feature.yml +116 -0
- doit_cli/templates/github-issue-templates/task.yml +129 -0
- doit_cli/templates/hooks/.gitkeep +0 -0
- doit_cli/templates/hooks/post-commit.sh +25 -0
- doit_cli/templates/hooks/post-merge.sh +75 -0
- doit_cli/templates/hooks/pre-commit.sh +17 -0
- doit_cli/templates/hooks/pre-push.sh +18 -0
- doit_cli/templates/memory/completed_roadmap.md +50 -0
- doit_cli/templates/memory/constitution.md +125 -0
- doit_cli/templates/memory/roadmap.md +61 -0
- doit_cli/templates/plan-template.md +146 -0
- doit_cli/templates/scripts/bash/check-prerequisites.sh +166 -0
- doit_cli/templates/scripts/bash/common.sh +156 -0
- doit_cli/templates/scripts/bash/create-new-feature.sh +297 -0
- doit_cli/templates/scripts/bash/setup-plan.sh +61 -0
- doit_cli/templates/scripts/bash/update-agent-context.sh +675 -0
- doit_cli/templates/scripts/powershell/check-prerequisites.ps1 +148 -0
- doit_cli/templates/scripts/powershell/common.ps1 +137 -0
- doit_cli/templates/scripts/powershell/create-new-feature.ps1 +283 -0
- doit_cli/templates/scripts/powershell/setup-plan.ps1 +61 -0
- doit_cli/templates/scripts/powershell/update-agent-context.ps1 +406 -0
- doit_cli/templates/spec-template.md +159 -0
- doit_cli/templates/tasks-template.md +313 -0
- doit_cli/templates/vscode-settings.json +14 -0
- doit_toolkit_cli-0.1.9.dist-info/METADATA +324 -0
- doit_toolkit_cli-0.1.9.dist-info/RECORD +134 -0
- doit_toolkit_cli-0.1.9.dist-info/WHEEL +4 -0
- doit_toolkit_cli-0.1.9.dist-info/entry_points.txt +2 -0
- doit_toolkit_cli-0.1.9.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
"""State manager for workflow persistence.
|
|
2
|
+
|
|
3
|
+
This module handles saving and loading workflow state for recovery
|
|
4
|
+
after interruptions.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
from datetime import datetime, timedelta
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Protocol, runtime_checkable
|
|
12
|
+
|
|
13
|
+
from ..models.workflow_models import (
|
|
14
|
+
WorkflowState,
|
|
15
|
+
WorkflowStatus,
|
|
16
|
+
StateCorruptionError,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# =============================================================================
|
|
21
|
+
# StateManager Protocol
|
|
22
|
+
# =============================================================================
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@runtime_checkable
|
|
26
|
+
class StateManagerProtocol(Protocol):
|
|
27
|
+
"""Protocol defining the StateManager interface."""
|
|
28
|
+
|
|
29
|
+
def save(self, state: WorkflowState) -> Path:
|
|
30
|
+
"""Save workflow state to file."""
|
|
31
|
+
...
|
|
32
|
+
|
|
33
|
+
def load(self, command_name: str) -> WorkflowState | None:
|
|
34
|
+
"""Load most recent state for a command."""
|
|
35
|
+
...
|
|
36
|
+
|
|
37
|
+
def delete(self, state: WorkflowState) -> None:
|
|
38
|
+
"""Delete state file after completion."""
|
|
39
|
+
...
|
|
40
|
+
|
|
41
|
+
def list_interrupted(self) -> list[WorkflowState]:
|
|
42
|
+
"""List all interrupted workflow states."""
|
|
43
|
+
...
|
|
44
|
+
|
|
45
|
+
def cleanup_stale(self, max_age_days: int = 7) -> int:
|
|
46
|
+
"""Remove state files older than threshold."""
|
|
47
|
+
...
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# =============================================================================
|
|
51
|
+
# StateManager Implementation
|
|
52
|
+
# =============================================================================
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class StateManager:
|
|
56
|
+
"""Manages workflow state persistence.
|
|
57
|
+
|
|
58
|
+
Handles saving workflow state to JSON files for recovery after
|
|
59
|
+
interruptions, loading previous state for resume, and cleanup
|
|
60
|
+
of stale state files.
|
|
61
|
+
|
|
62
|
+
State files are stored in `.doit/state/` by default.
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
DEFAULT_STATE_DIR = ".doit/state"
|
|
66
|
+
|
|
67
|
+
def __init__(self, state_dir: Path | str | None = None):
|
|
68
|
+
"""Initialize the state manager.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
state_dir: Directory to store state files. Defaults to .doit/state/
|
|
72
|
+
"""
|
|
73
|
+
if state_dir is None:
|
|
74
|
+
state_dir = Path.cwd() / self.DEFAULT_STATE_DIR
|
|
75
|
+
self.state_dir = Path(state_dir)
|
|
76
|
+
|
|
77
|
+
def save(self, state: WorkflowState) -> Path:
|
|
78
|
+
"""Save workflow state to file.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
state: State to persist
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Path to saved state file
|
|
85
|
+
"""
|
|
86
|
+
self._ensure_state_dir()
|
|
87
|
+
|
|
88
|
+
# Generate filename from state ID
|
|
89
|
+
filename = f"{state.id}.json"
|
|
90
|
+
filepath = self.state_dir / filename
|
|
91
|
+
|
|
92
|
+
# Serialize state to JSON
|
|
93
|
+
state_data = state.to_dict()
|
|
94
|
+
|
|
95
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
96
|
+
json.dump(state_data, f, indent=2)
|
|
97
|
+
|
|
98
|
+
return filepath
|
|
99
|
+
|
|
100
|
+
def load(self, command_name: str) -> WorkflowState | None:
|
|
101
|
+
"""Load most recent state for a command.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
command_name: Command to find state for
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
WorkflowState if found, None otherwise
|
|
108
|
+
"""
|
|
109
|
+
if not self.state_dir.exists():
|
|
110
|
+
return None
|
|
111
|
+
|
|
112
|
+
# Find all state files for this command
|
|
113
|
+
matching_files: list[tuple[Path, datetime]] = []
|
|
114
|
+
|
|
115
|
+
for filepath in self.state_dir.glob("*.json"):
|
|
116
|
+
if filepath.name.startswith(f"{command_name}_"):
|
|
117
|
+
try:
|
|
118
|
+
state = self._load_file(filepath)
|
|
119
|
+
if state and state.status == WorkflowStatus.INTERRUPTED:
|
|
120
|
+
matching_files.append((filepath, state.updated_at))
|
|
121
|
+
except (json.JSONDecodeError, KeyError, StateCorruptionError):
|
|
122
|
+
# Skip corrupted files
|
|
123
|
+
continue
|
|
124
|
+
|
|
125
|
+
if not matching_files:
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
# Return most recent
|
|
129
|
+
matching_files.sort(key=lambda x: x[1], reverse=True)
|
|
130
|
+
return self._load_file(matching_files[0][0])
|
|
131
|
+
|
|
132
|
+
def delete(self, state: WorkflowState) -> None:
|
|
133
|
+
"""Delete state file after completion.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
state: State to delete
|
|
137
|
+
"""
|
|
138
|
+
filename = f"{state.id}.json"
|
|
139
|
+
filepath = self.state_dir / filename
|
|
140
|
+
|
|
141
|
+
if filepath.exists():
|
|
142
|
+
filepath.unlink()
|
|
143
|
+
|
|
144
|
+
def list_interrupted(self) -> list[WorkflowState]:
|
|
145
|
+
"""List all interrupted workflow states.
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
List of interrupted states
|
|
149
|
+
"""
|
|
150
|
+
if not self.state_dir.exists():
|
|
151
|
+
return []
|
|
152
|
+
|
|
153
|
+
interrupted: list[WorkflowState] = []
|
|
154
|
+
|
|
155
|
+
for filepath in self.state_dir.glob("*.json"):
|
|
156
|
+
try:
|
|
157
|
+
state = self._load_file(filepath)
|
|
158
|
+
if state and state.status == WorkflowStatus.INTERRUPTED:
|
|
159
|
+
interrupted.append(state)
|
|
160
|
+
except (json.JSONDecodeError, KeyError, StateCorruptionError):
|
|
161
|
+
continue
|
|
162
|
+
|
|
163
|
+
# Sort by updated_at descending
|
|
164
|
+
interrupted.sort(key=lambda s: s.updated_at, reverse=True)
|
|
165
|
+
return interrupted
|
|
166
|
+
|
|
167
|
+
def cleanup_stale(self, max_age_days: int = 7) -> int:
|
|
168
|
+
"""Remove state files older than threshold.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
max_age_days: Maximum age before cleanup
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Number of files removed
|
|
175
|
+
"""
|
|
176
|
+
if not self.state_dir.exists():
|
|
177
|
+
return 0
|
|
178
|
+
|
|
179
|
+
threshold = datetime.now() - timedelta(days=max_age_days)
|
|
180
|
+
removed = 0
|
|
181
|
+
|
|
182
|
+
for filepath in self.state_dir.glob("*.json"):
|
|
183
|
+
try:
|
|
184
|
+
state = self._load_file(filepath)
|
|
185
|
+
if state and state.updated_at < threshold:
|
|
186
|
+
filepath.unlink()
|
|
187
|
+
removed += 1
|
|
188
|
+
except (json.JSONDecodeError, KeyError, StateCorruptionError):
|
|
189
|
+
# Remove corrupted files too
|
|
190
|
+
filepath.unlink()
|
|
191
|
+
removed += 1
|
|
192
|
+
|
|
193
|
+
return removed
|
|
194
|
+
|
|
195
|
+
def get_state_path(self, state: WorkflowState) -> Path:
|
|
196
|
+
"""Get the file path for a state.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
state: Workflow state
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
Path to state file
|
|
203
|
+
"""
|
|
204
|
+
return self.state_dir / f"{state.id}.json"
|
|
205
|
+
|
|
206
|
+
# =========================================================================
|
|
207
|
+
# Internal Methods
|
|
208
|
+
# =========================================================================
|
|
209
|
+
|
|
210
|
+
def _ensure_state_dir(self) -> None:
|
|
211
|
+
"""Ensure the state directory exists."""
|
|
212
|
+
self.state_dir.mkdir(parents=True, exist_ok=True)
|
|
213
|
+
|
|
214
|
+
def _load_file(self, filepath: Path) -> WorkflowState | None:
|
|
215
|
+
"""Load a state file.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
filepath: Path to state file
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
WorkflowState or None if file is invalid
|
|
222
|
+
|
|
223
|
+
Raises:
|
|
224
|
+
StateCorruptionError: If file is corrupted
|
|
225
|
+
"""
|
|
226
|
+
try:
|
|
227
|
+
with open(filepath, "r", encoding="utf-8") as f:
|
|
228
|
+
data = json.load(f)
|
|
229
|
+
return WorkflowState.from_dict(data)
|
|
230
|
+
except json.JSONDecodeError as e:
|
|
231
|
+
raise StateCorruptionError(
|
|
232
|
+
filepath, f"Invalid JSON: {e}"
|
|
233
|
+
)
|
|
234
|
+
except KeyError as e:
|
|
235
|
+
raise StateCorruptionError(
|
|
236
|
+
filepath, f"Missing required field: {e}"
|
|
237
|
+
)
|
|
238
|
+
except Exception as e:
|
|
239
|
+
raise StateCorruptionError(
|
|
240
|
+
filepath, f"Failed to load: {e}"
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
# =========================================================================
|
|
244
|
+
# Fixit Workflow State Methods (T013)
|
|
245
|
+
# =========================================================================
|
|
246
|
+
|
|
247
|
+
def save_fixit_state(self, state_data: dict, issue_id: int) -> Path:
|
|
248
|
+
"""Save fixit workflow state to file.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
state_data: Dictionary containing workflow state
|
|
252
|
+
issue_id: GitHub issue number
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
Path to saved state file
|
|
256
|
+
"""
|
|
257
|
+
self._ensure_state_dir()
|
|
258
|
+
|
|
259
|
+
filename = f"fixit-{issue_id}.json"
|
|
260
|
+
filepath = self.state_dir / filename
|
|
261
|
+
|
|
262
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
263
|
+
json.dump(state_data, f, indent=2)
|
|
264
|
+
|
|
265
|
+
return filepath
|
|
266
|
+
|
|
267
|
+
def load_fixit_state(self, issue_id: int) -> dict | None:
|
|
268
|
+
"""Load fixit workflow state for an issue.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
issue_id: GitHub issue number
|
|
272
|
+
|
|
273
|
+
Returns:
|
|
274
|
+
State dictionary if found, None otherwise
|
|
275
|
+
"""
|
|
276
|
+
if not self.state_dir.exists():
|
|
277
|
+
return None
|
|
278
|
+
|
|
279
|
+
filename = f"fixit-{issue_id}.json"
|
|
280
|
+
filepath = self.state_dir / filename
|
|
281
|
+
|
|
282
|
+
if not filepath.exists():
|
|
283
|
+
return None
|
|
284
|
+
|
|
285
|
+
try:
|
|
286
|
+
with open(filepath, "r", encoding="utf-8") as f:
|
|
287
|
+
return json.load(f)
|
|
288
|
+
except (json.JSONDecodeError, OSError):
|
|
289
|
+
return None
|
|
290
|
+
|
|
291
|
+
def delete_fixit_state(self, issue_id: int) -> bool:
|
|
292
|
+
"""Delete fixit workflow state for an issue.
|
|
293
|
+
|
|
294
|
+
Args:
|
|
295
|
+
issue_id: GitHub issue number
|
|
296
|
+
|
|
297
|
+
Returns:
|
|
298
|
+
True if deleted, False if not found
|
|
299
|
+
"""
|
|
300
|
+
filename = f"fixit-{issue_id}.json"
|
|
301
|
+
filepath = self.state_dir / filename
|
|
302
|
+
|
|
303
|
+
if filepath.exists():
|
|
304
|
+
filepath.unlink()
|
|
305
|
+
return True
|
|
306
|
+
return False
|
|
307
|
+
|
|
308
|
+
def list_fixit_states(self) -> list[tuple[int, dict]]:
|
|
309
|
+
"""List all fixit workflow states.
|
|
310
|
+
|
|
311
|
+
Returns:
|
|
312
|
+
List of (issue_id, state_data) tuples
|
|
313
|
+
"""
|
|
314
|
+
if not self.state_dir.exists():
|
|
315
|
+
return []
|
|
316
|
+
|
|
317
|
+
states: list[tuple[int, dict]] = []
|
|
318
|
+
|
|
319
|
+
for filepath in self.state_dir.glob("fixit-*.json"):
|
|
320
|
+
try:
|
|
321
|
+
# Extract issue ID from filename
|
|
322
|
+
issue_id_str = filepath.stem.replace("fixit-", "")
|
|
323
|
+
issue_id = int(issue_id_str)
|
|
324
|
+
|
|
325
|
+
with open(filepath, "r", encoding="utf-8") as f:
|
|
326
|
+
data = json.load(f)
|
|
327
|
+
states.append((issue_id, data))
|
|
328
|
+
except (ValueError, json.JSONDecodeError, OSError):
|
|
329
|
+
continue
|
|
330
|
+
|
|
331
|
+
return states
|
|
332
|
+
|
|
333
|
+
def get_active_fixit_workflow(self) -> tuple[int, dict] | None:
|
|
334
|
+
"""Get the currently active fixit workflow (most recent non-completed).
|
|
335
|
+
|
|
336
|
+
Returns:
|
|
337
|
+
Tuple of (issue_id, state_data) for active workflow, or None
|
|
338
|
+
"""
|
|
339
|
+
states = self.list_fixit_states()
|
|
340
|
+
|
|
341
|
+
# Filter for non-completed/cancelled workflows
|
|
342
|
+
active_states = [
|
|
343
|
+
(issue_id, data) for issue_id, data in states
|
|
344
|
+
if data.get("workflow", {}).get("phase") not in ["completed", "cancelled"]
|
|
345
|
+
]
|
|
346
|
+
|
|
347
|
+
if not active_states:
|
|
348
|
+
return None
|
|
349
|
+
|
|
350
|
+
# Return most recently updated
|
|
351
|
+
active_states.sort(
|
|
352
|
+
key=lambda x: x[1].get("workflow", {}).get("updated_at", ""),
|
|
353
|
+
reverse=True
|
|
354
|
+
)
|
|
355
|
+
return active_states[0]
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
"""StatusReporter service for aggregating and filtering spec statuses."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timedelta
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from ..models.status_models import SpecState, SpecStatus, StatusReport
|
|
8
|
+
from .spec_scanner import SpecScanner
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class StatusReporter:
|
|
12
|
+
"""Aggregates spec statuses into reports with statistics and filtering.
|
|
13
|
+
|
|
14
|
+
This service combines SpecScanner results with optional filtering
|
|
15
|
+
to generate StatusReport objects with computed statistics.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
project_root: Optional[Path] = None,
|
|
21
|
+
validate: bool = True,
|
|
22
|
+
) -> None:
|
|
23
|
+
"""Initialize reporter with project root.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
project_root: Root directory of the doit project.
|
|
27
|
+
Defaults to current working directory.
|
|
28
|
+
validate: Whether to run validation on specs.
|
|
29
|
+
"""
|
|
30
|
+
self.scanner = SpecScanner(project_root, validate=validate)
|
|
31
|
+
self.project_root = self.scanner.project_root
|
|
32
|
+
|
|
33
|
+
def generate_report(
|
|
34
|
+
self,
|
|
35
|
+
status_filter: Optional[SpecState] = None,
|
|
36
|
+
blocking_only: bool = False,
|
|
37
|
+
recent_days: Optional[int] = None,
|
|
38
|
+
) -> StatusReport:
|
|
39
|
+
"""Generate a status report with optional filtering.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
status_filter: Only include specs with this status.
|
|
43
|
+
blocking_only: Only include specs blocking commits.
|
|
44
|
+
recent_days: Only include specs modified in last N days.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
StatusReport with filtered specs and computed statistics.
|
|
48
|
+
"""
|
|
49
|
+
# Scan all specs
|
|
50
|
+
specs = self.scanner.scan(include_validation=True)
|
|
51
|
+
|
|
52
|
+
# Apply filters
|
|
53
|
+
specs = self._apply_filters(
|
|
54
|
+
specs,
|
|
55
|
+
status_filter=status_filter,
|
|
56
|
+
blocking_only=blocking_only,
|
|
57
|
+
recent_days=recent_days,
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
return StatusReport(
|
|
61
|
+
specs=specs,
|
|
62
|
+
generated_at=datetime.now(),
|
|
63
|
+
project_root=self.project_root,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
def _apply_filters(
|
|
67
|
+
self,
|
|
68
|
+
specs: list[SpecStatus],
|
|
69
|
+
status_filter: Optional[SpecState] = None,
|
|
70
|
+
blocking_only: bool = False,
|
|
71
|
+
recent_days: Optional[int] = None,
|
|
72
|
+
) -> list[SpecStatus]:
|
|
73
|
+
"""Apply filters to spec list.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
specs: List of SpecStatus to filter.
|
|
77
|
+
status_filter: Only include specs with this status.
|
|
78
|
+
blocking_only: Only include blocking specs.
|
|
79
|
+
recent_days: Only include specs modified in last N days.
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
Filtered list of SpecStatus objects.
|
|
83
|
+
"""
|
|
84
|
+
filtered = specs
|
|
85
|
+
|
|
86
|
+
# Filter by status
|
|
87
|
+
if status_filter is not None:
|
|
88
|
+
filtered = [s for s in filtered if s.status == status_filter]
|
|
89
|
+
|
|
90
|
+
# Filter by blocking
|
|
91
|
+
if blocking_only:
|
|
92
|
+
filtered = [s for s in filtered if s.is_blocking]
|
|
93
|
+
|
|
94
|
+
# Filter by recent modification
|
|
95
|
+
if recent_days is not None:
|
|
96
|
+
cutoff = datetime.now() - timedelta(days=recent_days)
|
|
97
|
+
filtered = [s for s in filtered if s.last_modified >= cutoff]
|
|
98
|
+
|
|
99
|
+
return filtered
|
|
100
|
+
|
|
101
|
+
def filter_by_status(
|
|
102
|
+
self,
|
|
103
|
+
specs: list[SpecStatus],
|
|
104
|
+
status: SpecState,
|
|
105
|
+
) -> list[SpecStatus]:
|
|
106
|
+
"""Filter specs by status.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
specs: List of SpecStatus to filter.
|
|
110
|
+
status: Status to filter by.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Specs with matching status.
|
|
114
|
+
"""
|
|
115
|
+
return [s for s in specs if s.status == status]
|
|
116
|
+
|
|
117
|
+
def filter_blocking(self, specs: list[SpecStatus]) -> list[SpecStatus]:
|
|
118
|
+
"""Filter to only blocking specs.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
specs: List of SpecStatus to filter.
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
Only specs that are blocking.
|
|
125
|
+
"""
|
|
126
|
+
return [s for s in specs if s.is_blocking]
|
|
127
|
+
|
|
128
|
+
def filter_recent(
|
|
129
|
+
self,
|
|
130
|
+
specs: list[SpecStatus],
|
|
131
|
+
days: int,
|
|
132
|
+
) -> list[SpecStatus]:
|
|
133
|
+
"""Filter to specs modified within N days.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
specs: List of SpecStatus to filter.
|
|
137
|
+
days: Number of days to look back.
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
Specs modified within the time period.
|
|
141
|
+
"""
|
|
142
|
+
cutoff = datetime.now() - timedelta(days=days)
|
|
143
|
+
return [s for s in specs if s.last_modified >= cutoff]
|