doit-toolkit-cli 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of doit-toolkit-cli might be problematic. Click here for more details.
- doit_cli/__init__.py +1356 -0
- doit_cli/cli/__init__.py +26 -0
- doit_cli/cli/analytics_command.py +616 -0
- doit_cli/cli/context_command.py +213 -0
- doit_cli/cli/diagram_command.py +304 -0
- doit_cli/cli/fixit_command.py +641 -0
- doit_cli/cli/hooks_command.py +211 -0
- doit_cli/cli/init_command.py +613 -0
- doit_cli/cli/memory_command.py +293 -0
- doit_cli/cli/roadmapit_command.py +10 -0
- doit_cli/cli/status_command.py +117 -0
- doit_cli/cli/sync_prompts_command.py +248 -0
- doit_cli/cli/validate_command.py +196 -0
- doit_cli/cli/verify_command.py +204 -0
- doit_cli/cli/workflow_mixin.py +224 -0
- doit_cli/cli/xref_command.py +555 -0
- doit_cli/formatters/__init__.py +8 -0
- doit_cli/formatters/base.py +38 -0
- doit_cli/formatters/json_formatter.py +126 -0
- doit_cli/formatters/markdown_formatter.py +97 -0
- doit_cli/formatters/rich_formatter.py +257 -0
- doit_cli/main.py +51 -0
- doit_cli/models/__init__.py +139 -0
- doit_cli/models/agent.py +74 -0
- doit_cli/models/analytics_models.py +384 -0
- doit_cli/models/context_config.py +464 -0
- doit_cli/models/crossref_models.py +182 -0
- doit_cli/models/diagram_models.py +363 -0
- doit_cli/models/fixit_models.py +355 -0
- doit_cli/models/hook_config.py +125 -0
- doit_cli/models/project.py +91 -0
- doit_cli/models/results.py +121 -0
- doit_cli/models/search_models.py +228 -0
- doit_cli/models/status_models.py +195 -0
- doit_cli/models/sync_models.py +146 -0
- doit_cli/models/template.py +77 -0
- doit_cli/models/validation_models.py +175 -0
- doit_cli/models/workflow_models.py +319 -0
- doit_cli/prompts/__init__.py +5 -0
- doit_cli/prompts/fixit_prompts.py +344 -0
- doit_cli/prompts/interactive.py +390 -0
- doit_cli/rules/__init__.py +5 -0
- doit_cli/rules/builtin_rules.py +160 -0
- doit_cli/services/__init__.py +79 -0
- doit_cli/services/agent_detector.py +168 -0
- doit_cli/services/analytics_service.py +218 -0
- doit_cli/services/architecture_generator.py +290 -0
- doit_cli/services/backup_service.py +204 -0
- doit_cli/services/config_loader.py +113 -0
- doit_cli/services/context_loader.py +1123 -0
- doit_cli/services/coverage_calculator.py +142 -0
- doit_cli/services/crossref_service.py +237 -0
- doit_cli/services/cycle_time_calculator.py +134 -0
- doit_cli/services/date_inferrer.py +349 -0
- doit_cli/services/diagram_service.py +337 -0
- doit_cli/services/drift_detector.py +109 -0
- doit_cli/services/entity_parser.py +301 -0
- doit_cli/services/er_diagram_generator.py +197 -0
- doit_cli/services/fixit_service.py +699 -0
- doit_cli/services/github_service.py +192 -0
- doit_cli/services/hook_manager.py +258 -0
- doit_cli/services/hook_validator.py +528 -0
- doit_cli/services/input_validator.py +322 -0
- doit_cli/services/memory_search.py +527 -0
- doit_cli/services/mermaid_validator.py +334 -0
- doit_cli/services/prompt_transformer.py +91 -0
- doit_cli/services/prompt_writer.py +133 -0
- doit_cli/services/query_interpreter.py +428 -0
- doit_cli/services/report_exporter.py +219 -0
- doit_cli/services/report_generator.py +256 -0
- doit_cli/services/requirement_parser.py +112 -0
- doit_cli/services/roadmap_summarizer.py +209 -0
- doit_cli/services/rule_engine.py +443 -0
- doit_cli/services/scaffolder.py +215 -0
- doit_cli/services/score_calculator.py +172 -0
- doit_cli/services/section_parser.py +204 -0
- doit_cli/services/spec_scanner.py +327 -0
- doit_cli/services/state_manager.py +355 -0
- doit_cli/services/status_reporter.py +143 -0
- doit_cli/services/task_parser.py +347 -0
- doit_cli/services/template_manager.py +710 -0
- doit_cli/services/template_reader.py +158 -0
- doit_cli/services/user_journey_generator.py +214 -0
- doit_cli/services/user_story_parser.py +232 -0
- doit_cli/services/validation_service.py +188 -0
- doit_cli/services/validator.py +232 -0
- doit_cli/services/velocity_tracker.py +173 -0
- doit_cli/services/workflow_engine.py +405 -0
- doit_cli/templates/agent-file-template.md +28 -0
- doit_cli/templates/checklist-template.md +39 -0
- doit_cli/templates/commands/doit.checkin.md +363 -0
- doit_cli/templates/commands/doit.constitution.md +187 -0
- doit_cli/templates/commands/doit.documentit.md +485 -0
- doit_cli/templates/commands/doit.fixit.md +181 -0
- doit_cli/templates/commands/doit.implementit.md +265 -0
- doit_cli/templates/commands/doit.planit.md +262 -0
- doit_cli/templates/commands/doit.reviewit.md +355 -0
- doit_cli/templates/commands/doit.roadmapit.md +389 -0
- doit_cli/templates/commands/doit.scaffoldit.md +458 -0
- doit_cli/templates/commands/doit.specit.md +521 -0
- doit_cli/templates/commands/doit.taskit.md +304 -0
- doit_cli/templates/commands/doit.testit.md +277 -0
- doit_cli/templates/config/context.yaml +134 -0
- doit_cli/templates/config/hooks.yaml +93 -0
- doit_cli/templates/config/validation-rules.yaml +64 -0
- doit_cli/templates/github-issue-templates/epic.yml +78 -0
- doit_cli/templates/github-issue-templates/feature.yml +116 -0
- doit_cli/templates/github-issue-templates/task.yml +129 -0
- doit_cli/templates/hooks/.gitkeep +0 -0
- doit_cli/templates/hooks/post-commit.sh +25 -0
- doit_cli/templates/hooks/post-merge.sh +75 -0
- doit_cli/templates/hooks/pre-commit.sh +17 -0
- doit_cli/templates/hooks/pre-push.sh +18 -0
- doit_cli/templates/memory/completed_roadmap.md +50 -0
- doit_cli/templates/memory/constitution.md +125 -0
- doit_cli/templates/memory/roadmap.md +61 -0
- doit_cli/templates/plan-template.md +146 -0
- doit_cli/templates/scripts/bash/check-prerequisites.sh +166 -0
- doit_cli/templates/scripts/bash/common.sh +156 -0
- doit_cli/templates/scripts/bash/create-new-feature.sh +297 -0
- doit_cli/templates/scripts/bash/setup-plan.sh +61 -0
- doit_cli/templates/scripts/bash/update-agent-context.sh +675 -0
- doit_cli/templates/scripts/powershell/check-prerequisites.ps1 +148 -0
- doit_cli/templates/scripts/powershell/common.ps1 +137 -0
- doit_cli/templates/scripts/powershell/create-new-feature.ps1 +283 -0
- doit_cli/templates/scripts/powershell/setup-plan.ps1 +61 -0
- doit_cli/templates/scripts/powershell/update-agent-context.ps1 +406 -0
- doit_cli/templates/spec-template.md +159 -0
- doit_cli/templates/tasks-template.md +313 -0
- doit_cli/templates/vscode-settings.json +14 -0
- doit_toolkit_cli-0.1.10.dist-info/METADATA +324 -0
- doit_toolkit_cli-0.1.10.dist-info/RECORD +135 -0
- doit_toolkit_cli-0.1.10.dist-info/WHEEL +4 -0
- doit_toolkit_cli-0.1.10.dist-info/entry_points.txt +2 -0
- doit_toolkit_cli-0.1.10.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
"""Data models for bug-fix workflow.
|
|
2
|
+
|
|
3
|
+
This module contains all data models, enums, and dataclasses
|
|
4
|
+
for the doit fixit command workflow.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from typing import Optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# =============================================================================
|
|
14
|
+
# Enums (T004-T007)
|
|
15
|
+
# =============================================================================
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class FixPhase(Enum):
|
|
19
|
+
"""Workflow phase states for bug-fix workflow."""
|
|
20
|
+
|
|
21
|
+
INITIALIZED = "initialized"
|
|
22
|
+
INVESTIGATING = "investigating"
|
|
23
|
+
PLANNING = "planning"
|
|
24
|
+
REVIEWING = "reviewing"
|
|
25
|
+
APPROVED = "approved"
|
|
26
|
+
IMPLEMENTING = "implementing"
|
|
27
|
+
COMPLETED = "completed"
|
|
28
|
+
CANCELLED = "cancelled"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class FindingType(Enum):
|
|
32
|
+
"""Types of investigation findings."""
|
|
33
|
+
|
|
34
|
+
HYPOTHESIS = "hypothesis"
|
|
35
|
+
CONFIRMED_CAUSE = "confirmed_cause"
|
|
36
|
+
AFFECTED_FILE = "affected_file"
|
|
37
|
+
REPRODUCTION_STEP = "reproduction_step"
|
|
38
|
+
RELATED_COMMIT = "related_commit"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class RiskLevel(Enum):
|
|
42
|
+
"""Risk levels for fix plans."""
|
|
43
|
+
|
|
44
|
+
LOW = "low"
|
|
45
|
+
MEDIUM = "medium"
|
|
46
|
+
HIGH = "high"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class PlanStatus(Enum):
|
|
50
|
+
"""Status states for fix plans."""
|
|
51
|
+
|
|
52
|
+
DRAFT = "draft"
|
|
53
|
+
PENDING_REVIEW = "pending_review"
|
|
54
|
+
REVISION_NEEDED = "revision_needed"
|
|
55
|
+
APPROVED = "approved"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class IssueState(Enum):
|
|
59
|
+
"""GitHub issue states."""
|
|
60
|
+
|
|
61
|
+
OPEN = "open"
|
|
62
|
+
CLOSED = "closed"
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class ChangeType(Enum):
|
|
66
|
+
"""Types of file changes in fix plans."""
|
|
67
|
+
|
|
68
|
+
MODIFY = "modify"
|
|
69
|
+
ADD = "add"
|
|
70
|
+
DELETE = "delete"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
# =============================================================================
|
|
74
|
+
# Models (T008-T011)
|
|
75
|
+
# =============================================================================
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@dataclass
|
|
79
|
+
class GitHubIssue:
|
|
80
|
+
"""Represents a GitHub issue fetched from the repository."""
|
|
81
|
+
|
|
82
|
+
number: int
|
|
83
|
+
title: str
|
|
84
|
+
body: str = ""
|
|
85
|
+
state: IssueState = IssueState.OPEN
|
|
86
|
+
labels: list[str] = field(default_factory=list)
|
|
87
|
+
created_at: Optional[datetime] = None
|
|
88
|
+
|
|
89
|
+
@classmethod
|
|
90
|
+
def from_dict(cls, data: dict) -> "GitHubIssue":
|
|
91
|
+
"""Create GitHubIssue from gh CLI JSON output."""
|
|
92
|
+
state = IssueState(data.get("state", "open").lower())
|
|
93
|
+
labels = [label["name"] if isinstance(label, dict) else label
|
|
94
|
+
for label in data.get("labels", [])]
|
|
95
|
+
return cls(
|
|
96
|
+
number=data["number"],
|
|
97
|
+
title=data["title"],
|
|
98
|
+
body=data.get("body", "") or "",
|
|
99
|
+
state=state,
|
|
100
|
+
labels=labels,
|
|
101
|
+
created_at=None, # Can be parsed from data if needed
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
def to_dict(self) -> dict:
|
|
105
|
+
"""Convert to dictionary for JSON serialization."""
|
|
106
|
+
return {
|
|
107
|
+
"number": self.number,
|
|
108
|
+
"title": self.title,
|
|
109
|
+
"body": self.body,
|
|
110
|
+
"state": self.state.value,
|
|
111
|
+
"labels": self.labels,
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@dataclass
|
|
116
|
+
class FixWorkflow:
|
|
117
|
+
"""Represents an in-progress bug fix workflow with its current phase and state."""
|
|
118
|
+
|
|
119
|
+
id: str
|
|
120
|
+
issue_id: int
|
|
121
|
+
branch_name: str
|
|
122
|
+
phase: FixPhase = FixPhase.INITIALIZED
|
|
123
|
+
started_at: datetime = field(default_factory=datetime.now)
|
|
124
|
+
updated_at: datetime = field(default_factory=datetime.now)
|
|
125
|
+
|
|
126
|
+
def to_dict(self) -> dict:
|
|
127
|
+
"""Convert to dictionary for JSON serialization."""
|
|
128
|
+
return {
|
|
129
|
+
"id": self.id,
|
|
130
|
+
"issue_id": self.issue_id,
|
|
131
|
+
"branch_name": self.branch_name,
|
|
132
|
+
"phase": self.phase.value,
|
|
133
|
+
"started_at": self.started_at.isoformat(),
|
|
134
|
+
"updated_at": self.updated_at.isoformat(),
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
@classmethod
|
|
138
|
+
def from_dict(cls, data: dict) -> "FixWorkflow":
|
|
139
|
+
"""Create FixWorkflow from dictionary."""
|
|
140
|
+
return cls(
|
|
141
|
+
id=data["id"],
|
|
142
|
+
issue_id=data["issue_id"],
|
|
143
|
+
branch_name=data["branch_name"],
|
|
144
|
+
phase=FixPhase(data["phase"]),
|
|
145
|
+
started_at=datetime.fromisoformat(data["started_at"]),
|
|
146
|
+
updated_at=datetime.fromisoformat(data["updated_at"]),
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
@dataclass
|
|
151
|
+
class InvestigationFinding:
|
|
152
|
+
"""A discovered fact during investigation."""
|
|
153
|
+
|
|
154
|
+
id: str
|
|
155
|
+
finding_type: FindingType
|
|
156
|
+
description: str
|
|
157
|
+
evidence: str = ""
|
|
158
|
+
file_path: Optional[str] = None
|
|
159
|
+
line_number: Optional[int] = None
|
|
160
|
+
|
|
161
|
+
def to_dict(self) -> dict:
|
|
162
|
+
"""Convert to dictionary for JSON serialization."""
|
|
163
|
+
return {
|
|
164
|
+
"id": self.id,
|
|
165
|
+
"type": self.finding_type.value,
|
|
166
|
+
"description": self.description,
|
|
167
|
+
"evidence": self.evidence,
|
|
168
|
+
"file_path": self.file_path,
|
|
169
|
+
"line_number": self.line_number,
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
@classmethod
|
|
173
|
+
def from_dict(cls, data: dict) -> "InvestigationFinding":
|
|
174
|
+
"""Create InvestigationFinding from dictionary."""
|
|
175
|
+
return cls(
|
|
176
|
+
id=data["id"],
|
|
177
|
+
finding_type=FindingType(data["type"]),
|
|
178
|
+
description=data["description"],
|
|
179
|
+
evidence=data.get("evidence", ""),
|
|
180
|
+
file_path=data.get("file_path"),
|
|
181
|
+
line_number=data.get("line_number"),
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
@dataclass
|
|
186
|
+
class InvestigationCheckpoint:
|
|
187
|
+
"""Tracks progress through investigation steps."""
|
|
188
|
+
|
|
189
|
+
id: str
|
|
190
|
+
title: str
|
|
191
|
+
completed: bool = False
|
|
192
|
+
notes: str = ""
|
|
193
|
+
|
|
194
|
+
def to_dict(self) -> dict:
|
|
195
|
+
"""Convert to dictionary for JSON serialization."""
|
|
196
|
+
return {
|
|
197
|
+
"id": self.id,
|
|
198
|
+
"title": self.title,
|
|
199
|
+
"completed": self.completed,
|
|
200
|
+
"notes": self.notes,
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
@classmethod
|
|
204
|
+
def from_dict(cls, data: dict) -> "InvestigationCheckpoint":
|
|
205
|
+
"""Create InvestigationCheckpoint from dictionary."""
|
|
206
|
+
return cls(
|
|
207
|
+
id=data["id"],
|
|
208
|
+
title=data["title"],
|
|
209
|
+
completed=data.get("completed", False),
|
|
210
|
+
notes=data.get("notes", ""),
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
@dataclass
|
|
215
|
+
class InvestigationPlan:
|
|
216
|
+
"""Documents the approach for investigating a bug."""
|
|
217
|
+
|
|
218
|
+
id: str
|
|
219
|
+
workflow_id: str
|
|
220
|
+
keywords: list[str] = field(default_factory=list)
|
|
221
|
+
checkpoints: list[InvestigationCheckpoint] = field(default_factory=list)
|
|
222
|
+
findings: list[InvestigationFinding] = field(default_factory=list)
|
|
223
|
+
created_at: datetime = field(default_factory=datetime.now)
|
|
224
|
+
|
|
225
|
+
def to_dict(self) -> dict:
|
|
226
|
+
"""Convert to dictionary for JSON serialization."""
|
|
227
|
+
return {
|
|
228
|
+
"id": self.id,
|
|
229
|
+
"workflow_id": self.workflow_id,
|
|
230
|
+
"keywords": self.keywords,
|
|
231
|
+
"checkpoints": [cp.to_dict() for cp in self.checkpoints],
|
|
232
|
+
"findings": [f.to_dict() for f in self.findings],
|
|
233
|
+
"created_at": self.created_at.isoformat(),
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
@classmethod
|
|
237
|
+
def from_dict(cls, data: dict) -> "InvestigationPlan":
|
|
238
|
+
"""Create InvestigationPlan from dictionary."""
|
|
239
|
+
return cls(
|
|
240
|
+
id=data["id"],
|
|
241
|
+
workflow_id=data["workflow_id"],
|
|
242
|
+
keywords=data.get("keywords", []),
|
|
243
|
+
checkpoints=[InvestigationCheckpoint.from_dict(cp)
|
|
244
|
+
for cp in data.get("checkpoints", [])],
|
|
245
|
+
findings=[InvestigationFinding.from_dict(f)
|
|
246
|
+
for f in data.get("findings", [])],
|
|
247
|
+
created_at=datetime.fromisoformat(data["created_at"])
|
|
248
|
+
if "created_at" in data else datetime.now(),
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
@dataclass
|
|
253
|
+
class FileChange:
|
|
254
|
+
"""Describes a file modification in a fix plan."""
|
|
255
|
+
|
|
256
|
+
file_path: str
|
|
257
|
+
change_type: ChangeType
|
|
258
|
+
description: str
|
|
259
|
+
|
|
260
|
+
def to_dict(self) -> dict:
|
|
261
|
+
"""Convert to dictionary for JSON serialization."""
|
|
262
|
+
return {
|
|
263
|
+
"file_path": self.file_path,
|
|
264
|
+
"change_type": self.change_type.value,
|
|
265
|
+
"description": self.description,
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
@classmethod
|
|
269
|
+
def from_dict(cls, data: dict) -> "FileChange":
|
|
270
|
+
"""Create FileChange from dictionary."""
|
|
271
|
+
return cls(
|
|
272
|
+
file_path=data["file_path"],
|
|
273
|
+
change_type=ChangeType(data["change_type"]),
|
|
274
|
+
description=data["description"],
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
@dataclass
|
|
279
|
+
class FixPlan:
|
|
280
|
+
"""Documents the approved approach to fix the bug."""
|
|
281
|
+
|
|
282
|
+
id: str
|
|
283
|
+
workflow_id: str
|
|
284
|
+
root_cause: str = ""
|
|
285
|
+
proposed_solution: str = ""
|
|
286
|
+
risk_level: RiskLevel = RiskLevel.LOW
|
|
287
|
+
status: PlanStatus = PlanStatus.DRAFT
|
|
288
|
+
affected_files: list[FileChange] = field(default_factory=list)
|
|
289
|
+
created_at: datetime = field(default_factory=datetime.now)
|
|
290
|
+
approved_at: Optional[datetime] = None
|
|
291
|
+
|
|
292
|
+
def to_dict(self) -> dict:
|
|
293
|
+
"""Convert to dictionary for JSON serialization."""
|
|
294
|
+
return {
|
|
295
|
+
"id": self.id,
|
|
296
|
+
"workflow_id": self.workflow_id,
|
|
297
|
+
"root_cause": self.root_cause,
|
|
298
|
+
"proposed_solution": self.proposed_solution,
|
|
299
|
+
"risk_level": self.risk_level.value,
|
|
300
|
+
"status": self.status.value,
|
|
301
|
+
"affected_files": [f.to_dict() for f in self.affected_files],
|
|
302
|
+
"created_at": self.created_at.isoformat(),
|
|
303
|
+
"approved_at": self.approved_at.isoformat() if self.approved_at else None,
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
@classmethod
|
|
307
|
+
def from_dict(cls, data: dict) -> "FixPlan":
|
|
308
|
+
"""Create FixPlan from dictionary."""
|
|
309
|
+
return cls(
|
|
310
|
+
id=data["id"],
|
|
311
|
+
workflow_id=data["workflow_id"],
|
|
312
|
+
root_cause=data.get("root_cause", ""),
|
|
313
|
+
proposed_solution=data.get("proposed_solution", ""),
|
|
314
|
+
risk_level=RiskLevel(data.get("risk_level", "low")),
|
|
315
|
+
status=PlanStatus(data.get("status", "draft")),
|
|
316
|
+
affected_files=[FileChange.from_dict(f)
|
|
317
|
+
for f in data.get("affected_files", [])],
|
|
318
|
+
created_at=datetime.fromisoformat(data["created_at"])
|
|
319
|
+
if "created_at" in data else datetime.now(),
|
|
320
|
+
approved_at=datetime.fromisoformat(data["approved_at"])
|
|
321
|
+
if data.get("approved_at") else None,
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
@dataclass
|
|
326
|
+
class FixitWorkflowState:
|
|
327
|
+
"""Complete state for a fixit workflow, used for persistence."""
|
|
328
|
+
|
|
329
|
+
workflow: FixWorkflow
|
|
330
|
+
issue: Optional[GitHubIssue] = None
|
|
331
|
+
investigation_plan: Optional[InvestigationPlan] = None
|
|
332
|
+
fix_plan: Optional[FixPlan] = None
|
|
333
|
+
|
|
334
|
+
def to_dict(self) -> dict:
|
|
335
|
+
"""Convert to dictionary for JSON serialization."""
|
|
336
|
+
return {
|
|
337
|
+
"workflow": self.workflow.to_dict(),
|
|
338
|
+
"issue": self.issue.to_dict() if self.issue else None,
|
|
339
|
+
"investigation_plan": self.investigation_plan.to_dict()
|
|
340
|
+
if self.investigation_plan else None,
|
|
341
|
+
"fix_plan": self.fix_plan.to_dict() if self.fix_plan else None,
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
@classmethod
|
|
345
|
+
def from_dict(cls, data: dict) -> "FixitWorkflowState":
|
|
346
|
+
"""Create FixitWorkflowState from dictionary."""
|
|
347
|
+
return cls(
|
|
348
|
+
workflow=FixWorkflow.from_dict(data["workflow"]),
|
|
349
|
+
issue=GitHubIssue.from_dict(data["issue"])
|
|
350
|
+
if data.get("issue") else None,
|
|
351
|
+
investigation_plan=InvestigationPlan.from_dict(data["investigation_plan"])
|
|
352
|
+
if data.get("investigation_plan") else None,
|
|
353
|
+
fix_plan=FixPlan.from_dict(data["fix_plan"])
|
|
354
|
+
if data.get("fix_plan") else None,
|
|
355
|
+
)
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
"""Hook configuration models for workflow enforcement."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
import yaml
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class HookRule:
|
|
12
|
+
"""Configuration for a specific hook type."""
|
|
13
|
+
|
|
14
|
+
enabled: bool = True
|
|
15
|
+
require_spec: bool = True
|
|
16
|
+
require_plan: bool = True
|
|
17
|
+
require_tasks: bool = False
|
|
18
|
+
validate_spec: bool = True # Run spec validation rules
|
|
19
|
+
validate_spec_threshold: int = 70 # Minimum quality score to pass
|
|
20
|
+
allowed_statuses: list[str] = field(
|
|
21
|
+
default_factory=lambda: ["In Progress", "Complete", "Approved"]
|
|
22
|
+
)
|
|
23
|
+
exempt_branches: list[str] = field(
|
|
24
|
+
default_factory=lambda: ["main", "develop"]
|
|
25
|
+
)
|
|
26
|
+
exempt_paths: list[str] = field(default_factory=list)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass
|
|
30
|
+
class LoggingConfig:
|
|
31
|
+
"""Configuration for hook logging."""
|
|
32
|
+
|
|
33
|
+
enabled: bool = True
|
|
34
|
+
log_bypasses: bool = True
|
|
35
|
+
log_path: str = ".doit/logs/hook-bypasses.log"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass
|
|
39
|
+
class HookConfig:
|
|
40
|
+
"""Main configuration for Git hooks workflow enforcement."""
|
|
41
|
+
|
|
42
|
+
version: int = 1
|
|
43
|
+
pre_commit: HookRule = field(default_factory=HookRule)
|
|
44
|
+
pre_push: HookRule = field(default_factory=HookRule)
|
|
45
|
+
logging: LoggingConfig = field(default_factory=LoggingConfig)
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def load_from_file(cls, config_path: Path) -> "HookConfig":
|
|
49
|
+
"""Load configuration from YAML file.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
config_path: Path to the hooks.yaml configuration file.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
HookConfig instance with values from file or defaults.
|
|
56
|
+
"""
|
|
57
|
+
if not config_path.exists():
|
|
58
|
+
return cls()
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
with open(config_path, encoding="utf-8") as f:
|
|
62
|
+
data = yaml.safe_load(f) or {}
|
|
63
|
+
except yaml.YAMLError:
|
|
64
|
+
# Return default config on parse error
|
|
65
|
+
return cls()
|
|
66
|
+
|
|
67
|
+
return cls._from_dict(data)
|
|
68
|
+
|
|
69
|
+
@classmethod
|
|
70
|
+
def _from_dict(cls, data: dict) -> "HookConfig":
|
|
71
|
+
"""Create HookConfig from dictionary."""
|
|
72
|
+
pre_commit_data = data.get("pre_commit", {})
|
|
73
|
+
pre_push_data = data.get("pre_push", {})
|
|
74
|
+
logging_data = data.get("logging", {})
|
|
75
|
+
|
|
76
|
+
# Handle alternate key names
|
|
77
|
+
if "require_spec_status" in pre_commit_data:
|
|
78
|
+
pre_commit_data["allowed_statuses"] = pre_commit_data.pop(
|
|
79
|
+
"require_spec_status"
|
|
80
|
+
)
|
|
81
|
+
if "require_spec_status" in pre_push_data:
|
|
82
|
+
pre_push_data["allowed_statuses"] = pre_push_data.pop(
|
|
83
|
+
"require_spec_status"
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
return cls(
|
|
87
|
+
version=data.get("version", 1),
|
|
88
|
+
pre_commit=HookRule(**{
|
|
89
|
+
k: v for k, v in pre_commit_data.items()
|
|
90
|
+
if k in HookRule.__dataclass_fields__
|
|
91
|
+
}) if pre_commit_data else HookRule(),
|
|
92
|
+
pre_push=HookRule(**{
|
|
93
|
+
k: v for k, v in pre_push_data.items()
|
|
94
|
+
if k in HookRule.__dataclass_fields__
|
|
95
|
+
}) if pre_push_data else HookRule(),
|
|
96
|
+
logging=LoggingConfig(**{
|
|
97
|
+
k: v for k, v in logging_data.items()
|
|
98
|
+
if k in LoggingConfig.__dataclass_fields__
|
|
99
|
+
}) if logging_data else LoggingConfig(),
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
@classmethod
|
|
103
|
+
def get_default_config_path(cls) -> Path:
|
|
104
|
+
"""Get the default configuration file path."""
|
|
105
|
+
return Path(".doit/config/hooks.yaml")
|
|
106
|
+
|
|
107
|
+
@classmethod
|
|
108
|
+
def load_default(cls) -> "HookConfig":
|
|
109
|
+
"""Load configuration from default location."""
|
|
110
|
+
return cls.load_from_file(cls.get_default_config_path())
|
|
111
|
+
|
|
112
|
+
def get_rule_for_hook(self, hook_type: str) -> Optional[HookRule]:
|
|
113
|
+
"""Get the rule configuration for a specific hook type.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
hook_type: Type of hook ('pre-commit' or 'pre-push').
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
HookRule for the specified hook type, or None if invalid.
|
|
120
|
+
"""
|
|
121
|
+
hook_map = {
|
|
122
|
+
"pre-commit": self.pre_commit,
|
|
123
|
+
"pre-push": self.pre_push,
|
|
124
|
+
}
|
|
125
|
+
return hook_map.get(hook_type)
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"""Project model representing a directory being initialized for doit workflow."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from .agent import Agent
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# Unsafe paths that require confirmation before initialization
|
|
12
|
+
UNSAFE_PATHS = [
|
|
13
|
+
Path.home(),
|
|
14
|
+
Path("/"),
|
|
15
|
+
Path("/usr"),
|
|
16
|
+
Path("/etc"),
|
|
17
|
+
Path("/var"),
|
|
18
|
+
Path("/opt"),
|
|
19
|
+
Path("/bin"),
|
|
20
|
+
Path("/sbin"),
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class Project:
|
|
26
|
+
"""Represents a project being initialized for doit workflow."""
|
|
27
|
+
|
|
28
|
+
path: Path
|
|
29
|
+
initialized: bool = False
|
|
30
|
+
doit_version: Optional[str] = None
|
|
31
|
+
created_at: Optional[datetime] = None
|
|
32
|
+
agents: list[Agent] = field(default_factory=list)
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def doit_folder(self) -> Path:
|
|
36
|
+
"""Path to .doit/ directory."""
|
|
37
|
+
return self.path / ".doit"
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def memory_folder(self) -> Path:
|
|
41
|
+
"""Path to .doit/memory/ directory."""
|
|
42
|
+
return self.doit_folder / "memory"
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def templates_folder(self) -> Path:
|
|
46
|
+
"""Path to .doit/templates/ directory."""
|
|
47
|
+
return self.doit_folder / "templates"
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def scripts_folder(self) -> Path:
|
|
51
|
+
"""Path to .doit/scripts/ directory."""
|
|
52
|
+
return self.doit_folder / "scripts"
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def backups_folder(self) -> Path:
|
|
56
|
+
"""Path to .doit/backups/ directory."""
|
|
57
|
+
return self.doit_folder / "backups"
|
|
58
|
+
|
|
59
|
+
def command_directory(self, agent: Agent) -> Path:
|
|
60
|
+
"""Path to command directory for given agent."""
|
|
61
|
+
return self.path / agent.command_directory
|
|
62
|
+
|
|
63
|
+
def is_safe_directory(self) -> bool:
|
|
64
|
+
"""Check if project path is safe for initialization."""
|
|
65
|
+
resolved = self.path.resolve()
|
|
66
|
+
return resolved not in UNSAFE_PATHS
|
|
67
|
+
|
|
68
|
+
def has_doit_setup(self) -> bool:
|
|
69
|
+
"""Check if project has any doit setup."""
|
|
70
|
+
return self.doit_folder.exists()
|
|
71
|
+
|
|
72
|
+
def has_agent_setup(self, agent: Agent) -> bool:
|
|
73
|
+
"""Check if project has setup for specific agent."""
|
|
74
|
+
return self.command_directory(agent).exists()
|
|
75
|
+
|
|
76
|
+
def detect_agents(self) -> list[Agent]:
|
|
77
|
+
"""Detect which agents are already configured in this project."""
|
|
78
|
+
detected = []
|
|
79
|
+
|
|
80
|
+
# Check for Claude setup
|
|
81
|
+
claude_dir = self.path / ".claude"
|
|
82
|
+
if claude_dir.exists():
|
|
83
|
+
detected.append(Agent.CLAUDE)
|
|
84
|
+
|
|
85
|
+
# Check for Copilot setup
|
|
86
|
+
copilot_instructions = self.path / ".github" / "copilot-instructions.md"
|
|
87
|
+
copilot_prompts = self.path / ".github" / "prompts"
|
|
88
|
+
if copilot_instructions.exists() or copilot_prompts.exists():
|
|
89
|
+
detected.append(Agent.COPILOT)
|
|
90
|
+
|
|
91
|
+
return detected
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""Result models for init and verify operations."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from .project import Project
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class VerifyStatus(str, Enum):
|
|
12
|
+
"""Status of verification check."""
|
|
13
|
+
|
|
14
|
+
PASS = "pass"
|
|
15
|
+
WARN = "warn"
|
|
16
|
+
FAIL = "fail"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class VerifyCheck:
|
|
21
|
+
"""Single verification check result."""
|
|
22
|
+
|
|
23
|
+
name: str
|
|
24
|
+
status: VerifyStatus
|
|
25
|
+
message: str
|
|
26
|
+
suggestion: Optional[str] = None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass
|
|
30
|
+
class InitResult:
|
|
31
|
+
"""Result of initialization operation."""
|
|
32
|
+
|
|
33
|
+
success: bool
|
|
34
|
+
project: Project
|
|
35
|
+
created_directories: list[Path] = field(default_factory=list)
|
|
36
|
+
created_files: list[Path] = field(default_factory=list)
|
|
37
|
+
updated_files: list[Path] = field(default_factory=list)
|
|
38
|
+
skipped_files: list[Path] = field(default_factory=list)
|
|
39
|
+
backup_path: Optional[Path] = None
|
|
40
|
+
error_message: Optional[str] = None
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def total_created(self) -> int:
|
|
44
|
+
"""Total number of files and directories created."""
|
|
45
|
+
return len(self.created_directories) + len(self.created_files)
|
|
46
|
+
|
|
47
|
+
@property
|
|
48
|
+
def summary(self) -> str:
|
|
49
|
+
"""Human-readable summary of the operation."""
|
|
50
|
+
if not self.success:
|
|
51
|
+
return f"Failed: {self.error_message or 'Unknown error'}"
|
|
52
|
+
|
|
53
|
+
parts = []
|
|
54
|
+
if self.created_directories:
|
|
55
|
+
parts.append(f"{len(self.created_directories)} directories created")
|
|
56
|
+
if self.created_files:
|
|
57
|
+
parts.append(f"{len(self.created_files)} files created")
|
|
58
|
+
if self.updated_files:
|
|
59
|
+
parts.append(f"{len(self.updated_files)} files updated")
|
|
60
|
+
if self.skipped_files:
|
|
61
|
+
parts.append(f"{len(self.skipped_files)} files skipped")
|
|
62
|
+
|
|
63
|
+
return ", ".join(parts) if parts else "No changes made"
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@dataclass
|
|
67
|
+
class VerifyResult:
|
|
68
|
+
"""Result of project verification."""
|
|
69
|
+
|
|
70
|
+
project: Project
|
|
71
|
+
checks: list[VerifyCheck] = field(default_factory=list)
|
|
72
|
+
|
|
73
|
+
@property
|
|
74
|
+
def passed(self) -> bool:
|
|
75
|
+
"""All checks passed (no failures)."""
|
|
76
|
+
return not any(c.status == VerifyStatus.FAIL for c in self.checks)
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def has_warnings(self) -> bool:
|
|
80
|
+
"""Any checks have warnings."""
|
|
81
|
+
return any(c.status == VerifyStatus.WARN for c in self.checks)
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def pass_count(self) -> int:
|
|
85
|
+
"""Number of passed checks."""
|
|
86
|
+
return sum(1 for c in self.checks if c.status == VerifyStatus.PASS)
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def warn_count(self) -> int:
|
|
90
|
+
"""Number of warning checks."""
|
|
91
|
+
return sum(1 for c in self.checks if c.status == VerifyStatus.WARN)
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def fail_count(self) -> int:
|
|
95
|
+
"""Number of failed checks."""
|
|
96
|
+
return sum(1 for c in self.checks if c.status == VerifyStatus.FAIL)
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def summary(self) -> str:
|
|
100
|
+
"""Summary of check results."""
|
|
101
|
+
return f"{self.pass_count} passed, {self.warn_count} warnings, {self.fail_count} failed"
|
|
102
|
+
|
|
103
|
+
def to_dict(self) -> dict:
|
|
104
|
+
"""Convert to dictionary for JSON output."""
|
|
105
|
+
return {
|
|
106
|
+
"status": "passed" if self.passed else "failed",
|
|
107
|
+
"checks": [
|
|
108
|
+
{
|
|
109
|
+
"name": c.name,
|
|
110
|
+
"status": c.status.value,
|
|
111
|
+
"message": c.message,
|
|
112
|
+
"suggestion": c.suggestion,
|
|
113
|
+
}
|
|
114
|
+
for c in self.checks
|
|
115
|
+
],
|
|
116
|
+
"summary": {
|
|
117
|
+
"passed": self.pass_count,
|
|
118
|
+
"warnings": self.warn_count,
|
|
119
|
+
"failed": self.fail_count,
|
|
120
|
+
},
|
|
121
|
+
}
|