codd-dev 0.2.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codd/__init__.py +3 -0
- codd/cli.py +344 -0
- codd/config.py +62 -0
- codd/defaults.yaml +30 -0
- codd/generator.py +644 -0
- codd/graph.py +288 -0
- codd/hooks.py +104 -0
- codd/implementer.py +846 -0
- codd/planner.py +576 -0
- codd/propagate.py +308 -0
- codd/scanner.py +445 -0
- codd/validator.py +499 -0
- codd/verifier.py +426 -0
- codd_dev-0.2.0a1.dist-info/METADATA +241 -0
- codd_dev-0.2.0a1.dist-info/RECORD +18 -0
- codd_dev-0.2.0a1.dist-info/WHEEL +4 -0
- codd_dev-0.2.0a1.dist-info/entry_points.txt +2 -0
- codd_dev-0.2.0a1.dist-info/licenses/LICENSE +21 -0
codd/planner.py
ADDED
|
@@ -0,0 +1,576 @@
|
|
|
1
|
+
"""CoDD planner — compute wave readiness from configured artifacts."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
from dataclasses import asdict, dataclass
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
import yaml
|
|
12
|
+
|
|
13
|
+
import codd.generator as generator_module
|
|
14
|
+
from codd.generator import WaveArtifact, _load_project_config, _load_wave_artifacts
|
|
15
|
+
from codd.validator import _iter_doc_files, _parse_codd_frontmatter, validate_project
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
STATUS_DONE = "DONE"
|
|
19
|
+
STATUS_READY = "READY"
|
|
20
|
+
STATUS_BLOCKED = "BLOCKED"
|
|
21
|
+
STATUS_ERROR = "ERROR"
|
|
22
|
+
|
|
23
|
+
ICON_BY_STATUS = {
|
|
24
|
+
STATUS_DONE: "✅",
|
|
25
|
+
STATUS_READY: "🔵",
|
|
26
|
+
STATUS_BLOCKED: "🔴",
|
|
27
|
+
STATUS_ERROR: "⚠️",
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
MECE_DOCUMENT_STRUCTURE = """\
|
|
31
|
+
docs/
|
|
32
|
+
├── requirements/ # What — source-of-truth requirements
|
|
33
|
+
├── design/ # How — overview architecture and cross-cutting design
|
|
34
|
+
├── detailed_design/ # How — module ownership, flows, and implementation-ready diagrams
|
|
35
|
+
├── plan/ # When — implementation sequencing and milestones
|
|
36
|
+
├── governance/ # Why — decisions, ADRs, change requests
|
|
37
|
+
├── test/ # Verify — acceptance criteria and test strategy
|
|
38
|
+
└── operations/ # Run — runbooks, monitoring, incident handling
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
STANDARD_V_MODEL_PATTERNS = """\
|
|
42
|
+
Typical wave patterns:
|
|
43
|
+
- Wave 1: acceptance criteria and decision records derived directly from requirements
|
|
44
|
+
- Wave 2: overview/system design that depends on requirements and wave 1 outputs
|
|
45
|
+
- Wave 3-4: domain design such as API, database, auth, UX, and integration design
|
|
46
|
+
- Wave 5: detailed design artifacts under docs/detailed_design/ with Mermaid diagrams, ownership boundaries, and runtime flows
|
|
47
|
+
- Wave 6: implementation planning that depends on the approved overview + detailed design set
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass(frozen=True)
|
|
52
|
+
class PlannedArtifact:
|
|
53
|
+
"""Planner view of one wave-configured artifact."""
|
|
54
|
+
|
|
55
|
+
wave: int
|
|
56
|
+
node_id: str
|
|
57
|
+
path: str
|
|
58
|
+
status: str
|
|
59
|
+
depends_on: list[str]
|
|
60
|
+
blocked_by: list[str]
|
|
61
|
+
validation_errors: list[str]
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@dataclass(frozen=True)
|
|
65
|
+
class PlannedWave:
|
|
66
|
+
"""Planner view of one wave."""
|
|
67
|
+
|
|
68
|
+
wave: int
|
|
69
|
+
status: str
|
|
70
|
+
nodes: list[PlannedArtifact]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@dataclass(frozen=True)
|
|
74
|
+
class PlanResult:
|
|
75
|
+
"""Serializable planner output."""
|
|
76
|
+
|
|
77
|
+
project_root: str
|
|
78
|
+
summary: dict[str, int]
|
|
79
|
+
next_wave: int | None
|
|
80
|
+
waves: list[PlannedWave]
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@dataclass(frozen=True)
|
|
84
|
+
class RequirementDocument:
|
|
85
|
+
"""Requirement document used to synthesize wave_config."""
|
|
86
|
+
|
|
87
|
+
node_id: str
|
|
88
|
+
path: str
|
|
89
|
+
content: str
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@dataclass(frozen=True)
|
|
93
|
+
class PlanInitResult:
|
|
94
|
+
"""Result of initializing wave_config from requirements."""
|
|
95
|
+
|
|
96
|
+
project_root: str
|
|
97
|
+
config_path: str
|
|
98
|
+
requirement_paths: list[str]
|
|
99
|
+
wave_config: dict[str, list[dict[str, Any]]]
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
@dataclass(frozen=True)
|
|
103
|
+
class _ExternalNode:
|
|
104
|
+
path: str
|
|
105
|
+
status: str
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def plan_init(
|
|
109
|
+
project_root: Path,
|
|
110
|
+
*,
|
|
111
|
+
force: bool = False,
|
|
112
|
+
ai_command: str | None = None,
|
|
113
|
+
) -> PlanInitResult:
|
|
114
|
+
"""Initialize wave_config from requirement documents."""
|
|
115
|
+
project_root = project_root.resolve()
|
|
116
|
+
config = _load_project_config(project_root)
|
|
117
|
+
|
|
118
|
+
if config.get("wave_config") and not force:
|
|
119
|
+
raise FileExistsError("codd.yaml already contains wave_config")
|
|
120
|
+
|
|
121
|
+
requirement_documents = _load_requirement_documents(project_root, config)
|
|
122
|
+
if not requirement_documents:
|
|
123
|
+
raise ValueError("no requirement documents with CoDD frontmatter were found under configured doc_dirs")
|
|
124
|
+
|
|
125
|
+
resolved_ai_command = generator_module._resolve_ai_command(config, ai_command)
|
|
126
|
+
prompt = _build_plan_init_prompt(config, requirement_documents)
|
|
127
|
+
raw_wave_config = generator_module._invoke_ai_command(resolved_ai_command, prompt)
|
|
128
|
+
wave_config = _parse_wave_config_output(raw_wave_config)
|
|
129
|
+
|
|
130
|
+
config["wave_config"] = wave_config
|
|
131
|
+
config_path = project_root / "codd" / "codd.yaml"
|
|
132
|
+
config_path.write_text(
|
|
133
|
+
yaml.safe_dump(config, sort_keys=False, allow_unicode=True),
|
|
134
|
+
encoding="utf-8",
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
return PlanInitResult(
|
|
138
|
+
project_root=str(project_root),
|
|
139
|
+
config_path=str(config_path),
|
|
140
|
+
requirement_paths=[document.path for document in requirement_documents],
|
|
141
|
+
wave_config=wave_config,
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def build_plan(project_root: Path) -> PlanResult:
|
|
146
|
+
"""Build wave execution status for a CoDD project."""
|
|
147
|
+
project_root = project_root.resolve()
|
|
148
|
+
config = _load_project_config(project_root)
|
|
149
|
+
artifacts = _load_wave_artifacts(config)
|
|
150
|
+
artifacts_by_node = {artifact.node_id: artifact for artifact in artifacts}
|
|
151
|
+
ordered_node_ids = _topological_order(artifacts)
|
|
152
|
+
|
|
153
|
+
validation = validate_project(project_root, project_root / "codd")
|
|
154
|
+
errors_by_location = _group_validation_errors(validation.issues)
|
|
155
|
+
external_nodes = _index_external_nodes(project_root, config, errors_by_location, set(artifacts_by_node))
|
|
156
|
+
|
|
157
|
+
planned_nodes: dict[str, PlannedArtifact] = {}
|
|
158
|
+
for node_id in ordered_node_ids:
|
|
159
|
+
artifact = artifacts_by_node[node_id]
|
|
160
|
+
location = Path(artifact.output).as_posix()
|
|
161
|
+
doc_path = project_root / artifact.output
|
|
162
|
+
validation_errors = sorted(set(errors_by_location.get(location, [])))
|
|
163
|
+
depends_on = _dependency_ids(artifact)
|
|
164
|
+
|
|
165
|
+
if doc_path.exists():
|
|
166
|
+
status = STATUS_ERROR if validation_errors else STATUS_DONE
|
|
167
|
+
blocked_by: list[str] = []
|
|
168
|
+
else:
|
|
169
|
+
blocked_by = [
|
|
170
|
+
dependency_id
|
|
171
|
+
for dependency_id in depends_on
|
|
172
|
+
if _dependency_status(dependency_id, planned_nodes, external_nodes) != STATUS_DONE
|
|
173
|
+
]
|
|
174
|
+
status = STATUS_READY if not blocked_by else STATUS_BLOCKED
|
|
175
|
+
|
|
176
|
+
planned_nodes[node_id] = PlannedArtifact(
|
|
177
|
+
wave=artifact.wave,
|
|
178
|
+
node_id=artifact.node_id,
|
|
179
|
+
path=location,
|
|
180
|
+
status=status,
|
|
181
|
+
depends_on=depends_on,
|
|
182
|
+
blocked_by=blocked_by,
|
|
183
|
+
validation_errors=validation_errors,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
waves = [
|
|
187
|
+
PlannedWave(
|
|
188
|
+
wave=wave,
|
|
189
|
+
status=_wave_status([planned_nodes[artifact.node_id] for artifact in artifacts if artifact.wave == wave]),
|
|
190
|
+
nodes=[planned_nodes[artifact.node_id] for artifact in artifacts if artifact.wave == wave],
|
|
191
|
+
)
|
|
192
|
+
for wave in sorted({artifact.wave for artifact in artifacts})
|
|
193
|
+
]
|
|
194
|
+
|
|
195
|
+
summary = {
|
|
196
|
+
"done": sum(1 for node in planned_nodes.values() if node.status == STATUS_DONE),
|
|
197
|
+
"ready": sum(1 for node in planned_nodes.values() if node.status == STATUS_READY),
|
|
198
|
+
"blocked": sum(1 for node in planned_nodes.values() if node.status == STATUS_BLOCKED),
|
|
199
|
+
"error": sum(1 for node in planned_nodes.values() if node.status == STATUS_ERROR),
|
|
200
|
+
}
|
|
201
|
+
next_wave = next((wave.wave for wave in waves if any(node.status == STATUS_READY for node in wave.nodes)), None)
|
|
202
|
+
|
|
203
|
+
return PlanResult(
|
|
204
|
+
project_root=str(project_root),
|
|
205
|
+
summary=summary,
|
|
206
|
+
next_wave=next_wave,
|
|
207
|
+
waves=waves,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def render_plan_text(plan: PlanResult) -> str:
|
|
212
|
+
"""Render a human-readable wave plan."""
|
|
213
|
+
lines: list[str] = []
|
|
214
|
+
|
|
215
|
+
for index, wave in enumerate(plan.waves):
|
|
216
|
+
if index:
|
|
217
|
+
lines.append("")
|
|
218
|
+
lines.append(f"Wave {wave.wave}: {wave.status}")
|
|
219
|
+
for node in wave.nodes:
|
|
220
|
+
lines.append(f" {ICON_BY_STATUS[node.status]} {node.node_id} [{node.status}] {node.path}")
|
|
221
|
+
if node.status == STATUS_READY and node.depends_on:
|
|
222
|
+
lines.append(f" depends_on: {', '.join(node.depends_on)}")
|
|
223
|
+
elif node.status == STATUS_BLOCKED:
|
|
224
|
+
blocked_text = ", ".join(node.blocked_by) if node.blocked_by else "(unknown)"
|
|
225
|
+
lines.append(f" blocked_by: {blocked_text}")
|
|
226
|
+
elif node.status == STATUS_ERROR:
|
|
227
|
+
for message in node.validation_errors:
|
|
228
|
+
lines.append(f" error: {message}")
|
|
229
|
+
|
|
230
|
+
lines.append("")
|
|
231
|
+
lines.append(
|
|
232
|
+
"Summary: "
|
|
233
|
+
f"{plan.summary['done']} DONE, "
|
|
234
|
+
f"{plan.summary['ready']} READY, "
|
|
235
|
+
f"{plan.summary['blocked']} BLOCKED, "
|
|
236
|
+
f"{plan.summary['error']} ERROR"
|
|
237
|
+
)
|
|
238
|
+
if plan.next_wave is not None:
|
|
239
|
+
lines.append(f"Next action: codd generate --wave {plan.next_wave}")
|
|
240
|
+
elif plan.summary["error"]:
|
|
241
|
+
lines.append("Next action: resolve validation errors")
|
|
242
|
+
else:
|
|
243
|
+
lines.append("Next action: all waves DONE")
|
|
244
|
+
|
|
245
|
+
return "\n".join(lines)
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def plan_to_dict(plan: PlanResult) -> dict:
|
|
249
|
+
"""Convert planner output to plain Python data for JSON serialization."""
|
|
250
|
+
return asdict(plan)
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def _load_requirement_documents(project_root: Path, config: dict[str, Any]) -> list[RequirementDocument]:
|
|
254
|
+
documents: list[RequirementDocument] = []
|
|
255
|
+
|
|
256
|
+
for doc_path in _iter_doc_files(project_root, config):
|
|
257
|
+
parsed = _parse_codd_frontmatter(doc_path)
|
|
258
|
+
if parsed.error:
|
|
259
|
+
continue
|
|
260
|
+
|
|
261
|
+
codd = parsed.codd or {}
|
|
262
|
+
if codd.get("type") != "requirement":
|
|
263
|
+
continue
|
|
264
|
+
|
|
265
|
+
node_id = codd.get("node_id")
|
|
266
|
+
if not isinstance(node_id, str) or not node_id.strip():
|
|
267
|
+
continue
|
|
268
|
+
|
|
269
|
+
documents.append(
|
|
270
|
+
RequirementDocument(
|
|
271
|
+
node_id=node_id.strip(),
|
|
272
|
+
path=doc_path.relative_to(project_root).as_posix(),
|
|
273
|
+
content=doc_path.read_text(encoding="utf-8"),
|
|
274
|
+
)
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
return documents
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def _build_plan_init_prompt(config: dict[str, Any], requirement_documents: list[RequirementDocument]) -> str:
|
|
281
|
+
project = config.get("project") or {}
|
|
282
|
+
scan = config.get("scan") or {}
|
|
283
|
+
doc_dirs = scan.get("doc_dirs") or []
|
|
284
|
+
project_name = project.get("name") or "(unknown)"
|
|
285
|
+
language = project.get("language") or "(unknown)"
|
|
286
|
+
|
|
287
|
+
lines = [
|
|
288
|
+
"You are initializing CoDD wave_config from requirement documents.",
|
|
289
|
+
f"Project name: {project_name}",
|
|
290
|
+
f"Primary language: {language}",
|
|
291
|
+
"Configured doc_dirs: " + (", ".join(str(item) for item in doc_dirs) if doc_dirs else "(none)"),
|
|
292
|
+
"",
|
|
293
|
+
"MECE Document Structure (7 categories):",
|
|
294
|
+
MECE_DOCUMENT_STRUCTURE.rstrip(),
|
|
295
|
+
"",
|
|
296
|
+
"Standard V-model artifact patterns:",
|
|
297
|
+
STANDARD_V_MODEL_PATTERNS.rstrip(),
|
|
298
|
+
"",
|
|
299
|
+
"Instructions:",
|
|
300
|
+
"- Read the requirement documents below and decide the minimum complete document set needed for this project.",
|
|
301
|
+
"- Output ONLY YAML for the wave_config mapping. Do not emit prose or Markdown fences.",
|
|
302
|
+
"- Use string wave numbers as the top-level keys.",
|
|
303
|
+
"- Each artifact entry must include node_id, output, title, depends_on, and conventions.",
|
|
304
|
+
"- Insert a dedicated detailed design wave between overview design and implementation planning when the project has multiple modules, integrations, workflows, or shared domain concepts.",
|
|
305
|
+
"- Detailed design artifacts must live under docs/detailed_design/ and stay Markdown + Mermaid (text-first, no binary diagrams).",
|
|
306
|
+
"- Decide which detailed design artifacts are necessary from the project context; do not hardcode a fixed set. Good candidates include shared domain ownership, component dependency maps, ER/CRUD views, key sequence diagrams, and state machines.",
|
|
307
|
+
"- conventions are release-blocking constraints. If a convention is violated, the project is not releasable.",
|
|
308
|
+
"- Extract conventions from the requirement documents for these categories:",
|
|
309
|
+
" security constraints (tenant isolation, authentication, authorization, auditability),",
|
|
310
|
+
" technical constraints (required stack, forbidden libraries, mandated integrations),",
|
|
311
|
+
" legal/regulatory requirements (privacy, GDPR, APPI, contractual obligations), and",
|
|
312
|
+
" non-functional requirements (SLA, latency, throughput, availability, recovery thresholds).",
|
|
313
|
+
"- Assign the relevant conventions to each artifact entry. Use conventions: [] only when an artifact truly has no release-blocking constraints.",
|
|
314
|
+
"- Do not add requirement documents themselves to wave_config.",
|
|
315
|
+
"- Keep output paths under docs/design/, docs/detailed_design/, docs/plan/, docs/governance/, docs/test/, or docs/operations/.",
|
|
316
|
+
"- Set dependencies so earlier waves unlock later waves in a realistic order.",
|
|
317
|
+
"- Do not emit explanatory headings or summaries such as 'Key conventions extracted:' or 'Notes:' before the YAML.",
|
|
318
|
+
"",
|
|
319
|
+
"Required schema (JSON notation):",
|
|
320
|
+
"{",
|
|
321
|
+
' "<wave-number>": [',
|
|
322
|
+
" {",
|
|
323
|
+
' "node_id": "category:name",',
|
|
324
|
+
' "output": "docs/.../file.md",',
|
|
325
|
+
' "title": "Document Title",',
|
|
326
|
+
' "depends_on": [{"id": "node:id", "relation": "derives_from", "semantic": "governance"}],',
|
|
327
|
+
' "conventions": [{"targets": ["node:id"], "reason": "release-blocking constraint"}]',
|
|
328
|
+
" }",
|
|
329
|
+
" ]",
|
|
330
|
+
"}",
|
|
331
|
+
"",
|
|
332
|
+
"Example output shape (YAML mapping only; do not wrap it in a top-level wave_config key):",
|
|
333
|
+
'"1":',
|
|
334
|
+
' - node_id: "design:acceptance-criteria"',
|
|
335
|
+
' output: "docs/test/acceptance_criteria.md"',
|
|
336
|
+
' title: "Acceptance Criteria"',
|
|
337
|
+
" depends_on:",
|
|
338
|
+
' - id: "req:project-requirements"',
|
|
339
|
+
' relation: "derives_from"',
|
|
340
|
+
' semantic: "governance"',
|
|
341
|
+
" conventions:",
|
|
342
|
+
" - targets:",
|
|
343
|
+
' - "db:rls_policies"',
|
|
344
|
+
' - "module:auth"',
|
|
345
|
+
' reason: "Tenant isolation and authenticated access are release-blocking constraints."',
|
|
346
|
+
'"2":',
|
|
347
|
+
' - node_id: "design:system-design"',
|
|
348
|
+
' output: "docs/design/system_design.md"',
|
|
349
|
+
' title: "System Design"',
|
|
350
|
+
" depends_on:",
|
|
351
|
+
' - id: "design:acceptance-criteria"',
|
|
352
|
+
' relation: "constrained_by"',
|
|
353
|
+
' semantic: "governance"',
|
|
354
|
+
" conventions:",
|
|
355
|
+
" - targets:",
|
|
356
|
+
' - "db:rls_policies"',
|
|
357
|
+
' - "service:auth"',
|
|
358
|
+
' reason: "Security, privacy, and access-control constraints must be reflected explicitly."',
|
|
359
|
+
'"3":',
|
|
360
|
+
' - node_id: "design:shared-domain-model"',
|
|
361
|
+
' output: "docs/detailed_design/shared_domain_model.md"',
|
|
362
|
+
' title: "Shared Domain Model"',
|
|
363
|
+
" depends_on:",
|
|
364
|
+
' - id: "design:system-design"',
|
|
365
|
+
' relation: "depends_on"',
|
|
366
|
+
' semantic: "technical"',
|
|
367
|
+
" conventions:",
|
|
368
|
+
" - targets:",
|
|
369
|
+
' - "module:auth"',
|
|
370
|
+
' - "db:rls_policies"',
|
|
371
|
+
' reason: "Canonical ownership of shared types and tenant boundaries must be implementation-ready before coding begins."',
|
|
372
|
+
"",
|
|
373
|
+
"User instruction:",
|
|
374
|
+
"以下の要件定義書を読み、このプロジェクトに必要な設計成果物・依存順序・artifactごとのconventionsを判断し、wave_config形式のYAMLを出力せよ。",
|
|
375
|
+
"conventionsは『違反したらリリース不可の制約』として抽出し、各artifactへ必ず割り当てること。",
|
|
376
|
+
"詳細設計waveが必要な場合は docs/detailed_design/ 配下に Mermaid 図を含む artifact を提案せよ。",
|
|
377
|
+
"",
|
|
378
|
+
"Requirement documents:",
|
|
379
|
+
]
|
|
380
|
+
|
|
381
|
+
for document in requirement_documents:
|
|
382
|
+
lines.extend(
|
|
383
|
+
[
|
|
384
|
+
f"--- BEGIN REQUIREMENT {document.path} ({document.node_id}) ---",
|
|
385
|
+
document.content.rstrip(),
|
|
386
|
+
f"--- END REQUIREMENT {document.path} ---",
|
|
387
|
+
"",
|
|
388
|
+
]
|
|
389
|
+
)
|
|
390
|
+
|
|
391
|
+
return "\n".join(lines).rstrip() + "\n"
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def _parse_wave_config_output(raw_output: str) -> dict[str, list[dict[str, Any]]]:
|
|
395
|
+
cleaned_output = _clean_wave_config_output(raw_output)
|
|
396
|
+
if not cleaned_output:
|
|
397
|
+
raise ValueError("AI command returned empty wave_config output")
|
|
398
|
+
|
|
399
|
+
try:
|
|
400
|
+
payload = yaml.safe_load(cleaned_output)
|
|
401
|
+
except yaml.YAMLError as exc:
|
|
402
|
+
trimmed_output = _clean_wave_config_output(_trim_to_wave_config_mapping(cleaned_output))
|
|
403
|
+
if trimmed_output == cleaned_output:
|
|
404
|
+
raise ValueError(f"AI command returned invalid wave_config YAML: {exc}") from exc
|
|
405
|
+
|
|
406
|
+
try:
|
|
407
|
+
payload = yaml.safe_load(trimmed_output)
|
|
408
|
+
except yaml.YAMLError as trimmed_exc:
|
|
409
|
+
raise ValueError(f"AI command returned invalid wave_config YAML: {trimmed_exc}") from trimmed_exc
|
|
410
|
+
|
|
411
|
+
if isinstance(payload, dict) and isinstance(payload.get("wave_config"), dict):
|
|
412
|
+
payload = payload["wave_config"]
|
|
413
|
+
|
|
414
|
+
if not isinstance(payload, dict):
|
|
415
|
+
raise ValueError("AI command must return a YAML mapping of wave numbers to artifact lists")
|
|
416
|
+
|
|
417
|
+
try:
|
|
418
|
+
artifacts = _load_wave_artifacts({"wave_config": payload})
|
|
419
|
+
except ValueError as exc:
|
|
420
|
+
trimmed_output = _clean_wave_config_output(_trim_to_wave_config_mapping(cleaned_output))
|
|
421
|
+
if trimmed_output == cleaned_output:
|
|
422
|
+
raise
|
|
423
|
+
|
|
424
|
+
try:
|
|
425
|
+
trimmed_payload = yaml.safe_load(trimmed_output)
|
|
426
|
+
except yaml.YAMLError as trimmed_exc:
|
|
427
|
+
raise ValueError(f"AI command returned invalid wave_config YAML: {trimmed_exc}") from trimmed_exc
|
|
428
|
+
|
|
429
|
+
if isinstance(trimmed_payload, dict) and isinstance(trimmed_payload.get("wave_config"), dict):
|
|
430
|
+
trimmed_payload = trimmed_payload["wave_config"]
|
|
431
|
+
if not isinstance(trimmed_payload, dict):
|
|
432
|
+
raise ValueError("AI command must return a YAML mapping of wave numbers to artifact lists") from exc
|
|
433
|
+
|
|
434
|
+
artifacts = _load_wave_artifacts({"wave_config": trimmed_payload})
|
|
435
|
+
|
|
436
|
+
return _serialize_wave_config(artifacts)
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
def _strip_code_fences(text: str) -> str:
|
|
440
|
+
stripped = text.strip()
|
|
441
|
+
fenced = re.match(r"^```(?:yaml|yml)?\s*\n(?P<body>.*)\n```$", stripped, re.DOTALL)
|
|
442
|
+
if fenced:
|
|
443
|
+
return fenced.group("body")
|
|
444
|
+
return stripped
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
def _clean_wave_config_output(text: str) -> str:
|
|
448
|
+
stripped = _strip_code_fences(text).strip()
|
|
449
|
+
lines = [line for line in stripped.splitlines() if not re.match(r"^\s*```(?:yaml|yml)?\s*$", line)]
|
|
450
|
+
return "\n".join(lines).strip()
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
def _trim_to_wave_config_mapping(text: str) -> str:
|
|
454
|
+
lines = text.splitlines()
|
|
455
|
+
for index, line in enumerate(lines):
|
|
456
|
+
if re.match(r'^\s*(?:wave_config|["\']?\d+["\']?)\s*:\s*(?:#.*)?$', line):
|
|
457
|
+
return "\n".join(lines[index:]).strip()
|
|
458
|
+
return text
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
def _serialize_wave_config(artifacts: list[WaveArtifact]) -> dict[str, list[dict[str, Any]]]:
|
|
462
|
+
grouped: dict[str, list[dict[str, Any]]] = defaultdict(list)
|
|
463
|
+
|
|
464
|
+
for artifact in artifacts:
|
|
465
|
+
entry: dict[str, Any] = {
|
|
466
|
+
"node_id": artifact.node_id,
|
|
467
|
+
"output": artifact.output,
|
|
468
|
+
"title": artifact.title,
|
|
469
|
+
}
|
|
470
|
+
if artifact.depends_on:
|
|
471
|
+
entry["depends_on"] = artifact.depends_on
|
|
472
|
+
if artifact.conventions:
|
|
473
|
+
entry["conventions"] = artifact.conventions
|
|
474
|
+
grouped[str(artifact.wave)].append(entry)
|
|
475
|
+
|
|
476
|
+
return {wave: grouped[wave] for wave in sorted(grouped, key=int)}
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
def _group_validation_errors(issues) -> dict[str, list[str]]:
|
|
480
|
+
errors_by_location: dict[str, list[str]] = defaultdict(list)
|
|
481
|
+
for issue in issues:
|
|
482
|
+
if issue.level != STATUS_ERROR:
|
|
483
|
+
continue
|
|
484
|
+
errors_by_location[issue.location].append(issue.message)
|
|
485
|
+
return dict(errors_by_location)
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def _index_external_nodes(
|
|
489
|
+
project_root: Path,
|
|
490
|
+
config: dict,
|
|
491
|
+
errors_by_location: dict[str, list[str]],
|
|
492
|
+
planned_nodes: set[str],
|
|
493
|
+
) -> dict[str, _ExternalNode]:
|
|
494
|
+
nodes: dict[str, _ExternalNode] = {}
|
|
495
|
+
|
|
496
|
+
for doc_path in _iter_doc_files(project_root, config):
|
|
497
|
+
relative_path = doc_path.relative_to(project_root).as_posix()
|
|
498
|
+
parsed = _parse_codd_frontmatter(doc_path)
|
|
499
|
+
if parsed.error:
|
|
500
|
+
continue
|
|
501
|
+
|
|
502
|
+
codd = parsed.codd or {}
|
|
503
|
+
node_id = codd.get("node_id")
|
|
504
|
+
if not isinstance(node_id, str) or node_id in planned_nodes:
|
|
505
|
+
continue
|
|
506
|
+
|
|
507
|
+
status = STATUS_ERROR if errors_by_location.get(relative_path) else STATUS_DONE
|
|
508
|
+
nodes[node_id] = _ExternalNode(path=relative_path, status=status)
|
|
509
|
+
|
|
510
|
+
return nodes
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
def _dependency_ids(artifact: WaveArtifact) -> list[str]:
|
|
514
|
+
return [entry["id"] for entry in artifact.depends_on]
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
def _dependency_status(
|
|
518
|
+
dependency_id: str,
|
|
519
|
+
planned_nodes: dict[str, PlannedArtifact],
|
|
520
|
+
external_nodes: dict[str, _ExternalNode],
|
|
521
|
+
) -> str:
|
|
522
|
+
if dependency_id.startswith("req:"):
|
|
523
|
+
return STATUS_DONE
|
|
524
|
+
if dependency_id in planned_nodes:
|
|
525
|
+
return planned_nodes[dependency_id].status
|
|
526
|
+
if dependency_id in external_nodes:
|
|
527
|
+
return external_nodes[dependency_id].status
|
|
528
|
+
return STATUS_BLOCKED
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
def _wave_status(nodes: list[PlannedArtifact]) -> str:
|
|
532
|
+
statuses = {node.status for node in nodes}
|
|
533
|
+
if STATUS_ERROR in statuses:
|
|
534
|
+
return STATUS_ERROR
|
|
535
|
+
if statuses == {STATUS_DONE}:
|
|
536
|
+
return STATUS_DONE
|
|
537
|
+
if STATUS_READY in statuses:
|
|
538
|
+
return STATUS_READY
|
|
539
|
+
return STATUS_BLOCKED
|
|
540
|
+
|
|
541
|
+
|
|
542
|
+
def _topological_order(artifacts: list[WaveArtifact]) -> list[str]:
|
|
543
|
+
artifacts_by_node = {artifact.node_id: artifact for artifact in artifacts}
|
|
544
|
+
indegree = {artifact.node_id: 0 for artifact in artifacts}
|
|
545
|
+
adjacency = {artifact.node_id: set() for artifact in artifacts}
|
|
546
|
+
|
|
547
|
+
for artifact in artifacts:
|
|
548
|
+
for dependency_id in _dependency_ids(artifact):
|
|
549
|
+
if dependency_id not in indegree:
|
|
550
|
+
continue
|
|
551
|
+
if artifact.node_id in adjacency[dependency_id]:
|
|
552
|
+
continue
|
|
553
|
+
adjacency[dependency_id].add(artifact.node_id)
|
|
554
|
+
indegree[artifact.node_id] += 1
|
|
555
|
+
|
|
556
|
+
ready = sorted(
|
|
557
|
+
[node_id for node_id, degree in indegree.items() if degree == 0],
|
|
558
|
+
key=lambda node_id: (artifacts_by_node[node_id].wave, node_id),
|
|
559
|
+
)
|
|
560
|
+
order: list[str] = []
|
|
561
|
+
|
|
562
|
+
while ready:
|
|
563
|
+
node_id = ready.pop(0)
|
|
564
|
+
order.append(node_id)
|
|
565
|
+
|
|
566
|
+
for child_id in sorted(adjacency[node_id], key=lambda child: (artifacts_by_node[child].wave, child)):
|
|
567
|
+
indegree[child_id] -= 1
|
|
568
|
+
if indegree[child_id] == 0:
|
|
569
|
+
ready.append(child_id)
|
|
570
|
+
ready.sort(key=lambda candidate: (artifacts_by_node[candidate].wave, candidate))
|
|
571
|
+
|
|
572
|
+
if len(order) != len(artifacts):
|
|
573
|
+
cycle_nodes = sorted(node_id for node_id, degree in indegree.items() if degree > 0)
|
|
574
|
+
raise ValueError(f"wave_config contains a dependency cycle: {', '.join(cycle_nodes)}")
|
|
575
|
+
|
|
576
|
+
return order
|