tapps-agents 3.5.39__py3-none-any.whl → 3.5.40__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tapps_agents/__init__.py +2 -2
- tapps_agents/agents/enhancer/agent.py +2728 -2728
- tapps_agents/agents/implementer/agent.py +35 -13
- tapps_agents/agents/reviewer/agent.py +43 -10
- tapps_agents/agents/reviewer/scoring.py +59 -68
- tapps_agents/agents/reviewer/tools/__init__.py +24 -0
- tapps_agents/agents/reviewer/tools/ruff_grouping.py +250 -0
- tapps_agents/agents/reviewer/tools/scoped_mypy.py +284 -0
- tapps_agents/beads/__init__.py +11 -0
- tapps_agents/beads/hydration.py +213 -0
- tapps_agents/beads/specs.py +206 -0
- tapps_agents/cli/commands/health.py +19 -3
- tapps_agents/cli/commands/simple_mode.py +842 -676
- tapps_agents/cli/commands/task.py +219 -0
- tapps_agents/cli/commands/top_level.py +13 -0
- tapps_agents/cli/main.py +658 -651
- tapps_agents/cli/parsers/top_level.py +1978 -1881
- tapps_agents/core/config.py +1622 -1622
- tapps_agents/core/init_project.py +3012 -2897
- tapps_agents/epic/markdown_sync.py +105 -0
- tapps_agents/epic/orchestrator.py +1 -2
- tapps_agents/epic/parser.py +427 -423
- tapps_agents/experts/adaptive_domain_detector.py +0 -2
- tapps_agents/experts/knowledge/api-design-integration/api-security-patterns.md +15 -15
- tapps_agents/experts/knowledge/api-design-integration/external-api-integration.md +19 -44
- tapps_agents/health/checks/outcomes.backup_20260204_064058.py +324 -0
- tapps_agents/health/checks/outcomes.backup_20260204_064256.py +324 -0
- tapps_agents/health/checks/outcomes.backup_20260204_064600.py +324 -0
- tapps_agents/health/checks/outcomes.py +134 -46
- tapps_agents/health/orchestrator.py +12 -4
- tapps_agents/hooks/__init__.py +33 -0
- tapps_agents/hooks/config.py +140 -0
- tapps_agents/hooks/events.py +135 -0
- tapps_agents/hooks/executor.py +128 -0
- tapps_agents/hooks/manager.py +143 -0
- tapps_agents/session/__init__.py +19 -0
- tapps_agents/session/manager.py +256 -0
- tapps_agents/simple_mode/code_snippet_handler.py +382 -0
- tapps_agents/simple_mode/intent_parser.py +29 -4
- tapps_agents/simple_mode/orchestrators/base.py +185 -59
- tapps_agents/simple_mode/orchestrators/build_orchestrator.py +2667 -2642
- tapps_agents/simple_mode/orchestrators/fix_orchestrator.py +2 -2
- tapps_agents/simple_mode/workflow_suggester.py +37 -3
- tapps_agents/workflow/agent_handlers/implementer_handler.py +18 -3
- tapps_agents/workflow/cursor_executor.py +2196 -2118
- tapps_agents/workflow/direct_execution_fallback.py +16 -3
- tapps_agents/workflow/message_formatter.py +2 -1
- tapps_agents/workflow/parallel_executor.py +43 -4
- tapps_agents/workflow/parser.py +375 -357
- tapps_agents/workflow/rules_generator.py +337 -337
- tapps_agents/workflow/skill_invoker.py +9 -3
- {tapps_agents-3.5.39.dist-info → tapps_agents-3.5.40.dist-info}/METADATA +5 -1
- {tapps_agents-3.5.39.dist-info → tapps_agents-3.5.40.dist-info}/RECORD +57 -53
- tapps_agents/agents/analyst/SKILL.md +0 -85
- tapps_agents/agents/architect/SKILL.md +0 -80
- tapps_agents/agents/debugger/SKILL.md +0 -66
- tapps_agents/agents/designer/SKILL.md +0 -78
- tapps_agents/agents/documenter/SKILL.md +0 -95
- tapps_agents/agents/enhancer/SKILL.md +0 -189
- tapps_agents/agents/implementer/SKILL.md +0 -117
- tapps_agents/agents/improver/SKILL.md +0 -55
- tapps_agents/agents/ops/SKILL.md +0 -64
- tapps_agents/agents/orchestrator/SKILL.md +0 -238
- tapps_agents/agents/planner/story_template.md +0 -37
- tapps_agents/agents/reviewer/templates/quality-dashboard.html.j2 +0 -150
- tapps_agents/agents/tester/SKILL.md +0 -71
- {tapps_agents-3.5.39.dist-info → tapps_agents-3.5.40.dist-info}/WHEEL +0 -0
- {tapps_agents-3.5.39.dist-info → tapps_agents-3.5.40.dist-info}/entry_points.txt +0 -0
- {tapps_agents-3.5.39.dist-info → tapps_agents-3.5.40.dist-info}/licenses/LICENSE +0 -0
- {tapps_agents-3.5.39.dist-info → tapps_agents-3.5.40.dist-info}/top_level.txt +0 -0
tapps_agents/workflow/parser.py
CHANGED
|
@@ -1,357 +1,375 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Workflow Parser - Parse YAML workflow definitions.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
# @ai-prime-directive: This file implements the YAML workflow parser with strict schema validation.
|
|
6
|
-
# The parser supports both wrapped ({workflow: {...}}) and legacy ({id: ..., steps: [...]}) formats
|
|
7
|
-
# and enforces schema compliance per ADR-004. This is critical infrastructure for the YAML-first workflow architecture.
|
|
8
|
-
|
|
9
|
-
# @ai-constraints:
|
|
10
|
-
# - Must support both wrapped and legacy workflow formats for backward compatibility
|
|
11
|
-
# - Schema validation must occur before workflow object construction
|
|
12
|
-
# - Step validation must check required fields (id, agent, instruction) before schema validation
|
|
13
|
-
# - Error messages must include file path and step ID for debugging
|
|
14
|
-
# - Performance: Parsing should complete in <100ms for typical workflows
|
|
15
|
-
|
|
16
|
-
# @note[2025-03-15]: YAML-first workflow architecture per ADR-004.
|
|
17
|
-
# The parser enforces strict schema compliance while maintaining backward compatibility.
|
|
18
|
-
# See docs/architecture/decisions/ADR-004-yaml-first-workflows.md
|
|
19
|
-
|
|
20
|
-
import re
|
|
21
|
-
from pathlib import Path
|
|
22
|
-
from typing import Any
|
|
23
|
-
|
|
24
|
-
import yaml
|
|
25
|
-
|
|
26
|
-
from .models import (
|
|
27
|
-
Workflow,
|
|
28
|
-
WorkflowSettings,
|
|
29
|
-
WorkflowStep,
|
|
30
|
-
WorkflowType,
|
|
31
|
-
)
|
|
32
|
-
from .schema_validator import SchemaVersion, WorkflowSchemaValidator
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
class WorkflowParser:
|
|
36
|
-
"""Parser for YAML workflow definitions."""
|
|
37
|
-
|
|
38
|
-
@staticmethod
|
|
39
|
-
def _err_prefix(file_path: Path | None = None, step_id: str | None = None) -> str:
|
|
40
|
-
parts: list[str] = []
|
|
41
|
-
if file_path:
|
|
42
|
-
parts.append(str(file_path))
|
|
43
|
-
if step_id:
|
|
44
|
-
parts.append(f"step:{step_id}")
|
|
45
|
-
return ": ".join(parts) + (": " if parts else "")
|
|
46
|
-
|
|
47
|
-
@staticmethod
|
|
48
|
-
def parse_file(file_path: Path) -> Workflow:
|
|
49
|
-
"""
|
|
50
|
-
Parse a workflow YAML file.
|
|
51
|
-
|
|
52
|
-
Args:
|
|
53
|
-
file_path: Path to workflow YAML file
|
|
54
|
-
|
|
55
|
-
Returns:
|
|
56
|
-
Parsed Workflow object
|
|
57
|
-
"""
|
|
58
|
-
with open(file_path, encoding="utf-8") as f:
|
|
59
|
-
content = yaml.safe_load(f)
|
|
60
|
-
|
|
61
|
-
return WorkflowParser.parse(content, file_path=file_path)
|
|
62
|
-
|
|
63
|
-
@staticmethod
|
|
64
|
-
def
|
|
65
|
-
"""
|
|
66
|
-
Parse workflow
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
if
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
metadata=
|
|
241
|
-
)
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
step_id
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
if not isinstance(
|
|
268
|
-
raise ValueError(
|
|
269
|
-
f"{WorkflowParser._err_prefix(file_path)}Step must
|
|
270
|
-
)
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
)
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
1
|
+
"""
|
|
2
|
+
Workflow Parser - Parse YAML workflow definitions.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
# @ai-prime-directive: This file implements the YAML workflow parser with strict schema validation.
|
|
6
|
+
# The parser supports both wrapped ({workflow: {...}}) and legacy ({id: ..., steps: [...]}) formats
|
|
7
|
+
# and enforces schema compliance per ADR-004. This is critical infrastructure for the YAML-first workflow architecture.
|
|
8
|
+
|
|
9
|
+
# @ai-constraints:
|
|
10
|
+
# - Must support both wrapped and legacy workflow formats for backward compatibility
|
|
11
|
+
# - Schema validation must occur before workflow object construction
|
|
12
|
+
# - Step validation must check required fields (id, agent, instruction) before schema validation
|
|
13
|
+
# - Error messages must include file path and step ID for debugging
|
|
14
|
+
# - Performance: Parsing should complete in <100ms for typical workflows
|
|
15
|
+
|
|
16
|
+
# @note[2025-03-15]: YAML-first workflow architecture per ADR-004.
|
|
17
|
+
# The parser enforces strict schema compliance while maintaining backward compatibility.
|
|
18
|
+
# See docs/architecture/decisions/ADR-004-yaml-first-workflows.md
|
|
19
|
+
|
|
20
|
+
import re
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from typing import Any
|
|
23
|
+
|
|
24
|
+
import yaml
|
|
25
|
+
|
|
26
|
+
from .models import (
|
|
27
|
+
Workflow,
|
|
28
|
+
WorkflowSettings,
|
|
29
|
+
WorkflowStep,
|
|
30
|
+
WorkflowType,
|
|
31
|
+
)
|
|
32
|
+
from .schema_validator import SchemaVersion, WorkflowSchemaValidator
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class WorkflowParser:
|
|
36
|
+
"""Parser for YAML workflow definitions."""
|
|
37
|
+
|
|
38
|
+
@staticmethod
|
|
39
|
+
def _err_prefix(file_path: Path | None = None, step_id: str | None = None) -> str:
|
|
40
|
+
parts: list[str] = []
|
|
41
|
+
if file_path:
|
|
42
|
+
parts.append(str(file_path))
|
|
43
|
+
if step_id:
|
|
44
|
+
parts.append(f"step:{step_id}")
|
|
45
|
+
return ": ".join(parts) + (": " if parts else "")
|
|
46
|
+
|
|
47
|
+
@staticmethod
|
|
48
|
+
def parse_file(file_path: Path) -> Workflow:
|
|
49
|
+
"""
|
|
50
|
+
Parse a workflow YAML file.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
file_path: Path to workflow YAML file
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
Parsed Workflow object
|
|
57
|
+
"""
|
|
58
|
+
with open(file_path, encoding="utf-8") as f:
|
|
59
|
+
content = yaml.safe_load(f)
|
|
60
|
+
|
|
61
|
+
return WorkflowParser.parse(content, file_path=file_path)
|
|
62
|
+
|
|
63
|
+
@staticmethod
|
|
64
|
+
def parse_yaml(yaml_string: str, file_path: Path | None = None) -> Workflow:
|
|
65
|
+
"""
|
|
66
|
+
Parse workflow from a YAML string.
|
|
67
|
+
|
|
68
|
+
Use this when you have workflow content as a string (e.g. from
|
|
69
|
+
_create_story_workflow or in-memory YAML). For file paths use parse_file.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
yaml_string: Workflow YAML as string
|
|
73
|
+
file_path: Optional path for error messages
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
Parsed Workflow object
|
|
77
|
+
"""
|
|
78
|
+
content = yaml.safe_load(yaml_string)
|
|
79
|
+
return WorkflowParser.parse(content, file_path=file_path)
|
|
80
|
+
|
|
81
|
+
@staticmethod
|
|
82
|
+
def parse(content: Any, file_path: Path | None = None) -> Workflow:
|
|
83
|
+
"""
|
|
84
|
+
Parse workflow content from dictionary.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
content: Workflow YAML content as dictionary
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Parsed Workflow object
|
|
91
|
+
"""
|
|
92
|
+
if not isinstance(content, dict):
|
|
93
|
+
raise ValueError(
|
|
94
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow file must parse to a mapping/object"
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
# Support both:
|
|
98
|
+
# 1) New schema: {"workflow": {...}}
|
|
99
|
+
# 2) Legacy/utility schema: {"id": "...", "steps": [...], ...}
|
|
100
|
+
is_wrapped = isinstance(content.get("workflow"), dict)
|
|
101
|
+
workflow_data: dict[str, Any] | None = content.get("workflow") if is_wrapped else None
|
|
102
|
+
if not workflow_data:
|
|
103
|
+
if isinstance(content.get("id"), str) and isinstance(content.get("steps"), list):
|
|
104
|
+
workflow_data = content # legacy/utility format
|
|
105
|
+
is_wrapped = False
|
|
106
|
+
else:
|
|
107
|
+
raise ValueError(
|
|
108
|
+
f"{WorkflowParser._err_prefix(file_path)}Missing required top-level key 'workflow'"
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Quick validation of required step fields before schema validation
|
|
112
|
+
# This ensures we get the expected error message format for missing required fields
|
|
113
|
+
steps_data = workflow_data.get("steps", [])
|
|
114
|
+
if isinstance(steps_data, list):
|
|
115
|
+
for step_data in steps_data:
|
|
116
|
+
if isinstance(step_data, dict):
|
|
117
|
+
step_id = step_data.get("id", "<unknown>")
|
|
118
|
+
agent = step_data.get("agent")
|
|
119
|
+
action = step_data.get("action")
|
|
120
|
+
|
|
121
|
+
# Check required fields and raise with expected message format
|
|
122
|
+
if not isinstance(step_data.get("id"), str) or not step_data.get("id", "").strip():
|
|
123
|
+
raise ValueError(
|
|
124
|
+
f"{WorkflowParser._err_prefix(file_path)}Step must have id, agent, and action"
|
|
125
|
+
)
|
|
126
|
+
if not isinstance(agent, str) or not agent.strip():
|
|
127
|
+
raise ValueError(
|
|
128
|
+
f"{WorkflowParser._err_prefix(file_path, step_id)}Step must have id, agent, and action"
|
|
129
|
+
)
|
|
130
|
+
if not isinstance(action, str) or not action.strip():
|
|
131
|
+
raise ValueError(
|
|
132
|
+
f"{WorkflowParser._err_prefix(file_path, step_id)}Step must have id, agent, and action"
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
# Determine schema version for validation
|
|
136
|
+
schema_version = workflow_data.get("schema_version")
|
|
137
|
+
if schema_version is None:
|
|
138
|
+
schema_version = SchemaVersion.LATEST.value
|
|
139
|
+
elif not isinstance(schema_version, str):
|
|
140
|
+
raise ValueError(
|
|
141
|
+
f"{WorkflowParser._err_prefix(file_path)}schema_version must be a string"
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
# Validate against schema (strict mode enabled by default)
|
|
145
|
+
validator = WorkflowSchemaValidator(schema_version=schema_version, strict=True)
|
|
146
|
+
validation_errors = validator.validate_workflow(content, file_path=file_path)
|
|
147
|
+
if validation_errors:
|
|
148
|
+
error_messages = [str(err) for err in validation_errors]
|
|
149
|
+
raise ValueError(
|
|
150
|
+
f"{WorkflowParser._err_prefix(file_path)}Schema validation failed:\n"
|
|
151
|
+
+ "\n".join(f" - {msg}" for msg in error_messages)
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# Parse workflow metadata
|
|
155
|
+
workflow_id = workflow_data.get("id")
|
|
156
|
+
if not isinstance(workflow_id, str) or not workflow_id.strip():
|
|
157
|
+
raise ValueError(
|
|
158
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow must have a non-empty string 'id'"
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
name = workflow_data.get("name", "")
|
|
162
|
+
if name is None:
|
|
163
|
+
name = ""
|
|
164
|
+
if not isinstance(name, str):
|
|
165
|
+
raise ValueError(
|
|
166
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow 'name' must be a string"
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
description = workflow_data.get("description", "")
|
|
170
|
+
if description is None:
|
|
171
|
+
description = ""
|
|
172
|
+
if not isinstance(description, str):
|
|
173
|
+
raise ValueError(
|
|
174
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow 'description' must be a string"
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
version = workflow_data.get("version", "1.0.0")
|
|
178
|
+
if not isinstance(version, str) or not version.strip():
|
|
179
|
+
raise ValueError(
|
|
180
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow 'version' must be a non-empty string"
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
# Parse workflow type
|
|
184
|
+
workflow_type_raw = workflow_data.get("type", None)
|
|
185
|
+
if workflow_type_raw is None:
|
|
186
|
+
workflow_type = WorkflowType.GREENFIELD
|
|
187
|
+
else:
|
|
188
|
+
if not isinstance(workflow_type_raw, str):
|
|
189
|
+
raise ValueError(
|
|
190
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow 'type' must be a string"
|
|
191
|
+
)
|
|
192
|
+
try:
|
|
193
|
+
workflow_type = WorkflowType(workflow_type_raw.lower())
|
|
194
|
+
except ValueError as e:
|
|
195
|
+
if is_wrapped:
|
|
196
|
+
allowed = ", ".join([t.value for t in WorkflowType])
|
|
197
|
+
raise ValueError(
|
|
198
|
+
f"{WorkflowParser._err_prefix(file_path)}Invalid workflow type '{workflow_type_raw}'. "
|
|
199
|
+
f"Allowed: {allowed}"
|
|
200
|
+
) from e
|
|
201
|
+
# Legacy/utility workflows may use non-standard types (e.g., "utility").
|
|
202
|
+
workflow_type = WorkflowType.GREENFIELD
|
|
203
|
+
|
|
204
|
+
# Parse settings
|
|
205
|
+
settings_data = workflow_data.get("settings", {})
|
|
206
|
+
if settings_data is None:
|
|
207
|
+
settings_data = {}
|
|
208
|
+
if not isinstance(settings_data, dict):
|
|
209
|
+
raise ValueError(
|
|
210
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow 'settings' must be an object/mapping"
|
|
211
|
+
)
|
|
212
|
+
# Support auto_detect at workflow level (legacy) or in settings
|
|
213
|
+
auto_detect = workflow_data.get("auto_detect")
|
|
214
|
+
if auto_detect is None:
|
|
215
|
+
auto_detect = settings_data.get("auto_detect", True)
|
|
216
|
+
settings = WorkflowSettings(
|
|
217
|
+
quality_gates=settings_data.get("quality_gates", True),
|
|
218
|
+
code_scoring=settings_data.get("code_scoring", True),
|
|
219
|
+
context_tier_default=settings_data.get("context_tier_default", 2),
|
|
220
|
+
auto_detect=auto_detect if isinstance(auto_detect, bool) else True,
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
# Parse steps
|
|
224
|
+
steps_data = workflow_data.get("steps", [])
|
|
225
|
+
if steps_data is None:
|
|
226
|
+
steps_data = []
|
|
227
|
+
if not isinstance(steps_data, list):
|
|
228
|
+
raise ValueError(
|
|
229
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow 'steps' must be a list"
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
steps: list[WorkflowStep] = []
|
|
233
|
+
for step_data in steps_data:
|
|
234
|
+
step = WorkflowParser._parse_step(step_data, file_path=file_path)
|
|
235
|
+
steps.append(step)
|
|
236
|
+
|
|
237
|
+
# Parse metadata
|
|
238
|
+
metadata = workflow_data.get("metadata", {})
|
|
239
|
+
if metadata is None:
|
|
240
|
+
metadata = {}
|
|
241
|
+
if not isinstance(metadata, dict):
|
|
242
|
+
raise ValueError(
|
|
243
|
+
f"{WorkflowParser._err_prefix(file_path)}Workflow 'metadata' must be an object/mapping"
|
|
244
|
+
)
|
|
245
|
+
if (not is_wrapped) and isinstance(workflow_type_raw, str):
|
|
246
|
+
metadata.setdefault("raw_type", workflow_type_raw)
|
|
247
|
+
|
|
248
|
+
WorkflowParser._validate_references(steps=steps, file_path=file_path)
|
|
249
|
+
|
|
250
|
+
return Workflow(
|
|
251
|
+
id=workflow_id,
|
|
252
|
+
name=name,
|
|
253
|
+
description=description,
|
|
254
|
+
version=version,
|
|
255
|
+
type=workflow_type,
|
|
256
|
+
settings=settings,
|
|
257
|
+
steps=steps,
|
|
258
|
+
metadata=metadata,
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
@staticmethod
|
|
262
|
+
def _validate_str_list(
|
|
263
|
+
value: Any, *, field: str, file_path: Path | None, step_id: str
|
|
264
|
+
) -> list[str]:
|
|
265
|
+
if value is None:
|
|
266
|
+
return []
|
|
267
|
+
if not isinstance(value, list) or any(not isinstance(v, str) for v in value):
|
|
268
|
+
raise ValueError(
|
|
269
|
+
f"{WorkflowParser._err_prefix(file_path, step_id)}Step '{field}' must be a list of strings"
|
|
270
|
+
)
|
|
271
|
+
return value
|
|
272
|
+
|
|
273
|
+
@staticmethod
|
|
274
|
+
def _parse_step(step_data: Any, file_path: Path | None = None) -> WorkflowStep:
|
|
275
|
+
"""Parse a workflow step."""
|
|
276
|
+
if not isinstance(step_data, dict):
|
|
277
|
+
raise ValueError(
|
|
278
|
+
f"{WorkflowParser._err_prefix(file_path)}Each step must be an object/mapping"
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
step_id = step_data.get("id")
|
|
282
|
+
agent = step_data.get("agent")
|
|
283
|
+
action = step_data.get("action")
|
|
284
|
+
|
|
285
|
+
if not isinstance(step_id, str) or not step_id.strip():
|
|
286
|
+
raise ValueError(
|
|
287
|
+
f"{WorkflowParser._err_prefix(file_path)}Step must have id, agent, and action"
|
|
288
|
+
)
|
|
289
|
+
if not isinstance(agent, str) or not agent.strip():
|
|
290
|
+
raise ValueError(
|
|
291
|
+
f"{WorkflowParser._err_prefix(file_path, step_id)}Step must have id, agent, and action"
|
|
292
|
+
)
|
|
293
|
+
if not isinstance(action, str) or not action.strip():
|
|
294
|
+
raise ValueError(
|
|
295
|
+
f"{WorkflowParser._err_prefix(file_path, step_id)}Step must have id, agent, and action"
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
context_tier = step_data.get("context_tier", 2)
|
|
299
|
+
if isinstance(context_tier, str):
|
|
300
|
+
txt = context_tier.strip()
|
|
301
|
+
m = re.match(r"(?i)^tier(\d+)$", txt)
|
|
302
|
+
if m:
|
|
303
|
+
context_tier = int(m.group(1))
|
|
304
|
+
elif txt.isdigit():
|
|
305
|
+
context_tier = int(txt)
|
|
306
|
+
if not isinstance(context_tier, int):
|
|
307
|
+
raise ValueError(
|
|
308
|
+
f"{WorkflowParser._err_prefix(file_path, step_id)}Step 'context_tier' must be an int (or 'TIER<n>')"
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
creates = WorkflowParser._validate_str_list(
|
|
312
|
+
step_data.get("creates", []), field="creates", file_path=file_path, step_id=step_id
|
|
313
|
+
)
|
|
314
|
+
requires = WorkflowParser._validate_str_list(
|
|
315
|
+
step_data.get("requires", []), field="requires", file_path=file_path, step_id=step_id
|
|
316
|
+
)
|
|
317
|
+
consults = WorkflowParser._validate_str_list(
|
|
318
|
+
step_data.get("consults", []), field="consults", file_path=file_path, step_id=step_id
|
|
319
|
+
)
|
|
320
|
+
optional_steps = WorkflowParser._validate_str_list(
|
|
321
|
+
step_data.get("optional_steps", []),
|
|
322
|
+
field="optional_steps",
|
|
323
|
+
file_path=file_path,
|
|
324
|
+
step_id=step_id,
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
next_step = step_data.get("next")
|
|
328
|
+
if next_step is not None and not isinstance(next_step, str):
|
|
329
|
+
raise ValueError(
|
|
330
|
+
f"{WorkflowParser._err_prefix(file_path, step_id)}Step 'next' must be a string"
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
gate = step_data.get("gate")
|
|
334
|
+
if gate is not None and not isinstance(gate, dict):
|
|
335
|
+
raise ValueError(
|
|
336
|
+
f"{WorkflowParser._err_prefix(file_path, step_id)}Step 'gate' must be an object/mapping"
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
return WorkflowStep(
|
|
340
|
+
id=step_id,
|
|
341
|
+
agent=agent,
|
|
342
|
+
action=action,
|
|
343
|
+
context_tier=context_tier,
|
|
344
|
+
creates=creates,
|
|
345
|
+
requires=requires,
|
|
346
|
+
condition=step_data.get("condition", "required"),
|
|
347
|
+
next=next_step,
|
|
348
|
+
gate=gate,
|
|
349
|
+
consults=consults,
|
|
350
|
+
optional_steps=optional_steps,
|
|
351
|
+
notes=step_data.get("notes"),
|
|
352
|
+
repeats=step_data.get("repeats", False),
|
|
353
|
+
scoring=step_data.get("scoring"),
|
|
354
|
+
metadata=step_data.get("metadata", {}),
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
@staticmethod
|
|
358
|
+
def _validate_references(steps: list[WorkflowStep], file_path: Path | None) -> None:
|
|
359
|
+
step_ids = [s.id for s in steps]
|
|
360
|
+
if len(step_ids) != len(set(step_ids)):
|
|
361
|
+
raise ValueError(
|
|
362
|
+
f"{WorkflowParser._err_prefix(file_path)}Duplicate step ids are not allowed"
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
step_id_set = set(step_ids)
|
|
366
|
+
for s in steps:
|
|
367
|
+
if s.next and s.next not in step_id_set:
|
|
368
|
+
raise ValueError(
|
|
369
|
+
f"{WorkflowParser._err_prefix(file_path, s.id)}Step 'next' references unknown step id: {s.next}"
|
|
370
|
+
)
|
|
371
|
+
for opt in (s.optional_steps or []):
|
|
372
|
+
if opt not in step_id_set:
|
|
373
|
+
raise ValueError(
|
|
374
|
+
f"{WorkflowParser._err_prefix(file_path, s.id)}Step 'optional_steps' references unknown step id: {opt}"
|
|
375
|
+
)
|